Compare commits

..

5 Commits

Author SHA1 Message Date
Ashlie Martinez
6463afb348 Bug showcase 1 2024-02-14 08:14:29 -08:00
ryanfkeepers
3ea2e4734d isolate the actual error from this query
I couldn't the exact error, so this is the best approximation i could come up with.
2024-02-13 17:23:08 -07:00
ryanfkeepers
368a246596 allow skipping error cases in exchange backup
utilize the previously introduced canSkipItemFailure interface to create
skip records for items during backup.
2024-02-13 16:39:03 -07:00
ryanfkeepers
dd71a5528a add canSkipItemFailure handler func
adds a new func to the exchange backup handler: canskipItemFailure.
This interface allows any handler to evaluate the provided error and runtime
config to decide whether that error is able to be marked as skipped instead
of return a recoverable error as per standard.
2024-02-13 16:10:03 -07:00
ryanfkeepers
60438d9e60 add new control opt for skipping event 503s
adds a new control option for skipping certain event item 503 failures.
Also adds a skip cause for that case.  And exports the skipCause value for future preparation.

Next PR will make use of these values.
2024-02-13 16:04:57 -07:00
79 changed files with 656 additions and 3556 deletions

View File

@ -1,5 +1,4 @@
name: Backup Restore Test name: Backup Restore Test
description: Run various backup/restore/export tests for a service.
inputs: inputs:
service: service:

View File

@ -1,5 +1,4 @@
name: Setup and Cache Golang name: Setup and Cache Golang
description: Build golang binaries for later use in CI.
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml # clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
# #

View File

@ -1,5 +1,4 @@
name: Publish Binary name: Publish Binary
description: Publish binary artifacts.
inputs: inputs:
version: version:

View File

@ -1,5 +1,4 @@
name: Publish Website name: Publish Website
description: Publish website artifacts.
inputs: inputs:
aws-iam-role: aws-iam-role:

View File

@ -1,5 +1,4 @@
name: Purge M365 User Data name: Purge M365 User Data
description: Deletes M365 data generated during CI tests.
# Hard deletion of an m365 user's data. Our CI processes create a lot # Hard deletion of an m365 user's data. Our CI processes create a lot
# of data churn (creation and immediate deletion) of files, the likes # of data churn (creation and immediate deletion) of files, the likes
@ -31,19 +30,12 @@ inputs:
description: Secret value of for AZURE_CLIENT_ID description: Secret value of for AZURE_CLIENT_ID
azure-client-secret: azure-client-secret:
description: Secret value of for AZURE_CLIENT_SECRET description: Secret value of for AZURE_CLIENT_SECRET
azure-pnp-client-id:
description: Secret value of AZURE_PNP_CLIENT_ID
azure-pnp-client-cert:
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
azure-tenant-id: azure-tenant-id:
description: Secret value of AZURE_TENANT_ID description: Secret value of for AZURE_TENANT_ID
m365-admin-user: m365-admin-user:
description: Secret value of for M365_TENANT_ADMIN_USER description: Secret value of for M365_TENANT_ADMIN_USER
m365-admin-password: m365-admin-password:
description: Secret value of for M365_TENANT_ADMIN_PASSWORD description: Secret value of for M365_TENANT_ADMIN_PASSWORD
tenant-domain:
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
required: true
runs: runs:
using: composite using: composite
@ -61,13 +53,7 @@ runs:
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }} AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }} AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }} AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: | run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call. # TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
#- name: Reset retention for all mailboxes to 0 #- name: Reset retention for all mailboxes to 0
@ -88,16 +74,10 @@ runs:
shell: pwsh shell: pwsh
working-directory: ./src/cmd/purge/scripts working-directory: ./src/cmd/purge/scripts
env: env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }} M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: | run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++) ./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
{
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
################################################################################################################ ################################################################################################################
# Sharepoint # Sharepoint
@ -108,14 +88,6 @@ runs:
shell: pwsh shell: pwsh
working-directory: ./src/cmd/purge/scripts working-directory: ./src/cmd/purge/scripts
env: env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }} M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }} run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}

View File

@ -1,5 +1,4 @@
name: Send a message to Teams name: Send a message to Teams
description: Send messages to communication apps.
inputs: inputs:
msg: msg:

View File

@ -1,5 +1,4 @@
name: Lint Website name: Lint Website
description: Lint website content.
inputs: inputs:
version: version:

View File

@ -40,5 +40,5 @@ jobs:
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/teams-message
with: with:
msg: "[CORSO FAILED] Publishing Binary" msg: "[FAILED] Publishing Binary"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -12,7 +12,7 @@ jobs:
continue-on-error: true continue-on-error: true
strategy: strategy:
matrix: matrix:
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""] user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -33,15 +33,12 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams - name: Notify failure in teams
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/teams-message
with: with:
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup" msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
Test-Site-Data-Cleanup: Test-Site-Data-Cleanup:
@ -50,7 +47,7 @@ jobs:
continue-on-error: true continue-on-error: true
strategy: strategy:
matrix: matrix:
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL] site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -73,13 +70,10 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams - name: Notify failure in teams
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/teams-message
with: with:
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup" msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -155,6 +155,3 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
user: user:
description: "User to run longevity test on" description: 'User to run longevity test on'
permissions: permissions:
# required to retrieve AWS credentials # required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Longevity-Tests: Longevity-Tests:
needs: [SetM365App] needs: [ SetM365App ]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
@ -37,7 +37,7 @@ jobs:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
RESTORE_DEST_PFX: Corso_Test_Longevity_ RESTORE_DEST_PFX: Corso_Test_Longevity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }} TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
PREFIX: "longevity" PREFIX: 'longevity'
# Options for retention. # Options for retention.
RETENTION_MODE: GOVERNANCE RETENTION_MODE: GOVERNANCE
@ -46,7 +46,7 @@ jobs:
defaults: defaults:
run: run:
working-directory: src working-directory: src
############################################################################ ############################################################################
# setup # setup
steps: steps:
@ -78,7 +78,7 @@ jobs:
- run: go build -o corso - run: go build -o corso
timeout-minutes: 10 timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR} - run: mkdir ${CORSO_LOG_DIR}
# Use shorter-lived credentials obtained from assume-role since these # Use shorter-lived credentials obtained from assume-role since these
@ -163,7 +163,7 @@ jobs:
data=$( echo $resultjson | jq -r '.[0] | .id' ) data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT echo result=$data >> $GITHUB_OUTPUT
########################################################################## ##########################################################################
# Onedrive # Onedrive
@ -328,7 +328,7 @@ jobs:
--hide-progress \ --hide-progress \
--force \ --force \
--json \ --json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
- name: Maintenance test Weekly - name: Maintenance test Weekly
id: maintenance-test-weekly id: maintenance-test-weekly
@ -392,5 +392,5 @@ jobs:
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/teams-message
with: with:
msg: "[CORSO FAILED] Longevity Test" msg: "[FAILED] Longevity Test"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -48,7 +48,7 @@ jobs:
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted: Test-Suite-Trusted:
needs: [Checkout, SetM365App] needs: [ Checkout, SetM365App]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
@ -100,9 +100,9 @@ jobs:
-timeout 2h \ -timeout 2h \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests ./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
########################################################################################################################################## ##########################################################################################################################################
# Logging & Notifications # Logging & Notifications
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
@ -118,5 +118,5 @@ jobs:
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/teams-message
with: with:
msg: "[COROS FAILED] Nightly Checks" msg: "[FAILED] Nightly Checks"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
user: user:
description: "User to run sanity test on" description: 'User to run sanity test on'
permissions: permissions:
# required to retrieve AWS credentials # required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests: Sanity-Tests:
needs: [SetM365App] needs: [ SetM365App ]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
@ -43,11 +43,12 @@ jobs:
defaults: defaults:
run: run:
working-directory: src working-directory: src
##########################################################################################################################################
########################################################################################################################################## # setup
# setup
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup Golang with cache - name: Setup Golang with cache
@ -63,9 +64,9 @@ jobs:
- run: mkdir ${CORSO_LOG_DIR} - run: mkdir ${CORSO_LOG_DIR}
########################################################################################################################################## ##########################################################################################################################################
# Pre-Run cleanup # Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently. # unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring # however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
@ -90,9 +91,6 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites - name: Purge CI-Produced Folders for Sites
timeout-minutes: 30 timeout-minutes: 30
@ -101,20 +99,17 @@ jobs:
with: with:
site: ${{ vars.CORSO_M365_TEST_SITE_URL }} site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }} folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }} libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }} older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }} azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }} azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
########################################################################################################################################## ##########################################################################################################################################
# Repository commands # Repository commands
- name: Version Test - name: Version Test
timeout-minutes: 10 timeout-minutes: 10
@ -174,9 +169,9 @@ jobs:
--mode complete \ --mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
########################################################################################################################################## ##########################################################################################################################################
# Exchange # Exchange
# generate new entries to roll into the next load test # generate new entries to roll into the next load test
# only runs if the test was successful # only runs if the test was successful
@ -198,8 +193,8 @@ jobs:
service: exchange service: exchange
kind: first-backup kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
@ -211,8 +206,8 @@ jobs:
service: exchange service: exchange
kind: incremental kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
@ -225,8 +220,8 @@ jobs:
service: exchange service: exchange
kind: non-delta kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
@ -239,15 +234,16 @@ jobs:
service: exchange service: exchange
kind: non-delta-incremental kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
##########################################################################################################################################
# Onedrive ##########################################################################################################################################
# Onedrive
# generate new entries for test # generate new entries for test
- name: OneDrive - Create new data - name: OneDrive - Create new data
@ -274,8 +270,8 @@ jobs:
service: onedrive service: onedrive
kind: first-backup kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"' backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
@ -299,14 +295,14 @@ jobs:
service: onedrive service: onedrive
kind: incremental kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"' backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
########################################################################################################################################## ##########################################################################################################################################
# Sharepoint Library # Sharepoint Library
# generate new entries for test # generate new entries for test
- name: SharePoint - Create new data - name: SharePoint - Create new data
@ -334,8 +330,8 @@ jobs:
service: sharepoint service: sharepoint
kind: first-backup kind: first-backup
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries' backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
category: libraries category: libraries
@ -361,15 +357,15 @@ jobs:
service: sharepoint service: sharepoint
kind: incremental kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries' backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
category: libraries category: libraries
########################################################################################################################################## ##########################################################################################################################################
# Sharepoint Lists # Sharepoint Lists
# generate new entries for test # generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names. # The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
@ -422,7 +418,7 @@ jobs:
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S") suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \ go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \ --site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \ --user ${{ env.TEST_USER }} \
@ -458,9 +454,9 @@ jobs:
category: lists category: lists
on-collision: copy on-collision: copy
########################################################################################################################################## ##########################################################################################################################################
# Groups and Teams # Groups and Teams
# generate new entries for test # generate new entries for test
- name: Groups - Create new data - name: Groups - Create new data
@ -487,8 +483,8 @@ jobs:
with: with:
service: groups service: groups
kind: first-backup kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries' backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
@ -512,15 +508,15 @@ jobs:
with: with:
service: groups service: groups
kind: incremental kind: incremental
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries' backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}' restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}" restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
########################################################################################################################################## ##########################################################################################################################################
# Logging & Notifications # Logging & Notifications
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
@ -536,5 +532,5 @@ jobs:
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/teams-message
with: with:
msg: "[CORSO FAILED] Sanity Tests" msg: "[FAILED] Sanity Tests"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -11,7 +11,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Emails attached within other emails are now correctly exported - Emails attached within other emails are now correctly exported
- Gracefully handle email and post attachments without name when exporting to eml - Gracefully handle email and post attachments without name when exporting to eml
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns) - Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
- Fixed an issue causing exports dealing with calendar data to have high memory usage
## [v0.19.0] (beta) - 2024-02-06 ## [v0.19.0] (beta) - 2024-02-06

View File

@ -1,6 +1,3 @@
> [!NOTE]
> **The Corso project is no longer actively maintained and has been archived**.
<p align="center"> <p align="center">
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" /> <img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
</p> </p>

View File

@ -151,10 +151,7 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
t.Log("backup results", result) t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email. // As an offhand check: the result should contain the m365 user's email.
assert.Contains( assert.Contains(t, result, suite.m365.User.Provider.Name())
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
@ -187,10 +184,7 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
t.Log("backup results", result) t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email. // As an offhand check: the result should contain the m365 user's email.
assert.Contains( assert.Contains(t, result, suite.m365.User.Provider.Name())
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
@ -289,10 +283,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
t.Log("backup results", result) t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email. // As an offhand check: the result should contain the m365 user's email.
assert.Contains( assert.Contains(t, result, suite.m365.User.Provider.Name())
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
} }
// AWS flags // AWS flags

View File

@ -114,8 +114,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsBackupCategoryTest(suite, flags.DataConversations) runGroupsBackupCategoryTest(suite, flags.DataConversations)
} }
@ -219,9 +217,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
} }
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() { func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.dpnd.vpr)
@ -305,10 +300,7 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID}) ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
cats = []path.CategoryType{ cats = []path.CategoryType{
path.ChannelMessagesCategory, path.ChannelMessagesCategory,
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid path.ConversationPostsCategory,
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// path.ConversationPostsCategory,
path.LibrariesCategory, path.LibrariesCategory,
} }
) )
@ -462,8 +454,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory) runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
} }

View File

@ -6,6 +6,12 @@ Param (
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")] [Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
[String]$Site, [String]$Site,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")] [Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
[String[]]$LibraryNameList = @(), [String[]]$LibraryNameList = @(),
@ -16,16 +22,7 @@ Param (
[String[]]$FolderPrefixPurgeList, [String[]]$FolderPrefixPurgeList,
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")] [Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
[String[]]$LibraryPrefixDeleteList = @(), [String[]]$LibraryPrefixDeleteList = @()
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
[String]$AppCert = $ENV:AZURE_APP_CERT
) )
Set-StrictMode -Version 2.0 Set-StrictMode -Version 2.0
@ -40,7 +37,7 @@ function Get-TimestampFromFolderName {
$name = $folder.Name $name = $folder.Name
#fallback on folder create time #fallback on folder create time
[datetime]$timestamp = $folder.TimeCreated [datetime]$timestamp = $folder.TimeCreated
try { try {
@ -69,7 +66,7 @@ function Get-TimestampFromListName {
$name = $list.Title $name = $list.Title
#fallback on list create time #fallback on list create time
[datetime]$timestamp = $list.LastItemUserModifiedDate [datetime]$timestamp = $list.LastItemUserModifiedDate
try { try {
@ -109,9 +106,8 @@ function Purge-Library {
Write-Host "`nPurging library: $LibraryName" Write-Host "`nPurging library: $LibraryName"
$foldersToPurge = @() $foldersToPurge = @()
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder $folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
Write-Host "`nFolders: $folders"
foreach ($f in $folders) { foreach ($f in $folders) {
$folderName = $f.Name $folderName = $f.Name
$createTime = Get-TimestampFromFolderName -Folder $f $createTime = Get-TimestampFromFolderName -Folder $f
@ -163,7 +159,7 @@ function Delete-LibraryByPrefix {
Write-Host "`nDeleting library: $LibraryNamePrefix" Write-Host "`nDeleting library: $LibraryNamePrefix"
$listsToDelete = @() $listsToDelete = @()
$lists = Get-PnPList $lists = Get-PnPList
foreach ($l in $lists) { foreach ($l in $lists) {
$listName = $l.Title $listName = $l.Title
@ -187,7 +183,7 @@ function Delete-LibraryByPrefix {
Write-Host "Deleting list: "$l.Title Write-Host "Deleting list: "$l.Title
try { try {
$listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden $listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden
# Check if the 'hidden' property is true # Check if the 'hidden' property is true
if ($listInfo.Hidden) { if ($listInfo.Hidden) {
Write-Host "List: $($l.Title) is hidden. Skipping..." Write-Host "List: $($l.Title) is hidden. Skipping..."
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
} }
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) { if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
Write-Host "ClientId and AppCert required as arguments or environment variables." Write-Host "Admin user name and password required as arguments or environment variables."
Exit Exit
} }
@ -255,8 +251,12 @@ else {
Exit Exit
} }
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
Write-Host "`nAuthenticating and connecting to $SiteUrl" Write-Host "`nAuthenticating and connecting to $SiteUrl"
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain Connect-PnPOnline -Url $siteUrl -Credential $cred
Write-Host "Connected to $siteUrl`n" Write-Host "Connected to $siteUrl`n"
# ensure that there are no unexpanded entries in the list of parameters # ensure that there are no unexpanded entries in the list of parameters

View File

@ -5,7 +5,6 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cmd/sanity_test/common" "github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
@ -21,20 +20,19 @@ const (
// this increases the chance that we'll run into a race collision with // this increases the chance that we'll run into a race collision with
// the cleanup script. Sometimes that's okay (deleting old data that // the cleanup script. Sometimes that's okay (deleting old data that
// isn't scrutinized in the test), other times it's not. We mark whether // isn't scrutinized in the test), other times it's not. We mark whether
// that's okay to do or not by specifying the folders being // that's okay to do or not by specifying the folder that's being
// scrutinized for the test. Any errors within those folders should cause // scrutinized for the test. Any errors within that folder should cause
// a fatal exit. Errors outside of those folders get ignored. // a fatal exit. Errors outside of that folder get ignored.
// //
// since we're using folder names, mustPopulateFolders will // since we're using folder names, requireNoErrorsWithinFolderName will
// work best (ie: have the fewest collisions/side-effects) if the folder // work best (ie: have the fewest collisions/side-effects) if the folder
// names are very specific. Standard sanity tests should include timestamps, // name is very specific. Standard sanity tests should include timestamps,
// which should help ensure that. Be warned if you try to use it with // which should help ensure that. Be warned if you try to use it with
// a more generic name: unintended effects could occur. // a more generic name: unintended effects could occur.
func populateSanitree( func populateSanitree(
ctx context.Context, ctx context.Context,
ac api.Client, ac api.Client,
driveID string, driveID, requireNoErrorsWithinFolderName string,
mustPopulateFolders []string,
) *common.Sanitree[models.DriveItemable, models.DriveItemable] { ) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
common.Infof(ctx, "building sanitree for drive: %s", driveID) common.Infof(ctx, "building sanitree for drive: %s", driveID)
@ -58,8 +56,8 @@ func populateSanitree(
ac, ac,
driveID, driveID,
stree.Name+"/", stree.Name+"/",
mustPopulateFolders, requireNoErrorsWithinFolderName,
slices.Contains(mustPopulateFolders, rootName), rootName == requireNoErrorsWithinFolderName,
stree) stree)
return stree return stree
@ -68,9 +66,7 @@ func populateSanitree(
func recursivelyBuildTree( func recursivelyBuildTree(
ctx context.Context, ctx context.Context,
ac api.Client, ac api.Client,
driveID string, driveID, location, requireNoErrorsWithinFolderName string,
location string,
mustPopulateFolders []string,
isChildOfFolderRequiringNoErrors bool, isChildOfFolderRequiringNoErrors bool,
stree *common.Sanitree[models.DriveItemable, models.DriveItemable], stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
) { ) {
@ -84,9 +80,9 @@ func recursivelyBuildTree(
common.Infof( common.Infof(
ctx, ctx,
"ignoring error getting children in directory %q because it is not within directory set %v\nerror: %s\n%+v", "ignoring error getting children in directory %q because it is not within directory %q\nerror: %s\n%+v",
location, location,
mustPopulateFolders, requireNoErrorsWithinFolderName,
err.Error(), err.Error(),
clues.ToCore(err)) clues.ToCore(err))
@ -103,12 +99,11 @@ func recursivelyBuildTree(
// currently we don't restore blank folders. // currently we don't restore blank folders.
// skip permission check for empty folders // skip permission check for empty folders
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 { if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
common.Infof(ctx, "skipped empty folder: %s%s", location, itemName) common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
continue continue
} }
cannotAllowErrors := isChildOfFolderRequiringNoErrors || cannotAllowErrors := isChildOfFolderRequiringNoErrors || itemName == requireNoErrorsWithinFolderName
slices.Contains(mustPopulateFolders, itemName)
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{ branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
Parent: stree, Parent: stree,
@ -129,7 +124,7 @@ func recursivelyBuildTree(
ac, ac,
driveID, driveID,
location+branch.Name+"/", location+branch.Name+"/",
mustPopulateFolders, requireNoErrorsWithinFolderName,
cannotAllowErrors, cannotAllowErrors,
branch) branch)
} }

View File

@ -32,7 +32,7 @@ func CheckExport(
ctx, ctx,
ac, ac,
driveID, driveID,
[]string{envs.SourceContainer}) envs.RestoreContainer)
sourceTree, ok := root.Children[envs.SourceContainer] sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert( common.Assert(

View File

@ -45,14 +45,7 @@ func CheckRestoration(
"drive_id", driveID, "drive_id", driveID,
"drive_name", driveName) "drive_name", driveName)
root := populateSanitree( root := populateSanitree(ctx, ac, driveID, envs.RestoreContainer)
ctx,
ac,
driveID,
[]string{
envs.SourceContainer,
envs.RestoreContainer,
})
sourceTree, ok := root.Children[envs.SourceContainer] sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert( common.Assert(

View File

@ -3,7 +3,7 @@ module github.com/alcionai/corso/src
go 1.21 go 1.21
replace ( replace (
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses // Alcion fork removes the validation of email addresses as we might get incomplete email addresses
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a

View File

@ -23,8 +23,8 @@ github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEej
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw= github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k= github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98= github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk= github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe h1:nLS5pxhm04Jz4+qeipNlxdyPGxqNWpBu8UGkRYpWoIw=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA= github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=

View File

@ -10,7 +10,6 @@ import (
"github.com/alcionai/corso/src/pkg/dttm" "github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/logger"
) )
const ( const (
@ -57,22 +56,12 @@ func ZipExportCollection(
defer wr.Close() defer wr.Close()
buf := make([]byte, ZipCopyBufferSize) buf := make([]byte, ZipCopyBufferSize)
counted := 0
log := logger.Ctx(ctx).
With("collection_count", len(expCollections))
for _, ec := range expCollections { for _, ec := range expCollections {
folder := ec.BasePath() folder := ec.BasePath()
items := ec.Items(ctx) items := ec.Items(ctx)
for item := range items { for item := range items {
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("progress zipping export items", "count_items", counted)
}
err := item.Error err := item.Error
if err != nil { if err != nil {
writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID)) writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID))
@ -99,12 +88,8 @@ func ZipExportCollection(
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID)) writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
return return
} }
item.Body.Close()
} }
} }
log.Infow("completed zipping export items", "count_items", counted)
}() }()
return zipCollection{reader}, nil return zipCollection{reader}, nil

View File

@ -1,13 +1,10 @@
package jwt package jwt
import ( import (
"context"
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
jwt "github.com/golang-jwt/jwt/v5" jwt "github.com/golang-jwt/jwt/v5"
"github.com/alcionai/corso/src/pkg/logger"
) )
// IsJWTExpired checks if the JWT token is past expiry by analyzing the // IsJWTExpired checks if the JWT token is past expiry by analyzing the
@ -40,51 +37,3 @@ func IsJWTExpired(
return expired, nil return expired, nil
} }
// GetJWTLifetime returns the issued at(iat) and expiration time(exp) claims
// present in the JWT token. These are optional claims and may not be present
// in the token. Absence is not reported as an error.
//
// An error is returned if the supplied token is malformed. Times are returned
// in UTC to have parity with graph responses.
func GetJWTLifetime(
ctx context.Context,
rawToken string,
) (time.Time, time.Time, error) {
var (
issuedAt time.Time
expiresAt time.Time
)
p := jwt.NewParser()
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
if err != nil {
logger.CtxErr(ctx, err).Debug("parsing jwt token")
return time.Time{}, time.Time{}, clues.Wrap(err, "invalid jwt")
}
exp, err := token.Claims.GetExpirationTime()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting exp claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token expiry time")
}
iat, err := token.Claims.GetIssuedAt()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting iat claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token issued at time")
}
// Absence of iat or exp claims is not reported as an error by jwt library as these
// are optional as per spec.
if iat != nil {
issuedAt = iat.UTC()
}
if exp != nil {
expiresAt = exp.UTC()
}
return issuedAt, expiresAt, nil
}

View File

@ -113,134 +113,3 @@ func (suite *JWTUnitSuite) TestIsJWTExpired() {
}) })
} }
} }
func (suite *JWTUnitSuite) TestGetJWTLifetime() {
// Set of time values to be used in the tests.
// Truncate to seconds for comparisons since jwt tokens have second
// level precision.
idToTime := map[string]time.Time{
"T0": time.Now().UTC().Add(-time.Hour).Truncate(time.Second),
"T1": time.Now().UTC().Truncate(time.Second),
"T2": time.Now().UTC().Add(time.Hour).Truncate(time.Second),
}
table := []struct {
name string
getToken func() (string, error)
expectFunc func(t *testing.T, iat time.Time, exp time.Time)
expectErr assert.ErrorAssertionFunc
}{
{
name: "alive token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T1"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, idToTime["T1"], exp)
},
expectErr: assert.NoError,
},
// Test with a token which is not generated using the go-jwt lib.
// This is a long lived token which is valid for 100 years.
{
name: "alive raw token with iat and exp claims",
getToken: func() (string, error) {
return rawToken, nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Less(t, iat, time.Now(), "iat should be in the past")
assert.Greater(t, exp, time.Now(), "exp should be in the future")
},
expectErr: assert.NoError,
},
// Regardless of whether the token is expired or not, we should be able to
// extract the iat and exp claims from it without error.
{
name: "expired token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T1"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T1"], iat)
assert.Equal(t, idToTime["T0"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing iat claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(idToTime["T2"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, idToTime["T2"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing exp claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "both claims missing",
getToken: func() (string, error) {
return createJWTToken(jwt.RegisteredClaims{})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "malformed token",
getToken: func() (string, error) {
return "header.claims.signature", nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
token, err := test.getToken()
require.NoError(t, err)
iat, exp, err := GetJWTLifetime(ctx, token)
test.expectErr(t, err)
test.expectFunc(t, iat, exp)
})
}
}

View File

@ -59,19 +59,6 @@ func First(vs ...string) string {
return "" return ""
} }
// FirstIn returns the first entry in the map with a non-zero value
// when iterating the provided list of keys.
func FirstIn(m map[string]any, keys ...string) string {
for _, key := range keys {
v, err := AnyValueToString(key, m)
if err == nil && len(v) > 0 {
return v
}
}
return ""
}
// Preview reduces the string to the specified size. // Preview reduces the string to the specified size.
// If the string is longer than the size, the last three // If the string is longer than the size, the last three
// characters are replaced with an ellipsis. Size < 4 // characters are replaced with an ellipsis. Size < 4

View File

@ -118,96 +118,3 @@ func TestGenerateHash(t *testing.T) {
} }
} }
} }
func TestFirstIn(t *testing.T) {
table := []struct {
name string
m map[string]any
keys []string
expect string
}{
{
name: "nil map",
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty map",
m: map[string]any{},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no match",
m: map[string]any{
"baz": "baz",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no keys",
m: map[string]any{
"baz": "baz",
},
keys: []string{},
expect: "",
},
{
name: "nil match",
m: map[string]any{
"foo": nil,
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty match",
m: map[string]any{
"foo": "",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "matches first key",
m: map[string]any{
"foo": "fnords",
},
keys: []string{"foo", "bar"},
expect: "fnords",
},
{
name: "matches second key",
m: map[string]any{
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with nil first match",
m: map[string]any{
"foo": nil,
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with empty first match",
m: map[string]any{
"foo": "",
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
}
for _, test := range table {
t.Run(test.name, func(t *testing.T) {
result := FirstIn(test.m, test.keys...)
assert.Equal(t, test.expect, result)
})
}
}

View File

@ -484,14 +484,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
desc := replacer.Replace(description) desc := replacer.Replace(description)
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html")) iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
} else { } else {
// Disable auto wrap, causes huge memory spikes stripped, err := html2text.FromString(description, html2text.Options{PrettyTables: true})
// https://github.com/jaytaylor/html2text/issues/48
prettyTablesOptions := html2text.NewPrettyTablesOptions()
prettyTablesOptions.AutoWrapText = false
stripped, err := html2text.FromString(
description,
html2text.Options{PrettyTables: true, PrettyTablesOptions: prettyTablesOptions})
if err != nil { if err != nil {
return clues.Wrap(err, "converting html to text"). return clues.Wrap(err, "converting html to text").
With("description_length", len(description)) With("description_length", len(description))

View File

@ -59,15 +59,6 @@ const (
minEpochDurationUpperBound = 7 * 24 * time.Hour minEpochDurationUpperBound = 7 * 24 * time.Hour
) )
// allValidCompressors is the set of compression algorithms either currently
// being used or that were previously used. Use this during the config verify
// command to avoid spurious errors. We can revisit whether we want to update
// the config in those old repos at a later time.
var allValidCompressors = map[compression.Name]struct{}{
compression.Name(defaultCompressor): {},
compression.Name("s2-default"): {},
}
var ( var (
ErrSettingDefaultConfig = clues.New("setting default repo config values") ErrSettingDefaultConfig = clues.New("setting default repo config values")
ErrorRepoAlreadyExists = clues.New("repo already exists") ErrorRepoAlreadyExists = clues.New("repo already exists")
@ -777,7 +768,7 @@ func (w *conn) verifyDefaultPolicyConfigOptions(
ctx = clues.Add(ctx, "current_global_policy", globalPol.String()) ctx = clues.Add(ctx, "current_global_policy", globalPol.String())
if _, ok := allValidCompressors[globalPol.CompressionPolicy.CompressorName]; !ok { if globalPol.CompressionPolicy.CompressorName != defaultCompressor {
errs.AddAlert(ctx, fault.NewAlert( errs.AddAlert(ctx, fault.NewAlert(
"unexpected compressor", "unexpected compressor",
corsoWrapperAlertNamespace, corsoWrapperAlertNamespace,

View File

@ -891,20 +891,6 @@ func (suite *ConnRetentionIntegrationSuite) TestVerifyDefaultConfigOptions() {
}, },
expectAlerts: 1, expectAlerts: 1,
}, },
{
name: "OldValidCompressor",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
_, err = updateCompressionOnPolicy("s2-default", pol)
require.NoError(t, err, clues.ToCore(err))
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 0,
},
{ {
name: "NonDefaultCompression", name: "NonDefaultCompression",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) { setupRepo: func(ctx context.Context, t *testing.T, con *conn) {

View File

@ -366,7 +366,7 @@ func downloadContent(
itemID := ptr.Val(item.GetId()) itemID := ptr.Val(item.GetId())
ctx = clues.Add(ctx, "item_id", itemID) ctx = clues.Add(ctx, "item_id", itemID)
content, err := downloadItem(ctx, iaag, driveID, item) content, err := downloadItem(ctx, iaag, item)
if err == nil { if err == nil {
return content, nil return content, nil
} else if !graph.IsErrUnauthorizedOrBadToken(err) { } else if !graph.IsErrUnauthorizedOrBadToken(err) {
@ -395,7 +395,7 @@ func downloadContent(
cdi := custom.ToCustomDriveItem(di) cdi := custom.ToCustomDriveItem(di)
content, err = downloadItem(ctx, iaag, driveID, cdi) content, err = downloadItem(ctx, iaag, cdi)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "content download retry") return nil, clues.Wrap(err, "content download retry")
} }
@ -426,7 +426,7 @@ func readItemContents(
return nil, core.ErrNotFound return nil, core.ErrNotFound
} }
rc, err := downloadFile(ctx, iaag, props.downloadURL, false) rc, err := downloadFile(ctx, iaag, props.downloadURL)
if graph.IsErrUnauthorizedOrBadToken(err) { if graph.IsErrUnauthorizedOrBadToken(err) {
logger.CtxErr(ctx, err).Debug("stale item in cache") logger.CtxErr(ctx, err).Debug("stale item in cache")
} }

View File

@ -795,12 +795,7 @@ func (h mockBackupHandler[T]) AugmentItemInfo(
return h.ItemInfo return h.ItemInfo
} }
func (h *mockBackupHandler[T]) Get( func (h *mockBackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) {
context.Context,
string,
map[string]string,
bool,
) (*http.Response, error) {
c := h.getCall c := h.getCall
h.getCall++ h.getCall++

View File

@ -21,10 +21,8 @@ import (
) )
const ( const (
acceptHeaderKey = "Accept" acceptHeaderKey = "Accept"
acceptHeaderValue = "*/*" acceptHeaderValue = "*/*"
gigabyte = 1024 * 1024 * 1024
largeFileDownloadLimit = 15 * gigabyte
) )
// downloadUrlKeys is used to find the download URL in a DriveItem response. // downloadUrlKeys is used to find the download URL in a DriveItem response.
@ -35,8 +33,7 @@ var downloadURLKeys = []string{
func downloadItem( func downloadItem(
ctx context.Context, ctx context.Context,
getter api.Getter, ag api.Getter,
driveID string,
item *custom.DriveItem, item *custom.DriveItem,
) (io.ReadCloser, error) { ) (io.ReadCloser, error) {
if item == nil { if item == nil {
@ -44,37 +41,36 @@ func downloadItem(
} }
var ( var (
// very large file content needs to be downloaded through a different endpoint, or else rc io.ReadCloser
// the download could take longer than the lifespan of the download token in the cached isFile = item.GetFile() != nil
// url, which will cause us to timeout on every download request, even if we refresh the err error
// download url right before the query.
url = "https://graph.microsoft.com/v1.0/drives/" + driveID + "/items/" + ptr.Val(item.GetId()) + "/content"
reader io.ReadCloser
err error
isLargeFile = ptr.Val(item.GetSize()) > largeFileDownloadLimit
) )
// if this isn't a file, no content is available for download if isFile {
if item.GetFile() == nil { var (
return reader, nil url string
ad = item.GetAdditionalData()
)
for _, key := range downloadURLKeys {
if v, err := str.AnyValueToString(key, ad); err == nil {
url = v
break
}
}
rc, err = downloadFile(ctx, ag, url)
if err != nil {
return nil, clues.Stack(err)
}
} }
// smaller files will maintain our current behavior (prefetching the download url with the return rc, nil
// url cache). That pattern works for us in general, and we only need to deviate for very
// large file sizes.
if !isLargeFile {
url = str.FirstIn(item.GetAdditionalData(), downloadURLKeys...)
}
reader, err = downloadFile(ctx, getter, url, isLargeFile)
return reader, clues.StackWC(ctx, err).OrNil()
} }
type downloadWithRetries struct { type downloadWithRetries struct {
getter api.Getter getter api.Getter
requireAuth bool url string
url string
} }
func (dg *downloadWithRetries) SupportsRange() bool { func (dg *downloadWithRetries) SupportsRange() bool {
@ -90,7 +86,7 @@ func (dg *downloadWithRetries) Get(
// wouldn't work without it (get 416 responses instead of 206). // wouldn't work without it (get 416 responses instead of 206).
headers[acceptHeaderKey] = acceptHeaderValue headers[acceptHeaderKey] = acceptHeaderValue
resp, err := dg.getter.Get(ctx, dg.url, headers, dg.requireAuth) resp, err := dg.getter.Get(ctx, dg.url, headers)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting file") return nil, clues.Wrap(err, "getting file")
} }
@ -100,7 +96,7 @@ func (dg *downloadWithRetries) Get(
resp.Body.Close() resp.Body.Close()
} }
return nil, clues.NewWC(ctx, "malware detected").Label(graph.LabelsMalware) return nil, clues.New("malware detected").Label(graph.LabelsMalware)
} }
if resp != nil && (resp.StatusCode/100) != 2 { if resp != nil && (resp.StatusCode/100) != 2 {
@ -111,7 +107,7 @@ func (dg *downloadWithRetries) Get(
// upstream error checks can compare the status with // upstream error checks can compare the status with
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode)) // clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
return nil, clues. return nil, clues.
Wrap(clues.NewWC(ctx, resp.Status), "non-2xx http response"). Wrap(clues.New(resp.Status), "non-2xx http response").
Label(graph.LabelStatus(resp.StatusCode)) Label(graph.LabelStatus(resp.StatusCode))
} }
@ -122,7 +118,6 @@ func downloadFile(
ctx context.Context, ctx context.Context,
ag api.Getter, ag api.Getter,
url string, url string,
requireAuth bool,
) (io.ReadCloser, error) { ) (io.ReadCloser, error) {
if len(url) == 0 { if len(url) == 0 {
return nil, clues.NewWC(ctx, "empty file url") return nil, clues.NewWC(ctx, "empty file url")
@ -146,9 +141,8 @@ func downloadFile(
rc, err := readers.NewResetRetryHandler( rc, err := readers.NewResetRetryHandler(
ctx, ctx,
&downloadWithRetries{ &downloadWithRetries{
getter: ag, getter: ag,
requireAuth: requireAuth, url: url,
url: url,
}) })
return rc, clues.Stack(err).OrNil() return rc, clues.Stack(err).OrNil()

View File

@ -109,11 +109,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
} }
// Read data for the file // Read data for the file
itemData, err := downloadItem( itemData, err := downloadItem(ctx, bh, custom.ToCustomDriveItem(driveItem))
ctx,
bh,
suite.m365.User.DriveID,
custom.ToCustomDriveItem(driveItem))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
size, err := io.Copy(io.Discard, itemData) size, err := io.Copy(io.Discard, itemData)
@ -296,7 +292,6 @@ func (m mockGetter) Get(
ctx context.Context, ctx context.Context,
url string, url string,
headers map[string]string, headers map[string]string,
requireAuth bool,
) (*http.Response, error) { ) (*http.Response, error) {
return m.GetFunc(ctx, url) return m.GetFunc(ctx, url)
} }
@ -384,7 +379,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
return nil, clues.New("test error") return nil, clues.New("test error")
}, },
errorExpected: require.Error, errorExpected: require.Error,
rcExpected: require.NotNil, rcExpected: require.Nil,
}, },
{ {
name: "download url is empty", name: "download url is empty",
@ -421,7 +416,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
}, nil }, nil
}, },
errorExpected: require.Error, errorExpected: require.Error,
rcExpected: require.NotNil, rcExpected: require.Nil,
}, },
{ {
name: "non-2xx http response", name: "non-2xx http response",
@ -440,7 +435,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
}, nil }, nil
}, },
errorExpected: require.Error, errorExpected: require.Error,
rcExpected: require.NotNil, rcExpected: require.Nil,
}, },
} }
@ -453,78 +448,9 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
mg := mockGetter{ mg := mockGetter{
GetFunc: test.GetFunc, GetFunc: test.GetFunc,
} }
rc, err := downloadItem( rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(test.itemFunc()))
ctx,
mg,
"driveID",
custom.ToCustomDriveItem(test.itemFunc()))
test.errorExpected(t, err, clues.ToCore(err)) test.errorExpected(t, err, clues.ToCore(err))
test.rcExpected(t, rc, "reader should only be nil if item is nil") test.rcExpected(t, rc)
})
}
}
func (suite *ItemUnitTestSuite) TestDownloadItem_urlByFileSize() {
var (
testRc = io.NopCloser(bytes.NewReader([]byte("test")))
url = "https://example.com"
okResp = &http.Response{
StatusCode: http.StatusOK,
Body: testRc,
}
)
table := []struct {
name string
itemFunc func() models.DriveItemable
GetFunc func(ctx context.Context, url string) (*http.Response, error)
errorExpected require.ErrorAssertionFunc
rcExpected require.ValueAssertionFunc
label string
}{
{
name: "big file",
itemFunc: func() models.DriveItemable {
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
di.SetSize(ptr.To[int64](20 * gigabyte))
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
assert.Contains(suite.T(), url, "/content")
return okResp, nil
},
},
{
name: "small file",
itemFunc: func() models.DriveItemable {
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
di.SetSize(ptr.To[int64](2 * gigabyte))
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
assert.NotContains(suite.T(), url, "/content")
return okResp, nil
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := downloadItem(
ctx,
mockGetter{GetFunc: test.GetFunc},
"driveID",
custom.ToCustomDriveItem(test.itemFunc()))
require.NoError(t, err, clues.ToCore(err))
}) })
} }
} }
@ -581,11 +507,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
mg := mockGetter{ mg := mockGetter{
GetFunc: GetFunc, GetFunc: GetFunc,
} }
rc, err := downloadItem( rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(itemFunc()))
ctx,
mg,
"driveID",
custom.ToCustomDriveItem(itemFunc()))
errorExpected(t, err, clues.ToCore(err)) errorExpected(t, err, clues.ToCore(err))
rcExpected(t, rc) rcExpected(t, rc)

View File

@ -93,9 +93,8 @@ func (h siteBackupHandler) Get(
ctx context.Context, ctx context.Context,
url string, url string,
headers map[string]string, headers map[string]string,
requireAuth bool,
) (*http.Response, error) { ) (*http.Response, error) {
return h.ac.Get(ctx, url, headers, requireAuth) return h.ac.Get(ctx, url, headers)
} }
func (h siteBackupHandler) PathPrefix( func (h siteBackupHandler) PathPrefix(

View File

@ -154,8 +154,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
http.MethodGet, http.MethodGet,
props.downloadURL, props.downloadURL,
nil, nil,
nil, nil)
false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, resp) require.NotNil(t, resp)

View File

@ -93,9 +93,8 @@ func (h userDriveBackupHandler) Get(
ctx context.Context, ctx context.Context,
url string, url string,
headers map[string]string, headers map[string]string,
requireAuth bool,
) (*http.Response, error) { ) (*http.Response, error) {
return h.ac.Get(ctx, url, headers, requireAuth) return h.ac.Get(ctx, url, headers)
} }
func (h userDriveBackupHandler) PathPrefix( func (h userDriveBackupHandler) PathPrefix(

View File

@ -89,9 +89,10 @@ func (bh mockBackupHandler) previewIncludeContainers() []string { return
func (bh mockBackupHandler) previewExcludeContainers() []string { return bh.previewExcludes } func (bh mockBackupHandler) previewExcludeContainers() []string { return bh.previewExcludes }
func (bh mockBackupHandler) CanSkipItemFailure( func (bh mockBackupHandler) CanSkipItemFailure(
err error, error,
resourceID string, string,
opts control.Options, string,
control.Options,
) (fault.SkipCause, bool) { ) (fault.SkipCause, bool) {
return "", false return "", false
} }

View File

@ -287,7 +287,8 @@ func (col *prefetchCollection) streamItems(
cause, canSkip := col.skipChecker.CanSkipItemFailure( cause, canSkip := col.skipChecker.CanSkipItemFailure(
err, err,
user, user,
col.Opts()) id,
col.BaseCollection.Opts())
// Handle known error cases // Handle known error cases
switch { switch {
@ -427,7 +428,6 @@ func (col *lazyFetchCollection) streamItems(
success int64 success int64
progressMessage chan<- struct{} progressMessage chan<- struct{}
user = col.user user = col.user
el = errs.Local()
) )
defer func() { defer func() {
@ -439,7 +439,7 @@ func (col *lazyFetchCollection) streamItems(
int(success), int(success),
0, 0,
col.FullPath().Folder(false), col.FullPath().Folder(false),
el.Failure()) errs.Failure())
}() }()
if len(col.added)+len(col.removed) > 0 { if len(col.added)+len(col.removed) > 0 {
@ -465,7 +465,7 @@ func (col *lazyFetchCollection) streamItems(
// add any new items // add any new items
for id, modTime := range col.added { for id, modTime := range col.added {
if el.Failure() != nil { if errs.Failure() != nil {
break break
} }
@ -481,18 +481,18 @@ func (col *lazyFetchCollection) streamItems(
&lazyItemGetter{ &lazyItemGetter{
userID: user, userID: user,
itemID: id, itemID: id,
category: col.Category(), category: col.FullPath().Category(),
getter: col.getter, getter: col.getter,
modTime: modTime, modTime: modTime,
immutableIDs: col.Opts().ToggleFeatures.ExchangeImmutableIDs, immutableIDs: col.Opts().ToggleFeatures.ExchangeImmutableIDs,
parentPath: parentPath, parentPath: parentPath,
skipChecker: col.skipChecker, skipChecker: col.skipChecker,
opts: col.Opts(), opts: col.BaseCollection.Opts(),
}, },
id, id,
modTime, modTime,
col.counter, col.counter,
el) errs)
atomic.AddInt64(&success, 1) atomic.AddInt64(&success, 1)
@ -526,23 +526,22 @@ func (lig *lazyItemGetter) GetData(
lig.immutableIDs, lig.immutableIDs,
lig.parentPath) lig.parentPath)
if err != nil { if err != nil {
if lig.skipChecker != nil { cause, canSkip := lig.skipChecker.CanSkipItemFailure(
cause, canSkip := lig.skipChecker.CanSkipItemFailure( err,
err, lig.userID,
lig.userID, lig.itemID,
lig.opts) lig.opts)
if canSkip { if canSkip {
errs.AddSkip(ctx, fault.FileSkip( errs.AddSkip(ctx, fault.FileSkip(
cause, cause,
lig.category.String(), lig.category.String(),
lig.itemID, lig.itemID,
lig.itemID, lig.itemID,
nil)) nil))
return nil, nil, false, clues. return nil, nil, false, clues.
NewWC(ctx, "error marked as skippable by handler"). NewWC(ctx, "error marked as skippable by handler").
Label(graph.LabelsSkippable) Label(graph.LabelsSkippable)
}
} }
// If an item was deleted then return an empty file so we don't fail // If an item was deleted then return an empty file so we don't fail
@ -559,7 +558,7 @@ func (lig *lazyItemGetter) GetData(
err = clues.Stack(err) err = clues.Stack(err)
errs.AddRecoverable(ctx, err) errs.AddRecoverable(ctx, err)
return nil, nil, false, clues.Stack(err) return nil, nil, false, err
} }
// Update the mod time to what we already told kopia about. This is required // Update the mod time to what we already told kopia about. This is required

View File

@ -28,7 +28,6 @@ import (
"github.com/alcionai/corso/src/pkg/errs/core" "github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph" "github.com/alcionai/corso/src/pkg/services/m365/api/graph"
graphTD "github.com/alcionai/corso/src/pkg/services/m365/api/graph/testdata" graphTD "github.com/alcionai/corso/src/pkg/services/m365/api/graph/testdata"
) )
@ -338,30 +337,29 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() { func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
var ( var (
t = suite.T()
start = time.Now().Add(-time.Second) start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {} statusUpdater = func(*support.ControllerOperationStatus) {}
) )
fullPath, err := path.Build("t", "pr", path.ExchangeService, path.EmailCategory, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, path.EmailCategory, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
table := []struct { table := []struct {
name string name string
category path.CategoryType
handler backupHandler
added map[string]time.Time added map[string]time.Time
removed map[string]struct{} removed map[string]struct{}
expectItemCount int expectItemCount int
expectSkippedCount int expectSkippedCount int
expectErr assert.ErrorAssertionFunc
}{ }{
{ {
name: "no items", name: "no items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
expectErr: assert.NoError,
}, },
{ {
name: "events only added items", name: "only added items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{ added: map[string]time.Time{
"fisher": {}, "fisher": {},
"flannigan": {}, "flannigan": {},
@ -369,12 +367,9 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
}, },
expectItemCount: 0, expectItemCount: 0,
expectSkippedCount: 3, expectSkippedCount: 3,
expectErr: assert.NoError,
}, },
{ {
name: "events only removed items", name: "only removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
removed: map[string]struct{}{ removed: map[string]struct{}{
"princess": {}, "princess": {},
"poppy": {}, "poppy": {},
@ -382,12 +377,9 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
}, },
expectItemCount: 3, expectItemCount: 3,
expectSkippedCount: 0, expectSkippedCount: 0,
expectErr: assert.NoError,
}, },
{ {
name: "events added and removed items", name: "added and removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{ added: map[string]time.Time{
"general": {}, "general": {},
}, },
@ -400,95 +392,6 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
// not 1, because general is removed from the added // not 1, because general is removed from the added
// map due to being in the removed map // map due to being in the removed map
expectSkippedCount: 0, expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "contacts only added items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 0,
expectErr: assert.Error,
},
{
name: "contacts only removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "contacts added and removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "mail only added items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 0,
expectErr: assert.Error,
},
{
name: "mail only removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "mail added and removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
}, },
} }
@ -503,29 +406,19 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
opts := control.DefaultOptions()
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
col := NewCollection( col := NewCollection(
data.NewBaseCollection( data.NewBaseCollection(
fullPath, fullPath,
nil, nil,
locPath.ToBuilder(), locPath.ToBuilder(),
opts, control.DefaultOptions(),
false, false,
count.New()), count.New()),
"pr", "",
&mock.ItemGetSerialize{ &mock.ItemGetSerialize{
SerializeErr: graph.ErrServiceUnavailableEmptyResp, SerializeErr: assert.AnError,
}, },
test.handler, mock.AlwaysCanSkipFailChecker(),
test.added, test.added,
maps.Keys(test.removed), maps.Keys(test.removed),
false, false,
@ -537,10 +430,10 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
_, rok := test.removed[item.ID()] _, rok := test.removed[item.ID()]
if rok { if rok {
assert.True(t, item.Deleted(), "removals should be marked as deleted")
dimt, ok := item.(data.ItemModTime) dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime") require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()") assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
} }
_, aok := test.added[item.ID()] _, aok := test.added[item.ID()]
@ -551,7 +444,7 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID()) assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
} }
test.expectErr(t, errs.Failure()) assert.NoError(t, errs.Failure())
assert.Equal( assert.Equal(
t, t,
test.expectItemCount, test.expectItemCount,
@ -708,7 +601,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
expectItemCount: 3, expectItemCount: 3,
expectReads: []string{ expectReads: []string{
"fisher", "fisher",
"flannigan",
"fitzbog", "fitzbog",
}, },
}, },
@ -773,10 +665,10 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
_, rok := test.removed[item.ID()] _, rok := test.removed[item.ID()]
if rok { if rok {
assert.True(t, item.Deleted(), "removals should be marked as deleted")
dimt, ok := item.(data.ItemModTime) dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime") require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()") assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
} }
modTime, aok := test.added[item.ID()] modTime, aok := test.added[item.ID()]
@ -785,6 +677,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
// initializer. // initializer.
assert.Implements(t, (*data.ItemModTime)(nil), item) assert.Implements(t, (*data.ItemModTime)(nil), item)
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time") assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
assert.False(t, item.Deleted(), "additions should not be marked as deleted") assert.False(t, item.Deleted(), "additions should not be marked as deleted")
// Check if the test want's us to read the item's data so the lazy // Check if the test want's us to read the item's data so the lazy
@ -804,8 +697,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
// collection initializer. // collection initializer.
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, modTime, info.Modified(), "ItemInfo mod time") assert.Equal(t, modTime, info.Modified(), "ItemInfo mod time")
} else {
assert.Fail(t, "unexpected read on item %s", item.ID())
} }
} }
@ -824,60 +715,44 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() { func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
var ( var (
t = suite.T()
start = time.Now().Add(-time.Second) start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {} statusUpdater = func(*support.ControllerOperationStatus) {}
expectSkip = func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
assert.ErrorContains(t, err, "skip")
assert.True(t, clues.HasLabel(err, graph.LabelsSkippable), clues.ToCore(err))
}
expectNotSkipped = func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
assert.NotContains(t, err.Error(), "skip")
}
) )
fullPath, err := path.Build("t", "pr", path.ExchangeService, path.EmailCategory, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, path.EmailCategory, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
table := []struct { table := []struct {
name string name string
added map[string]time.Time added map[string]time.Time
removed map[string]struct{} removed map[string]struct{}
category path.CategoryType
handler backupHandler
expectItemCount int expectItemCount int
expectSkippedCount int expectSkippedCount int
expectReads []string expectReads []string
expectErr func(t *testing.T, err error)
expectFailure assert.ErrorAssertionFunc
}{ }{
{ {
name: "no items", name: "no items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
expectFailure: assert.NoError,
}, },
{ {
name: "events only added items", name: "only added items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{ added: map[string]time.Time{
"fisher": start.Add(time.Minute), "fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute), "flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute), "fitzbog": start.Add(3 * time.Minute),
}, },
expectItemCount: 3, expectItemCount: 3,
expectSkippedCount: 3, expectSkippedCount: 2,
expectReads: []string{ expectReads: []string{
"fisher", "fisher",
"flannigan",
"fitzbog", "fitzbog",
}, },
expectErr: expectSkip,
expectFailure: assert.NoError,
}, },
{ {
name: "events only removed items", name: "only removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
removed: map[string]struct{}{ removed: map[string]struct{}{
"princess": {}, "princess": {},
"poppy": {}, "poppy": {},
@ -885,13 +760,9 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
}, },
expectItemCount: 3, expectItemCount: 3,
expectSkippedCount: 0, expectSkippedCount: 0,
expectErr: expectSkip,
expectFailure: assert.NoError,
}, },
{ {
name: "events added and removed items", name: "added and removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{ added: map[string]time.Time{
"general": {}, "general": {},
}, },
@ -904,112 +775,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
// not 1, because general is removed from the added // not 1, because general is removed from the added
// map due to being in the removed map // map due to being in the removed map
expectSkippedCount: 0, expectSkippedCount: 0,
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "contacts only added items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 0,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectNotSkipped,
expectFailure: assert.Error,
},
{
name: "contacts only removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "contacts added and removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "mail only added items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 0,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectNotSkipped,
expectFailure: assert.Error,
},
{
name: "mail only removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "mail added and removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
}, },
} }
@ -1017,41 +782,31 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
var ( var (
t = suite.T() t = suite.T()
errs = fault.New(false) errs = fault.New(true)
itemCount int itemCount int
) )
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
mlg := &mockLazyItemGetterSerializer{ mlg := &mockLazyItemGetterSerializer{
ItemGetSerialize: &mock.ItemGetSerialize{ ItemGetSerialize: &mock.ItemGetSerialize{
SerializeErr: graph.ErrServiceUnavailableEmptyResp, SerializeErr: assert.AnError,
}, },
} }
defer mlg.check(t, test.expectReads) defer mlg.check(t, test.expectReads)
opts := control.DefaultOptions()
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
col := NewCollection( col := NewCollection(
data.NewBaseCollection( data.NewBaseCollection(
fullPath, fullPath,
nil, nil,
locPath.ToBuilder(), locPath.ToBuilder(),
opts, control.DefaultOptions(),
false, false,
count.New()), count.New()),
"pr", "",
mlg, mlg,
test.handler, mock.AlwaysCanSkipFailChecker(),
test.added, test.added,
maps.Keys(test.removed), maps.Keys(test.removed),
true, true,
@ -1063,10 +818,10 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
_, rok := test.removed[item.ID()] _, rok := test.removed[item.ID()]
if rok { if rok {
assert.True(t, item.Deleted(), "removals should be marked as deleted")
dimt, ok := item.(data.ItemModTime) dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime") require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()") assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
} }
modTime, aok := test.added[item.ID()] modTime, aok := test.added[item.ID()]
@ -1075,6 +830,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
// initializer. // initializer.
assert.Implements(t, (*data.ItemModTime)(nil), item) assert.Implements(t, (*data.ItemModTime)(nil), item)
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time") assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
assert.False(t, item.Deleted(), "additions should not be marked as deleted") assert.False(t, item.Deleted(), "additions should not be marked as deleted")
// Check if the test want's us to read the item's data so the lazy // Check if the test want's us to read the item's data so the lazy
@ -1083,23 +839,18 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
r := item.ToReader() r := item.ToReader()
_, err := io.ReadAll(r) _, err := io.ReadAll(r)
test.expectErr(t, err) assert.Error(t, err, clues.ToCore(err))
assert.ErrorContains(t, err, "marked as skippable", clues.ToCore(err))
assert.True(t, clues.HasLabel(err, graph.LabelsSkippable), clues.ToCore(err))
r.Close() r.Close()
} else {
assert.Fail(t, "unexpected read on item %s", item.ID())
} }
} }
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID()) assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
} }
failure := errs.Failure() assert.NoError(t, errs.Failure())
if failure == nil && len(errs.Recovered()) > 0 {
failure = errs.Recovered()[0]
}
test.expectFailure(t, failure, clues.ToCore(failure))
assert.Equal( assert.Equal(
t, t,
test.expectItemCount, test.expectItemCount,

View File

@ -56,9 +56,9 @@ func (h contactBackupHandler) NewContainerCache(
} }
func (h contactBackupHandler) CanSkipItemFailure( func (h contactBackupHandler) CanSkipItemFailure(
err error, error,
resourceID string, string, string,
opts control.Options, control.Options,
) (fault.SkipCause, bool) { ) (fault.SkipCause, bool) {
return "", false return "", false
} }

View File

@ -3,7 +3,6 @@ package exchange
import ( import (
"testing" "testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -22,62 +21,11 @@ func TestContactsBackupHandlerUnitSuite(t *testing.T) {
} }
func (suite *ContactsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() { func (suite *ContactsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString() t := suite.T()
table := []struct { h := newContactBackupHandler(api.Client{})
name string cause, result := h.CanSkipItemFailure(nil, "", "", control.Options{})
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: assert.AnError,
opts: control.Options{},
expect: assert.False,
},
{
name: "false when map is empty",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
},
{
name: "false on nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "false even if resource matches",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newContactBackupHandler(api.Client{}) assert.False(t, result)
cause, result := h.CanSkipItemFailure( assert.Equal(t, fault.SkipCause(""), cause)
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
} }

View File

@ -3,6 +3,7 @@ package exchange
import ( import (
"errors" "errors"
"net/http" "net/http"
"slices"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -66,25 +67,23 @@ func (h eventBackupHandler) NewContainerCache(
// built into the func. // built into the func.
func (h eventBackupHandler) CanSkipItemFailure( func (h eventBackupHandler) CanSkipItemFailure(
err error, err error,
resourceID string, resourceID, itemID string,
opts control.Options, opts control.Options,
) (fault.SkipCause, bool) { ) (fault.SkipCause, bool) {
if err == nil { if err == nil {
return "", false return "", false
} }
// this is a bit overly cautious. we do know that we get 503s with empty response bodies
// due to fauilures when getting too many instances. We don't know for sure if we get
// generic, well formed 503s. But since we're working with specific resources and item
// IDs in the first place, that extra caution will help make sure an unexpected error dosn't
// slip through the cracks on us.
if !errors.Is(err, graph.ErrServiceUnavailableEmptyResp) && if !errors.Is(err, graph.ErrServiceUnavailableEmptyResp) &&
!clues.HasLabel(err, graph.LabelStatus(http.StatusServiceUnavailable)) { !clues.HasLabel(err, graph.LabelStatus(http.StatusServiceUnavailable)) {
return "", false return "", false
} }
_, ok := opts.SkipEventsOnInstance503ForResources[resourceID] itemIDs, ok := opts.SkipTheseEventsOnInstance503[resourceID]
if !ok {
return "", false
}
// strict equals required here. ids are case sensitive. // strict equals required here. ids are case sensitive.
return fault.SkipKnownEventInstance503s, ok return fault.SkipKnownEventInstance503s, slices.Contains(itemIDs, itemID)
} }

View File

@ -25,7 +25,10 @@ func TestEventsBackupHandlerUnitSuite(t *testing.T) {
} }
func (suite *EventsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() { func (suite *EventsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString() var (
resourceID = uuid.NewString()
itemID = uuid.NewString()
)
table := []struct { table := []struct {
name string name string
@ -35,27 +38,25 @@ func (suite *EventsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
expectCause fault.SkipCause expectCause fault.SkipCause
}{ }{
{ {
name: "no config", name: "no config",
err: graph.ErrServiceUnavailableEmptyResp, err: nil,
opts: control.Options{}, opts: control.Options{},
expect: assert.False, expect: assert.False,
expectCause: fault.SkipKnownEventInstance503s,
}, },
{ {
name: "empty skip on 503", name: "empty skip on 503",
err: graph.ErrServiceUnavailableEmptyResp, err: nil,
opts: control.Options{ opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{}, SkipTheseEventsOnInstance503: map[string][]string{},
}, },
expect: assert.False, expect: assert.False,
expectCause: fault.SkipKnownEventInstance503s,
}, },
{ {
name: "nil error", name: "nil error",
err: nil, err: nil,
opts: control.Options{ opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{ SkipTheseEventsOnInstance503: map[string][]string{
resourceID: {}, "foo": {"bar", "baz"},
}, },
}, },
expect: assert.False, expect: assert.False,
@ -64,19 +65,30 @@ func (suite *EventsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
name: "non-matching resource", name: "non-matching resource",
err: graph.ErrServiceUnavailableEmptyResp, err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{ opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{ SkipTheseEventsOnInstance503: map[string][]string{
"foo": {}, "foo": {"bar", "baz"},
}, },
}, },
expect: assert.False, expect: assert.False,
},
{
name: "non-matching item",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{
SkipTheseEventsOnInstance503: map[string][]string{
resourceID: {"bar", "baz"},
},
},
expect: assert.False,
// the item won't match, but we still return this as the cause
expectCause: fault.SkipKnownEventInstance503s, expectCause: fault.SkipKnownEventInstance503s,
}, },
{ {
name: "match on instance 503 empty resp", name: "match on instance 503 empty resp",
err: graph.ErrServiceUnavailableEmptyResp, err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{ opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{ SkipTheseEventsOnInstance503: map[string][]string{
resourceID: {}, resourceID: {"bar", itemID},
}, },
}, },
expect: assert.True, expect: assert.True,
@ -87,8 +99,8 @@ func (suite *EventsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
err: clues.New("arbitrary error"). err: clues.New("arbitrary error").
Label(graph.LabelStatus(http.StatusServiceUnavailable)), Label(graph.LabelStatus(http.StatusServiceUnavailable)),
opts: control.Options{ opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{ SkipTheseEventsOnInstance503: map[string][]string{
resourceID: {}, resourceID: {"bar", itemID},
}, },
}, },
expect: assert.True, expect: assert.True,
@ -103,6 +115,7 @@ func (suite *EventsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
cause, result := h.CanSkipItemFailure( cause, result := h.CanSkipItemFailure(
test.err, test.err,
resourceID, resourceID,
itemID,
test.opts) test.opts)
test.expect(t, result) test.expect(t, result)

View File

@ -62,7 +62,7 @@ func BackupHandlers(ac api.Client) map[path.CategoryType]backupHandler {
type canSkipItemFailurer interface { type canSkipItemFailurer interface {
CanSkipItemFailure( CanSkipItemFailure(
err error, err error,
resourceID string, resourceID, itemID string,
opts control.Options, opts control.Options,
) (fault.SkipCause, bool) ) (fault.SkipCause, bool)
} }

View File

@ -62,8 +62,12 @@ func (h mailBackupHandler) NewContainerCache(
func (h mailBackupHandler) CanSkipItemFailure( func (h mailBackupHandler) CanSkipItemFailure(
err error, err error,
resourceID string, resourceID string, itemID string,
opts control.Options, opts control.Options,
) (fault.SkipCause, bool) { ) (fault.SkipCause, bool) {
return "", false if err == nil {
return "", false
}
return fault.SkipKnownEventInstance503s, true
} }

View File

@ -3,7 +3,6 @@ package exchange
import ( import (
"testing" "testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -22,62 +21,11 @@ func TestMailBackupHandlerUnitSuite(t *testing.T) {
} }
func (suite *MailBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() { func (suite *MailBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString() t := suite.T()
table := []struct { h := newMailBackupHandler(api.Client{})
name string cause, result := h.CanSkipItemFailure(nil, "", "", control.Options{})
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: assert.AnError,
opts: control.Options{},
expect: assert.False,
},
{
name: "false when map is empty",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
},
{
name: "false on nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "false even if resource matches",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newMailBackupHandler(api.Client{}) assert.False(t, result)
cause, result := h.CanSkipItemFailure( assert.Equal(t, fault.SkipCause(""), cause)
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
} }

View File

@ -59,9 +59,10 @@ type canSkipFailChecker struct {
} }
func (m canSkipFailChecker) CanSkipItemFailure( func (m canSkipFailChecker) CanSkipItemFailure(
err error, error,
resourceID string, string,
opts control.Options, string,
control.Options,
) (fault.SkipCause, bool) { ) (fault.SkipCause, bool) {
return fault.SkipCause("testing"), m.canSkip return fault.SkipCause("testing"), m.canSkip
} }
@ -69,3 +70,7 @@ func (m canSkipFailChecker) CanSkipItemFailure(
func NeverCanSkipFailChecker() *canSkipFailChecker { func NeverCanSkipFailChecker() *canSkipFailChecker {
return &canSkipFailChecker{} return &canSkipFailChecker{}
} }
func AlwaysCanSkipFailChecker() *canSkipFailChecker {
return &canSkipFailChecker{true}
}

View File

@ -197,12 +197,7 @@ func (h BackupHandler[T]) AugmentItemInfo(
return h.ItemInfo return h.ItemInfo
} }
func (h *BackupHandler[T]) Get( func (h *BackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) {
context.Context,
string,
map[string]string,
bool,
) (*http.Response, error) {
c := h.getCall c := h.getCall
h.getCall++ h.getCall++

View File

@ -420,9 +420,6 @@ func (suite *BackupOpUnitSuite) TestNewBackupOperation_configuredOptionsMatchInp
MaxPages: 46, MaxPages: 46,
Enabled: true, Enabled: true,
}, },
SkipEventsOnInstance503ForResources: map[string]struct{}{
"resource": {},
},
} }
t := suite.T() t := suite.T()

View File

@ -305,10 +305,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead int itemsRead int
itemsWritten int itemsWritten int
nonMetaItemsWritten int nonMetaItemsWritten int
// TODO: Temporary mechanism to skip permissions
// related tests. Remove once we figure out the issue.
skipChecks bool
}{ }{
{ {
name: "clean incremental, no changes", name: "clean incremental, no changes",
@ -357,7 +353,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 1, // .data file for newitem itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated) itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written. nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
skipChecks: true,
}, },
{ {
name: "remove permission from new file", name: "remove permission from new file",
@ -377,7 +372,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 1, // .data file for newitem itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated) itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written. nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
skipChecks: true,
}, },
{ {
name: "add permission to container", name: "add permission to container",
@ -398,7 +392,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 0, itemsRead: 0,
itemsWritten: 2, // .dirmeta for collection itemsWritten: 2, // .dirmeta for collection
nonMetaItemsWritten: 0, // no files updated as update on container nonMetaItemsWritten: 0, // no files updated as update on container
skipChecks: true,
}, },
{ {
name: "remove permission from container", name: "remove permission from container",
@ -419,7 +412,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 0, itemsRead: 0,
itemsWritten: 2, // .dirmeta for collection itemsWritten: 2, // .dirmeta for collection
nonMetaItemsWritten: 0, // no files updated nonMetaItemsWritten: 0, // no files updated
skipChecks: true,
}, },
{ {
name: "update contents of a file", name: "update contents of a file",
@ -749,11 +741,9 @@ func RunIncrementalDriveishBackupTest(
assertReadWrite = assert.LessOrEqual assertReadWrite = assert.LessOrEqual
} }
if !test.skipChecks { assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written") assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written") assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
}
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure())) assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors") assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")

View File

@ -175,7 +175,7 @@ func runGroupsIncrementalBackupTests(
suite, suite,
opts, opts,
m365.Group.ID, m365.Group.ID,
m365.SecondaryGroup.ID, // more reliable than user m365.User.ID,
path.GroupsService, path.GroupsService,
path.LibrariesCategory, path.LibrariesCategory,
ic, ic,
@ -201,12 +201,8 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
sel.Include( sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel), selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel)) selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter) bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx) defer bod.Close(t, ctx)
@ -330,12 +326,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBas
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID}) sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel.Include( sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel), selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel)) selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
RunMergeBaseGroupsUpdate(suite, sel.Selector, false) RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
} }
@ -344,12 +336,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBa
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID}) sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel.Include( sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel), selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel)) selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false) RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
} }

View File

@ -28,10 +28,8 @@ type Options struct {
// had already backed up. // had already backed up.
PreviewLimits PreviewItemLimits `json:"previewItemLimits"` PreviewLimits PreviewItemLimits `json:"previewItemLimits"`
// specifying a resource tuple in this map allows that resource to produce // resourceID -> []eventID
// a Skip instead of a recoverable error in case of a failure due to 503 when SkipTheseEventsOnInstance503 map[string][]string
// retrieving calendar event item data.
SkipEventsOnInstance503ForResources map[string]struct{}
} }
// RateLimiter is the set of options applied to any external service facing rate // RateLimiter is the set of options applied to any external service facing rate

View File

@ -10,7 +10,6 @@ import (
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
) )
func ConsumeExportCollections( func ConsumeExportCollections(
@ -20,10 +19,6 @@ func ConsumeExportCollections(
errs *fault.Bus, errs *fault.Bus,
) error { ) error {
el := errs.Local() el := errs.Local()
counted := 0
log := logger.Ctx(ctx).
With("export_location", exportLocation,
"collection_count", len(expColl))
for _, col := range expColl { for _, col := range expColl {
if el.Failure() != nil { if el.Failure() != nil {
@ -34,13 +29,6 @@ func ConsumeExportCollections(
ictx := clues.Add(ctx, "dir_name", folder) ictx := clues.Add(ctx, "dir_name", folder)
for item := range col.Items(ictx) { for item := range col.Items(ictx) {
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("progress writing export items", "count_items", counted)
}
if item.Error != nil { if item.Error != nil {
el.AddRecoverable(ictx, clues.Wrap(item.Error, "getting item")) el.AddRecoverable(ictx, clues.Wrap(item.Error, "getting item"))
continue continue
@ -54,8 +42,6 @@ func ConsumeExportCollections(
} }
} }
log.Infow("completed writing export items", "count_items", counted)
return el.Failure() return el.Failure()
} }

View File

@ -47,7 +47,7 @@ func (c Access) GetToken(
c.Credentials.AzureClientSecret)) c.Credentials.AzureClientSecret))
) )
resp, err := c.Post(ctx, rawURL, headers, body, false) resp, err := c.Post(ctx, rawURL, headers, body)
if err != nil { if err != nil {
return clues.Stack(err) return clues.Stack(err)
} }

View File

@ -63,14 +63,7 @@ func NewClient(
return Client{}, err return Client{}, err
} }
azureAuth, err := graph.NewAzureAuth(creds) rqr := graph.NewNoTimeoutHTTPWrapper(counter)
if err != nil {
return Client{}, clues.Wrap(err, "generating azure authorizer")
}
rqr := graph.NewNoTimeoutHTTPWrapper(
counter,
graph.AuthorizeRequester(azureAuth))
if co.DeltaPageSize < 1 || co.DeltaPageSize > maxDeltaPageSize { if co.DeltaPageSize < 1 || co.DeltaPageSize > maxDeltaPageSize {
co.DeltaPageSize = maxDeltaPageSize co.DeltaPageSize = maxDeltaPageSize
@ -131,7 +124,11 @@ func newLargeItemService(
counter *count.Bus, counter *count.Bus,
) (*graph.Service, error) { ) (*graph.Service, error) {
a, err := NewService(creds, counter, graph.NoTimeout()) a, err := NewService(creds, counter, graph.NoTimeout())
return a, clues.Wrap(err, "generating no-timeout graph adapter").OrNil() if err != nil {
return nil, clues.Wrap(err, "generating no-timeout graph adapter")
}
return a, nil
} }
type Getter interface { type Getter interface {
@ -139,7 +136,6 @@ type Getter interface {
ctx context.Context, ctx context.Context,
url string, url string,
headers map[string]string, headers map[string]string,
requireAuth bool,
) (*http.Response, error) ) (*http.Response, error)
} }
@ -148,9 +144,8 @@ func (c Client) Get(
ctx context.Context, ctx context.Context,
url string, url string,
headers map[string]string, headers map[string]string,
requireAuth bool,
) (*http.Response, error) { ) (*http.Response, error) {
return c.Requester.Request(ctx, http.MethodGet, url, nil, headers, requireAuth) return c.Requester.Request(ctx, http.MethodGet, url, nil, headers)
} }
// Get performs an ad-hoc get request using its graph.Requester // Get performs an ad-hoc get request using its graph.Requester
@ -159,9 +154,8 @@ func (c Client) Post(
url string, url string,
headers map[string]string, headers map[string]string,
body io.Reader, body io.Reader,
requireAuth bool,
) (*http.Response, error) { ) (*http.Response, error) {
return c.Requester.Request(ctx, http.MethodGet, url, body, headers, requireAuth) return c.Requester.Request(ctx, http.MethodGet, url, body, headers)
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -32,9 +32,6 @@ func (suite *ConversationsPagerIntgSuite) SetupSuite() {
} }
func (suite *ConversationsPagerIntgSuite) TestEnumerateConversations_withThreadsAndPosts() { func (suite *ConversationsPagerIntgSuite) TestEnumerateConversations_withThreadsAndPosts() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
var ( var (
t = suite.T() t = suite.T()
ac = suite.its.ac.Conversations() ac = suite.its.ac.Conversations()

View File

@ -1,94 +0,0 @@
package graph
import (
"context"
"net/http"
"net/url"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/alcionai/clues"
abstractions "github.com/microsoft/kiota-abstractions-go"
kauth "github.com/microsoft/kiota-authentication-azure-go"
"github.com/alcionai/corso/src/pkg/account"
)
func GetAuth(tenant, client, secret string) (*kauth.AzureIdentityAuthenticationProvider, error) {
// Client Provider: Uses Secret for access to tenant-level data
cred, err := azidentity.NewClientSecretCredential(tenant, client, secret, nil)
if err != nil {
return nil, clues.Wrap(err, "creating m365 client identity")
}
auth, err := kauth.NewAzureIdentityAuthenticationProviderWithScopes(
cred,
[]string{"https://graph.microsoft.com/.default"})
if err != nil {
return nil, clues.Wrap(err, "creating azure authentication")
}
return auth, nil
}
// ---------------------------------------------------------------------------
// requester authorization
// ---------------------------------------------------------------------------
type authorizer interface {
addAuthToHeaders(
ctx context.Context,
urlStr string,
headers http.Header,
) error
}
// consumed by kiota
type authenticateRequester interface {
AuthenticateRequest(
ctx context.Context,
request *abstractions.RequestInformation,
additionalAuthenticationContext map[string]any,
) error
}
// ---------------------------------------------------------------------------
// Azure Authorizer
// ---------------------------------------------------------------------------
type azureAuth struct {
auth authenticateRequester
}
func NewAzureAuth(creds account.M365Config) (*azureAuth, error) {
auth, err := GetAuth(
creds.AzureTenantID,
creds.AzureClientID,
creds.AzureClientSecret)
return &azureAuth{auth}, clues.Stack(err).OrNil()
}
func (aa azureAuth) addAuthToHeaders(
ctx context.Context,
urlStr string,
headers http.Header,
) error {
requestInfo := abstractions.NewRequestInformation()
uri, err := url.Parse(urlStr)
if err != nil {
return clues.WrapWC(ctx, err, "parsing url").OrNil()
}
requestInfo.SetUri(*uri)
err = aa.auth.AuthenticateRequest(ctx, requestInfo, nil)
for _, k := range requestInfo.Headers.ListKeys() {
for _, v := range requestInfo.Headers.Get(k) {
headers.Add(k, v)
}
}
return clues.WrapWC(ctx, err, "authorizing request").OrNil()
}

View File

@ -240,7 +240,7 @@ func (mw *RateLimiterMiddleware) Intercept(
middlewareIndex int, middlewareIndex int,
req *http.Request, req *http.Request,
) (*http.Response, error) { ) (*http.Response, error) {
QueueRequest(getReqCtx(req)) QueueRequest(req.Context())
return pipeline.Next(req, middlewareIndex) return pipeline.Next(req, middlewareIndex)
} }
@ -339,7 +339,7 @@ func (mw *throttlingMiddleware) Intercept(
middlewareIndex int, middlewareIndex int,
req *http.Request, req *http.Request,
) (*http.Response, error) { ) (*http.Response, error) {
err := mw.tf.Block(getReqCtx(req)) err := mw.tf.Block(req.Context())
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -701,48 +701,10 @@ func (ode oDataErr) errMessageMatchesAllFilters(err error, fs ...filters.Filter)
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// other helpers // other helpers
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
const (
// JWTQueryParam is a query param embed in graph download URLs which holds
// JWT token.
JWTQueryParam = "tempauth"
// base64 encoded json header. Contains {"alg":"HS256","typ":"JWT"}
//
// Hardcoding this instead of generating it every time on the fly.
// The algorithm doesn't matter as we are not verifying the token.
jwtHeader = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"
)
func sanitizeToken(rawToken string) string { // JWTQueryParam is a query param embed in graph download URLs which holds
segments := strings.Split(rawToken, ".") // JWT token.
const JWTQueryParam = "tempauth"
// Check if the token has the old format, in which it has 3 segments and
// conforms to jwt spec. Format is seg1.seg2.seg3.
if len(segments) == 3 {
return rawToken
}
// Check if it is a msft proprietary token in which it has 4 segments and
// doesn't meet jwt spec. Format is v1.seg1.seg2.seg3. Return a token which
// meets jwt spec.
//
// In this proprietary token, there is no jwt header segment. Also, the claims
// section is split into first and segments. The first segment contains the
// `exp` claim that we are interested in.
//
// The second segment contains the rest of the claims, but likely encrypted.
// We don't need it so discard it. The last segment contains the signature which
// we don't care about either, as we are not verifying the token. So append it as is.
//
// It's okay if the sanitized token still doesn't meet jwt spec. It'll fail decoding
// later and we have fallbacks for that.
if len(segments) == 4 && segments[0] == "v1" {
return jwtHeader + "." + segments[1] + "." + segments[3]
}
// If MSFT change the token format again on us, just return empty string and let caller
// handle it as an error.
return ""
}
// IsURLExpired inspects the jwt token embed in the item download url // IsURLExpired inspects the jwt token embed in the item download url
// and returns true if it is expired. // and returns true if it is expired.
@ -753,20 +715,12 @@ func IsURLExpired(
expiredErr error, expiredErr error,
err error, err error,
) { ) {
ctx = clues.Add(ctx, "checked_url", urlStr)
// Extract the raw JWT string from the download url. // Extract the raw JWT string from the download url.
rawJWT, err := common.GetQueryParamFromURL(urlStr, JWTQueryParam) rawJWT, err := common.GetQueryParamFromURL(urlStr, JWTQueryParam)
if err != nil { if err != nil {
return nil, clues.WrapWC(ctx, err, "jwt query param not found") return nil, clues.WrapWC(ctx, err, "jwt query param not found")
} }
// Token may have a proprietary format. Try to sanitize it to jwt format.
rawJWT = sanitizeToken(rawJWT)
if len(rawJWT) == 0 {
return nil, clues.WrapWC(ctx, err, "sanitizing jwt")
}
expired, err := jwt.IsJWTExpired(rawJWT) expired, err := jwt.IsJWTExpired(rawJWT)
if err != nil { if err != nil {
return nil, clues.WrapWC(ctx, err, "checking jwt expiry") return nil, clues.WrapWC(ctx, err, "checking jwt expiry")

View File

@ -36,7 +36,6 @@ type Requester interface {
method, url string, method, url string,
body io.Reader, body io.Reader,
headers map[string]string, headers map[string]string,
requireAuth bool,
) (*http.Response, error) ) (*http.Response, error)
} }
@ -59,8 +58,12 @@ func NewHTTPWrapper(
transport: defaultTransport(), transport: defaultTransport(),
}, },
} }
redirect = func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
}
hc = &http.Client{ hc = &http.Client{
Transport: rt, CheckRedirect: redirect,
Transport: rt,
} }
) )
@ -97,7 +100,6 @@ func (hw httpWrapper) Request(
method, url string, method, url string,
body io.Reader, body io.Reader,
headers map[string]string, headers map[string]string,
requireAuth bool,
) (*http.Response, error) { ) (*http.Response, error) {
req, err := http.NewRequestWithContext(ctx, method, url, body) req, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil { if err != nil {
@ -113,17 +115,6 @@ func (hw httpWrapper) Request(
// See https://learn.microsoft.com/en-us/sharepoint/dev/general-development/how-to-avoid-getting-throttled-or-blocked-in-sharepoint-online#how-to-decorate-your-http-traffic // See https://learn.microsoft.com/en-us/sharepoint/dev/general-development/how-to-avoid-getting-throttled-or-blocked-in-sharepoint-online#how-to-decorate-your-http-traffic
req.Header.Set("User-Agent", "ISV|Alcion|Corso/"+version.Version) req.Header.Set("User-Agent", "ISV|Alcion|Corso/"+version.Version)
if requireAuth {
if hw.config.requesterAuth == nil {
return nil, clues.Wrap(err, "http wrapper misconfigured: missing required authorization")
}
err := hw.config.requesterAuth.addAuthToHeaders(ctx, url, req.Header)
if err != nil {
return nil, clues.Wrap(err, "setting request auth headers")
}
}
retriedErrors := []string{} retriedErrors := []string{}
var e error var e error
@ -146,7 +137,7 @@ func (hw httpWrapper) Request(
resp, err := hw.client.Do(req) resp, err := hw.client.Do(req)
if err == nil { if err == nil {
logResp(ictx, resp, req) logResp(ictx, resp)
return resp, nil return resp, nil
} }

View File

@ -40,10 +40,9 @@ func (suite *HTTPWrapperIntgSuite) TestNewHTTPWrapper() {
resp, err := hw.Request( resp, err := hw.Request(
ctx, ctx,
http.MethodGet, http.MethodGet,
"https://www.google.com", "https://www.corsobackup.io",
nil, nil,
nil, nil)
false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
defer resp.Body.Close() defer resp.Body.Close()
@ -77,56 +76,6 @@ func (mw *mwForceResp) Intercept(
return mw.resp, mw.err return mw.resp, mw.err
} }
func (suite *HTTPWrapperIntgSuite) TestHTTPWrapper_Request_withAuth() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
a := tconfig.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
azureAuth, err := NewAzureAuth(m365)
require.NoError(t, err, clues.ToCore(err))
hw := NewHTTPWrapper(count.New(), AuthorizeRequester(azureAuth))
// any request that requires authorization will do
resp, err := hw.Request(
ctx,
http.MethodGet,
"https://graph.microsoft.com/v1.0/users",
nil,
nil,
true)
require.NoError(t, err, clues.ToCore(err))
defer resp.Body.Close()
require.NotNil(t, resp)
require.Equal(t, http.StatusOK, resp.StatusCode)
// also validate that non-auth'd endpoints succeed
resp, err = hw.Request(
ctx,
http.MethodGet,
"https://www.google.com",
nil,
nil,
true)
require.NoError(t, err, clues.ToCore(err))
defer resp.Body.Close()
require.NotNil(t, resp)
require.Equal(t, http.StatusOK, resp.StatusCode)
}
// ---------------------------------------------------------------------------
// unit
// ---------------------------------------------------------------------------
type HTTPWrapperUnitSuite struct { type HTTPWrapperUnitSuite struct {
tester.Suite tester.Suite
} }
@ -135,25 +84,26 @@ func TestHTTPWrapperUnitSuite(t *testing.T) {
suite.Run(t, &HTTPWrapperUnitSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &HTTPWrapperUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *HTTPWrapperUnitSuite) TestHTTPWrapper_Request_redirect() { func (suite *HTTPWrapperUnitSuite) TestNewHTTPWrapper_redirectMiddleware() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
respHdr := http.Header{} url := "https://graph.microsoft.com/fnords/beaux/regard"
respHdr.Set("Location", "localhost:99999999/smarfs")
hdr := http.Header{}
hdr.Set("Location", "localhost:99999999/smarfs")
toResp := &http.Response{ toResp := &http.Response{
StatusCode: http.StatusFound, StatusCode: http.StatusFound,
Header: respHdr, Header: hdr,
} }
mwResp := mwForceResp{ mwResp := mwForceResp{
resp: toResp, resp: toResp,
alternate: func(req *http.Request) (bool, *http.Response, error) { alternate: func(req *http.Request) (bool, *http.Response, error) {
if strings.HasSuffix(req.URL.String(), "smarfs") { if strings.HasSuffix(req.URL.String(), "smarfs") {
assert.Equal(t, req.Header.Get("X-Test-Val"), "should-be-copied-to-redirect")
return true, &http.Response{StatusCode: http.StatusOK}, nil return true, &http.Response{StatusCode: http.StatusOK}, nil
} }
@ -163,22 +113,17 @@ func (suite *HTTPWrapperUnitSuite) TestHTTPWrapper_Request_redirect() {
hw := NewHTTPWrapper(count.New(), appendMiddleware(&mwResp)) hw := NewHTTPWrapper(count.New(), appendMiddleware(&mwResp))
resp, err := hw.Request( resp, err := hw.Request(ctx, http.MethodGet, url, nil, nil)
ctx,
http.MethodGet,
"https://graph.microsoft.com/fnords/beaux/regard",
nil,
map[string]string{"X-Test-Val": "should-be-copied-to-redirect"},
false)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
defer resp.Body.Close() defer resp.Body.Close()
require.NotNil(t, resp) require.NotNil(t, resp)
// require.Equal(t, 1, calledCorrectly, "test server was called with expected path")
require.Equal(t, http.StatusOK, resp.StatusCode) require.Equal(t, http.StatusOK, resp.StatusCode)
} }
func (suite *HTTPWrapperUnitSuite) TestHTTPWrapper_Request_http2StreamErrorRetries() { func (suite *HTTPWrapperUnitSuite) TestNewHTTPWrapper_http2StreamErrorRetries() {
var ( var (
url = "https://graph.microsoft.com/fnords/beaux/regard" url = "https://graph.microsoft.com/fnords/beaux/regard"
streamErr = http2.StreamError{ streamErr = http2.StreamError{
@ -243,7 +188,7 @@ func (suite *HTTPWrapperUnitSuite) TestHTTPWrapper_Request_http2StreamErrorRetri
// the test middleware. // the test middleware.
hw.retryDelay = 0 hw.retryDelay = 0
_, err := hw.Request(ctx, http.MethodGet, url, nil, nil, false) _, err := hw.Request(ctx, http.MethodGet, url, nil, nil)
require.ErrorAs(t, err, &http2.StreamError{}, clues.ToCore(err)) require.ErrorAs(t, err, &http2.StreamError{}, clues.ToCore(err))
require.Equal(t, test.expectRetries, tries, "count of retries") require.Equal(t, test.expectRetries, tries, "count of retries")
}) })

View File

@ -5,13 +5,7 @@ import (
"net/http" "net/http"
"net/http/httputil" "net/http/httputil"
"os" "os"
"strings"
"time"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/jwt"
"github.com/alcionai/corso/src/internal/common/pii"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
@ -31,7 +25,7 @@ func shouldLogRespBody(resp *http.Response) bool {
resp.StatusCode > 399 resp.StatusCode > 399
} }
func logResp(ctx context.Context, resp *http.Response, req *http.Request) { func logResp(ctx context.Context, resp *http.Response) {
var ( var (
log = logger.Ctx(ctx) log = logger.Ctx(ctx)
respClass = resp.StatusCode / 100 respClass = resp.StatusCode / 100
@ -48,25 +42,6 @@ func logResp(ctx context.Context, resp *http.Response, req *http.Request) {
return return
} }
// Log bearer token iat and exp claims if we hit 401s. This is purely for
// debugging purposes and will be removed in the future.
if resp.StatusCode == http.StatusUnauthorized {
errs := []any{"graph api error: " + resp.Status}
// As per MSFT docs, the token may have a special format and may not always
// validate as a JWT. Hence log token lifetime in a best effort manner only.
iat, exp, err := getTokenLifetime(ctx, req)
if err != nil {
errs = append(errs, " getting token lifetime: ", err)
}
log.With("response", getRespDump(ctx, resp, logBody)).
With("token issued at", iat, "token expires at", exp).
Error(errs...)
return
}
// Log api calls according to api debugging configurations. // Log api calls according to api debugging configurations.
switch respClass { switch respClass {
case 2: case 2:
@ -94,51 +69,3 @@ func getRespDump(ctx context.Context, resp *http.Response, getBody bool) string
return string(respDump) return string(respDump)
} }
func getReqCtx(req *http.Request) context.Context {
if req == nil {
return context.Background()
}
var logURL pii.SafeURL
if req.URL != nil {
logURL = LoggableURL(req.URL.String())
}
return clues.AddTraceName(
req.Context(),
"graph-http-middleware",
"method", req.Method,
"url", logURL,
"request_content_len", req.ContentLength)
}
// GetTokenLifetime extracts the JWT token embedded in the request and returns
// the token's issue and expiration times. The token is expected to be in the
// "Authorization" header, with a "Bearer " prefix. If the token is not present
// or is malformed, an error is returned.
func getTokenLifetime(
ctx context.Context,
req *http.Request,
) (time.Time, time.Time, error) {
if req == nil {
return time.Time{}, time.Time{}, clues.New("nil request")
}
// Don't throw an error if auth header is absent. This is to prevent
// unnecessary noise in the logs for requests served by the http requestor
// client. These requests may be preauthenticated and may not carry auth headers.
rawToken := req.Header.Get("Authorization")
if len(rawToken) == 0 {
return time.Time{}, time.Time{}, nil
}
// Strip the "Bearer " prefix from the token. This prefix is guaranteed to be
// present as per msft docs. But even if it's not, the jwt lib will handle
// malformed tokens gracefully and return an error.
rawToken = strings.TrimPrefix(rawToken, "Bearer ")
iat, exp, err := jwt.GetJWTLifetime(ctx, rawToken)
return iat, exp, clues.Stack(err).OrNil()
}

View File

@ -125,12 +125,15 @@ func (mw *LoggingMiddleware) Intercept(
} }
ctx := clues.Add( ctx := clues.Add(
getReqCtx(req), req.Context(),
"method", req.Method,
"url", LoggableURL(req.URL.String()),
"request_content_len", req.ContentLength,
"resp_status", resp.Status, "resp_status", resp.Status,
"resp_status_code", resp.StatusCode, "resp_status_code", resp.StatusCode,
"resp_content_len", resp.ContentLength) "resp_content_len", resp.ContentLength)
logResp(ctx, resp, req) logResp(ctx, resp)
return resp, err return resp, err
} }
@ -153,7 +156,7 @@ func (mw RetryMiddleware) Intercept(
middlewareIndex int, middlewareIndex int,
req *http.Request, req *http.Request,
) (*http.Response, error) { ) (*http.Response, error) {
ctx := getReqCtx(req) ctx := req.Context()
resp, err := pipeline.Next(req, middlewareIndex) resp, err := pipeline.Next(req, middlewareIndex)
retriable := IsErrTimeout(err) || retriable := IsErrTimeout(err) ||
@ -232,11 +235,7 @@ func (mw RetryMiddleware) retryRequest(
case <-ctx.Done(): case <-ctx.Done():
// Don't retry if the context is marked as done, it will just error out // Don't retry if the context is marked as done, it will just error out
// when we attempt to send the retry anyway. // when we attempt to send the retry anyway.
err := clues.StackWC(ctx, ctx.Err()) return resp, clues.StackWC(ctx, ctx.Err())
logger.CtxErr(ctx, err).Info("request context marked done")
return resp, err
case <-timer.C: case <-timer.C:
} }
@ -250,9 +249,7 @@ func (mw RetryMiddleware) retryRequest(
return resp, Wrap(ctx, err, "resetting request body reader") return resp, Wrap(ctx, err, "resetting request body reader")
} }
} else { } else {
logger. logger.Ctx(ctx).Error("body is not an io.Seeker: unable to reset request body")
Ctx(getReqCtx(req)).
Error("body is not an io.Seeker: unable to reset request body")
} }
} }

View File

@ -505,95 +505,3 @@ func (suite *MiddlewareUnitSuite) TestLimiterConsumption() {
}) })
} }
} }
const (
// Raw test token valid for 100 years.
rawToken = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9." +
"eyJuYmYiOiIxNjkxODE5NTc5IiwiZXhwIjoiMzk0NTUyOTE3OSIsImVuZHBvaW50dXJsTGVuZ3RoIjoiMTYw" +
"IiwiaXNsb29wYmFjayI6IlRydWUiLCJ2ZXIiOiJoYXNoZWRwcm9vZnRva2VuIiwicm9sZXMiOiJhbGxmaWxl" +
"cy53cml0ZSBhbGxzaXRlcy5mdWxsY29udHJvbCBhbGxwcm9maWxlcy5yZWFkIiwidHQiOiIxIiwiYWxnIjoi" +
"SFMyNTYifQ" +
".signature"
)
// Tests getTokenLifetime
func (suite *MiddlewareUnitSuite) TestGetTokenLifetime() {
table := []struct {
name string
request *http.Request
expectErr assert.ErrorAssertionFunc
}{
{
name: "nil request",
request: nil,
expectErr: assert.Error,
},
// Test that we don't throw an error if auth header is absent.
// This is to prevent unnecessary noise in logs for requestor http client.
{
name: "no authorization header",
request: &http.Request{
Header: http.Header{},
},
expectErr: assert.NoError,
},
{
name: "well formed auth header with token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{"Bearer " + rawToken},
},
},
expectErr: assert.NoError,
},
{
name: "Missing Bearer prefix but valid token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{rawToken},
},
},
expectErr: assert.NoError,
},
{
name: "invalid token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{"Bearer " + "invalid"},
},
},
expectErr: assert.Error,
},
{
name: "valid prefix but empty token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{"Bearer "},
},
},
expectErr: assert.Error,
},
{
name: "Invalid prefix but valid token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{"Bearer" + rawToken},
},
},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// iat, exp specific tests are in jwt package.
_, _, err := getTokenLifetime(ctx, test.request)
test.expectErr(t, err, clues.ToCore(err))
})
}
}

View File

@ -6,9 +6,11 @@ import (
"net/http" "net/http"
"time" "time"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/alcionai/clues" "github.com/alcionai/clues"
abstractions "github.com/microsoft/kiota-abstractions-go" abstractions "github.com/microsoft/kiota-abstractions-go"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
kauth "github.com/microsoft/kiota-authentication-azure-go"
khttp "github.com/microsoft/kiota-http-go" khttp "github.com/microsoft/kiota-http-go"
msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go" msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go"
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core" msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
@ -125,6 +127,23 @@ func CreateAdapter(
return wrapAdapter(adpt, cc), nil return wrapAdapter(adpt, cc), nil
} }
func GetAuth(tenant string, client string, secret string) (*kauth.AzureIdentityAuthenticationProvider, error) {
// Client Provider: Uses Secret for access to tenant-level data
cred, err := azidentity.NewClientSecretCredential(tenant, client, secret, nil)
if err != nil {
return nil, clues.Wrap(err, "creating m365 client identity")
}
auth, err := kauth.NewAzureIdentityAuthenticationProviderWithScopes(
cred,
[]string{"https://graph.microsoft.com/.default"})
if err != nil {
return nil, clues.Wrap(err, "creating azure authentication")
}
return auth, nil
}
// KiotaHTTPClient creates a httpClient with middlewares and timeout configured // KiotaHTTPClient creates a httpClient with middlewares and timeout configured
// for use in the graph adapter. // for use in the graph adapter.
// //
@ -181,11 +200,6 @@ type clientConfig struct {
maxRetries int maxRetries int
// The minimum delay in seconds between retries // The minimum delay in seconds between retries
minDelay time.Duration minDelay time.Duration
// requesterAuth sets the authorization step for requester-compliant clients.
// if non-nil, it will ensure calls are authorized before querying.
// does not get consumed by the standard graph client, which already comes
// packaged with an auth protocol.
requesterAuth authorizer
appendMiddleware []khttp.Middleware appendMiddleware []khttp.Middleware
} }
@ -273,12 +287,6 @@ func MaxConnectionRetries(max int) Option {
} }
} }
func AuthorizeRequester(a authorizer) Option {
return func(c *clientConfig) {
c.requesterAuth = a
}
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Middleware Control // Middleware Control
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -77,8 +77,7 @@ func (iw *largeItemWriter) Write(p []byte) (int, error) {
http.MethodPut, http.MethodPut,
iw.url, iw.url,
bytes.NewReader(p), bytes.NewReader(p),
headers, headers)
false)
if err != nil { if err != nil {
return 0, clues.Wrap(err, "uploading item").With( return 0, clues.Wrap(err, "uploading item").With(
"upload_id", iw.parentID, "upload_id", iw.parentID,

View File

@ -55,8 +55,7 @@ func makeAC(
cli, err := api.NewClient( cli, err := api.NewClient(
creds, creds,
control.DefaultOptions(), control.DefaultOptions(),
count.New(), count.New())
opts...)
if err != nil { if err != nil {
return api.Client{}, clues.WrapWC(ctx, err, "constructing api client") return api.Client{}, clues.WrapWC(ctx, err, "constructing api client")
} }

View File

@ -76,11 +76,20 @@ func (suite *siteIntegrationSuite) TestSites_GetByID() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
site, err := suite.cli.SiteByID(ctx, suite.m365.Site.ID) sites, err := suite.cli.Sites(ctx, fault.New(true))
require.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, site.WebURL) assert.NotEmpty(t, sites)
assert.NotEmpty(t, site.ID)
assert.NotEmpty(t, site.OwnerType) for _, s := range sites {
suite.Run("site_"+s.ID, func() {
t := suite.T()
site, err := suite.cli.SiteByID(ctx, s.ID)
require.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, site.WebURL)
assert.NotEmpty(t, site.ID)
assert.NotEmpty(t, site.OwnerType)
})
}
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -21,8 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
## Corso concepts {#corso-concepts} ## Corso concepts {#corso-concepts}
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your * **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
*M365 Service*'s data. See [Repositories](../repos) for more information. *M365 Services* data. See [Repositories](../repos) for more information.
* **Backup** is a copy of a resource of your *M365 Service*'s data to be used for restores in case of deletion, loss, * **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
or corruption of the original data. Corso performs backups incrementally, and each backup only captures data that has original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.
changed between backup iterations.

View File

@ -79,23 +79,12 @@ const config = {
srcDark: 'img/corso_horizontal_logo_white.svg', srcDark: 'img/corso_horizontal_logo_white.svg',
}, },
items: [ items: [
{
type: 'doc',
docId: 'quickstart',
position: 'left',
label: 'Quick Start',
},
{ {
type: 'doc', type: 'doc',
docId: 'intro', docId: 'intro',
position: 'left', position: 'left',
label: 'Docs', label: 'Docs',
}, },
{
href: 'https://discord.gg/63DTTSnuhT',
label: 'Community',
position: 'left',
},
{ {
to: '/blog', to: '/blog',
label: 'Blog', label: 'Blog',
@ -117,12 +106,30 @@ const config = {
}, },
links: [ links: [
{ {
title: 'Open Source', title: 'Resources',
items: [ items: [
{ {
label: 'Docs', label: 'Docs',
to: '/docs/intro', to: '/docs/intro',
}, },
],
},
{
title: 'Community',
items: [
{
label: 'Discord',
href: 'https://discord.gg/63DTTSnuhT',
},
{
label: 'Twitter',
href: 'https://twitter.com/CorsoBackup',
},
],
},
{
title: 'More',
items: [
{ {
label: 'Blog', label: 'Blog',
to: '/blog', to: '/blog',
@ -131,26 +138,6 @@ const config = {
label: 'GitHub', label: 'GitHub',
href: 'https://github.com/alcionai/corso', href: 'https://github.com/alcionai/corso',
}, },
{
label: 'Corso Discord',
href: 'https://discord.gg/63DTTSnuhT',
},
],
},
{
title: ' ',
},
{
title: 'Alcion, Powered by Corso',
items: [
{
label: 'Backup as a Service',
href: 'https://www.alcion.ai',
},
{
label: 'Alcion Discord',
href: 'https://www.alcion.ai/discord',
},
], ],
}, },
], ],

1811
website/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@
}, },
"dependencies": { "dependencies": {
"@docusaurus/core": "3.1.1", "@docusaurus/core": "3.1.1",
"@docusaurus/plugin-google-gtag": "^3.5.1", "@docusaurus/plugin-google-gtag": "^3.1.1",
"@docusaurus/preset-classic": "3.1.1", "@docusaurus/preset-classic": "3.1.1",
"@loadable/component": "^5.16.3", "@loadable/component": "^5.16.3",
"@mdx-js/react": "^3.0.0", "@mdx-js/react": "^3.0.0",
@ -26,17 +26,17 @@
"feather-icons": "^4.29.1", "feather-icons": "^4.29.1",
"jarallax": "^2.2.0", "jarallax": "^2.2.0",
"mdx-mermaid": "^2.0.0", "mdx-mermaid": "^2.0.0",
"mermaid": "^10.9.0", "mermaid": "^10.8.0",
"prism-react-renderer": "^2.1.0", "prism-react-renderer": "^2.1.0",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.3.0", "react-dom": "^18.2.0",
"sass": "^1.79.1", "sass": "^1.70.0",
"tiny-slider": "^2.9.4", "tiny-slider": "^2.9.4",
"tw-elements": "1.0.0-alpha13", "tw-elements": "1.0.0-alpha13",
"wow.js": "^1.2.2" "wow.js": "^1.2.2"
}, },
"devDependencies": { "devDependencies": {
"@docusaurus/module-type-aliases": "3.5.1", "@docusaurus/module-type-aliases": "3.1.1",
"@iconify/react": "^4.1.1", "@iconify/react": "^4.1.1",
"autoprefixer": "^10.4.17", "autoprefixer": "^10.4.17",
"postcss": "^8.4.33", "postcss": "^8.4.33",

View File

@ -33,7 +33,7 @@ export default function CTA() {
<br /> Microsoft 365 Data! <br /> Microsoft 365 Data!
</h3> </h3>
<h6 className="text-white/50 text-lg font-semibold"> <h6 className="text-white/50 text-lg font-semibold">
Corso (Free and Open Source) or <br/> Alcion (Managed Backup as a Service) Corso is Free and Open Source
</h6> </h6>
</div> </div>
</div> </div>
@ -41,24 +41,15 @@ export default function CTA() {
<div className="mt-8"> <div className="mt-8">
<div className="section-title text-md-start"> <div className="section-title text-md-start">
<p className="text-white/50 max-w-xl mx-auto mb-2"> <p className="text-white/50 max-w-xl mx-auto mb-2">
Whether you want to self-host or use a managed service, we have you covered! Follow our quick-start guide to start protecting your
business-critical Microsoft 365 data in just a few
minutes.
</p> </p>
<a
href="https://www.alcion.ai/"
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
>
Try Alcion{" "}
<Icon
icon="uim:angle-right-b"
className="align-middle"
/>
</a>
<p></p>
<a <a
href="docs/quickstart/" href="docs/quickstart/"
className="!text-white !no-underline flex flex-row items-center !hover:text-white" className="!text-white !no-underline flex flex-row items-center !hover:text-white"
> >
Corso Quickstart{" "} Get Started{" "}
<Icon <Icon
icon="uim:angle-right-b" icon="uim:angle-right-b"
className="align-middle" className="align-middle"

View File

@ -34,17 +34,10 @@ export default function Hero() {
<div className="mt-12 !z-10 mb-6 flex flex-col 2xs:flex-row items-center justify-center 2xs:space-y-0 space-y-4 2xs:space-x-4"> <div className="mt-12 !z-10 mb-6 flex flex-col 2xs:flex-row items-center justify-center 2xs:space-y-0 space-y-4 2xs:space-x-4">
<a <a
href="https://github.com/alcionai/corso/releases" target="_blank" href="../docs/quickstart/"
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md" className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
> >
Download Quick Start
</a>
<a
href="https://www.alcion.ai/"
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-200 hover:bg-indigo-400 border-indigo-600 hover:border-indigo-800 text-blue rounded-md"
>
Try Alcion (Corso SaaS)
</a> </a>
</div> </div>

View File

@ -213,9 +213,9 @@ export default function KeyLoveFAQ() {
Community Community
</h3> </h3>
<p className="text-slate-400"> <p className="text-slate-400">
The Corso community provides a venue for Microsoft 365 admins to share and The Corso community provides a venue for M365 admins to share and
learn about the importance of data protection as well as best learn about the importance of data protection as well as best
practices around Microsoft 365 secure configuration and compliance practices around M365 secure configuration and compliance
management. management.
</p> </p>
<ul className="list-none text-slate-400 mt-4"> <ul className="list-none text-slate-400 mt-4">
@ -279,7 +279,8 @@ export default function KeyLoveFAQ() {
</h3> </h3>
<p className="text-slate-400"> <p className="text-slate-400">
Corso provides secure data backup that protects customers against Corso provides secure data backup that protects customers against
accidental data loss and service provider downtime. accidental data loss, service provider downtime, and malicious
threats including ransomware attacks.
</p> </p>
<ul className="list-none text-slate-400 mt-4"> <ul className="list-none text-slate-400 mt-4">
<li className="mb-1 flex"> <li className="mb-1 flex">
@ -330,7 +331,7 @@ export default function KeyLoveFAQ() {
Robust Backups Robust Backups
</h3> </h3>
<p className="text-slate-400"> <p className="text-slate-400">
Corso, purpose-built for Microsoft 365 protection, provides easy-to-use Corso, purpose-built for M365 protection, provides easy-to-use
comprehensive backup and restore workflows that reduces backup comprehensive backup and restore workflows that reduces backup
time, improve time-to-recovery, reduce admin overhead, and replace time, improve time-to-recovery, reduce admin overhead, and replace
unreliable scripts or workarounds. unreliable scripts or workarounds.
@ -341,7 +342,7 @@ export default function KeyLoveFAQ() {
className="text-indigo-600 text-xl mr-2" className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline" icon="material-symbols:check-circle-outline"
/>{" "} />{" "}
Constantly updated Microsoft 365 Graph Data engine Constantly updated M365 Graph Data engine
</li> </li>
<li className="mb-1 flex"> <li className="mb-1 flex">
<Icon <Icon
@ -461,7 +462,7 @@ export default function KeyLoveFAQ() {
<div className="md:col-span-6"> <div className="md:col-span-6">
<div className="accordion space-y-3" id="accordionExample"> <div className="accordion space-y-3" id="accordionExample">
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden"> <div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2 <h2
className="accordion-header mb-0 !cursor-pointer font-semibold" className="accordion-header mb-0 !cursor-pointer font-semibold"
id="headingOne" id="headingOne"
@ -474,7 +475,7 @@ export default function KeyLoveFAQ() {
aria-expanded="false" aria-expanded="false"
aria-controls="collapseOne" aria-controls="collapseOne"
> >
<span>How do I choose between Corso and Alcion, powered by Corso?</span> <span>What platforms does Corso run on?</span>
</button> </button>
</h2> </h2>
<div <div
@ -485,7 +486,8 @@ export default function KeyLoveFAQ() {
> >
<div className="accordion-body p-5"> <div className="accordion-body p-5">
<p className="text-slate-400 !visible dark:text-gray-400"> <p className="text-slate-400 !visible dark:text-gray-400">
Corso is a good fit for basic backup while Alcion is a better fit if you need increased reliability, security, and support. Corso has both native binaries and container images for
Windows, Linux, and macOS.
</p> </p>
</div> </div>
</div> </div>
@ -503,7 +505,9 @@ export default function KeyLoveFAQ() {
aria-expanded="false" aria-expanded="false"
aria-controls="collapse2" aria-controls="collapse2"
> >
<span>What platforms does Corso run on?</span> <span>
What Microsoft 365 services can I backup using Corso?
</span>
</button> </button>
</h2> </h2>
<div <div
@ -514,8 +518,8 @@ export default function KeyLoveFAQ() {
> >
<div className="accordion-body p-5"> <div className="accordion-body p-5">
<p className="text-slate-400 !visible dark:text-gray-400"> <p className="text-slate-400 !visible dark:text-gray-400">
Corso has both native binaries and container images for Corso currently supports OneDrive, Exchange, SharePoint,
Windows, Linux, and macOS. and Teams.
</p> </p>
</div> </div>
</div> </div>
@ -533,9 +537,7 @@ export default function KeyLoveFAQ() {
aria-expanded="false" aria-expanded="false"
aria-controls="collapse3" aria-controls="collapse3"
> >
<span> <span>What object storage does Corso support?</span>
What Microsoft 365 services can I backup using Corso?
</span>
</button> </button>
</h2> </h2>
<div <div
@ -543,36 +545,6 @@ export default function KeyLoveFAQ() {
className="accordion-collapse collapse" className="accordion-collapse collapse"
aria-labelledby="heading3" aria-labelledby="heading3"
data-bs-parent="#accordionExample" data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 !visible dark:text-gray-400">
Corso currently supports OneDrive, Exchange, SharePoint,
and Teams.
</p>
</div>
</div>
</div>
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading4"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse4"
aria-expanded="false"
aria-controls="collapse4"
>
<span>What object storage does Corso support?</span>
</button>
</h2>
<div
id="collapse4"
className="accordion-collapse collapse"
aria-labelledby="heading4"
data-bs-parent="#accordionExample"
> >
<div className="accordion-body p-5"> <div className="accordion-body p-5">
<p className="text-slate-400 dark:text-gray-400 !visible"> <p className="text-slate-400 dark:text-gray-400 !visible">
@ -587,23 +559,23 @@ export default function KeyLoveFAQ() {
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden"> <div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2 <h2
className="accordion-header mb-0 font-semibold" className="accordion-header mb-0 font-semibold"
id="heading5" id="heading4"
> >
<button <button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left" className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button" type="button"
data-bs-toggle="collapse" data-bs-toggle="collapse"
data-bs-target="#collapse5" data-bs-target="#collapse4"
aria-expanded="false" aria-expanded="false"
aria-controls="collapse5" aria-controls="collapse4"
> >
<span>How can I get help for Corso?</span> <span>How can I get help for Corso?</span>
</button> </button>
</h2> </h2>
<div <div
id="collapse5" id="collapse4"
className="accordion-collapse collapse" className="accordion-collapse collapse"
aria-labelledby="heading5" aria-labelledby="heading4"
data-bs-parent="#accordionExample" data-bs-parent="#accordionExample"
> >
<div className="accordion-body p-5"> <div className="accordion-body p-5">
@ -633,23 +605,23 @@ export default function KeyLoveFAQ() {
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden"> <div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2 <h2
className="accordion-header mb-0 !cursor-pointer font-semibold" className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading6" id="heading5"
> >
<button <button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left" className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button" type="button"
data-bs-toggle="collapse" data-bs-toggle="collapse"
data-bs-target="#collapse6" data-bs-target="#collapse5"
aria-expanded="false" aria-expanded="false"
aria-controls="collapse6" aria-controls="collapse5"
> >
<span>What is Corso's open-source license?</span> <span>What is Corso's open-source license?</span>
</button> </button>
</h2> </h2>
<div <div
id="collapse6" id="collapse5"
className="accordion-collapse collapse" className="accordion-collapse collapse"
aria-labelledby="heading6" aria-labelledby="heading5"
data-bs-parent="#accordionExample" data-bs-parent="#accordionExample"
> >
<div className="accordion-body p-5"> <div className="accordion-body p-5">
@ -663,23 +635,23 @@ export default function KeyLoveFAQ() {
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden"> <div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2 <h2
className="accordion-header mb-0 !cursor-pointer font-semibold" className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading7" id="heading6"
> >
<button <button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left" className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button" type="button"
data-bs-toggle="collapse" data-bs-toggle="collapse"
data-bs-target="#collapse7" data-bs-target="#collapse6"
aria-expanded="false" aria-expanded="false"
aria-controls="collapse7" aria-controls="collapse6"
> >
<span>How do I request a new feature?</span> <span>How do I request a new feature?</span>
</button> </button>
</h2> </h2>
<div <div
id="collapse7" id="collapse6"
className="accordion-collapse collapse" className="accordion-collapse collapse"
aria-labelledby="heading7" aria-labelledby="heading6"
data-bs-parent="#accordionExample" data-bs-parent="#accordionExample"
> >
<div className="accordion-body p-5"> <div className="accordion-body p-5">

96
website/static/img/corso_horizontal_logo.svg Normal file → Executable file

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 23 KiB

After

Width:  |  Height:  |  Size: 3.5 KiB

96
website/static/img/corso_horizontal_logo_white.svg Normal file → Executable file
View File

@ -1,95 +1 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#fff;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>
<!-- Generator: Adobe Illustrator 28.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 1920 632.51" style="enable-background:new 0 0 1920 632.51;" xml:space="preserve">
<style type="text/css">
.st0{clip-path:url(#SVGID_00000065045999731516100160000007329899648576828572_);fill:#FFFFFF;}
.st1{fill:#FFFFFF;}
</style>
<g id="Layer_1">
<g>
<g>
<defs>
<rect id="SVGID_1_" y="2.64" width="1920" height="523"/>
</defs>
<clipPath id="SVGID_00000147923114548510084520000017867003880147110077_">
<use xlink:href="#SVGID_1_" style="overflow:visible;"/>
</clipPath>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M134.51,132.57
c28.63,0,54.6,7.95,75.81,22.79c11.66,7.95,14.31,23.33,6.36,36.58c-7.42,12.19-25.98,12.73-37.64,5.83
c-12.73-7.42-28.63-12.19-44.53-12.19c-41.35,0-77.93,30.22-77.93,76.34c0,46.12,36.58,75.81,77.93,75.81
c15.91,0,31.81-4.77,44.53-12.19c11.66-6.89,30.22-6.36,37.64,5.83c7.95,13.26,5.3,28.63-6.36,36.58
c-21.21,14.85-47.19,22.8-75.81,22.8C63.47,390.76,2.5,340.39,2.5,261.93C2.5,183.47,63.47,132.57,134.51,132.57"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M261.22,260.87
c0-78.46,58.85-128.29,128.83-128.29c70.51,0,129.89,49.83,129.89,128.29s-59.37,129.89-129.89,129.89
C320.06,390.76,261.22,339.33,261.22,260.87 M465.86,260.87c0-45.59-34.46-75.28-75.81-75.28c-40.82,0-74.75,29.69-74.75,75.28
c0,46.66,33.93,76.87,74.75,76.87C431.4,337.74,465.86,307.52,465.86,260.87"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M633.91,296.39v64.15
c0,14.85-12.19,27.57-28.1,27.57c-14.84,0-26.51-12.72-26.51-27.57V162.79c0-14.85,11.67-27.57,26.51-27.57
c15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.02,67.86-62.02c7.95,0,15.91,0.53,23.33,2.12
c13.79,3.18,22.8,16.97,19.62,31.28c-4.77,23.86-28.63,18.03-44.53,18.03C653.53,184,633.91,230.65,633.91,296.39"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M788.19,304.87
c13.25-5.3,23.33,1.59,27.57,10.6c10.08,19.09,29.16,29.69,53.55,29.69c24.92,0,42.94-11.13,42.94-29.69
c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.71c3.18-36.58,42.94-65.21,88.53-65.21
c32.87,0,63.09,10.6,79.53,36.58c7.42,12.72,3.71,25.44-4.77,31.81c-9.01,7.42-20.15,6.89-31.81-3.18
c-13.78-12.19-29.69-16.97-42.41-16.97c-13.79,0-29.16,4.77-34.46,13.25c-4.24,6.89-4.77,13.78-2.12,21.21
c3.18,9.54,18.03,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16c0,47.71-41.88,74.75-98.61,74.75
c-38.17,0-76.87-20.15-90.13-56.2C770.69,321.31,776.53,309.12,788.19,304.87"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1006.61,260.87
c0-78.46,58.85-128.29,128.83-128.29c70.51,0,129.89,49.83,129.89,128.29s-59.37,129.89-129.89,129.89
C1065.46,390.76,1006.61,339.33,1006.61,260.87 M1211.25,260.87c0-45.59-34.46-75.28-75.81-75.28
c-40.82,0-74.75,29.69-74.75,75.28c0,46.66,33.93,76.87,74.75,76.87C1176.79,337.74,1211.25,307.52,1211.25,260.87"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1658.37,523.34
c-33.98,0-65.93-13.24-89.96-37.26l-221.94-221.93l221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27
c33.98,0,65.93,13.24,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98
C1724.3,510.1,1692.35,523.34,1658.37,523.34 M1393.9,264.14l198.22,198.22c36.54,36.53,95.97,36.52,132.5,0l131.98-131.98
c36.53-36.53,36.53-95.96,0-132.5L1724.61,65.92c-17.7-17.7-41.22-27.44-66.25-27.44c-25.03,0-48.55,9.75-66.25,27.44
L1393.9,264.14z"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1813.99,263.46
l-6.63-41.44c-1.01-7.18-6.37-11.15-14.81-11.15l-49.44-0.14c-7.95,0-13.8-3.97-14.81-11.15l-0.17-1.18
c-1.61-11.39-9.6-20.86-20.58-24.34l-66.42-24.32c-1.66-0.53-3.19-1.38-4.51-2.5c-0.48-0.4-0.8-0.96-0.97-1.57l-12.42-42.67
c-0.12-0.43-0.36-0.83-0.7-1.12c-3.65-3-9.24-1.95-11.5,2.32l-24.5,54.88c-0.89,1.68-1.4,3.55-1.5,5.45l3.01,25.01
c-0.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85l12.2-50.1
c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.89-63.08c1.35-2.12,2.07-4.58,2.07-7.08
C1814.12,264.67,1814.07,264.06,1813.99,263.46 M1707.54,223.11c-5.96,1.5-22.58,0.54-24.08-5.43
c-1.5-5.95,12.71-14.66,18.66-16.15c5.96-1.5,12,2.12,13.5,8.09C1717.11,215.57,1713.49,221.62,1707.54,223.11"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1680.04,156.45
L1667,107.48c-0.14-0.53-0.47-1.01-0.92-1.33c-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9L1680.04,156.45z"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1655.16,406.81
l-0.85,3.47c-1.93,7.89-11.75,10.65-17.49,4.9l-123.3-123.3l-11.74-11.74l13.35-13.35l11.74,11.74L1655.16,406.81z"/>
</g>
<g>
<path class="st1" d="M523.8,510.47c5.57-9.64,17.49-14.55,30.17-14.55c24.41,0,44.78,17.77,44.78,46.11
c0,27.78-20.76,45.93-44.97,45.93c-12.11,0-24.41-5.29-29.98-14.74v3.97c0,5.29-4.42,9.83-10.19,9.83
c-5.38,0-9.61-4.54-9.61-9.83v-118.5c0-5.29,4.23-9.83,9.61-9.83c5.77,0,10.19,4.54,10.19,9.83V510.47z M551.48,569.06
c14.99,0,27.1-11.15,27.1-27.4s-12.11-26.84-27.1-26.84c-13.45,0-27.48,9.45-27.48,26.84
C523.99,558.85,536.87,569.06,551.48,569.06z"/>
<path class="st1" d="M645.84,623.3c-2.11,4.91-7.11,7.56-12.3,6.24s-8.07-7.37-6.15-12.28l14.61-35.34l-30.56-72.38
c-2.11-4.91,0.96-10.96,6.15-12.29c5.19-1.32,10.19,1.32,12.3,6.24l22.68,54.81l22.87-54.81c2.11-4.91,7.11-7.56,12.3-6.24
c5.19,1.32,8.26,7.37,6.15,12.29L645.84,623.3z"/>
<path class="st1" d="M828.41,573.4c-5.96,9.64-19.03,14.55-30.17,14.55c-24.22,0-45.55-17.95-45.55-46.11
s21.33-45.93,45.55-45.93c10.76,0,24.02,4.35,30.17,14.36v-3.59c0-5.29,4.23-9.83,9.61-9.83c5.77,0,10.19,4.54,10.19,9.83v70.5
c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83V573.4z M800.55,569.06c14.61,0,27.67-10.02,27.67-27.4
s-14.22-26.84-27.67-26.84c-14.99,0-27.48,10.58-27.48,26.84S785.56,569.06,800.55,569.06z"/>
<path class="st1" d="M894.91,577.18c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83v-118.5c0-5.29,4.23-9.83,9.61-9.83
c5.77,0,10.19,4.54,10.19,9.83V577.18z"/>
<path class="st1" d="M964.67,495.91c10.38,0,19.8,2.83,27.48,8.13c4.23,2.83,5.19,8.32,2.31,13.04
c-2.69,4.35-9.42,4.54-13.65,2.08c-4.61-2.65-10.38-4.35-16.14-4.35c-14.99,0-28.25,10.77-28.25,27.21s13.26,27.03,28.25,27.03
c5.77,0,11.53-1.7,16.14-4.35c4.23-2.46,10.96-2.27,13.65,2.08c2.88,4.72,1.92,10.21-2.31,13.04c-7.69,5.29-17.1,8.13-27.48,8.13
c-25.75,0-47.85-17.95-47.85-45.93C916.82,514.06,938.92,495.91,964.67,495.91z"/>
<path class="st1" d="M1026.55,449.8c7.3,0,13.07,5.29,13.07,12.28c0,6.99-5.77,12.29-13.07,12.29c-7.11,0-13.26-5.29-13.26-12.29
C1013.29,455.09,1019.44,449.8,1026.55,449.8z M1036.55,506.69c0-5.29-4.42-9.83-10.19-9.83c-5.38,0-9.61,4.54-9.61,9.83v70.5
c0,5.29,4.23,9.83,9.61,9.83c5.77,0,10.19-4.54,10.19-9.83V506.69z"/>
<path class="st1" d="M1058.07,541.65c0-27.97,21.33-45.74,46.7-45.74c25.56,0,47.08,17.77,47.08,45.74
c0,27.97-21.52,46.3-47.08,46.3C1079.4,587.95,1058.07,569.62,1058.07,541.65z M1132.25,541.65c0-16.25-12.49-26.84-27.48-26.84
c-14.8,0-27.1,10.58-27.1,26.84c0,16.63,12.3,27.4,27.1,27.4C1119.76,569.06,1132.25,558.28,1132.25,541.65z"/>
<path class="st1" d="M1173.38,506.69c0-5.29,4.42-9.83,10.19-9.83c5.38,0,9.61,4.54,9.61,9.83v4.35
c5.19-10.21,17.49-15.12,27.48-15.12c21.72,0,34.21,13.8,34.21,38.74v42.52c0,5.29-4.42,9.83-10.19,9.83
c-5.38,0-9.61-4.54-9.61-9.83v-40.26c0-13.99-7.3-21.92-18.83-21.92c-11.72,0-23.06,6.24-23.06,23.62v38.55
c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83V506.69z"/>
</g>
</g>
</g>
<g id="Layer_2">
</g>
</svg>

Before

Width:  |  Height:  |  Size: 8.2 KiB

After

Width:  |  Height:  |  Size: 3.5 KiB