Compare commits

..

2 Commits

Author SHA1 Message Date
aviator-app[bot]
b043ec667b
Merge branch 'main' into dependabot/go_modules/src/github.com/jhillyerd/enmime-1.2.0 2024-02-12 18:16:29 +00:00
dependabot[bot]
2eb304337d
⬆️ Bump github.com/jhillyerd/enmime from 1.1.0 to 1.2.0 in /src
Bumps [github.com/jhillyerd/enmime](https://github.com/jhillyerd/enmime) from 1.1.0 to 1.2.0.
- [Release notes](https://github.com/jhillyerd/enmime/releases)
- [Commits](https://github.com/jhillyerd/enmime/compare/v1.1.0...v1.2.0)

---
updated-dependencies:
- dependency-name: github.com/jhillyerd/enmime
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-02-12 05:10:10 +00:00
85 changed files with 560 additions and 4390 deletions

View File

@ -1,5 +1,4 @@
name: Backup Restore Test
description: Run various backup/restore/export tests for a service.
inputs:
service:

View File

@ -1,5 +1,4 @@
name: Setup and Cache Golang
description: Build golang binaries for later use in CI.
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
#

View File

@ -1,5 +1,4 @@
name: Publish Binary
description: Publish binary artifacts.
inputs:
version:

View File

@ -1,5 +1,4 @@
name: Publish Website
description: Publish website artifacts.
inputs:
aws-iam-role:

View File

@ -1,5 +1,4 @@
name: Purge M365 User Data
description: Deletes M365 data generated during CI tests.
# Hard deletion of an m365 user's data. Our CI processes create a lot
# of data churn (creation and immediate deletion) of files, the likes
@ -31,19 +30,12 @@ inputs:
description: Secret value of for AZURE_CLIENT_ID
azure-client-secret:
description: Secret value of for AZURE_CLIENT_SECRET
azure-pnp-client-id:
description: Secret value of AZURE_PNP_CLIENT_ID
azure-pnp-client-cert:
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
azure-tenant-id:
description: Secret value of AZURE_TENANT_ID
description: Secret value of for AZURE_TENANT_ID
m365-admin-user:
description: Secret value of for M365_TENANT_ADMIN_USER
m365-admin-password:
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
tenant-domain:
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
required: true
runs:
using: composite
@ -61,13 +53,7 @@ runs:
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
#- name: Reset retention for all mailboxes to 0
@ -88,16 +74,10 @@ runs:
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
################################################################################################################
# Sharepoint
@ -108,14 +88,6 @@ runs:
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}

View File

@ -1,5 +1,4 @@
name: Send a message to Teams
description: Send messages to communication apps.
inputs:
msg:

View File

@ -1,5 +1,4 @@
name: Lint Website
description: Lint website content.
inputs:
version:

View File

@ -40,5 +40,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Publishing Binary"
msg: "[FAILED] Publishing Binary"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -463,7 +463,7 @@ jobs:
go-version-file: src/go.mod
- name: Go Lint
uses: golangci/golangci-lint-action@v4
uses: golangci/golangci-lint-action@v3
with:
# Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code.

View File

@ -12,7 +12,7 @@ jobs:
continue-on-error: true
strategy:
matrix:
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
steps:
- uses: actions/checkout@v4
@ -33,15 +33,12 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
Test-Site-Data-Cleanup:
@ -50,7 +47,7 @@ jobs:
continue-on-error: true
strategy:
matrix:
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
steps:
- uses: actions/checkout@v4
@ -73,13 +70,10 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -155,6 +155,3 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
user:
description: "User to run longevity test on"
description: 'User to run longevity test on'
permissions:
# required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Longevity-Tests:
needs: [SetM365App]
needs: [ SetM365App ]
environment: Testing
runs-on: ubuntu-latest
env:
@ -37,7 +37,7 @@ jobs:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
RESTORE_DEST_PFX: Corso_Test_Longevity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
PREFIX: "longevity"
PREFIX: 'longevity'
# Options for retention.
RETENTION_MODE: GOVERNANCE
@ -46,7 +46,7 @@ jobs:
defaults:
run:
working-directory: src
############################################################################
# setup
steps:
@ -78,7 +78,7 @@ jobs:
- run: go build -o corso
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR}
# Use shorter-lived credentials obtained from assume-role since these
@ -163,7 +163,7 @@ jobs:
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Onedrive
@ -328,7 +328,7 @@ jobs:
--hide-progress \
--force \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
- name: Maintenance test Weekly
id: maintenance-test-weekly
@ -392,5 +392,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Longevity Test"
msg: "[FAILED] Longevity Test"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -48,7 +48,7 @@ jobs:
# ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted:
needs: [Checkout, SetM365App]
needs: [ Checkout, SetM365App]
environment: Testing
runs-on: ubuntu-latest
defaults:
@ -100,9 +100,9 @@ jobs:
-timeout 2h \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
##########################################################################################################################################
##########################################################################################################################################
# Logging & Notifications
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
@ -118,5 +118,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[COROS FAILED] Nightly Checks"
msg: "[FAILED] Nightly Checks"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
user:
description: "User to run sanity test on"
description: 'User to run sanity test on'
permissions:
# required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests:
needs: [SetM365App]
needs: [ SetM365App ]
environment: Testing
runs-on: ubuntu-latest
env:
@ -43,11 +43,12 @@ jobs:
defaults:
run:
working-directory: src
##########################################################################################################################################
##########################################################################################################################################
# setup
# setup
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
@ -63,9 +64,9 @@ jobs:
- run: mkdir ${CORSO_LOG_DIR}
##########################################################################################################################################
##########################################################################################################################################
# Pre-Run cleanup
# Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
@ -90,9 +91,6 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites
timeout-minutes: 30
@ -101,20 +99,17 @@ jobs:
with:
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
##########################################################################################################################################
##########################################################################################################################################
# Repository commands
# Repository commands
- name: Version Test
timeout-minutes: 10
@ -174,9 +169,9 @@ jobs:
--mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
##########################################################################################################################################
##########################################################################################################################################
# Exchange
# Exchange
# generate new entries to roll into the next load test
# only runs if the test was successful
@ -198,8 +193,8 @@ jobs:
service: exchange
kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -211,8 +206,8 @@ jobs:
service: exchange
kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -225,8 +220,8 @@ jobs:
service: exchange
kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -239,15 +234,16 @@ jobs:
service: exchange
kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Onedrive
##########################################################################################################################################
# Onedrive
# generate new entries for test
- name: OneDrive - Create new data
@ -274,8 +270,8 @@ jobs:
service: onedrive
kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -299,14 +295,14 @@ jobs:
service: onedrive
kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
##########################################################################################################################################
# Sharepoint Library
# Sharepoint Library
# generate new entries for test
- name: SharePoint - Create new data
@ -334,8 +330,8 @@ jobs:
service: sharepoint
kind: first-backup
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
@ -361,15 +357,15 @@ jobs:
service: sharepoint
kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
##########################################################################################################################################
##########################################################################################################################################
# Sharepoint Lists
# Sharepoint Lists
# generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
@ -422,7 +418,7 @@ jobs:
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
@ -458,9 +454,9 @@ jobs:
category: lists
on-collision: copy
##########################################################################################################################################
##########################################################################################################################################
# Groups and Teams
# Groups and Teams
# generate new entries for test
- name: Groups - Create new data
@ -487,8 +483,8 @@ jobs:
with:
service: groups
kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -512,15 +508,15 @@ jobs:
with:
service: groups
kind: incremental
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
##########################################################################################################################################
# Logging & Notifications
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
@ -536,5 +532,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Sanity Tests"
msg: "[FAILED] Sanity Tests"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -11,7 +11,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Emails attached within other emails are now correctly exported
- Gracefully handle email and post attachments without name when exporting to eml
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
- Fixed an issue causing exports dealing with calendar data to have high memory usage
## [v0.19.0] (beta) - 2024-02-06

View File

@ -1,6 +1,3 @@
> [!NOTE]
> **The Corso project is no longer actively maintained and has been archived**.
<p align="center">
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
</p>

View File

@ -150,11 +150,8 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
result := recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.m365.User.ID)
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
@ -186,11 +183,8 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
result := recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.m365.User.ID)
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
@ -288,11 +282,8 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
result := suite.dpnd.recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.m365.User.ID)
}
// AWS flags

View File

@ -114,8 +114,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsBackupCategoryTest(suite, flags.DataConversations)
}
@ -219,9 +217,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
}
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
@ -305,10 +300,7 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
cats = []path.CategoryType{
path.ChannelMessagesCategory,
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// path.ConversationPostsCategory,
path.ConversationPostsCategory,
path.LibrariesCategory,
}
)
@ -462,8 +454,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
}

View File

@ -6,6 +6,12 @@ Param (
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
[String]$Site,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
[String[]]$LibraryNameList = @(),
@ -16,16 +22,7 @@ Param (
[String[]]$FolderPrefixPurgeList,
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
[String[]]$LibraryPrefixDeleteList = @(),
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
[String]$AppCert = $ENV:AZURE_APP_CERT
[String[]]$LibraryPrefixDeleteList = @()
)
Set-StrictMode -Version 2.0
@ -40,7 +37,7 @@ function Get-TimestampFromFolderName {
$name = $folder.Name
#fallback on folder create time
#fallback on folder create time
[datetime]$timestamp = $folder.TimeCreated
try {
@ -69,7 +66,7 @@ function Get-TimestampFromListName {
$name = $list.Title
#fallback on list create time
#fallback on list create time
[datetime]$timestamp = $list.LastItemUserModifiedDate
try {
@ -109,9 +106,8 @@ function Purge-Library {
Write-Host "`nPurging library: $LibraryName"
$foldersToPurge = @()
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
Write-Host "`nFolders: $folders"
foreach ($f in $folders) {
$folderName = $f.Name
$createTime = Get-TimestampFromFolderName -Folder $f
@ -163,7 +159,7 @@ function Delete-LibraryByPrefix {
Write-Host "`nDeleting library: $LibraryNamePrefix"
$listsToDelete = @()
$lists = Get-PnPList
$lists = Get-PnPList
foreach ($l in $lists) {
$listName = $l.Title
@ -187,7 +183,7 @@ function Delete-LibraryByPrefix {
Write-Host "Deleting list: "$l.Title
try {
$listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden
# Check if the 'hidden' property is true
if ($listInfo.Hidden) {
Write-Host "List: $($l.Title) is hidden. Skipping..."
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
}
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
Write-Host "ClientId and AppCert required as arguments or environment variables."
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
Write-Host "Admin user name and password required as arguments or environment variables."
Exit
}
@ -255,8 +251,12 @@ else {
Exit
}
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
Write-Host "`nAuthenticating and connecting to $SiteUrl"
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
Connect-PnPOnline -Url $siteUrl -Credential $cred
Write-Host "Connected to $siteUrl`n"
# ensure that there are no unexpanded entries in the list of parameters

View File

@ -5,7 +5,6 @@ import (
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/internal/common/ptr"
@ -21,20 +20,19 @@ const (
// this increases the chance that we'll run into a race collision with
// the cleanup script. Sometimes that's okay (deleting old data that
// isn't scrutinized in the test), other times it's not. We mark whether
// that's okay to do or not by specifying the folders being
// scrutinized for the test. Any errors within those folders should cause
// a fatal exit. Errors outside of those folders get ignored.
// that's okay to do or not by specifying the folder that's being
// scrutinized for the test. Any errors within that folder should cause
// a fatal exit. Errors outside of that folder get ignored.
//
// since we're using folder names, mustPopulateFolders will
// since we're using folder names, requireNoErrorsWithinFolderName will
// work best (ie: have the fewest collisions/side-effects) if the folder
// names are very specific. Standard sanity tests should include timestamps,
// name is very specific. Standard sanity tests should include timestamps,
// which should help ensure that. Be warned if you try to use it with
// a more generic name: unintended effects could occur.
func populateSanitree(
ctx context.Context,
ac api.Client,
driveID string,
mustPopulateFolders []string,
driveID, requireNoErrorsWithinFolderName string,
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
common.Infof(ctx, "building sanitree for drive: %s", driveID)
@ -58,8 +56,8 @@ func populateSanitree(
ac,
driveID,
stree.Name+"/",
mustPopulateFolders,
slices.Contains(mustPopulateFolders, rootName),
requireNoErrorsWithinFolderName,
rootName == requireNoErrorsWithinFolderName,
stree)
return stree
@ -68,9 +66,7 @@ func populateSanitree(
func recursivelyBuildTree(
ctx context.Context,
ac api.Client,
driveID string,
location string,
mustPopulateFolders []string,
driveID, location, requireNoErrorsWithinFolderName string,
isChildOfFolderRequiringNoErrors bool,
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
) {
@ -84,9 +80,9 @@ func recursivelyBuildTree(
common.Infof(
ctx,
"ignoring error getting children in directory %q because it is not within directory set %v\nerror: %s\n%+v",
"ignoring error getting children in directory %q because it is not within directory %q\nerror: %s\n%+v",
location,
mustPopulateFolders,
requireNoErrorsWithinFolderName,
err.Error(),
clues.ToCore(err))
@ -103,12 +99,11 @@ func recursivelyBuildTree(
// currently we don't restore blank folders.
// skip permission check for empty folders
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
common.Infof(ctx, "skipped empty folder: %s%s", location, itemName)
common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
continue
}
cannotAllowErrors := isChildOfFolderRequiringNoErrors ||
slices.Contains(mustPopulateFolders, itemName)
cannotAllowErrors := isChildOfFolderRequiringNoErrors || itemName == requireNoErrorsWithinFolderName
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
Parent: stree,
@ -129,7 +124,7 @@ func recursivelyBuildTree(
ac,
driveID,
location+branch.Name+"/",
mustPopulateFolders,
requireNoErrorsWithinFolderName,
cannotAllowErrors,
branch)
}

View File

@ -32,7 +32,7 @@ func CheckExport(
ctx,
ac,
driveID,
[]string{envs.SourceContainer})
envs.RestoreContainer)
sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert(

View File

@ -45,14 +45,7 @@ func CheckRestoration(
"drive_id", driveID,
"drive_name", driveName)
root := populateSanitree(
ctx,
ac,
driveID,
[]string{
envs.SourceContainer,
envs.RestoreContainer,
})
root := populateSanitree(ctx, ac, driveID, envs.RestoreContainer)
sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert(

View File

@ -3,7 +3,7 @@ module github.com/alcionai/corso/src
go 1.21
replace (
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
@ -20,7 +20,7 @@ require (
github.com/google/uuid v1.6.0
github.com/h2non/gock v1.2.0
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056
github.com/jhillyerd/enmime v1.1.0
github.com/jhillyerd/enmime v1.2.0
github.com/kopia/kopia v0.15.0
github.com/microsoft/kiota-abstractions-go v1.5.4
github.com/microsoft/kiota-authentication-azure-go v1.0.1

View File

@ -23,8 +23,8 @@ github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEej
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe h1:nLS5pxhm04Jz4+qeipNlxdyPGxqNWpBu8UGkRYpWoIw=
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -147,8 +147,8 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056 h1:iCHtR9CQyktQ5+f3dMVZfwD2KWJUgm7M0gdL9NGr8KA=
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056/go.mod h1:CVKlgaMiht+LXvHG173ujK6JUhZXKb2u/BQtjPDIvyk=
github.com/jhillyerd/enmime v1.1.0 h1:ubaIzg68VY7CMCe2YbHe6nkRvU9vujixTkNz3EBvZOw=
github.com/jhillyerd/enmime v1.1.0/go.mod h1:FRFuUPCLh8PByQv+8xRcLO9QHqaqTqreYhopv5eyk4I=
github.com/jhillyerd/enmime v1.2.0 h1:dIu1IPEymQgoT2dzuB//ttA/xcV40NMPpQtmd4wslHk=
github.com/jhillyerd/enmime v1.2.0/go.mod h1:FRFuUPCLh8PByQv+8xRcLO9QHqaqTqreYhopv5eyk4I=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=

View File

@ -10,7 +10,6 @@ import (
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/logger"
)
const (
@ -57,22 +56,12 @@ func ZipExportCollection(
defer wr.Close()
buf := make([]byte, ZipCopyBufferSize)
counted := 0
log := logger.Ctx(ctx).
With("collection_count", len(expCollections))
for _, ec := range expCollections {
folder := ec.BasePath()
items := ec.Items(ctx)
for item := range items {
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("progress zipping export items", "count_items", counted)
}
err := item.Error
if err != nil {
writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID))
@ -99,12 +88,8 @@ func ZipExportCollection(
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
return
}
item.Body.Close()
}
}
log.Infow("completed zipping export items", "count_items", counted)
}()
return zipCollection{reader}, nil

View File

@ -1,13 +1,10 @@
package jwt
import (
"context"
"time"
"github.com/alcionai/clues"
jwt "github.com/golang-jwt/jwt/v5"
"github.com/alcionai/corso/src/pkg/logger"
)
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
@ -40,51 +37,3 @@ func IsJWTExpired(
return expired, nil
}
// GetJWTLifetime returns the issued at(iat) and expiration time(exp) claims
// present in the JWT token. These are optional claims and may not be present
// in the token. Absence is not reported as an error.
//
// An error is returned if the supplied token is malformed. Times are returned
// in UTC to have parity with graph responses.
func GetJWTLifetime(
ctx context.Context,
rawToken string,
) (time.Time, time.Time, error) {
var (
issuedAt time.Time
expiresAt time.Time
)
p := jwt.NewParser()
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
if err != nil {
logger.CtxErr(ctx, err).Debug("parsing jwt token")
return time.Time{}, time.Time{}, clues.Wrap(err, "invalid jwt")
}
exp, err := token.Claims.GetExpirationTime()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting exp claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token expiry time")
}
iat, err := token.Claims.GetIssuedAt()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting iat claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token issued at time")
}
// Absence of iat or exp claims is not reported as an error by jwt library as these
// are optional as per spec.
if iat != nil {
issuedAt = iat.UTC()
}
if exp != nil {
expiresAt = exp.UTC()
}
return issuedAt, expiresAt, nil
}

View File

@ -113,134 +113,3 @@ func (suite *JWTUnitSuite) TestIsJWTExpired() {
})
}
}
func (suite *JWTUnitSuite) TestGetJWTLifetime() {
// Set of time values to be used in the tests.
// Truncate to seconds for comparisons since jwt tokens have second
// level precision.
idToTime := map[string]time.Time{
"T0": time.Now().UTC().Add(-time.Hour).Truncate(time.Second),
"T1": time.Now().UTC().Truncate(time.Second),
"T2": time.Now().UTC().Add(time.Hour).Truncate(time.Second),
}
table := []struct {
name string
getToken func() (string, error)
expectFunc func(t *testing.T, iat time.Time, exp time.Time)
expectErr assert.ErrorAssertionFunc
}{
{
name: "alive token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T1"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, idToTime["T1"], exp)
},
expectErr: assert.NoError,
},
// Test with a token which is not generated using the go-jwt lib.
// This is a long lived token which is valid for 100 years.
{
name: "alive raw token with iat and exp claims",
getToken: func() (string, error) {
return rawToken, nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Less(t, iat, time.Now(), "iat should be in the past")
assert.Greater(t, exp, time.Now(), "exp should be in the future")
},
expectErr: assert.NoError,
},
// Regardless of whether the token is expired or not, we should be able to
// extract the iat and exp claims from it without error.
{
name: "expired token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T1"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T1"], iat)
assert.Equal(t, idToTime["T0"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing iat claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(idToTime["T2"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, idToTime["T2"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing exp claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "both claims missing",
getToken: func() (string, error) {
return createJWTToken(jwt.RegisteredClaims{})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "malformed token",
getToken: func() (string, error) {
return "header.claims.signature", nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
token, err := test.getToken()
require.NoError(t, err)
iat, exp, err := GetJWTLifetime(ctx, token)
test.expectErr(t, err)
test.expectFunc(t, iat, exp)
})
}
}

View File

@ -59,19 +59,6 @@ func First(vs ...string) string {
return ""
}
// FirstIn returns the first entry in the map with a non-zero value
// when iterating the provided list of keys.
func FirstIn(m map[string]any, keys ...string) string {
for _, key := range keys {
v, err := AnyValueToString(key, m)
if err == nil && len(v) > 0 {
return v
}
}
return ""
}
// Preview reduces the string to the specified size.
// If the string is longer than the size, the last three
// characters are replaced with an ellipsis. Size < 4

View File

@ -118,96 +118,3 @@ func TestGenerateHash(t *testing.T) {
}
}
}
func TestFirstIn(t *testing.T) {
table := []struct {
name string
m map[string]any
keys []string
expect string
}{
{
name: "nil map",
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty map",
m: map[string]any{},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no match",
m: map[string]any{
"baz": "baz",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no keys",
m: map[string]any{
"baz": "baz",
},
keys: []string{},
expect: "",
},
{
name: "nil match",
m: map[string]any{
"foo": nil,
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty match",
m: map[string]any{
"foo": "",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "matches first key",
m: map[string]any{
"foo": "fnords",
},
keys: []string{"foo", "bar"},
expect: "fnords",
},
{
name: "matches second key",
m: map[string]any{
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with nil first match",
m: map[string]any{
"foo": nil,
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with empty first match",
m: map[string]any{
"foo": "",
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
}
for _, test := range table {
t.Run(test.name, func(t *testing.T) {
result := FirstIn(test.m, test.keys...)
assert.Equal(t, test.expect, result)
})
}
}

View File

@ -484,14 +484,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
desc := replacer.Replace(description)
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
} else {
// Disable auto wrap, causes huge memory spikes
// https://github.com/jaytaylor/html2text/issues/48
prettyTablesOptions := html2text.NewPrettyTablesOptions()
prettyTablesOptions.AutoWrapText = false
stripped, err := html2text.FromString(
description,
html2text.Options{PrettyTables: true, PrettyTablesOptions: prettyTablesOptions})
stripped, err := html2text.FromString(description, html2text.Options{PrettyTables: true})
if err != nil {
return clues.Wrap(err, "converting html to text").
With("description_length", len(description))

View File

@ -59,15 +59,6 @@ const (
minEpochDurationUpperBound = 7 * 24 * time.Hour
)
// allValidCompressors is the set of compression algorithms either currently
// being used or that were previously used. Use this during the config verify
// command to avoid spurious errors. We can revisit whether we want to update
// the config in those old repos at a later time.
var allValidCompressors = map[compression.Name]struct{}{
compression.Name(defaultCompressor): {},
compression.Name("s2-default"): {},
}
var (
ErrSettingDefaultConfig = clues.New("setting default repo config values")
ErrorRepoAlreadyExists = clues.New("repo already exists")
@ -777,7 +768,7 @@ func (w *conn) verifyDefaultPolicyConfigOptions(
ctx = clues.Add(ctx, "current_global_policy", globalPol.String())
if _, ok := allValidCompressors[globalPol.CompressionPolicy.CompressorName]; !ok {
if globalPol.CompressionPolicy.CompressorName != defaultCompressor {
errs.AddAlert(ctx, fault.NewAlert(
"unexpected compressor",
corsoWrapperAlertNamespace,

View File

@ -891,20 +891,6 @@ func (suite *ConnRetentionIntegrationSuite) TestVerifyDefaultConfigOptions() {
},
expectAlerts: 1,
},
{
name: "OldValidCompressor",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
_, err = updateCompressionOnPolicy("s2-default", pol)
require.NoError(t, err, clues.ToCore(err))
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 0,
},
{
name: "NonDefaultCompression",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {

View File

@ -366,7 +366,7 @@ func downloadContent(
itemID := ptr.Val(item.GetId())
ctx = clues.Add(ctx, "item_id", itemID)
content, err := downloadItem(ctx, iaag, driveID, item)
content, err := downloadItem(ctx, iaag, item)
if err == nil {
return content, nil
} else if !graph.IsErrUnauthorizedOrBadToken(err) {
@ -395,7 +395,7 @@ func downloadContent(
cdi := custom.ToCustomDriveItem(di)
content, err = downloadItem(ctx, iaag, driveID, cdi)
content, err = downloadItem(ctx, iaag, cdi)
if err != nil {
return nil, clues.Wrap(err, "content download retry")
}
@ -426,7 +426,7 @@ func readItemContents(
return nil, core.ErrNotFound
}
rc, err := downloadFile(ctx, iaag, props.downloadURL, false)
rc, err := downloadFile(ctx, iaag, props.downloadURL)
if graph.IsErrUnauthorizedOrBadToken(err) {
logger.CtxErr(ctx, err).Debug("stale item in cache")
}

View File

@ -795,12 +795,7 @@ func (h mockBackupHandler[T]) AugmentItemInfo(
return h.ItemInfo
}
func (h *mockBackupHandler[T]) Get(
context.Context,
string,
map[string]string,
bool,
) (*http.Response, error) {
func (h *mockBackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) {
c := h.getCall
h.getCall++

View File

@ -21,10 +21,8 @@ import (
)
const (
acceptHeaderKey = "Accept"
acceptHeaderValue = "*/*"
gigabyte = 1024 * 1024 * 1024
largeFileDownloadLimit = 15 * gigabyte
acceptHeaderKey = "Accept"
acceptHeaderValue = "*/*"
)
// downloadUrlKeys is used to find the download URL in a DriveItem response.
@ -35,8 +33,7 @@ var downloadURLKeys = []string{
func downloadItem(
ctx context.Context,
getter api.Getter,
driveID string,
ag api.Getter,
item *custom.DriveItem,
) (io.ReadCloser, error) {
if item == nil {
@ -44,37 +41,36 @@ func downloadItem(
}
var (
// very large file content needs to be downloaded through a different endpoint, or else
// the download could take longer than the lifespan of the download token in the cached
// url, which will cause us to timeout on every download request, even if we refresh the
// download url right before the query.
url = "https://graph.microsoft.com/v1.0/drives/" + driveID + "/items/" + ptr.Val(item.GetId()) + "/content"
reader io.ReadCloser
err error
isLargeFile = ptr.Val(item.GetSize()) > largeFileDownloadLimit
rc io.ReadCloser
isFile = item.GetFile() != nil
err error
)
// if this isn't a file, no content is available for download
if item.GetFile() == nil {
return reader, nil
if isFile {
var (
url string
ad = item.GetAdditionalData()
)
for _, key := range downloadURLKeys {
if v, err := str.AnyValueToString(key, ad); err == nil {
url = v
break
}
}
rc, err = downloadFile(ctx, ag, url)
if err != nil {
return nil, clues.Stack(err)
}
}
// smaller files will maintain our current behavior (prefetching the download url with the
// url cache). That pattern works for us in general, and we only need to deviate for very
// large file sizes.
if !isLargeFile {
url = str.FirstIn(item.GetAdditionalData(), downloadURLKeys...)
}
reader, err = downloadFile(ctx, getter, url, isLargeFile)
return reader, clues.StackWC(ctx, err).OrNil()
return rc, nil
}
type downloadWithRetries struct {
getter api.Getter
requireAuth bool
url string
getter api.Getter
url string
}
func (dg *downloadWithRetries) SupportsRange() bool {
@ -90,7 +86,7 @@ func (dg *downloadWithRetries) Get(
// wouldn't work without it (get 416 responses instead of 206).
headers[acceptHeaderKey] = acceptHeaderValue
resp, err := dg.getter.Get(ctx, dg.url, headers, dg.requireAuth)
resp, err := dg.getter.Get(ctx, dg.url, headers)
if err != nil {
return nil, clues.Wrap(err, "getting file")
}
@ -100,7 +96,7 @@ func (dg *downloadWithRetries) Get(
resp.Body.Close()
}
return nil, clues.NewWC(ctx, "malware detected").Label(graph.LabelsMalware)
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
}
if resp != nil && (resp.StatusCode/100) != 2 {
@ -111,7 +107,7 @@ func (dg *downloadWithRetries) Get(
// upstream error checks can compare the status with
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
return nil, clues.
Wrap(clues.NewWC(ctx, resp.Status), "non-2xx http response").
Wrap(clues.New(resp.Status), "non-2xx http response").
Label(graph.LabelStatus(resp.StatusCode))
}
@ -122,7 +118,6 @@ func downloadFile(
ctx context.Context,
ag api.Getter,
url string,
requireAuth bool,
) (io.ReadCloser, error) {
if len(url) == 0 {
return nil, clues.NewWC(ctx, "empty file url")
@ -146,9 +141,8 @@ func downloadFile(
rc, err := readers.NewResetRetryHandler(
ctx,
&downloadWithRetries{
getter: ag,
requireAuth: requireAuth,
url: url,
getter: ag,
url: url,
})
return rc, clues.Stack(err).OrNil()

View File

@ -109,11 +109,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
}
// Read data for the file
itemData, err := downloadItem(
ctx,
bh,
suite.m365.User.DriveID,
custom.ToCustomDriveItem(driveItem))
itemData, err := downloadItem(ctx, bh, custom.ToCustomDriveItem(driveItem))
require.NoError(t, err, clues.ToCore(err))
size, err := io.Copy(io.Discard, itemData)
@ -296,7 +292,6 @@ func (m mockGetter) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return m.GetFunc(ctx, url)
}
@ -384,7 +379,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
return nil, clues.New("test error")
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
{
name: "download url is empty",
@ -421,7 +416,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
}, nil
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
{
name: "non-2xx http response",
@ -440,7 +435,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
}, nil
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
}
@ -453,78 +448,9 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
mg := mockGetter{
GetFunc: test.GetFunc,
}
rc, err := downloadItem(
ctx,
mg,
"driveID",
custom.ToCustomDriveItem(test.itemFunc()))
rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(test.itemFunc()))
test.errorExpected(t, err, clues.ToCore(err))
test.rcExpected(t, rc, "reader should only be nil if item is nil")
})
}
}
func (suite *ItemUnitTestSuite) TestDownloadItem_urlByFileSize() {
var (
testRc = io.NopCloser(bytes.NewReader([]byte("test")))
url = "https://example.com"
okResp = &http.Response{
StatusCode: http.StatusOK,
Body: testRc,
}
)
table := []struct {
name string
itemFunc func() models.DriveItemable
GetFunc func(ctx context.Context, url string) (*http.Response, error)
errorExpected require.ErrorAssertionFunc
rcExpected require.ValueAssertionFunc
label string
}{
{
name: "big file",
itemFunc: func() models.DriveItemable {
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
di.SetSize(ptr.To[int64](20 * gigabyte))
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
assert.Contains(suite.T(), url, "/content")
return okResp, nil
},
},
{
name: "small file",
itemFunc: func() models.DriveItemable {
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
di.SetSize(ptr.To[int64](2 * gigabyte))
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
assert.NotContains(suite.T(), url, "/content")
return okResp, nil
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := downloadItem(
ctx,
mockGetter{GetFunc: test.GetFunc},
"driveID",
custom.ToCustomDriveItem(test.itemFunc()))
require.NoError(t, err, clues.ToCore(err))
test.rcExpected(t, rc)
})
}
}
@ -581,11 +507,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
mg := mockGetter{
GetFunc: GetFunc,
}
rc, err := downloadItem(
ctx,
mg,
"driveID",
custom.ToCustomDriveItem(itemFunc()))
rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(itemFunc()))
errorExpected(t, err, clues.ToCore(err))
rcExpected(t, rc)

View File

@ -93,9 +93,8 @@ func (h siteBackupHandler) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return h.ac.Get(ctx, url, headers, requireAuth)
return h.ac.Get(ctx, url, headers)
}
func (h siteBackupHandler) PathPrefix(

View File

@ -154,8 +154,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
http.MethodGet,
props.downloadURL,
nil,
nil,
false)
nil)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, resp)

View File

@ -93,9 +93,8 @@ func (h userDriveBackupHandler) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return h.ac.Get(ctx, url, headers, requireAuth)
return h.ac.Get(ctx, url, headers)
}
func (h userDriveBackupHandler) PathPrefix(

View File

@ -296,7 +296,6 @@ func populateCollections(
cl),
qp.ProtectedResource.ID(),
bh.itemHandler(),
bh,
addAndRem.Added,
addAndRem.Removed,
// TODO: produce a feature flag that allows selective

View File

@ -88,14 +88,6 @@ func (bh mockBackupHandler) folderGetter() containerGetter { return
func (bh mockBackupHandler) previewIncludeContainers() []string { return bh.previewIncludes }
func (bh mockBackupHandler) previewExcludeContainers() []string { return bh.previewExcludes }
func (bh mockBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return "", false
}
func (bh mockBackupHandler) NewContainerCache(
userID string,
) (string, graph.ContainerResolver) {

View File

@ -19,7 +19,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/fault"
@ -69,21 +68,21 @@ func getItemAndInfo(
ctx context.Context,
getter itemGetterSerializer,
userID string,
itemID string,
id string,
useImmutableIDs bool,
parentPath string,
) ([]byte, *details.ExchangeInfo, error) {
item, info, err := getter.GetItem(
ctx,
userID,
itemID,
id,
fault.New(true)) // temporary way to force a failFast error
if err != nil {
return nil, nil, clues.WrapWC(ctx, err, "fetching item").
Label(fault.LabelForceNoBackupCreation)
}
itemData, err := getter.Serialize(ctx, item, userID, itemID)
itemData, err := getter.Serialize(ctx, item, userID, id)
if err != nil {
return nil, nil, clues.WrapWC(ctx, err, "serializing item")
}
@ -109,7 +108,6 @@ func NewCollection(
bc data.BaseCollection,
user string,
items itemGetterSerializer,
canSkipFailChecker canSkipItemFailurer,
origAdded map[string]time.Time,
origRemoved []string,
validModTimes bool,
@ -142,7 +140,6 @@ func NewCollection(
added: added,
removed: removed,
getter: items,
skipChecker: canSkipFailChecker,
statusUpdater: statusUpdater,
}
}
@ -153,7 +150,6 @@ func NewCollection(
added: added,
removed: removed,
getter: items,
skipChecker: canSkipFailChecker,
statusUpdater: statusUpdater,
counter: counter,
}
@ -171,8 +167,7 @@ type prefetchCollection struct {
// removed is a list of item IDs that were deleted from, or moved out, of a container
removed map[string]struct{}
getter itemGetterSerializer
skipChecker canSkipItemFailurer
getter itemGetterSerializer
statusUpdater support.StatusUpdater
}
@ -199,12 +194,11 @@ func (col *prefetchCollection) streamItems(
wg sync.WaitGroup
progressMessage chan<- struct{}
user = col.user
dataCategory = col.Category().String()
)
ctx = clues.Add(
ctx,
"category", dataCategory)
"category", col.Category().String())
defer func() {
close(stream)
@ -233,7 +227,7 @@ func (col *prefetchCollection) streamItems(
defer close(semaphoreCh)
// delete all removed items
for itemID := range col.removed {
for id := range col.removed {
semaphoreCh <- struct{}{}
wg.Add(1)
@ -253,7 +247,7 @@ func (col *prefetchCollection) streamItems(
if progressMessage != nil {
progressMessage <- struct{}{}
}
}(itemID)
}(id)
}
var (
@ -262,7 +256,7 @@ func (col *prefetchCollection) streamItems(
)
// add any new items
for itemID := range col.added {
for id := range col.added {
if el.Failure() != nil {
break
}
@ -283,23 +277,8 @@ func (col *prefetchCollection) streamItems(
col.Opts().ToggleFeatures.ExchangeImmutableIDs,
parentPath)
if err != nil {
// pulled outside the switch due to multiple return values.
cause, canSkip := col.skipChecker.CanSkipItemFailure(
err,
user,
col.Opts())
// Handle known error cases
switch {
case canSkip:
// this is a special case handler that allows the item to be skipped
// instead of producing an error.
errs.AddSkip(ctx, fault.FileSkip(
cause,
dataCategory,
id,
id,
nil))
case errors.Is(err, core.ErrNotFound):
// Don't report errors for deleted items as there's no way for us to
// back up data that is gone. Record it as a "success", since there's
@ -370,7 +349,7 @@ func (col *prefetchCollection) streamItems(
if progressMessage != nil {
progressMessage <- struct{}{}
}
}(itemID)
}(id)
}
wg.Wait()
@ -398,8 +377,7 @@ type lazyFetchCollection struct {
// removed is a list of item IDs that were deleted from, or moved out, of a container
removed map[string]struct{}
getter itemGetterSerializer
skipChecker canSkipItemFailurer
getter itemGetterSerializer
statusUpdater support.StatusUpdater
@ -426,8 +404,8 @@ func (col *lazyFetchCollection) streamItems(
var (
success int64
progressMessage chan<- struct{}
user = col.user
el = errs.Local()
user = col.user
)
defer func() {
@ -439,7 +417,7 @@ func (col *lazyFetchCollection) streamItems(
int(success),
0,
col.FullPath().Folder(false),
el.Failure())
errs.Failure())
}()
if len(col.added)+len(col.removed) > 0 {
@ -465,7 +443,7 @@ func (col *lazyFetchCollection) streamItems(
// add any new items
for id, modTime := range col.added {
if el.Failure() != nil {
if errs.Failure() != nil {
break
}
@ -481,18 +459,15 @@ func (col *lazyFetchCollection) streamItems(
&lazyItemGetter{
userID: user,
itemID: id,
category: col.Category(),
getter: col.getter,
modTime: modTime,
immutableIDs: col.Opts().ToggleFeatures.ExchangeImmutableIDs,
parentPath: parentPath,
skipChecker: col.skipChecker,
opts: col.Opts(),
},
id,
modTime,
col.counter,
el)
errs)
atomic.AddInt64(&success, 1)
@ -506,12 +481,9 @@ type lazyItemGetter struct {
getter itemGetterSerializer
userID string
itemID string
category path.CategoryType
parentPath string
modTime time.Time
immutableIDs bool
skipChecker canSkipItemFailurer
opts control.Options
}
func (lig *lazyItemGetter) GetData(
@ -526,25 +498,6 @@ func (lig *lazyItemGetter) GetData(
lig.immutableIDs,
lig.parentPath)
if err != nil {
if lig.skipChecker != nil {
cause, canSkip := lig.skipChecker.CanSkipItemFailure(
err,
lig.userID,
lig.opts)
if canSkip {
errs.AddSkip(ctx, fault.FileSkip(
cause,
lig.category.String(),
lig.itemID,
lig.itemID,
nil))
return nil, nil, false, clues.
NewWC(ctx, "error marked as skippable by handler").
Label(graph.LabelsSkippable)
}
}
// If an item was deleted then return an empty file so we don't fail
// the backup and return a sentinel error when asked for ItemInfo so
// we don't display the item in the backup.
@ -559,7 +512,7 @@ func (lig *lazyItemGetter) GetData(
err = clues.Stack(err)
errs.AddRecoverable(ctx, err)
return nil, nil, false, clues.Stack(err)
return nil, nil, false, err
}
// Update the mod time to what we already told kopia about. This is required

View File

@ -28,7 +28,6 @@ import (
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
graphTD "github.com/alcionai/corso/src/pkg/services/m365/api/graph/testdata"
)
@ -154,7 +153,6 @@ func (suite *CollectionUnitSuite) TestNewCollection_state() {
count.New()),
"u",
mock.DefaultItemGetSerialize(),
mock.NeverCanSkipFailChecker(),
nil,
nil,
colType.validModTimes,
@ -300,7 +298,6 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
count.New()),
"",
&mock.ItemGetSerialize{},
mock.NeverCanSkipFailChecker(),
test.added,
maps.Keys(test.removed),
false,
@ -336,232 +333,6 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
}
}
func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
var (
start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {}
)
table := []struct {
name string
category path.CategoryType
handler backupHandler
added map[string]time.Time
removed map[string]struct{}
expectItemCount int
expectSkippedCount int
expectErr assert.ErrorAssertionFunc
}{
{
name: "no items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
expectErr: assert.NoError,
},
{
name: "events only added items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 3,
expectErr: assert.NoError,
},
{
name: "events only removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "events added and removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "contacts only added items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 0,
expectErr: assert.Error,
},
{
name: "contacts only removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "contacts added and removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "mail only added items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 0,
expectErr: assert.Error,
},
{
name: "mail only removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "mail added and removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
var (
t = suite.T()
errs = fault.New(true)
itemCount int
)
ctx, flush := tester.NewContext(t)
defer flush()
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
opts := control.DefaultOptions()
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
col := NewCollection(
data.NewBaseCollection(
fullPath,
nil,
locPath.ToBuilder(),
opts,
false,
count.New()),
"pr",
&mock.ItemGetSerialize{
SerializeErr: graph.ErrServiceUnavailableEmptyResp,
},
test.handler,
test.added,
maps.Keys(test.removed),
false,
statusUpdater,
count.New())
for item := range col.Items(ctx, errs) {
itemCount++
_, rok := test.removed[item.ID()]
if rok {
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
_, aok := test.added[item.ID()]
if !rok && aok {
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
}
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
}
test.expectErr(t, errs.Failure())
assert.Equal(
t,
test.expectItemCount,
itemCount,
"should see all expected items")
assert.Len(t, errs.Skipped(), test.expectSkippedCount)
})
}
}
// This test verifies skipped error cases are handled correctly by collection enumeration
func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
var (
@ -627,7 +398,6 @@ func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
count.New()),
"",
test.itemGetter,
mock.NeverCanSkipFailChecker(),
test.added,
nil,
false,
@ -708,7 +478,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
expectItemCount: 3,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
},
@ -761,7 +530,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
count.New()),
"",
mlg,
mock.NeverCanSkipFailChecker(),
test.added,
maps.Keys(test.removed),
true,
@ -773,10 +541,10 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
_, rok := test.removed[item.ID()]
if rok {
assert.True(t, item.Deleted(), "removals should be marked as deleted")
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
modTime, aok := test.added[item.ID()]
@ -785,6 +553,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
// initializer.
assert.Implements(t, (*data.ItemModTime)(nil), item)
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
// Check if the test want's us to read the item's data so the lazy
@ -804,8 +573,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
// collection initializer.
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, modTime, info.Modified(), "ItemInfo mod time")
} else {
assert.Fail(t, "unexpected read on item %s", item.ID())
}
}
@ -822,294 +589,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
}
}
func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
var (
start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {}
expectSkip = func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
assert.ErrorContains(t, err, "skip")
assert.True(t, clues.HasLabel(err, graph.LabelsSkippable), clues.ToCore(err))
}
expectNotSkipped = func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
assert.NotContains(t, err.Error(), "skip")
}
)
table := []struct {
name string
added map[string]time.Time
removed map[string]struct{}
category path.CategoryType
handler backupHandler
expectItemCount int
expectSkippedCount int
expectReads []string
expectErr func(t *testing.T, err error)
expectFailure assert.ErrorAssertionFunc
}{
{
name: "no items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
expectFailure: assert.NoError,
},
{
name: "events only added items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 3,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "events only removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "events added and removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "contacts only added items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 0,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectNotSkipped,
expectFailure: assert.Error,
},
{
name: "contacts only removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "contacts added and removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "mail only added items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 0,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectNotSkipped,
expectFailure: assert.Error,
},
{
name: "mail only removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "mail added and removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
var (
t = suite.T()
errs = fault.New(false)
itemCount int
)
ctx, flush := tester.NewContext(t)
defer flush()
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
mlg := &mockLazyItemGetterSerializer{
ItemGetSerialize: &mock.ItemGetSerialize{
SerializeErr: graph.ErrServiceUnavailableEmptyResp,
},
}
defer mlg.check(t, test.expectReads)
opts := control.DefaultOptions()
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
col := NewCollection(
data.NewBaseCollection(
fullPath,
nil,
locPath.ToBuilder(),
opts,
false,
count.New()),
"pr",
mlg,
test.handler,
test.added,
maps.Keys(test.removed),
true,
statusUpdater,
count.New())
for item := range col.Items(ctx, errs) {
itemCount++
_, rok := test.removed[item.ID()]
if rok {
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
modTime, aok := test.added[item.ID()]
if !rok && aok {
// Item's mod time should be what's passed into the collection
// initializer.
assert.Implements(t, (*data.ItemModTime)(nil), item)
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
// Check if the test want's us to read the item's data so the lazy
// data fetch is executed.
if slices.Contains(test.expectReads, item.ID()) {
r := item.ToReader()
_, err := io.ReadAll(r)
test.expectErr(t, err)
r.Close()
} else {
assert.Fail(t, "unexpected read on item %s", item.ID())
}
}
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
}
failure := errs.Failure()
if failure == nil && len(errs.Recovered()) > 0 {
failure = errs.Recovered()[0]
}
test.expectFailure(t, failure, clues.ToCore(failure))
assert.Equal(
t,
test.expectItemCount,
itemCount,
"should see all expected items")
assert.Len(t, errs.Skipped(), test.expectSkippedCount)
})
}
}
func (suite *CollectionUnitSuite) TestLazyItem_NoRead_GetInfo_Errors() {
t := suite.T()

View File

@ -1,8 +1,6 @@
package exchange
import (
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
@ -54,11 +52,3 @@ func (h contactBackupHandler) NewContainerCache(
getter: h.ac,
}
}
func (h contactBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return "", false
}

View File

@ -1,83 +0,0 @@
package exchange
import (
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type ContactsBackupHandlerUnitSuite struct {
tester.Suite
}
func TestContactsBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &ContactsBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ContactsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString()
table := []struct {
name string
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: assert.AnError,
opts: control.Options{},
expect: assert.False,
},
{
name: "false when map is empty",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
},
{
name: "false on nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "false even if resource matches",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newContactBackupHandler(api.Client{})
cause, result := h.CanSkipItemFailure(
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
}

View File

@ -1,13 +1,6 @@
package exchange
import (
"errors"
"net/http"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
@ -59,32 +52,3 @@ func (h eventBackupHandler) NewContainerCache(
getter: h.ac,
}
}
// todo: this could be further improved buy specifying the call source and matching that
// with the expected error. Might be necessary if we use this for more than one error.
// But since we only call this in a single place at this time, that additional guard isn't
// built into the func.
func (h eventBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
if err == nil {
return "", false
}
// this is a bit overly cautious. we do know that we get 503s with empty response bodies
// due to fauilures when getting too many instances. We don't know for sure if we get
// generic, well formed 503s. But since we're working with specific resources and item
// IDs in the first place, that extra caution will help make sure an unexpected error dosn't
// slip through the cracks on us.
if !errors.Is(err, graph.ErrServiceUnavailableEmptyResp) &&
!clues.HasLabel(err, graph.LabelStatus(http.StatusServiceUnavailable)) {
return "", false
}
_, ok := opts.SkipEventsOnInstance503ForResources[resourceID]
// strict equals required here. ids are case sensitive.
return fault.SkipKnownEventInstance503s, ok
}

View File

@ -1,112 +0,0 @@
package exchange
import (
"net/http"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
type EventsBackupHandlerUnitSuite struct {
tester.Suite
}
func TestEventsBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &EventsBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *EventsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString()
table := []struct {
name string
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{},
expect: assert.False,
expectCause: fault.SkipKnownEventInstance503s,
},
{
name: "empty skip on 503",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
expectCause: fault.SkipKnownEventInstance503s,
},
{
name: "nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "non-matching resource",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
"foo": {},
},
},
expect: assert.False,
expectCause: fault.SkipKnownEventInstance503s,
},
{
name: "match on instance 503 empty resp",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.True,
expectCause: fault.SkipKnownEventInstance503s,
},
{
name: "match on instance 503",
err: clues.New("arbitrary error").
Label(graph.LabelStatus(http.StatusServiceUnavailable)),
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.True,
expectCause: fault.SkipKnownEventInstance503s,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newEventBackupHandler(api.Client{})
cause, result := h.CanSkipItemFailure(
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
}

View File

@ -26,8 +26,6 @@ type backupHandler interface {
previewIncludeContainers() []string
previewExcludeContainers() []string
NewContainerCache(userID string) (string, graph.ContainerResolver)
canSkipItemFailurer
}
type addedAndRemovedItemGetter interface {
@ -59,14 +57,6 @@ func BackupHandlers(ac api.Client) map[path.CategoryType]backupHandler {
}
}
type canSkipItemFailurer interface {
CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool)
}
// ---------------------------------------------------------------------------
// restore
// ---------------------------------------------------------------------------

View File

@ -1,8 +1,6 @@
package exchange
import (
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
@ -59,11 +57,3 @@ func (h mailBackupHandler) NewContainerCache(
getter: h.ac,
}
}
func (h mailBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return "", false
}

View File

@ -1,83 +0,0 @@
package exchange
import (
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type MailBackupHandlerUnitSuite struct {
tester.Suite
}
func TestMailBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &MailBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *MailBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString()
table := []struct {
name string
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: assert.AnError,
opts: control.Options{},
expect: assert.False,
},
{
name: "false when map is empty",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
},
{
name: "false on nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "false even if resource matches",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newMailBackupHandler(api.Client{})
cause, result := h.CanSkipItemFailure(
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
}

View File

@ -3,7 +3,6 @@ package exchange
import (
"context"
"errors"
"regexp"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
@ -148,8 +147,6 @@ func restoreMail(
msg = setMessageSVEPs(toMessage(msg))
setReplyTos(msg)
attachments := msg.GetAttachments()
// Item.Attachments --> HasAttachments doesn't always have a value populated when deserialized
msg.SetAttachments([]models.Attachmentable{})
@ -232,38 +229,6 @@ func setMessageSVEPs(msg models.Messageable) models.Messageable {
return msg
}
func setReplyTos(msg models.Messageable) {
var (
replyTos = msg.GetReplyTo()
emailAddress models.EmailAddressable
name, address string
sanitizedReplyTos = make([]models.Recipientable, 0)
)
if len(replyTos) == 0 {
return
}
for _, replyTo := range replyTos {
emailAddress = replyTo.GetEmailAddress()
address = ptr.Val(emailAddress.GetAddress())
name = ptr.Val(emailAddress.GetName())
if isValidEmail(address) || isValidDN(address) {
newEmailAddress := models.NewEmailAddress()
newEmailAddress.SetAddress(ptr.To(address))
newEmailAddress.SetName(ptr.To(name))
sanitizedReplyTo := models.NewRecipient()
sanitizedReplyTo.SetEmailAddress(newEmailAddress)
sanitizedReplyTos = append(sanitizedReplyTos, sanitizedReplyTo)
}
}
msg.SetReplyTo(sanitizedReplyTos)
}
func (h mailRestoreHandler) GetItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
@ -275,24 +240,3 @@ func (h mailRestoreHandler) GetItemsInContainerByCollisionKey(
return m, nil
}
// [TODO]relocate to a common place
func isValidEmail(email string) bool {
emailRegex := `^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$`
r := regexp.MustCompile(emailRegex)
return r.MatchString(email)
}
// isValidDN check if given string's format matches that of a MSFT Distinguished Name
// This regular expression matches strings that start with /o=,
// followed by any characters except /,
// then /ou=, followed by any characters except /,
// then /cn=, followed by any characters except /,
// then /cn= followed by a 32-character hexadecimal string followed by - and any additional characters.
func isValidDN(dn string) bool {
dnRegex := `^/o=[^/]+/ou=[^/]+/cn=[^/]+/cn=[a-fA-F0-9]{32}-[a-zA-Z0-9-]+$`
r := regexp.MustCompile(dnRegex)
return r.MatchString(dn)
}

View File

@ -11,7 +11,6 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
@ -25,127 +24,6 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
//nolint:lll
const TestDN = "/o=ExchangeLabs/ou=Exchange Administrative Group (FYDIBOHF23SPDLT)/cn=Recipients/cn=4eca0d46a2324036b0b326dc58cfc802-user"
type RestoreMailUnitSuite struct {
tester.Suite
}
func TestRestoreMailUnitSuite(t *testing.T) {
suite.Run(t, &RestoreMailUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *RestoreMailUnitSuite) TestIsValidEmail() {
table := []struct {
name string
email string
check assert.BoolAssertionFunc
}{
{
name: "valid email",
email: "foo@bar.com",
check: assert.True,
},
{
name: "invalid email, missing domain",
email: "foo.com",
check: assert.False,
},
{
name: "invalid email, random uuid",
email: "12345678-abcd-90ef-88f8-2d95ef12fb66",
check: assert.False,
},
{
name: "empty email",
email: "",
check: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
result := isValidEmail(test.email)
test.check(t, result)
})
}
}
func (suite *RestoreMailUnitSuite) TestIsValidDN() {
table := []struct {
name string
dn string
check assert.BoolAssertionFunc
}{
{
name: "valid DN",
dn: TestDN,
check: assert.True,
},
{
name: "invalid DN",
dn: "random string",
check: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
result := isValidDN(test.dn)
test.check(t, result)
})
}
}
func (suite *RestoreMailUnitSuite) TestSetReplyTos() {
t := suite.T()
replyTos := make([]models.Recipientable, 0)
emailAddresses := map[string]string{
"foo.bar": "foo@bar.com",
"foo.com": "foo.com",
"empty": "",
"dn": TestDN,
}
validEmailAddresses := map[string]string{
"foo.bar": "foo@bar.com",
"dn": TestDN,
}
for k, v := range emailAddresses {
emailAddress := models.NewEmailAddress()
emailAddress.SetAddress(ptr.To(v))
emailAddress.SetName(ptr.To(k))
replyTo := models.NewRecipient()
replyTo.SetEmailAddress(emailAddress)
replyTos = append(replyTos, replyTo)
}
mailMessage := models.NewMessage()
mailMessage.SetReplyTo(replyTos)
setReplyTos(mailMessage)
sanitizedReplyTos := mailMessage.GetReplyTo()
require.Len(t, sanitizedReplyTos, len(validEmailAddresses))
for _, sanitizedReplyTo := range sanitizedReplyTos {
emailAddress := sanitizedReplyTo.GetEmailAddress()
assert.Contains(t, validEmailAddresses, ptr.Val(emailAddress.GetName()))
assert.Equal(t, validEmailAddresses[ptr.Val(emailAddress.GetName())], ptr.Val(emailAddress.GetAddress()))
}
}
var _ mailRestorer = &mailRestoreMock{}
type mailRestoreMock struct {

View File

@ -6,15 +6,10 @@ import (
"github.com/microsoft/kiota-abstractions-go/serialization"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
// get and serialize item mock
// ---------------------------------------------------------------------------
type ItemGetSerialize struct {
GetData serialization.Parsable
GetCount int
@ -49,23 +44,3 @@ func (m *ItemGetSerialize) Serialize(
func DefaultItemGetSerialize() *ItemGetSerialize {
return &ItemGetSerialize{}
}
// ---------------------------------------------------------------------------
// can skip item failure mock
// ---------------------------------------------------------------------------
type canSkipFailChecker struct {
canSkip bool
}
func (m canSkipFailChecker) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return fault.SkipCause("testing"), m.canSkip
}
func NeverCanSkipFailChecker() *canSkipFailChecker {
return &canSkipFailChecker{}
}

View File

@ -197,12 +197,7 @@ func (h BackupHandler[T]) AugmentItemInfo(
return h.ItemInfo
}
func (h *BackupHandler[T]) Get(
context.Context,
string,
map[string]string,
bool,
) (*http.Response, error) {
func (h *BackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) {
c := h.getCall
h.getCall++

View File

@ -420,9 +420,6 @@ func (suite *BackupOpUnitSuite) TestNewBackupOperation_configuredOptionsMatchInp
MaxPages: 46,
Enabled: true,
},
SkipEventsOnInstance503ForResources: map[string]struct{}{
"resource": {},
},
}
t := suite.T()

View File

@ -305,10 +305,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead int
itemsWritten int
nonMetaItemsWritten int
// TODO: Temporary mechanism to skip permissions
// related tests. Remove once we figure out the issue.
skipChecks bool
}{
{
name: "clean incremental, no changes",
@ -357,7 +353,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
skipChecks: true,
},
{
name: "remove permission from new file",
@ -377,7 +372,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
skipChecks: true,
},
{
name: "add permission to container",
@ -398,7 +392,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 0,
itemsWritten: 2, // .dirmeta for collection
nonMetaItemsWritten: 0, // no files updated as update on container
skipChecks: true,
},
{
name: "remove permission from container",
@ -419,7 +412,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 0,
itemsWritten: 2, // .dirmeta for collection
nonMetaItemsWritten: 0, // no files updated
skipChecks: true,
},
{
name: "update contents of a file",
@ -749,11 +741,9 @@ func RunIncrementalDriveishBackupTest(
assertReadWrite = assert.LessOrEqual
}
if !test.skipChecks {
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
}
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")

View File

@ -175,7 +175,7 @@ func runGroupsIncrementalBackupTests(
suite,
opts,
m365.Group.ID,
m365.SecondaryGroup.ID, // more reliable than user
m365.User.ID,
path.GroupsService,
path.LibrariesCategory,
ic,
@ -201,12 +201,8 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
@ -330,12 +326,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBas
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
}
@ -344,12 +336,8 @@ func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBa
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
}

View File

@ -27,11 +27,6 @@ type Options struct {
// backup data until the set limits without paying attention to what the other
// had already backed up.
PreviewLimits PreviewItemLimits `json:"previewItemLimits"`
// specifying a resource tuple in this map allows that resource to produce
// a Skip instead of a recoverable error in case of a failure due to 503 when
// retrieving calendar event item data.
SkipEventsOnInstance503ForResources map[string]struct{}
}
// RateLimiter is the set of options applied to any external service facing rate

View File

@ -10,7 +10,6 @@ import (
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
)
func ConsumeExportCollections(
@ -20,10 +19,6 @@ func ConsumeExportCollections(
errs *fault.Bus,
) error {
el := errs.Local()
counted := 0
log := logger.Ctx(ctx).
With("export_location", exportLocation,
"collection_count", len(expColl))
for _, col := range expColl {
if el.Failure() != nil {
@ -34,13 +29,6 @@ func ConsumeExportCollections(
ictx := clues.Add(ctx, "dir_name", folder)
for item := range col.Items(ictx) {
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("progress writing export items", "count_items", counted)
}
if item.Error != nil {
el.AddRecoverable(ictx, clues.Wrap(item.Error, "getting item"))
continue
@ -54,8 +42,6 @@ func ConsumeExportCollections(
}
}
log.Infow("completed writing export items", "count_items", counted)
return el.Failure()
}

View File

@ -12,39 +12,34 @@ type AddSkipper interface {
AddSkip(ctx context.Context, s *Skipped)
}
// SkipCause identifies the well-known conditions to Skip an item. It is
// skipCause identifies the well-known conditions to Skip an item. It is
// important that skip cause enumerations do not overlap with general error
// handling. Skips must be well known, well documented, and consistent.
// Transient failures, undocumented or unknown conditions, and arbitrary
// handling should never produce a skipped item. Those cases should get
// handled as normal errors.
type SkipCause string
type skipCause string
const (
// SkipMalware identifies a malware detection case. Files that graph
// api identifies as malware cannot be downloaded or uploaded, and will
// permanently fail any attempts to backup or restore.
SkipMalware SkipCause = "malware_detected"
SkipMalware skipCause = "malware_detected"
// SkipOneNote identifies that a file was skipped because it
// was a OneNote file that remains inaccessible (503 server response)
// regardless of the number of retries.
//nolint:lll
// https://support.microsoft.com/en-us/office/restrictions-and-limitations-in-onedrive-and-sharepoint-64883a5d-228e-48f5-b3d2-eb39e07630fa#onenotenotebooks
SkipOneNote SkipCause = "inaccessible_one_note_file"
SkipOneNote skipCause = "inaccessible_one_note_file"
// SkipInvalidRecipients identifies that an email was skipped because Exchange
// believes it is not valid and fails any attempt to read it.
SkipInvalidRecipients SkipCause = "invalid_recipients_email"
SkipInvalidRecipients skipCause = "invalid_recipients_email"
// SkipCorruptData identifies that an email was skipped because graph reported
// that the email data was corrupt and failed all attempts to read it.
SkipCorruptData SkipCause = "corrupt_data"
// SkipKnownEventInstance503s identifies cases where we have a pre-configured list
// of event IDs where the events are known to fail with a 503 due to there being
// too many instances to retrieve from graph api.
SkipKnownEventInstance503s SkipCause = "known_event_instance_503"
SkipCorruptData skipCause = "corrupt_data"
)
var _ print.Printable = &Skipped{}
@ -75,7 +70,7 @@ func (s *Skipped) String() string {
}
// HasCause compares the underlying cause against the parameter.
func (s *Skipped) HasCause(c SkipCause) bool {
func (s *Skipped) HasCause(c skipCause) bool {
if s == nil {
return false
}
@ -110,27 +105,27 @@ func (s Skipped) Values(bool) []string {
}
// ContainerSkip produces a Container-kind Item for tracking skipped items.
func ContainerSkip(cause SkipCause, namespace, id, name string, addtl map[string]any) *Skipped {
func ContainerSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
return itemSkip(ContainerType, cause, namespace, id, name, addtl)
}
// EmailSkip produces a Email-kind Item for tracking skipped items.
func EmailSkip(cause SkipCause, user, id string, addtl map[string]any) *Skipped {
func EmailSkip(cause skipCause, user, id string, addtl map[string]any) *Skipped {
return itemSkip(EmailType, cause, user, id, "", addtl)
}
// FileSkip produces a File-kind Item for tracking skipped items.
func FileSkip(cause SkipCause, namespace, id, name string, addtl map[string]any) *Skipped {
func FileSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
return itemSkip(FileType, cause, namespace, id, name, addtl)
}
// OnwerSkip produces a ResourceOwner-kind Item for tracking skipped items.
func OwnerSkip(cause SkipCause, namespace, id, name string, addtl map[string]any) *Skipped {
func OwnerSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
return itemSkip(ResourceOwnerType, cause, namespace, id, name, addtl)
}
// itemSkip produces a Item of the provided type for tracking skipped items.
func itemSkip(t ItemType, cause SkipCause, namespace, id, name string, addtl map[string]any) *Skipped {
func itemSkip(t ItemType, cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
return &Skipped{
Item: Item{
Namespace: namespace,

View File

@ -47,7 +47,7 @@ func (c Access) GetToken(
c.Credentials.AzureClientSecret))
)
resp, err := c.Post(ctx, rawURL, headers, body, false)
resp, err := c.Post(ctx, rawURL, headers, body)
if err != nil {
return clues.Stack(err)
}

View File

@ -63,14 +63,7 @@ func NewClient(
return Client{}, err
}
azureAuth, err := graph.NewAzureAuth(creds)
if err != nil {
return Client{}, clues.Wrap(err, "generating azure authorizer")
}
rqr := graph.NewNoTimeoutHTTPWrapper(
counter,
graph.AuthorizeRequester(azureAuth))
rqr := graph.NewNoTimeoutHTTPWrapper(counter)
if co.DeltaPageSize < 1 || co.DeltaPageSize > maxDeltaPageSize {
co.DeltaPageSize = maxDeltaPageSize
@ -131,7 +124,11 @@ func newLargeItemService(
counter *count.Bus,
) (*graph.Service, error) {
a, err := NewService(creds, counter, graph.NoTimeout())
return a, clues.Wrap(err, "generating no-timeout graph adapter").OrNil()
if err != nil {
return nil, clues.Wrap(err, "generating no-timeout graph adapter")
}
return a, nil
}
type Getter interface {
@ -139,7 +136,6 @@ type Getter interface {
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error)
}
@ -148,9 +144,8 @@ func (c Client) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return c.Requester.Request(ctx, http.MethodGet, url, nil, headers, requireAuth)
return c.Requester.Request(ctx, http.MethodGet, url, nil, headers)
}
// Get performs an ad-hoc get request using its graph.Requester
@ -159,9 +154,8 @@ func (c Client) Post(
url string,
headers map[string]string,
body io.Reader,
requireAuth bool,
) (*http.Response, error) {
return c.Requester.Request(ctx, http.MethodGet, url, body, headers, requireAuth)
return c.Requester.Request(ctx, http.MethodGet, url, body, headers)
}
// ---------------------------------------------------------------------------

View File

@ -32,9 +32,6 @@ func (suite *ConversationsPagerIntgSuite) SetupSuite() {
}
func (suite *ConversationsPagerIntgSuite) TestEnumerateConversations_withThreadsAndPosts() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
var (
t = suite.T()
ac = suite.its.ac.Conversations()

View File

@ -1,94 +0,0 @@
package graph
import (
"context"
"net/http"
"net/url"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/alcionai/clues"
abstractions "github.com/microsoft/kiota-abstractions-go"
kauth "github.com/microsoft/kiota-authentication-azure-go"
"github.com/alcionai/corso/src/pkg/account"
)
func GetAuth(tenant, client, secret string) (*kauth.AzureIdentityAuthenticationProvider, error) {
// Client Provider: Uses Secret for access to tenant-level data
cred, err := azidentity.NewClientSecretCredential(tenant, client, secret, nil)
if err != nil {
return nil, clues.Wrap(err, "creating m365 client identity")
}
auth, err := kauth.NewAzureIdentityAuthenticationProviderWithScopes(
cred,
[]string{"https://graph.microsoft.com/.default"})
if err != nil {
return nil, clues.Wrap(err, "creating azure authentication")
}
return auth, nil
}
// ---------------------------------------------------------------------------
// requester authorization
// ---------------------------------------------------------------------------
type authorizer interface {
addAuthToHeaders(
ctx context.Context,
urlStr string,
headers http.Header,
) error
}
// consumed by kiota
type authenticateRequester interface {
AuthenticateRequest(
ctx context.Context,
request *abstractions.RequestInformation,
additionalAuthenticationContext map[string]any,
) error
}
// ---------------------------------------------------------------------------
// Azure Authorizer
// ---------------------------------------------------------------------------
type azureAuth struct {
auth authenticateRequester
}
func NewAzureAuth(creds account.M365Config) (*azureAuth, error) {
auth, err := GetAuth(
creds.AzureTenantID,
creds.AzureClientID,
creds.AzureClientSecret)
return &azureAuth{auth}, clues.Stack(err).OrNil()
}
func (aa azureAuth) addAuthToHeaders(
ctx context.Context,
urlStr string,
headers http.Header,
) error {
requestInfo := abstractions.NewRequestInformation()
uri, err := url.Parse(urlStr)
if err != nil {
return clues.WrapWC(ctx, err, "parsing url").OrNil()
}
requestInfo.SetUri(*uri)
err = aa.auth.AuthenticateRequest(ctx, requestInfo, nil)
for _, k := range requestInfo.Headers.ListKeys() {
for _, v := range requestInfo.Headers.Get(k) {
headers.Add(k, v)
}
}
return clues.WrapWC(ctx, err, "authorizing request").OrNil()
}

View File

@ -240,7 +240,7 @@ func (mw *RateLimiterMiddleware) Intercept(
middlewareIndex int,
req *http.Request,
) (*http.Response, error) {
QueueRequest(getReqCtx(req))
QueueRequest(req.Context())
return pipeline.Next(req, middlewareIndex)
}
@ -339,7 +339,7 @@ func (mw *throttlingMiddleware) Intercept(
middlewareIndex int,
req *http.Request,
) (*http.Response, error) {
err := mw.tf.Block(getReqCtx(req))
err := mw.tf.Block(req.Context())
if err != nil {
return nil, err
}

View File

@ -114,14 +114,7 @@ const (
// ErrServiceUnavailableEmptyResp indicates the remote service returned a 503
// with an empty response body. This can sometimes happen if a request times out
// during processing.
//
// TODO(ashmrtn): Either make a separate error struct for empty responses and
// implement Is() on it or start using tags on errors for the different status
// codes.
var (
ErrServiceUnavailableEmptyResp = clues.New("service unavailable and no returned content")
ErrNotFoundEmptyResp = clues.New("not found and no returned content")
)
var ErrServiceUnavailableEmptyResp = clues.New("service unavailable and no returned content")
// ---------------------------------------------------------------------------
// error categorization
@ -156,8 +149,7 @@ func stackWithCoreErr(ctx context.Context, err error, traceDepth int) error {
labels = append(labels, core.LabelRootCauseUnknown)
}
stacked := stackWithDepth(ctx, err, 1+traceDepth).
Label(LabelStatus(ode.Resp.StatusCode))
stacked := stackWithDepth(ctx, err, 1+traceDepth)
// labeling here because we want the context from stackWithDepth first
for _, label := range labels {
@ -418,14 +410,9 @@ func stackReq(
// then all we get from graph SDK is an error saying "content is empty" which
// isn't particularly useful.
if resp != nil &&
resp.ContentLength == 0 {
switch resp.StatusCode {
case http.StatusServiceUnavailable:
e = clues.Stack(ErrServiceUnavailableEmptyResp, e)
case http.StatusNotFound:
e = clues.Stack(ErrNotFoundEmptyResp, e)
}
resp.ContentLength == 0 &&
resp.StatusCode == http.StatusServiceUnavailable {
e = clues.Stack(ErrServiceUnavailableEmptyResp, e)
}
if e == nil {
@ -701,48 +688,10 @@ func (ode oDataErr) errMessageMatchesAllFilters(err error, fs ...filters.Filter)
// ---------------------------------------------------------------------------
// other helpers
// ---------------------------------------------------------------------------
const (
// JWTQueryParam is a query param embed in graph download URLs which holds
// JWT token.
JWTQueryParam = "tempauth"
// base64 encoded json header. Contains {"alg":"HS256","typ":"JWT"}
//
// Hardcoding this instead of generating it every time on the fly.
// The algorithm doesn't matter as we are not verifying the token.
jwtHeader = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"
)
func sanitizeToken(rawToken string) string {
segments := strings.Split(rawToken, ".")
// Check if the token has the old format, in which it has 3 segments and
// conforms to jwt spec. Format is seg1.seg2.seg3.
if len(segments) == 3 {
return rawToken
}
// Check if it is a msft proprietary token in which it has 4 segments and
// doesn't meet jwt spec. Format is v1.seg1.seg2.seg3. Return a token which
// meets jwt spec.
//
// In this proprietary token, there is no jwt header segment. Also, the claims
// section is split into first and segments. The first segment contains the
// `exp` claim that we are interested in.
//
// The second segment contains the rest of the claims, but likely encrypted.
// We don't need it so discard it. The last segment contains the signature which
// we don't care about either, as we are not verifying the token. So append it as is.
//
// It's okay if the sanitized token still doesn't meet jwt spec. It'll fail decoding
// later and we have fallbacks for that.
if len(segments) == 4 && segments[0] == "v1" {
return jwtHeader + "." + segments[1] + "." + segments[3]
}
// If MSFT change the token format again on us, just return empty string and let caller
// handle it as an error.
return ""
}
// JWTQueryParam is a query param embed in graph download URLs which holds
// JWT token.
const JWTQueryParam = "tempauth"
// IsURLExpired inspects the jwt token embed in the item download url
// and returns true if it is expired.
@ -753,20 +702,12 @@ func IsURLExpired(
expiredErr error,
err error,
) {
ctx = clues.Add(ctx, "checked_url", urlStr)
// Extract the raw JWT string from the download url.
rawJWT, err := common.GetQueryParamFromURL(urlStr, JWTQueryParam)
if err != nil {
return nil, clues.WrapWC(ctx, err, "jwt query param not found")
}
// Token may have a proprietary format. Try to sanitize it to jwt format.
rawJWT = sanitizeToken(rawJWT)
if len(rawJWT) == 0 {
return nil, clues.WrapWC(ctx, err, "sanitizing jwt")
}
expired, err := jwt.IsJWTExpired(rawJWT)
if err != nil {
return nil, clues.WrapWC(ctx, err, "checking jwt expiry")

View File

@ -36,7 +36,6 @@ type Requester interface {
method, url string,
body io.Reader,
headers map[string]string,
requireAuth bool,
) (*http.Response, error)
}
@ -59,8 +58,12 @@ func NewHTTPWrapper(
transport: defaultTransport(),
},
}
redirect = func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
}
hc = &http.Client{
Transport: rt,
CheckRedirect: redirect,
Transport: rt,
}
)
@ -97,7 +100,6 @@ func (hw httpWrapper) Request(
method, url string,
body io.Reader,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
req, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil {
@ -113,17 +115,6 @@ func (hw httpWrapper) Request(
// See https://learn.microsoft.com/en-us/sharepoint/dev/general-development/how-to-avoid-getting-throttled-or-blocked-in-sharepoint-online#how-to-decorate-your-http-traffic
req.Header.Set("User-Agent", "ISV|Alcion|Corso/"+version.Version)
if requireAuth {
if hw.config.requesterAuth == nil {
return nil, clues.Wrap(err, "http wrapper misconfigured: missing required authorization")
}
err := hw.config.requesterAuth.addAuthToHeaders(ctx, url, req.Header)
if err != nil {
return nil, clues.Wrap(err, "setting request auth headers")
}
}
retriedErrors := []string{}
var e error
@ -146,7 +137,7 @@ func (hw httpWrapper) Request(
resp, err := hw.client.Do(req)
if err == nil {
logResp(ictx, resp, req)
logResp(ictx, resp)
return resp, nil
}

View File

@ -40,10 +40,9 @@ func (suite *HTTPWrapperIntgSuite) TestNewHTTPWrapper() {
resp, err := hw.Request(
ctx,
http.MethodGet,
"https://www.google.com",
"https://www.corsobackup.io",
nil,
nil,
false)
nil)
require.NoError(t, err, clues.ToCore(err))
defer resp.Body.Close()
@ -77,56 +76,6 @@ func (mw *mwForceResp) Intercept(
return mw.resp, mw.err
}
func (suite *HTTPWrapperIntgSuite) TestHTTPWrapper_Request_withAuth() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
a := tconfig.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
azureAuth, err := NewAzureAuth(m365)
require.NoError(t, err, clues.ToCore(err))
hw := NewHTTPWrapper(count.New(), AuthorizeRequester(azureAuth))
// any request that requires authorization will do
resp, err := hw.Request(
ctx,
http.MethodGet,
"https://graph.microsoft.com/v1.0/users",
nil,
nil,
true)
require.NoError(t, err, clues.ToCore(err))
defer resp.Body.Close()
require.NotNil(t, resp)
require.Equal(t, http.StatusOK, resp.StatusCode)
// also validate that non-auth'd endpoints succeed
resp, err = hw.Request(
ctx,
http.MethodGet,
"https://www.google.com",
nil,
nil,
true)
require.NoError(t, err, clues.ToCore(err))
defer resp.Body.Close()
require.NotNil(t, resp)
require.Equal(t, http.StatusOK, resp.StatusCode)
}
// ---------------------------------------------------------------------------
// unit
// ---------------------------------------------------------------------------
type HTTPWrapperUnitSuite struct {
tester.Suite
}
@ -135,25 +84,26 @@ func TestHTTPWrapperUnitSuite(t *testing.T) {
suite.Run(t, &HTTPWrapperUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *HTTPWrapperUnitSuite) TestHTTPWrapper_Request_redirect() {
func (suite *HTTPWrapperUnitSuite) TestNewHTTPWrapper_redirectMiddleware() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
respHdr := http.Header{}
respHdr.Set("Location", "localhost:99999999/smarfs")
url := "https://graph.microsoft.com/fnords/beaux/regard"
hdr := http.Header{}
hdr.Set("Location", "localhost:99999999/smarfs")
toResp := &http.Response{
StatusCode: http.StatusFound,
Header: respHdr,
Header: hdr,
}
mwResp := mwForceResp{
resp: toResp,
alternate: func(req *http.Request) (bool, *http.Response, error) {
if strings.HasSuffix(req.URL.String(), "smarfs") {
assert.Equal(t, req.Header.Get("X-Test-Val"), "should-be-copied-to-redirect")
return true, &http.Response{StatusCode: http.StatusOK}, nil
}
@ -163,22 +113,17 @@ func (suite *HTTPWrapperUnitSuite) TestHTTPWrapper_Request_redirect() {
hw := NewHTTPWrapper(count.New(), appendMiddleware(&mwResp))
resp, err := hw.Request(
ctx,
http.MethodGet,
"https://graph.microsoft.com/fnords/beaux/regard",
nil,
map[string]string{"X-Test-Val": "should-be-copied-to-redirect"},
false)
resp, err := hw.Request(ctx, http.MethodGet, url, nil, nil)
require.NoError(t, err, clues.ToCore(err))
defer resp.Body.Close()
require.NotNil(t, resp)
// require.Equal(t, 1, calledCorrectly, "test server was called with expected path")
require.Equal(t, http.StatusOK, resp.StatusCode)
}
func (suite *HTTPWrapperUnitSuite) TestHTTPWrapper_Request_http2StreamErrorRetries() {
func (suite *HTTPWrapperUnitSuite) TestNewHTTPWrapper_http2StreamErrorRetries() {
var (
url = "https://graph.microsoft.com/fnords/beaux/regard"
streamErr = http2.StreamError{
@ -243,7 +188,7 @@ func (suite *HTTPWrapperUnitSuite) TestHTTPWrapper_Request_http2StreamErrorRetri
// the test middleware.
hw.retryDelay = 0
_, err := hw.Request(ctx, http.MethodGet, url, nil, nil, false)
_, err := hw.Request(ctx, http.MethodGet, url, nil, nil)
require.ErrorAs(t, err, &http2.StreamError{}, clues.ToCore(err))
require.Equal(t, test.expectRetries, tries, "count of retries")
})

View File

@ -5,13 +5,7 @@ import (
"net/http"
"net/http/httputil"
"os"
"strings"
"time"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/jwt"
"github.com/alcionai/corso/src/internal/common/pii"
"github.com/alcionai/corso/src/pkg/logger"
)
@ -31,7 +25,7 @@ func shouldLogRespBody(resp *http.Response) bool {
resp.StatusCode > 399
}
func logResp(ctx context.Context, resp *http.Response, req *http.Request) {
func logResp(ctx context.Context, resp *http.Response) {
var (
log = logger.Ctx(ctx)
respClass = resp.StatusCode / 100
@ -48,25 +42,6 @@ func logResp(ctx context.Context, resp *http.Response, req *http.Request) {
return
}
// Log bearer token iat and exp claims if we hit 401s. This is purely for
// debugging purposes and will be removed in the future.
if resp.StatusCode == http.StatusUnauthorized {
errs := []any{"graph api error: " + resp.Status}
// As per MSFT docs, the token may have a special format and may not always
// validate as a JWT. Hence log token lifetime in a best effort manner only.
iat, exp, err := getTokenLifetime(ctx, req)
if err != nil {
errs = append(errs, " getting token lifetime: ", err)
}
log.With("response", getRespDump(ctx, resp, logBody)).
With("token issued at", iat, "token expires at", exp).
Error(errs...)
return
}
// Log api calls according to api debugging configurations.
switch respClass {
case 2:
@ -94,51 +69,3 @@ func getRespDump(ctx context.Context, resp *http.Response, getBody bool) string
return string(respDump)
}
func getReqCtx(req *http.Request) context.Context {
if req == nil {
return context.Background()
}
var logURL pii.SafeURL
if req.URL != nil {
logURL = LoggableURL(req.URL.String())
}
return clues.AddTraceName(
req.Context(),
"graph-http-middleware",
"method", req.Method,
"url", logURL,
"request_content_len", req.ContentLength)
}
// GetTokenLifetime extracts the JWT token embedded in the request and returns
// the token's issue and expiration times. The token is expected to be in the
// "Authorization" header, with a "Bearer " prefix. If the token is not present
// or is malformed, an error is returned.
func getTokenLifetime(
ctx context.Context,
req *http.Request,
) (time.Time, time.Time, error) {
if req == nil {
return time.Time{}, time.Time{}, clues.New("nil request")
}
// Don't throw an error if auth header is absent. This is to prevent
// unnecessary noise in the logs for requests served by the http requestor
// client. These requests may be preauthenticated and may not carry auth headers.
rawToken := req.Header.Get("Authorization")
if len(rawToken) == 0 {
return time.Time{}, time.Time{}, nil
}
// Strip the "Bearer " prefix from the token. This prefix is guaranteed to be
// present as per msft docs. But even if it's not, the jwt lib will handle
// malformed tokens gracefully and return an error.
rawToken = strings.TrimPrefix(rawToken, "Bearer ")
iat, exp, err := jwt.GetJWTLifetime(ctx, rawToken)
return iat, exp, clues.Stack(err).OrNil()
}

View File

@ -125,12 +125,15 @@ func (mw *LoggingMiddleware) Intercept(
}
ctx := clues.Add(
getReqCtx(req),
req.Context(),
"method", req.Method,
"url", LoggableURL(req.URL.String()),
"request_content_len", req.ContentLength,
"resp_status", resp.Status,
"resp_status_code", resp.StatusCode,
"resp_content_len", resp.ContentLength)
logResp(ctx, resp, req)
logResp(ctx, resp)
return resp, err
}
@ -153,7 +156,7 @@ func (mw RetryMiddleware) Intercept(
middlewareIndex int,
req *http.Request,
) (*http.Response, error) {
ctx := getReqCtx(req)
ctx := req.Context()
resp, err := pipeline.Next(req, middlewareIndex)
retriable := IsErrTimeout(err) ||
@ -232,11 +235,7 @@ func (mw RetryMiddleware) retryRequest(
case <-ctx.Done():
// Don't retry if the context is marked as done, it will just error out
// when we attempt to send the retry anyway.
err := clues.StackWC(ctx, ctx.Err())
logger.CtxErr(ctx, err).Info("request context marked done")
return resp, err
return resp, clues.StackWC(ctx, ctx.Err())
case <-timer.C:
}
@ -250,9 +249,7 @@ func (mw RetryMiddleware) retryRequest(
return resp, Wrap(ctx, err, "resetting request body reader")
}
} else {
logger.
Ctx(getReqCtx(req)).
Error("body is not an io.Seeker: unable to reset request body")
logger.Ctx(ctx).Error("body is not an io.Seeker: unable to reset request body")
}
}

View File

@ -505,95 +505,3 @@ func (suite *MiddlewareUnitSuite) TestLimiterConsumption() {
})
}
}
const (
// Raw test token valid for 100 years.
rawToken = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9." +
"eyJuYmYiOiIxNjkxODE5NTc5IiwiZXhwIjoiMzk0NTUyOTE3OSIsImVuZHBvaW50dXJsTGVuZ3RoIjoiMTYw" +
"IiwiaXNsb29wYmFjayI6IlRydWUiLCJ2ZXIiOiJoYXNoZWRwcm9vZnRva2VuIiwicm9sZXMiOiJhbGxmaWxl" +
"cy53cml0ZSBhbGxzaXRlcy5mdWxsY29udHJvbCBhbGxwcm9maWxlcy5yZWFkIiwidHQiOiIxIiwiYWxnIjoi" +
"SFMyNTYifQ" +
".signature"
)
// Tests getTokenLifetime
func (suite *MiddlewareUnitSuite) TestGetTokenLifetime() {
table := []struct {
name string
request *http.Request
expectErr assert.ErrorAssertionFunc
}{
{
name: "nil request",
request: nil,
expectErr: assert.Error,
},
// Test that we don't throw an error if auth header is absent.
// This is to prevent unnecessary noise in logs for requestor http client.
{
name: "no authorization header",
request: &http.Request{
Header: http.Header{},
},
expectErr: assert.NoError,
},
{
name: "well formed auth header with token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{"Bearer " + rawToken},
},
},
expectErr: assert.NoError,
},
{
name: "Missing Bearer prefix but valid token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{rawToken},
},
},
expectErr: assert.NoError,
},
{
name: "invalid token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{"Bearer " + "invalid"},
},
},
expectErr: assert.Error,
},
{
name: "valid prefix but empty token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{"Bearer "},
},
},
expectErr: assert.Error,
},
{
name: "Invalid prefix but valid token",
request: &http.Request{
Header: http.Header{
"Authorization": []string{"Bearer" + rawToken},
},
},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// iat, exp specific tests are in jwt package.
_, _, err := getTokenLifetime(ctx, test.request)
test.expectErr(t, err, clues.ToCore(err))
})
}
}

View File

@ -6,9 +6,11 @@ import (
"net/http"
"time"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/alcionai/clues"
abstractions "github.com/microsoft/kiota-abstractions-go"
"github.com/microsoft/kiota-abstractions-go/serialization"
kauth "github.com/microsoft/kiota-authentication-azure-go"
khttp "github.com/microsoft/kiota-http-go"
msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go"
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
@ -125,6 +127,23 @@ func CreateAdapter(
return wrapAdapter(adpt, cc), nil
}
func GetAuth(tenant string, client string, secret string) (*kauth.AzureIdentityAuthenticationProvider, error) {
// Client Provider: Uses Secret for access to tenant-level data
cred, err := azidentity.NewClientSecretCredential(tenant, client, secret, nil)
if err != nil {
return nil, clues.Wrap(err, "creating m365 client identity")
}
auth, err := kauth.NewAzureIdentityAuthenticationProviderWithScopes(
cred,
[]string{"https://graph.microsoft.com/.default"})
if err != nil {
return nil, clues.Wrap(err, "creating azure authentication")
}
return auth, nil
}
// KiotaHTTPClient creates a httpClient with middlewares and timeout configured
// for use in the graph adapter.
//
@ -181,11 +200,6 @@ type clientConfig struct {
maxRetries int
// The minimum delay in seconds between retries
minDelay time.Duration
// requesterAuth sets the authorization step for requester-compliant clients.
// if non-nil, it will ensure calls are authorized before querying.
// does not get consumed by the standard graph client, which already comes
// packaged with an auth protocol.
requesterAuth authorizer
appendMiddleware []khttp.Middleware
}
@ -273,12 +287,6 @@ func MaxConnectionRetries(max int) Option {
}
}
func AuthorizeRequester(a authorizer) Option {
return func(c *clientConfig) {
c.requesterAuth = a
}
}
// ---------------------------------------------------------------------------
// Middleware Control
// ---------------------------------------------------------------------------
@ -434,13 +442,6 @@ func (aw *adapterWrap) Send(
// to limit the scope of this fix.
logger.Ctx(ictx).Debug("invalid request")
events.Inc(events.APICall, "invalidgetrequest")
} else if requestInfo.Method.String() == http.MethodGet && errors.Is(err, ErrNotFoundEmptyResp) {
// We've started seeing 404s with no content being returned for messages
// message attachments, and events. Attempting to manually fetch the items
// succeeds. Therefore we want to retry these to see if we can work around
// the problem.
logger.Ctx(ictx).Debug("404 with no content")
events.Inc(events.APICall, "notfoundnocontent")
} else {
// exit most errors without retry
break

View File

@ -12,7 +12,6 @@ import (
"time"
"github.com/alcionai/clues"
"github.com/h2non/gock"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/users"
"github.com/stretchr/testify/assert"
@ -27,115 +26,6 @@ import (
graphTD "github.com/alcionai/corso/src/pkg/services/m365/api/graph/testdata"
)
// ---------------------------------------------------------------------------
// Unit tests
// ---------------------------------------------------------------------------
type GraphUnitSuite struct {
tester.Suite
}
func TestGraphUnitSuite(t *testing.T) {
suite.Run(t, &GraphUnitSuite{
Suite: tester.NewUnitSuite(t),
})
}
func (suite *GraphUnitSuite) TestNoRetryPostNoContent404() {
const (
host = "https://graph.microsoft.com"
retries = 3
)
t := suite.T()
ctx, flush := tester.NewContext(t)
t.Cleanup(flush)
a := tconfig.NewFakeM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
// Run with a single retry since 503 retries are exponential and
// the test will take a long time to run.
service, err := NewGockService(
creds,
count.New(),
MaxRetries(1),
MaxConnectionRetries(retries))
require.NoError(t, err, clues.ToCore(err))
t.Cleanup(gock.Off)
gock.New(host).
Post("/v1.0/users").
Reply(http.StatusNotFound).
BodyString("").
Type("text/plain")
// Since we're retrying all 404s with no content the endpoint we use doesn't
// matter.
_, err = service.Client().Users().Post(ctx, models.NewUser(), nil)
assert.ErrorIs(t, err, ErrNotFoundEmptyResp)
assert.False(t, gock.IsPending(), "some requests not seen")
}
func (suite *GraphUnitSuite) TestRetryGetNoContent404() {
const (
host = "https://graph.microsoft.com"
retries = 3
emptyUserList = `{
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users",
"value": []
}`
)
t := suite.T()
ctx, flush := tester.NewContext(t)
t.Cleanup(flush)
a := tconfig.NewFakeM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
// Run with a single retry since 503 retries are exponential and
// the test will take a long time to run.
service, err := NewGockService(
creds,
count.New(),
MaxRetries(1),
MaxConnectionRetries(retries))
require.NoError(t, err, clues.ToCore(err))
t.Cleanup(gock.Off)
gock.New(host).
Get("/v1.0/users").
Times(retries - 1).
Reply(http.StatusNotFound).
BodyString("").
Type("text/plain")
gock.New(host).
Get("/v1.0/users").
Reply(http.StatusOK).
JSON(emptyUserList)
// Since we're retrying all 404s with no content the endpoint we use doesn't
// matter.
_, err = service.Client().Users().Get(ctx, nil)
assert.NoError(t, err, clues.ToCore(err))
assert.False(t, gock.IsPending(), "some requests not seen")
}
// ---------------------------------------------------------------------------
// Integration tests
// ---------------------------------------------------------------------------
type GraphIntgSuite struct {
tester.Suite
fakeCredentials account.M365Config

View File

@ -77,8 +77,7 @@ func (iw *largeItemWriter) Write(p []byte) (int, error) {
http.MethodPut,
iw.url,
bytes.NewReader(p),
headers,
false)
headers)
if err != nil {
return 0, clues.Wrap(err, "uploading item").With(
"upload_id", iw.parentID,

View File

@ -55,8 +55,7 @@ func makeAC(
cli, err := api.NewClient(
creds,
control.DefaultOptions(),
count.New(),
opts...)
count.New())
if err != nil {
return api.Client{}, clues.WrapWC(ctx, err, "constructing api client")
}

View File

@ -76,11 +76,20 @@ func (suite *siteIntegrationSuite) TestSites_GetByID() {
ctx, flush := tester.NewContext(t)
defer flush()
site, err := suite.cli.SiteByID(ctx, suite.m365.Site.ID)
require.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, site.WebURL)
assert.NotEmpty(t, site.ID)
assert.NotEmpty(t, site.OwnerType)
sites, err := suite.cli.Sites(ctx, fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, sites)
for _, s := range sites {
suite.Run("site_"+s.ID, func() {
t := suite.T()
site, err := suite.cli.SiteByID(ctx, s.ID)
require.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, site.WebURL)
assert.NotEmpty(t, site.ID)
assert.NotEmpty(t, site.OwnerType)
})
}
}
// ---------------------------------------------------------------------------

View File

@ -21,8 +21,7 @@ application to connect to your *M365 tenant* and transfer data during backup and
## Corso concepts {#corso-concepts}
* **Repository** refers to the storage location where Corso securely and efficiently stores encrypted *backups* of your
*M365 Service*'s data. See [Repositories](../repos) for more information.
*M365 Services* data. See [Repositories](../repos) for more information.
* **Backup** is a copy of a resource of your *M365 Service*'s data to be used for restores in case of deletion, loss,
or corruption of the original data. Corso performs backups incrementally, and each backup only captures data that has
changed between backup iterations.
* **Backup** is a copy of your *M365 Services* data to be used for restores in case of deletion, loss, or corruption of the
original data. Corso performs backups incrementally, and each backup only captures data that has changed between backup iterations.

View File

@ -79,23 +79,12 @@ const config = {
srcDark: 'img/corso_horizontal_logo_white.svg',
},
items: [
{
type: 'doc',
docId: 'quickstart',
position: 'left',
label: 'Quick Start',
},
{
type: 'doc',
docId: 'intro',
position: 'left',
label: 'Docs',
},
{
href: 'https://discord.gg/63DTTSnuhT',
label: 'Community',
position: 'left',
},
{
to: '/blog',
label: 'Blog',
@ -117,12 +106,30 @@ const config = {
},
links: [
{
title: 'Open Source',
title: 'Resources',
items: [
{
label: 'Docs',
to: '/docs/intro',
},
],
},
{
title: 'Community',
items: [
{
label: 'Discord',
href: 'https://discord.gg/63DTTSnuhT',
},
{
label: 'Twitter',
href: 'https://twitter.com/CorsoBackup',
},
],
},
{
title: 'More',
items: [
{
label: 'Blog',
to: '/blog',
@ -131,26 +138,6 @@ const config = {
label: 'GitHub',
href: 'https://github.com/alcionai/corso',
},
{
label: 'Corso Discord',
href: 'https://discord.gg/63DTTSnuhT',
},
],
},
{
title: ' ',
},
{
title: 'Alcion, Powered by Corso',
items: [
{
label: 'Backup as a Service',
href: 'https://www.alcion.ai',
},
{
label: 'Alcion Discord',
href: 'https://www.alcion.ai/discord',
},
],
},
],

1811
website/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@
},
"dependencies": {
"@docusaurus/core": "3.1.1",
"@docusaurus/plugin-google-gtag": "^3.5.1",
"@docusaurus/plugin-google-gtag": "^3.1.1",
"@docusaurus/preset-classic": "3.1.1",
"@loadable/component": "^5.16.3",
"@mdx-js/react": "^3.0.0",
@ -26,17 +26,17 @@
"feather-icons": "^4.29.1",
"jarallax": "^2.2.0",
"mdx-mermaid": "^2.0.0",
"mermaid": "^10.9.0",
"mermaid": "^10.8.0",
"prism-react-renderer": "^2.1.0",
"react": "^18.2.0",
"react-dom": "^18.3.0",
"sass": "^1.79.1",
"react-dom": "^18.2.0",
"sass": "^1.70.0",
"tiny-slider": "^2.9.4",
"tw-elements": "1.0.0-alpha13",
"wow.js": "^1.2.2"
},
"devDependencies": {
"@docusaurus/module-type-aliases": "3.5.1",
"@docusaurus/module-type-aliases": "3.1.1",
"@iconify/react": "^4.1.1",
"autoprefixer": "^10.4.17",
"postcss": "^8.4.33",

View File

@ -33,7 +33,7 @@ export default function CTA() {
<br /> Microsoft 365 Data!
</h3>
<h6 className="text-white/50 text-lg font-semibold">
Corso (Free and Open Source) or <br/> Alcion (Managed Backup as a Service)
Corso is Free and Open Source
</h6>
</div>
</div>
@ -41,24 +41,15 @@ export default function CTA() {
<div className="mt-8">
<div className="section-title text-md-start">
<p className="text-white/50 max-w-xl mx-auto mb-2">
Whether you want to self-host or use a managed service, we have you covered!
Follow our quick-start guide to start protecting your
business-critical Microsoft 365 data in just a few
minutes.
</p>
<a
href="https://www.alcion.ai/"
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
>
Try Alcion{" "}
<Icon
icon="uim:angle-right-b"
className="align-middle"
/>
</a>
<p></p>
<a
href="docs/quickstart/"
className="!text-white !no-underline flex flex-row items-center !hover:text-white"
>
Corso Quickstart{" "}
Get Started{" "}
<Icon
icon="uim:angle-right-b"
className="align-middle"

View File

@ -34,17 +34,10 @@ export default function Hero() {
<div className="mt-12 !z-10 mb-6 flex flex-col 2xs:flex-row items-center justify-center 2xs:space-y-0 space-y-4 2xs:space-x-4">
<a
href="https://github.com/alcionai/corso/releases" target="_blank"
href="../docs/quickstart/"
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-800 hover:bg-indigo-900 border-indigo-800 hover:border-indigo-900 text-white rounded-md"
>
Download
</a>
<a
href="https://www.alcion.ai/"
className="text-2xl !z-10 !no-underline hover:text-white py-2 px-6 font-bold btn bg-indigo-200 hover:bg-indigo-400 border-indigo-600 hover:border-indigo-800 text-blue rounded-md"
>
Try Alcion (Corso SaaS)
Quick Start
</a>
</div>

View File

@ -213,9 +213,9 @@ export default function KeyLoveFAQ() {
Community
</h3>
<p className="text-slate-400">
The Corso community provides a venue for Microsoft 365 admins to share and
The Corso community provides a venue for M365 admins to share and
learn about the importance of data protection as well as best
practices around Microsoft 365 secure configuration and compliance
practices around M365 secure configuration and compliance
management.
</p>
<ul className="list-none text-slate-400 mt-4">
@ -279,7 +279,8 @@ export default function KeyLoveFAQ() {
</h3>
<p className="text-slate-400">
Corso provides secure data backup that protects customers against
accidental data loss and service provider downtime.
accidental data loss, service provider downtime, and malicious
threats including ransomware attacks.
</p>
<ul className="list-none text-slate-400 mt-4">
<li className="mb-1 flex">
@ -330,7 +331,7 @@ export default function KeyLoveFAQ() {
Robust Backups
</h3>
<p className="text-slate-400">
Corso, purpose-built for Microsoft 365 protection, provides easy-to-use
Corso, purpose-built for M365 protection, provides easy-to-use
comprehensive backup and restore workflows that reduces backup
time, improve time-to-recovery, reduce admin overhead, and replace
unreliable scripts or workarounds.
@ -341,7 +342,7 @@ export default function KeyLoveFAQ() {
className="text-indigo-600 text-xl mr-2"
icon="material-symbols:check-circle-outline"
/>{" "}
Constantly updated Microsoft 365 Graph Data engine
Constantly updated M365 Graph Data engine
</li>
<li className="mb-1 flex">
<Icon
@ -461,7 +462,7 @@ export default function KeyLoveFAQ() {
<div className="md:col-span-6">
<div className="accordion space-y-3" id="accordionExample">
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="headingOne"
@ -474,7 +475,7 @@ export default function KeyLoveFAQ() {
aria-expanded="false"
aria-controls="collapseOne"
>
<span>How do I choose between Corso and Alcion, powered by Corso?</span>
<span>What platforms does Corso run on?</span>
</button>
</h2>
<div
@ -485,7 +486,8 @@ export default function KeyLoveFAQ() {
>
<div className="accordion-body p-5">
<p className="text-slate-400 !visible dark:text-gray-400">
Corso is a good fit for basic backup while Alcion is a better fit if you need increased reliability, security, and support.
Corso has both native binaries and container images for
Windows, Linux, and macOS.
</p>
</div>
</div>
@ -503,7 +505,9 @@ export default function KeyLoveFAQ() {
aria-expanded="false"
aria-controls="collapse2"
>
<span>What platforms does Corso run on?</span>
<span>
What Microsoft 365 services can I backup using Corso?
</span>
</button>
</h2>
<div
@ -514,8 +518,8 @@ export default function KeyLoveFAQ() {
>
<div className="accordion-body p-5">
<p className="text-slate-400 !visible dark:text-gray-400">
Corso has both native binaries and container images for
Windows, Linux, and macOS.
Corso currently supports OneDrive, Exchange, SharePoint,
and Teams.
</p>
</div>
</div>
@ -533,9 +537,7 @@ export default function KeyLoveFAQ() {
aria-expanded="false"
aria-controls="collapse3"
>
<span>
What Microsoft 365 services can I backup using Corso?
</span>
<span>What object storage does Corso support?</span>
</button>
</h2>
<div
@ -543,36 +545,6 @@ export default function KeyLoveFAQ() {
className="accordion-collapse collapse"
aria-labelledby="heading3"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 !visible dark:text-gray-400">
Corso currently supports OneDrive, Exchange, SharePoint,
and Teams.
</p>
</div>
</div>
</div>
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading4"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse4"
aria-expanded="false"
aria-controls="collapse4"
>
<span>What object storage does Corso support?</span>
</button>
</h2>
<div
id="collapse4"
className="accordion-collapse collapse"
aria-labelledby="heading4"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
<p className="text-slate-400 dark:text-gray-400 !visible">
@ -587,23 +559,23 @@ export default function KeyLoveFAQ() {
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 font-semibold"
id="heading5"
id="heading4"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse5"
data-bs-target="#collapse4"
aria-expanded="false"
aria-controls="collapse5"
aria-controls="collapse4"
>
<span>How can I get help for Corso?</span>
</button>
</h2>
<div
id="collapse5"
id="collapse4"
className="accordion-collapse collapse"
aria-labelledby="heading5"
aria-labelledby="heading4"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
@ -633,23 +605,23 @@ export default function KeyLoveFAQ() {
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading6"
id="heading5"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse6"
data-bs-target="#collapse5"
aria-expanded="false"
aria-controls="collapse6"
aria-controls="collapse5"
>
<span>What is Corso's open-source license?</span>
</button>
</h2>
<div
id="collapse6"
id="collapse5"
className="accordion-collapse collapse"
aria-labelledby="heading6"
aria-labelledby="heading5"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">
@ -663,23 +635,23 @@ export default function KeyLoveFAQ() {
<div className="accordion-item !text-white relative shadow dark:shadow-gray-800 rounded-md overflow-hidden">
<h2
className="accordion-header mb-0 !cursor-pointer font-semibold"
id="heading7"
id="heading6"
>
<button
className="transition accordion-button-custom text-white !text-base !cursor-pointer border-none outline-none collapsed focus:outline-none !bg-transparent flex justify-between items-center p-5 w-full font-bold text-left"
type="button"
data-bs-toggle="collapse"
data-bs-target="#collapse7"
data-bs-target="#collapse6"
aria-expanded="false"
aria-controls="collapse7"
aria-controls="collapse6"
>
<span>How do I request a new feature?</span>
</button>
</h2>
<div
id="collapse7"
id="collapse6"
className="accordion-collapse collapse"
aria-labelledby="heading7"
aria-labelledby="heading6"
data-bs-parent="#accordionExample"
>
<div className="accordion-body p-5">

96
website/static/img/corso_horizontal_logo.svg Normal file → Executable file

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 23 KiB

After

Width:  |  Height:  |  Size: 3.5 KiB

96
website/static/img/corso_horizontal_logo_white.svg Normal file → Executable file
View File

@ -1,95 +1 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 28.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 1920 632.51" style="enable-background:new 0 0 1920 632.51;" xml:space="preserve">
<style type="text/css">
.st0{clip-path:url(#SVGID_00000065045999731516100160000007329899648576828572_);fill:#FFFFFF;}
.st1{fill:#FFFFFF;}
</style>
<g id="Layer_1">
<g>
<g>
<defs>
<rect id="SVGID_1_" y="2.64" width="1920" height="523"/>
</defs>
<clipPath id="SVGID_00000147923114548510084520000017867003880147110077_">
<use xlink:href="#SVGID_1_" style="overflow:visible;"/>
</clipPath>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M134.51,132.57
c28.63,0,54.6,7.95,75.81,22.79c11.66,7.95,14.31,23.33,6.36,36.58c-7.42,12.19-25.98,12.73-37.64,5.83
c-12.73-7.42-28.63-12.19-44.53-12.19c-41.35,0-77.93,30.22-77.93,76.34c0,46.12,36.58,75.81,77.93,75.81
c15.91,0,31.81-4.77,44.53-12.19c11.66-6.89,30.22-6.36,37.64,5.83c7.95,13.26,5.3,28.63-6.36,36.58
c-21.21,14.85-47.19,22.8-75.81,22.8C63.47,390.76,2.5,340.39,2.5,261.93C2.5,183.47,63.47,132.57,134.51,132.57"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M261.22,260.87
c0-78.46,58.85-128.29,128.83-128.29c70.51,0,129.89,49.83,129.89,128.29s-59.37,129.89-129.89,129.89
C320.06,390.76,261.22,339.33,261.22,260.87 M465.86,260.87c0-45.59-34.46-75.28-75.81-75.28c-40.82,0-74.75,29.69-74.75,75.28
c0,46.66,33.93,76.87,74.75,76.87C431.4,337.74,465.86,307.52,465.86,260.87"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M633.91,296.39v64.15
c0,14.85-12.19,27.57-28.1,27.57c-14.84,0-26.51-12.72-26.51-27.57V162.79c0-14.85,11.67-27.57,26.51-27.57
c15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.02,67.86-62.02c7.95,0,15.91,0.53,23.33,2.12
c13.79,3.18,22.8,16.97,19.62,31.28c-4.77,23.86-28.63,18.03-44.53,18.03C653.53,184,633.91,230.65,633.91,296.39"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M788.19,304.87
c13.25-5.3,23.33,1.59,27.57,10.6c10.08,19.09,29.16,29.69,53.55,29.69c24.92,0,42.94-11.13,42.94-29.69
c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.71c3.18-36.58,42.94-65.21,88.53-65.21
c32.87,0,63.09,10.6,79.53,36.58c7.42,12.72,3.71,25.44-4.77,31.81c-9.01,7.42-20.15,6.89-31.81-3.18
c-13.78-12.19-29.69-16.97-42.41-16.97c-13.79,0-29.16,4.77-34.46,13.25c-4.24,6.89-4.77,13.78-2.12,21.21
c3.18,9.54,18.03,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16c0,47.71-41.88,74.75-98.61,74.75
c-38.17,0-76.87-20.15-90.13-56.2C770.69,321.31,776.53,309.12,788.19,304.87"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1006.61,260.87
c0-78.46,58.85-128.29,128.83-128.29c70.51,0,129.89,49.83,129.89,128.29s-59.37,129.89-129.89,129.89
C1065.46,390.76,1006.61,339.33,1006.61,260.87 M1211.25,260.87c0-45.59-34.46-75.28-75.81-75.28
c-40.82,0-74.75,29.69-74.75,75.28c0,46.66,33.93,76.87,74.75,76.87C1176.79,337.74,1211.25,307.52,1211.25,260.87"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1658.37,523.34
c-33.98,0-65.93-13.24-89.96-37.26l-221.94-221.93l221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27
c33.98,0,65.93,13.24,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98
C1724.3,510.1,1692.35,523.34,1658.37,523.34 M1393.9,264.14l198.22,198.22c36.54,36.53,95.97,36.52,132.5,0l131.98-131.98
c36.53-36.53,36.53-95.96,0-132.5L1724.61,65.92c-17.7-17.7-41.22-27.44-66.25-27.44c-25.03,0-48.55,9.75-66.25,27.44
L1393.9,264.14z"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1813.99,263.46
l-6.63-41.44c-1.01-7.18-6.37-11.15-14.81-11.15l-49.44-0.14c-7.95,0-13.8-3.97-14.81-11.15l-0.17-1.18
c-1.61-11.39-9.6-20.86-20.58-24.34l-66.42-24.32c-1.66-0.53-3.19-1.38-4.51-2.5c-0.48-0.4-0.8-0.96-0.97-1.57l-12.42-42.67
c-0.12-0.43-0.36-0.83-0.7-1.12c-3.65-3-9.24-1.95-11.5,2.32l-24.5,54.88c-0.89,1.68-1.4,3.55-1.5,5.45l3.01,25.01
c-0.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85l12.2-50.1
c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.89-63.08c1.35-2.12,2.07-4.58,2.07-7.08
C1814.12,264.67,1814.07,264.06,1813.99,263.46 M1707.54,223.11c-5.96,1.5-22.58,0.54-24.08-5.43
c-1.5-5.95,12.71-14.66,18.66-16.15c5.96-1.5,12,2.12,13.5,8.09C1717.11,215.57,1713.49,221.62,1707.54,223.11"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1680.04,156.45
L1667,107.48c-0.14-0.53-0.47-1.01-0.92-1.33c-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9L1680.04,156.45z"/>
<path style="clip-path:url(#SVGID_00000147923114548510084520000017867003880147110077_);fill:#FFFFFF;" d="M1655.16,406.81
l-0.85,3.47c-1.93,7.89-11.75,10.65-17.49,4.9l-123.3-123.3l-11.74-11.74l13.35-13.35l11.74,11.74L1655.16,406.81z"/>
</g>
<g>
<path class="st1" d="M523.8,510.47c5.57-9.64,17.49-14.55,30.17-14.55c24.41,0,44.78,17.77,44.78,46.11
c0,27.78-20.76,45.93-44.97,45.93c-12.11,0-24.41-5.29-29.98-14.74v3.97c0,5.29-4.42,9.83-10.19,9.83
c-5.38,0-9.61-4.54-9.61-9.83v-118.5c0-5.29,4.23-9.83,9.61-9.83c5.77,0,10.19,4.54,10.19,9.83V510.47z M551.48,569.06
c14.99,0,27.1-11.15,27.1-27.4s-12.11-26.84-27.1-26.84c-13.45,0-27.48,9.45-27.48,26.84
C523.99,558.85,536.87,569.06,551.48,569.06z"/>
<path class="st1" d="M645.84,623.3c-2.11,4.91-7.11,7.56-12.3,6.24s-8.07-7.37-6.15-12.28l14.61-35.34l-30.56-72.38
c-2.11-4.91,0.96-10.96,6.15-12.29c5.19-1.32,10.19,1.32,12.3,6.24l22.68,54.81l22.87-54.81c2.11-4.91,7.11-7.56,12.3-6.24
c5.19,1.32,8.26,7.37,6.15,12.29L645.84,623.3z"/>
<path class="st1" d="M828.41,573.4c-5.96,9.64-19.03,14.55-30.17,14.55c-24.22,0-45.55-17.95-45.55-46.11
s21.33-45.93,45.55-45.93c10.76,0,24.02,4.35,30.17,14.36v-3.59c0-5.29,4.23-9.83,9.61-9.83c5.77,0,10.19,4.54,10.19,9.83v70.5
c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83V573.4z M800.55,569.06c14.61,0,27.67-10.02,27.67-27.4
s-14.22-26.84-27.67-26.84c-14.99,0-27.48,10.58-27.48,26.84S785.56,569.06,800.55,569.06z"/>
<path class="st1" d="M894.91,577.18c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83v-118.5c0-5.29,4.23-9.83,9.61-9.83
c5.77,0,10.19,4.54,10.19,9.83V577.18z"/>
<path class="st1" d="M964.67,495.91c10.38,0,19.8,2.83,27.48,8.13c4.23,2.83,5.19,8.32,2.31,13.04
c-2.69,4.35-9.42,4.54-13.65,2.08c-4.61-2.65-10.38-4.35-16.14-4.35c-14.99,0-28.25,10.77-28.25,27.21s13.26,27.03,28.25,27.03
c5.77,0,11.53-1.7,16.14-4.35c4.23-2.46,10.96-2.27,13.65,2.08c2.88,4.72,1.92,10.21-2.31,13.04c-7.69,5.29-17.1,8.13-27.48,8.13
c-25.75,0-47.85-17.95-47.85-45.93C916.82,514.06,938.92,495.91,964.67,495.91z"/>
<path class="st1" d="M1026.55,449.8c7.3,0,13.07,5.29,13.07,12.28c0,6.99-5.77,12.29-13.07,12.29c-7.11,0-13.26-5.29-13.26-12.29
C1013.29,455.09,1019.44,449.8,1026.55,449.8z M1036.55,506.69c0-5.29-4.42-9.83-10.19-9.83c-5.38,0-9.61,4.54-9.61,9.83v70.5
c0,5.29,4.23,9.83,9.61,9.83c5.77,0,10.19-4.54,10.19-9.83V506.69z"/>
<path class="st1" d="M1058.07,541.65c0-27.97,21.33-45.74,46.7-45.74c25.56,0,47.08,17.77,47.08,45.74
c0,27.97-21.52,46.3-47.08,46.3C1079.4,587.95,1058.07,569.62,1058.07,541.65z M1132.25,541.65c0-16.25-12.49-26.84-27.48-26.84
c-14.8,0-27.1,10.58-27.1,26.84c0,16.63,12.3,27.4,27.1,27.4C1119.76,569.06,1132.25,558.28,1132.25,541.65z"/>
<path class="st1" d="M1173.38,506.69c0-5.29,4.42-9.83,10.19-9.83c5.38,0,9.61,4.54,9.61,9.83v4.35
c5.19-10.21,17.49-15.12,27.48-15.12c21.72,0,34.21,13.8,34.21,38.74v42.52c0,5.29-4.42,9.83-10.19,9.83
c-5.38,0-9.61-4.54-9.61-9.83v-40.26c0-13.99-7.3-21.92-18.83-21.92c-11.72,0-23.06,6.24-23.06,23.62v38.55
c0,5.29-4.42,9.83-10.19,9.83c-5.38,0-9.61-4.54-9.61-9.83V506.69z"/>
</g>
</g>
</g>
<g id="Layer_2">
</g>
</svg>
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1920 523"><defs><style>.cls-1{fill:#fff;}</style></defs><g><path class="cls-1" d="M134.51,129.94c28.63,0,54.6,7.95,75.81,22.79,11.67,7.95,14.31,23.33,6.36,36.58-7.42,12.19-25.98,12.73-37.64,5.83-12.73-7.42-28.63-12.19-44.53-12.19-41.35,0-77.93,30.22-77.93,76.34s36.58,75.81,77.93,75.81c15.91,0,31.81-4.77,44.53-12.19,11.66-6.89,30.22-6.36,37.64,5.83,7.95,13.25,5.3,28.63-6.36,36.58-21.21,14.84-47.18,22.8-75.81,22.8C63.47,388.12,2.5,337.76,2.5,259.29S63.47,129.94,134.51,129.94Z"/><path class="cls-1" d="M261.22,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/><path class="cls-1" d="M633.91,293.75v64.15c0,14.84-12.19,27.57-28.1,27.57-14.84,0-26.51-12.72-26.51-27.57V160.15c0-14.84,11.67-27.57,26.51-27.57,15.91,0,28.1,12.72,28.1,27.57v31.81c12.73-44,37.11-62.03,67.86-62.03,7.95,0,15.91,.53,23.33,2.12,13.79,3.18,22.8,16.97,19.62,31.28-4.77,23.86-28.63,18.03-44.53,18.03-46.65,0-66.27,46.65-66.27,112.39Z"/><path class="cls-1" d="M788.19,302.24c13.25-5.3,23.33,1.59,27.57,10.6,10.08,19.09,29.16,29.69,53.55,29.69s42.94-11.13,42.94-29.69c0-15.9-15.38-22.79-33.4-27.03l-33.4-7.95c-52.48-14.32-71.57-42.94-68.39-82.7,3.18-36.58,42.94-65.21,88.53-65.21,32.87,0,63.09,10.6,79.53,36.58,7.42,12.72,3.71,25.44-4.77,31.81-9.01,7.42-20.15,6.89-31.81-3.18-13.78-12.19-29.69-16.97-42.41-16.97-13.79,0-29.16,4.77-34.46,13.25-4.24,6.89-4.77,13.78-2.12,21.21,3.18,9.54,18.02,14.31,31.28,18.02l38.17,9.54c53.54,13.25,64.68,48.24,64.68,73.16,0,47.71-41.88,74.75-98.61,74.75-38.17,0-76.87-20.15-90.13-56.2-4.24-13.25,1.59-25.44,13.25-29.68Z"/><path class="cls-1" d="M1006.61,258.23c0-78.46,58.85-128.3,128.83-128.3s129.88,49.83,129.88,128.3-59.37,129.89-129.88,129.89-128.83-51.43-128.83-129.89Zm204.64,0c0-45.59-34.46-75.28-75.81-75.28s-74.75,29.69-74.75,75.28,33.93,76.87,74.75,76.87,75.81-30.22,75.81-76.87Z"/></g><path class="cls-1" d="M1658.37,520.7c-33.98,0-65.93-13.23-89.96-37.26l-221.94-221.93,221.94-221.93c24.03-24.03,55.98-37.27,89.96-37.27s65.93,13.23,89.96,37.27l131.98,131.97c49.6,49.61,49.6,130.31,0,179.92l-131.98,131.98c-24.03,24.03-55.98,37.26-89.96,37.26Zm-264.47-259.2l198.22,198.22c36.53,36.53,95.97,36.52,132.5,0l131.98-131.98c36.53-36.53,36.53-95.97,0-132.5l-131.98-131.97c-17.7-17.7-41.22-27.44-66.25-27.44s-48.55,9.75-66.25,27.44l-198.22,198.22Z"/><g><path class="cls-1" d="M1813.99,260.82l-6.63-41.44c-1.01-7.17-6.37-11.15-14.81-11.15l-49.44-.14c-7.95,0-13.8-3.97-14.81-11.15l-.17-1.18c-1.61-11.39-9.61-20.86-20.58-24.34l-66.42-24.32c-1.66-.53-3.19-1.38-4.51-2.5-.48-.41-.8-.96-.97-1.57l-12.42-42.67c-.13-.43-.36-.83-.7-1.12-3.65-3-9.25-1.95-11.5,2.32l-24.5,54.88c-.89,1.68-1.4,3.54-1.5,5.45l3.01,25.01c-.14,2.64-1.08,5.2-2.7,7.3l-48.86,48.59c-5.2,5.2-16.08,16.08-16.08,16.08l136.84,136.85,12.2-50.1c1.05-5.87,5.91-10.29,11.84-10.77l25.89-2.09c80.88-3.46,81.72-26.8,104.9-63.08,1.35-2.12,2.07-4.58,2.07-7.08,0-.6-.04-1.21-.13-1.81Zm-106.45-40.35c-5.96,1.5-22.58,.54-24.08-5.43-1.5-5.95,12.71-14.66,18.66-16.15,5.96-1.5,12,2.12,13.5,8.08,1.49,5.95-2.13,12-8.08,13.49Z"/><path class="cls-1" d="M1680.04,153.81l-13.04-48.97c-.14-.53-.47-1.01-.92-1.33-2.89-2.07-7.06-1.18-8.79,2.09l-16.37,33.9,39.12,14.32Z"/><path class="cls-1" d="M1655.16,404.17l-.85,3.47c-1.93,7.9-11.75,10.65-17.49,4.9l-123.3-123.3-11.74-11.74,13.35-13.35,11.74,11.74,128.28,128.28Z"/></g></svg>

Before

Width:  |  Height:  |  Size: 8.2 KiB

After

Width:  |  Height:  |  Size: 3.5 KiB