Compare commits

..

No commits in common. "main" and "v0.19.0" have entirely different histories.

138 changed files with 2317 additions and 10457 deletions

View File

@ -1,5 +1,4 @@
name: Backup Restore Test
description: Run various backup/restore/export tests for a service.
inputs:
service:

View File

@ -1,5 +1,4 @@
name: Setup and Cache Golang
description: Build golang binaries for later use in CI.
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
#

View File

@ -1,5 +1,4 @@
name: Publish Binary
description: Publish binary artifacts.
inputs:
version:

View File

@ -1,5 +1,4 @@
name: Publish Website
description: Publish website artifacts.
inputs:
aws-iam-role:

View File

@ -1,5 +1,4 @@
name: Purge M365 User Data
description: Deletes M365 data generated during CI tests.
# Hard deletion of an m365 user's data. Our CI processes create a lot
# of data churn (creation and immediate deletion) of files, the likes
@ -31,19 +30,12 @@ inputs:
description: Secret value of for AZURE_CLIENT_ID
azure-client-secret:
description: Secret value of for AZURE_CLIENT_SECRET
azure-pnp-client-id:
description: Secret value of AZURE_PNP_CLIENT_ID
azure-pnp-client-cert:
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
azure-tenant-id:
description: Secret value of AZURE_TENANT_ID
description: Secret value of for AZURE_TENANT_ID
m365-admin-user:
description: Secret value of for M365_TENANT_ADMIN_USER
m365-admin-password:
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
tenant-domain:
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
required: true
runs:
using: composite
@ -61,13 +53,7 @@ runs:
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
#- name: Reset retention for all mailboxes to 0
@ -88,16 +74,10 @@ runs:
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
################################################################################################################
# Sharepoint
@ -108,14 +88,6 @@ runs:
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}

View File

@ -1,5 +1,4 @@
name: Send a message to Teams
description: Send messages to communication apps.
inputs:
msg:

View File

@ -1,5 +1,4 @@
name: Lint Website
description: Lint website content.
inputs:
version:

View File

@ -40,5 +40,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Publishing Binary"
msg: "[FAILED] Publishing Binary"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -463,7 +463,7 @@ jobs:
go-version-file: src/go.mod
- name: Go Lint
uses: golangci/golangci-lint-action@v4
uses: golangci/golangci-lint-action@v3
with:
# Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code.

View File

@ -12,7 +12,7 @@ jobs:
continue-on-error: true
strategy:
matrix:
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
steps:
- uses: actions/checkout@v4
@ -33,15 +33,12 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
Test-Site-Data-Cleanup:
@ -50,7 +47,7 @@ jobs:
continue-on-error: true
strategy:
matrix:
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
steps:
- uses: actions/checkout@v4
@ -73,13 +70,10 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -155,6 +155,3 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
user:
description: "User to run longevity test on"
description: 'User to run longevity test on'
permissions:
# required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Longevity-Tests:
needs: [SetM365App]
needs: [ SetM365App ]
environment: Testing
runs-on: ubuntu-latest
env:
@ -37,7 +37,7 @@ jobs:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
RESTORE_DEST_PFX: Corso_Test_Longevity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
PREFIX: "longevity"
PREFIX: 'longevity'
# Options for retention.
RETENTION_MODE: GOVERNANCE
@ -46,7 +46,7 @@ jobs:
defaults:
run:
working-directory: src
############################################################################
# setup
steps:
@ -78,7 +78,7 @@ jobs:
- run: go build -o corso
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR}
# Use shorter-lived credentials obtained from assume-role since these
@ -113,6 +113,7 @@ jobs:
--extend-retention \
--prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
--succeed-if-exists \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
@ -163,7 +164,7 @@ jobs:
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Onedrive
@ -328,7 +329,7 @@ jobs:
--hide-progress \
--force \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
- name: Maintenance test Weekly
id: maintenance-test-weekly
@ -392,5 +393,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Longevity Test"
msg: "[FAILED] Longevity Test"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -48,7 +48,7 @@ jobs:
# ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted:
needs: [Checkout, SetM365App]
needs: [ Checkout, SetM365App]
environment: Testing
runs-on: ubuntu-latest
defaults:
@ -100,9 +100,9 @@ jobs:
-timeout 2h \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
##########################################################################################################################################
##########################################################################################################################################
# Logging & Notifications
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
@ -118,5 +118,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[COROS FAILED] Nightly Checks"
msg: "[FAILED] Nightly Checks"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
user:
description: "User to run sanity test on"
description: 'User to run sanity test on'
permissions:
# required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests:
needs: [SetM365App]
needs: [ SetM365App ]
environment: Testing
runs-on: ubuntu-latest
env:
@ -43,11 +43,12 @@ jobs:
defaults:
run:
working-directory: src
##########################################################################################################################################
##########################################################################################################################################
# setup
# setup
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
@ -63,9 +64,9 @@ jobs:
- run: mkdir ${CORSO_LOG_DIR}
##########################################################################################################################################
##########################################################################################################################################
# Pre-Run cleanup
# Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
@ -90,9 +91,6 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites
timeout-minutes: 30
@ -101,20 +99,17 @@ jobs:
with:
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
##########################################################################################################################################
##########################################################################################################################################
# Repository commands
# Repository commands
- name: Version Test
timeout-minutes: 10
@ -174,9 +169,9 @@ jobs:
--mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
##########################################################################################################################################
##########################################################################################################################################
# Exchange
# Exchange
# generate new entries to roll into the next load test
# only runs if the test was successful
@ -198,8 +193,8 @@ jobs:
service: exchange
kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -211,8 +206,8 @@ jobs:
service: exchange
kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -225,8 +220,8 @@ jobs:
service: exchange
kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -239,15 +234,16 @@ jobs:
service: exchange
kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Onedrive
##########################################################################################################################################
# Onedrive
# generate new entries for test
- name: OneDrive - Create new data
@ -274,8 +270,8 @@ jobs:
service: onedrive
kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -299,14 +295,14 @@ jobs:
service: onedrive
kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
##########################################################################################################################################
# Sharepoint Library
# Sharepoint Library
# generate new entries for test
- name: SharePoint - Create new data
@ -334,8 +330,8 @@ jobs:
service: sharepoint
kind: first-backup
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
@ -361,15 +357,15 @@ jobs:
service: sharepoint
kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
##########################################################################################################################################
##########################################################################################################################################
# Sharepoint Lists
# Sharepoint Lists
# generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
@ -422,7 +418,7 @@ jobs:
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
@ -458,9 +454,9 @@ jobs:
category: lists
on-collision: copy
##########################################################################################################################################
##########################################################################################################################################
# Groups and Teams
# Groups and Teams
# generate new entries for test
- name: Groups - Create new data
@ -487,8 +483,8 @@ jobs:
with:
service: groups
kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -512,15 +508,15 @@ jobs:
with:
service: groups
kind: incremental
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
##########################################################################################################################################
# Logging & Notifications
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
@ -536,5 +532,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Sanity Tests"
msg: "[FAILED] Sanity Tests"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -6,14 +6,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased] (beta)
### Fixed
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
- Emails attached within other emails are now correctly exported
- Gracefully handle email and post attachments without name when exporting to eml
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
- Fixed an issue causing exports dealing with calendar data to have high memory usage
## [v0.19.0] (beta) - 2024-02-06
### Added
- Events can now be exported from Exchange backups as .ics files.
@ -502,8 +494,7 @@ this case, Corso will skip over the item but report this in the backup summary.
- Miscellaneous
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
[Unreleased]: https://github.com/alcionai/corso/compare/v0.18.0...HEAD
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0

View File

@ -1,6 +1,3 @@
> [!NOTE]
> **The Corso project is no longer actively maintained and has been archived**.
<p align="center">
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
</p>

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
@ -40,7 +39,7 @@ var (
type NoBackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestNoBackupExchangeE2ESuite(t *testing.T) {
@ -55,7 +54,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
}
@ -94,7 +93,7 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
type BackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupExchangeE2ESuite(t *testing.T) {
@ -109,7 +108,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
}
@ -139,7 +138,7 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
cmd, ctx := buildExchangeBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
suite.its.user.ID,
category.String(),
&recorder)
@ -150,11 +149,8 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
result := recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
@ -177,7 +173,7 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
cmd, ctx := buildExchangeBackupCmd(
ctx,
suite.dpnd.configFilePath,
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.m365.User.ID),
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.its.user.ID),
category.String(),
&recorder)
err := cmd.ExecuteContext(ctx)
@ -186,11 +182,8 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
result := recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
@ -249,7 +242,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFl
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
@ -273,7 +266,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
@ -288,11 +281,8 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
result := suite.dpnd.recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
// AWS flags
@ -306,7 +296,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
@ -329,7 +319,7 @@ type PreparedBackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
@ -346,13 +336,13 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
suite.backupOps = make(map[path.CategoryType]string)
var (
users = []string{suite.m365.User.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
users = []string{suite.its.user.ID}
ins = idname.NewCache(map[string]string{suite.its.user.ID: suite.its.user.ID})
)
for _, set := range []path.CategoryType{email, contacts, events} {

View File

@ -20,7 +20,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
@ -36,7 +35,7 @@ import (
type NoBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestNoBackupGroupsE2ESuite(t *testing.T) {
@ -51,7 +50,7 @@ func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
}
@ -90,7 +89,7 @@ func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
type BackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupGroupsE2ESuite(t *testing.T) {
@ -105,7 +104,7 @@ func (suite *BackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
}
@ -114,8 +113,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsBackupCategoryTest(suite, flags.DataConversations)
}
@ -137,7 +134,7 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
cmd, ctx := buildGroupsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Group.ID,
suite.its.group.ID,
category,
&recorder)
@ -205,7 +202,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
@ -219,9 +216,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
}
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
@ -232,7 +226,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
@ -256,7 +250,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
@ -279,7 +273,7 @@ type PreparedBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
@ -296,19 +290,16 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
groups = []string{suite.m365.Group.ID}
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
groups = []string{suite.its.group.ID}
ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
cats = []path.CategoryType{
path.ChannelMessagesCategory,
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// path.ConversationPostsCategory,
path.ConversationPostsCategory,
path.LibrariesCategory,
}
)
@ -462,8 +453,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
}

View File

@ -14,16 +14,141 @@ import (
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/storage"
"github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// Gockable client
// ---------------------------------------------------------------------------
// GockClient produces a new exchange api client that can be
// mocked using gock.
func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
s, err := graph.NewGockService(creds, counter)
if err != nil {
return api.Client{}, err
}
li, err := graph.NewGockService(creds, counter, graph.NoTimeout())
if err != nil {
return api.Client{}, err
}
return api.Client{
Credentials: creds,
Stable: s,
LargeItem: li,
}, nil
}
// ---------------------------------------------------------------------------
// Suite Setup
// ---------------------------------------------------------------------------
type ids struct {
ID string
DriveID string
DriveRootFolderID string
}
type intgTesterSetup struct {
acct account.Account
ac api.Client
gockAC api.Client
user ids
site ids
group ids
team ids
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its := intgTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
its.acct = tconfig.NewM365Account(t)
creds, err := its.acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
its.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
its.gockAC, err = gockClient(creds, count.New())
require.NoError(t, err, clues.ToCore(err))
// user drive
uids := ids{}
uids.ID = tconfig.M365UserID(t)
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, uids.ID)
require.NoError(t, err, clues.ToCore(err))
uids.DriveID = ptr.Val(userDrive.GetId())
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, uids.DriveID)
require.NoError(t, err, clues.ToCore(err))
uids.DriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
its.user = uids
// site
sids := ids{}
sids.ID = tconfig.M365SiteID(t)
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, sids.ID)
require.NoError(t, err, clues.ToCore(err))
sids.DriveID = ptr.Val(siteDrive.GetId())
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, sids.DriveID)
require.NoError(t, err, clues.ToCore(err))
sids.DriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
its.site = sids
// group
gids := ids{}
// use of the TeamID is intentional here, so that we are assured
// the group has full usage of the teams api.
gids.ID = tconfig.M365TeamID(t)
its.group = gids
// team
tids := ids{}
tids.ID = tconfig.M365TeamID(t)
its.team = tids
return its
}
type dependencies struct {
st storage.Storage
repo repository.Repositoryer

View File

@ -20,7 +20,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/config"
@ -90,7 +89,7 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
type BackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupSharepointE2ESuite(t *testing.T) {
@ -105,7 +104,7 @@ func (suite *BackupSharepointE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
}
@ -129,7 +128,7 @@ func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category s
cmd, ctx := buildSharepointBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Site.ID,
suite.its.site.ID,
category,
&recorder)
@ -188,7 +187,7 @@ type PreparedBackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
@ -205,13 +204,13 @@ func (suite *PreparedBackupSharepointE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
suite.backupOps = make(map[path.CategoryType]string)
var (
sites = []string{suite.m365.Site.ID}
ins = idname.NewCache(map[string]string{suite.m365.Site.ID: suite.m365.Site.ID})
sites = []string{suite.its.site.ID}
ins = idname.NewCache(map[string]string{suite.its.site.ID: suite.its.site.ID})
cats = []path.CategoryType{
path.ListsCategory,
}

View File

@ -20,7 +20,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
@ -36,7 +35,7 @@ import (
type NoBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestNoBackupTeamsChatsE2ESuite(t *testing.T) {
@ -52,7 +51,7 @@ func (suite *NoBackupTeamsChatsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
@ -91,7 +90,7 @@ func (suite *NoBackupTeamsChatsE2ESuite) TestTeamsChatsBackupListCmd_noBackups()
type BackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupTeamsChatsE2ESuite(t *testing.T) {
@ -107,7 +106,7 @@ func (suite *BackupTeamsChatsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
@ -129,7 +128,7 @@ func runTeamsChatsBackupCategoryTest(suite *BackupTeamsChatsE2ESuite, category s
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
suite.its.user.ID,
category,
&recorder)
@ -189,7 +188,7 @@ func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAzureClient
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--teamschat", suite.its.user.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
@ -213,7 +212,7 @@ func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_fromConfigFile
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--teamschat", suite.its.user.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
@ -237,7 +236,7 @@ func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAWSFlags()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--teamschat", suite.its.user.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
@ -260,7 +259,7 @@ type PreparedBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupTeamsChatsE2ESuite(t *testing.T) {
@ -278,13 +277,13 @@ func (suite *PreparedBackupTeamsChatsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
teamschats = []string{suite.m365.User.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
teamschats = []string{suite.its.user.ID}
ins = idname.NewCache(map[string]string{suite.its.user.ID: suite.its.user.ID})
cats = []path.CategoryType{
path.ChatsCategory,
}

View File

@ -6,6 +6,12 @@ Param (
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
[String]$Site,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
[String[]]$LibraryNameList = @(),
@ -16,16 +22,7 @@ Param (
[String[]]$FolderPrefixPurgeList,
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
[String[]]$LibraryPrefixDeleteList = @(),
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
[String]$AppCert = $ENV:AZURE_APP_CERT
[String[]]$LibraryPrefixDeleteList = @()
)
Set-StrictMode -Version 2.0
@ -40,7 +37,7 @@ function Get-TimestampFromFolderName {
$name = $folder.Name
#fallback on folder create time
#fallback on folder create time
[datetime]$timestamp = $folder.TimeCreated
try {
@ -69,7 +66,7 @@ function Get-TimestampFromListName {
$name = $list.Title
#fallback on list create time
#fallback on list create time
[datetime]$timestamp = $list.LastItemUserModifiedDate
try {
@ -109,9 +106,8 @@ function Purge-Library {
Write-Host "`nPurging library: $LibraryName"
$foldersToPurge = @()
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
Write-Host "`nFolders: $folders"
foreach ($f in $folders) {
$folderName = $f.Name
$createTime = Get-TimestampFromFolderName -Folder $f
@ -163,7 +159,7 @@ function Delete-LibraryByPrefix {
Write-Host "`nDeleting library: $LibraryNamePrefix"
$listsToDelete = @()
$lists = Get-PnPList
$lists = Get-PnPList
foreach ($l in $lists) {
$listName = $l.Title
@ -187,7 +183,7 @@ function Delete-LibraryByPrefix {
Write-Host "Deleting list: "$l.Title
try {
$listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden
# Check if the 'hidden' property is true
if ($listInfo.Hidden) {
Write-Host "List: $($l.Title) is hidden. Skipping..."
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
}
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
Write-Host "ClientId and AppCert required as arguments or environment variables."
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
Write-Host "Admin user name and password required as arguments or environment variables."
Exit
}
@ -255,8 +251,12 @@ else {
Exit
}
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
Write-Host "`nAuthenticating and connecting to $SiteUrl"
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
Connect-PnPOnline -Url $siteUrl -Credential $cred
Write-Host "Connected to $siteUrl`n"
# ensure that there are no unexpanded entries in the list of parameters

View File

@ -5,7 +5,6 @@ import (
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/internal/common/ptr"
@ -21,20 +20,19 @@ const (
// this increases the chance that we'll run into a race collision with
// the cleanup script. Sometimes that's okay (deleting old data that
// isn't scrutinized in the test), other times it's not. We mark whether
// that's okay to do or not by specifying the folders being
// scrutinized for the test. Any errors within those folders should cause
// a fatal exit. Errors outside of those folders get ignored.
// that's okay to do or not by specifying the folder that's being
// scrutinized for the test. Any errors within that folder should cause
// a fatal exit. Errors outside of that folder get ignored.
//
// since we're using folder names, mustPopulateFolders will
// since we're using folder names, requireNoErrorsWithinFolderName will
// work best (ie: have the fewest collisions/side-effects) if the folder
// names are very specific. Standard sanity tests should include timestamps,
// name is very specific. Standard sanity tests should include timestamps,
// which should help ensure that. Be warned if you try to use it with
// a more generic name: unintended effects could occur.
func populateSanitree(
ctx context.Context,
ac api.Client,
driveID string,
mustPopulateFolders []string,
driveID, requireNoErrorsWithinFolderName string,
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
common.Infof(ctx, "building sanitree for drive: %s", driveID)
@ -58,8 +56,8 @@ func populateSanitree(
ac,
driveID,
stree.Name+"/",
mustPopulateFolders,
slices.Contains(mustPopulateFolders, rootName),
requireNoErrorsWithinFolderName,
rootName == requireNoErrorsWithinFolderName,
stree)
return stree
@ -68,9 +66,7 @@ func populateSanitree(
func recursivelyBuildTree(
ctx context.Context,
ac api.Client,
driveID string,
location string,
mustPopulateFolders []string,
driveID, location, requireNoErrorsWithinFolderName string,
isChildOfFolderRequiringNoErrors bool,
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
) {
@ -84,9 +80,9 @@ func recursivelyBuildTree(
common.Infof(
ctx,
"ignoring error getting children in directory %q because it is not within directory set %v\nerror: %s\n%+v",
"ignoring error getting children in directory %q because it is not within directory %q\nerror: %s\n%+v",
location,
mustPopulateFolders,
requireNoErrorsWithinFolderName,
err.Error(),
clues.ToCore(err))
@ -103,12 +99,11 @@ func recursivelyBuildTree(
// currently we don't restore blank folders.
// skip permission check for empty folders
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
common.Infof(ctx, "skipped empty folder: %s%s", location, itemName)
common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
continue
}
cannotAllowErrors := isChildOfFolderRequiringNoErrors ||
slices.Contains(mustPopulateFolders, itemName)
cannotAllowErrors := isChildOfFolderRequiringNoErrors || itemName == requireNoErrorsWithinFolderName
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
Parent: stree,
@ -129,7 +124,7 @@ func recursivelyBuildTree(
ac,
driveID,
location+branch.Name+"/",
mustPopulateFolders,
requireNoErrorsWithinFolderName,
cannotAllowErrors,
branch)
}

View File

@ -32,7 +32,7 @@ func CheckExport(
ctx,
ac,
driveID,
[]string{envs.SourceContainer})
envs.RestoreContainer)
sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert(

View File

@ -45,14 +45,7 @@ func CheckRestoration(
"drive_id", driveID,
"drive_name", driveName)
root := populateSanitree(
ctx,
ac,
driveID,
[]string{
envs.SourceContainer,
envs.RestoreContainer,
})
root := populateSanitree(ctx, ac, driveID, envs.RestoreContainer)
sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert(

View File

@ -3,7 +3,7 @@ module github.com/alcionai/corso/src
go 1.21
replace (
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
@ -121,7 +121,7 @@ require (
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/microsoft/kiota-serialization-text-go v1.0.0
github.com/minio/md5-simd v1.1.2 // indirect
github.com/minio/minio-go/v7 v7.0.67
github.com/minio/minio-go/v7 v7.0.66
github.com/minio/sha256-simd v1.0.1 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect

View File

@ -23,8 +23,8 @@ github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEej
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe h1:nLS5pxhm04Jz4+qeipNlxdyPGxqNWpBu8UGkRYpWoIw=
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -219,8 +219,8 @@ github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1 h1:uq4qZD8VXLiNZY0t4NoRpLDo
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1/go.mod h1:HUITyuFN556+0QZ/IVfH5K4FyJM7kllV6ExKi2ImKhE=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
github.com/minio/minio-go/v7 v7.0.67 h1:BeBvZWAS+kRJm1vGTMJYVjKUNoo0FoEt/wUWdUtfmh8=
github.com/minio/minio-go/v7 v7.0.67/go.mod h1:+UXocnUeZ3wHvVh5s95gcrA4YjMIbccT6ubB+1m054A=
github.com/minio/minio-go/v7 v7.0.66 h1:bnTOXOHjOqv/gcMuiVbN9o2ngRItvqE774dG9nq0Dzw=
github.com/minio/minio-go/v7 v7.0.66/go.mod h1:DHAgmyQEGdW3Cif0UooKOyrT3Vxs82zNdV6tkKhRtbs=
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=

View File

@ -10,7 +10,6 @@ import (
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/logger"
)
const (
@ -57,22 +56,12 @@ func ZipExportCollection(
defer wr.Close()
buf := make([]byte, ZipCopyBufferSize)
counted := 0
log := logger.Ctx(ctx).
With("collection_count", len(expCollections))
for _, ec := range expCollections {
folder := ec.BasePath()
items := ec.Items(ctx)
for item := range items {
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("progress zipping export items", "count_items", counted)
}
err := item.Error
if err != nil {
writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID))
@ -99,12 +88,8 @@ func ZipExportCollection(
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
return
}
item.Body.Close()
}
}
log.Infow("completed zipping export items", "count_items", counted)
}()
return zipCollection{reader}, nil

View File

@ -1,13 +1,10 @@
package jwt
import (
"context"
"time"
"github.com/alcionai/clues"
jwt "github.com/golang-jwt/jwt/v5"
"github.com/alcionai/corso/src/pkg/logger"
)
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
@ -40,51 +37,3 @@ func IsJWTExpired(
return expired, nil
}
// GetJWTLifetime returns the issued at(iat) and expiration time(exp) claims
// present in the JWT token. These are optional claims and may not be present
// in the token. Absence is not reported as an error.
//
// An error is returned if the supplied token is malformed. Times are returned
// in UTC to have parity with graph responses.
func GetJWTLifetime(
ctx context.Context,
rawToken string,
) (time.Time, time.Time, error) {
var (
issuedAt time.Time
expiresAt time.Time
)
p := jwt.NewParser()
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
if err != nil {
logger.CtxErr(ctx, err).Debug("parsing jwt token")
return time.Time{}, time.Time{}, clues.Wrap(err, "invalid jwt")
}
exp, err := token.Claims.GetExpirationTime()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting exp claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token expiry time")
}
iat, err := token.Claims.GetIssuedAt()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting iat claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token issued at time")
}
// Absence of iat or exp claims is not reported as an error by jwt library as these
// are optional as per spec.
if iat != nil {
issuedAt = iat.UTC()
}
if exp != nil {
expiresAt = exp.UTC()
}
return issuedAt, expiresAt, nil
}

View File

@ -113,134 +113,3 @@ func (suite *JWTUnitSuite) TestIsJWTExpired() {
})
}
}
func (suite *JWTUnitSuite) TestGetJWTLifetime() {
// Set of time values to be used in the tests.
// Truncate to seconds for comparisons since jwt tokens have second
// level precision.
idToTime := map[string]time.Time{
"T0": time.Now().UTC().Add(-time.Hour).Truncate(time.Second),
"T1": time.Now().UTC().Truncate(time.Second),
"T2": time.Now().UTC().Add(time.Hour).Truncate(time.Second),
}
table := []struct {
name string
getToken func() (string, error)
expectFunc func(t *testing.T, iat time.Time, exp time.Time)
expectErr assert.ErrorAssertionFunc
}{
{
name: "alive token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T1"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, idToTime["T1"], exp)
},
expectErr: assert.NoError,
},
// Test with a token which is not generated using the go-jwt lib.
// This is a long lived token which is valid for 100 years.
{
name: "alive raw token with iat and exp claims",
getToken: func() (string, error) {
return rawToken, nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Less(t, iat, time.Now(), "iat should be in the past")
assert.Greater(t, exp, time.Now(), "exp should be in the future")
},
expectErr: assert.NoError,
},
// Regardless of whether the token is expired or not, we should be able to
// extract the iat and exp claims from it without error.
{
name: "expired token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T1"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T1"], iat)
assert.Equal(t, idToTime["T0"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing iat claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(idToTime["T2"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, idToTime["T2"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing exp claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "both claims missing",
getToken: func() (string, error) {
return createJWTToken(jwt.RegisteredClaims{})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "malformed token",
getToken: func() (string, error) {
return "header.claims.signature", nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
token, err := test.getToken()
require.NoError(t, err)
iat, exp, err := GetJWTLifetime(ctx, token)
test.expectErr(t, err)
test.expectFunc(t, iat, exp)
})
}
}

View File

@ -59,19 +59,6 @@ func First(vs ...string) string {
return ""
}
// FirstIn returns the first entry in the map with a non-zero value
// when iterating the provided list of keys.
func FirstIn(m map[string]any, keys ...string) string {
for _, key := range keys {
v, err := AnyValueToString(key, m)
if err == nil && len(v) > 0 {
return v
}
}
return ""
}
// Preview reduces the string to the specified size.
// If the string is longer than the size, the last three
// characters are replaced with an ellipsis. Size < 4

View File

@ -118,96 +118,3 @@ func TestGenerateHash(t *testing.T) {
}
}
}
func TestFirstIn(t *testing.T) {
table := []struct {
name string
m map[string]any
keys []string
expect string
}{
{
name: "nil map",
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty map",
m: map[string]any{},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no match",
m: map[string]any{
"baz": "baz",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no keys",
m: map[string]any{
"baz": "baz",
},
keys: []string{},
expect: "",
},
{
name: "nil match",
m: map[string]any{
"foo": nil,
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty match",
m: map[string]any{
"foo": "",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "matches first key",
m: map[string]any{
"foo": "fnords",
},
keys: []string{"foo", "bar"},
expect: "fnords",
},
{
name: "matches second key",
m: map[string]any{
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with nil first match",
m: map[string]any{
"foo": nil,
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with empty first match",
m: map[string]any{
"foo": "",
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
}
for _, test := range table {
t.Run(test.name, func(t *testing.T) {
result := FirstIn(test.m, test.keys...)
assert.Equal(t, test.expect, result)
})
}
}

View File

@ -143,121 +143,6 @@ func getICalData(ctx context.Context, data models.Messageable) (string, error) {
return ics.FromEventable(ctx, event)
}
func getFileAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
kind := ptr.Val(attachment.GetContentType())
bytes, err := attachment.GetBackingStore().Get("contentBytes")
if err != nil {
return nil, clues.WrapWC(ctx, err, "failed to get attachment bytes").
With("kind", kind)
}
if bytes == nil {
// TODO(meain): Handle non file attachments
// https://github.com/alcionai/corso/issues/4772
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
Info("no contentBytes for attachment")
return nil, nil
}
bts, ok := bytes.([]byte)
if !ok {
return nil, clues.WrapWC(ctx, err, "invalid content bytes").
With("kind", kind).
With("interface_type", fmt.Sprintf("%T", bytes))
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
// Graph as of now does not let us create any attachments
// without a name, but we have run into instances where we have
// see attachments without a name, possibly from old
// data. This is for those cases.
name = "Unnamed"
}
contentID, err := attachment.GetBackingStore().Get("contentId")
if err != nil {
return nil, clues.WrapWC(ctx, err, "getting content id for attachment").
With("kind", kind)
}
if contentID != nil {
cids, _ := str.AnyToString(contentID)
if len(cids) > 0 {
name = cids
}
}
return &mail.File{
// cannot use filename as inline attachment will not get mapped properly
Name: name,
MimeType: kind,
Data: bts,
Inline: ptr.Val(attachment.GetIsInline()),
}, nil
}
func getItemAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
it, err := attachment.GetBackingStore().Get("item")
if err != nil {
return nil, clues.WrapWC(ctx, err, "getting item for attachment").
With("attachment_id", ptr.Val(attachment.GetId()))
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
// Graph as of now does not let us create any attachments
// without a name, but we have run into instances where we have
// see attachments without a name, possibly from old
// data. This is for those cases.
name = "Unnamed"
}
switch it := it.(type) {
case *models.Message:
cb, err := FromMessageable(ctx, it)
if err != nil {
return nil, clues.WrapWC(ctx, err, "converting item attachment to eml").
With("attachment_id", ptr.Val(attachment.GetId()))
}
return &mail.File{
Name: name,
MimeType: "message/rfc822",
Data: []byte(cb),
}, nil
default:
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
Info("unknown item attachment type")
}
return nil, nil
}
func getMailAttachment(ctx context.Context, att models.Attachmentable) (*mail.File, error) {
otyp := ptr.Val(att.GetOdataType())
switch otyp {
case "#microsoft.graph.fileAttachment":
return getFileAttachment(ctx, att)
case "#microsoft.graph.itemAttachment":
return getItemAttachment(ctx, att)
default:
logger.Ctx(ctx).
With("attachment_id", ptr.Val(att.GetId()),
"attachment_type", otyp).
Info("unknown attachment type")
return nil, nil
}
}
// FromJSON converts a Messageable (as json) to .eml format
func FromJSON(ctx context.Context, body []byte) (string, error) {
ctx = clues.Add(ctx, "body_len", len(body))
@ -267,11 +152,6 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
return "", clues.WrapWC(ctx, err, "converting to messageble")
}
return FromMessageable(ctx, data)
}
// Converts a Messageable to .eml format
func FromMessageable(ctx context.Context, data models.Messageable) (string, error) {
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
email := mail.NewMSG()
@ -349,16 +229,54 @@ func FromMessageable(ctx context.Context, data models.Messageable) (string, erro
if data.GetAttachments() != nil {
for _, attachment := range data.GetAttachments() {
att, err := getMailAttachment(ctx, attachment)
kind := ptr.Val(attachment.GetContentType())
bytes, err := attachment.GetBackingStore().Get("contentBytes")
if err != nil {
return "", clues.WrapWC(ctx, err, "getting mail attachment")
return "", clues.WrapWC(ctx, err, "failed to get attachment bytes").
With("kind", kind)
}
// There are known cases where we just wanna log and
// ignore instead of erroring out
if att != nil {
email.Attach(att)
if bytes == nil {
// TODO(meain): Handle non file attachments
// https://github.com/alcionai/corso/issues/4772
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
Info("no contentBytes for attachment")
continue
}
bts, ok := bytes.([]byte)
if !ok {
return "", clues.WrapWC(ctx, err, "invalid content bytes").
With("kind", kind).
With("interface_type", fmt.Sprintf("%T", bytes))
}
name := ptr.Val(attachment.GetName())
contentID, err := attachment.GetBackingStore().Get("contentId")
if err != nil {
return "", clues.WrapWC(ctx, err, "getting content id for attachment").
With("kind", kind)
}
if contentID != nil {
cids, _ := str.AnyToString(contentID)
if len(cids) > 0 {
name = cids
}
}
email.Attach(&mail.File{
// cannot use filename as inline attachment will not get mapped properly
Name: name,
MimeType: kind,
Data: bts,
Inline: ptr.Val(attachment.GetIsInline()),
})
}
}
@ -380,7 +298,7 @@ func FromMessageable(ctx context.Context, data models.Messageable) (string, erro
}
}
if err := email.GetError(); err != nil {
if err = email.GetError(); err != nil {
return "", clues.WrapWC(ctx, err, "converting to eml")
}
@ -488,9 +406,6 @@ func FromJSONPostToEML(
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
name = "Unnamed"
}
contentID, err := attachment.GetBackingStore().Get("contentId")
if err != nil {

View File

@ -137,11 +137,6 @@ func (suite *EMLUnitSuite) TestConvert_messageble_to_eml() {
}
func (suite *EMLUnitSuite) TestConvert_edge_cases() {
bodies := []string{
testdata.EmailWithAttachments,
testdata.EmailWithinEmail,
}
tests := []struct {
name string
transform func(models.Messageable)
@ -167,75 +162,35 @@ func (suite *EMLUnitSuite) TestConvert_edge_cases() {
require.NoError(suite.T(), err, "setting attachment content")
},
},
{
name: "attachment without name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(ptr.To(""))
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
},
},
{
name: "attachment with nil name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(nil)
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
},
},
{
name: "multiple attachments without name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(ptr.To(""))
attachments[2].SetName(ptr.To(""))
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
assert.False(suite.T(), ptr.Val(attachments[2].GetIsInline()))
},
},
}
for _, b := range bodies {
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(b)
body := []byte(testdata.EmailWithAttachments)
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
test.transform(msg)
test.transform(msg)
writer := kjson.NewJsonSerializationWriter()
writer := kjson.NewJsonSerializationWriter()
defer writer.Close()
defer writer.Close()
err = writer.WriteObjectValue("", msg)
require.NoError(t, err, "serializing message")
err = writer.WriteObjectValue("", msg)
require.NoError(t, err, "serializing message")
nbody, err := writer.GetSerializedContent()
require.NoError(t, err, "getting serialized content")
nbody, err := writer.GetSerializedContent()
require.NoError(t, err, "getting serialized content")
_, err = FromJSON(ctx, nbody)
assert.NoError(t, err, "converting to eml")
})
}
_, err = FromJSON(ctx, nbody)
assert.NoError(t, err, "converting to eml")
})
}
}
@ -273,11 +228,11 @@ func (suite *EMLUnitSuite) TestConvert_eml_ics() {
assert.Equal(
t,
msg.GetCreatedDateTime().Format(ics.ICalDateTimeFormatUTC),
msg.GetCreatedDateTime().Format(ics.ICalDateTimeFormat),
event.GetProperty(ical.ComponentPropertyCreated).Value)
assert.Equal(
t,
msg.GetLastModifiedDateTime().Format(ics.ICalDateTimeFormatUTC),
msg.GetLastModifiedDateTime().Format(ics.ICalDateTimeFormat),
event.GetProperty(ical.ComponentPropertyLastModified).Value)
st, err := ics.GetUTCTime(
@ -292,11 +247,11 @@ func (suite *EMLUnitSuite) TestConvert_eml_ics() {
assert.Equal(
t,
st.Format(ics.ICalDateTimeFormatUTC),
st.Format(ics.ICalDateTimeFormat),
event.GetProperty(ical.ComponentPropertyDtStart).Value)
assert.Equal(
t,
et.Format(ics.ICalDateTimeFormatUTC),
et.Format(ics.ICalDateTimeFormat),
event.GetProperty(ical.ComponentPropertyDtEnd).Value)
tos := msg.GetToRecipients()
@ -443,48 +398,3 @@ func (suite *EMLUnitSuite) TestConvert_postable_to_eml() {
assert.Equal(t, source, target)
}
// Tests an ics within an eml within another eml
func (suite *EMLUnitSuite) TestConvert_message_in_messageble_to_eml() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(testdata.EmailWithinEmail)
out, err := FromJSON(ctx, body)
assert.NoError(t, err, "converting to eml")
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
require.NoError(t, err, "reading created eml")
assert.Equal(t, ptr.Val(msg.GetSubject()), eml.GetHeader("Subject"))
assert.Equal(t, msg.GetSentDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
assert.Equal(t, formatAddress(msg.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
attachments := eml.Attachments
assert.Equal(t, 3, len(attachments), "attachment count in parent email")
ieml, err := enmime.ReadEnvelope(strings.NewReader(string(attachments[0].Content)))
require.NoError(t, err, "reading created eml")
itm, err := msg.GetAttachments()[0].GetBackingStore().Get("item")
require.NoError(t, err, "getting item from message")
imsg := itm.(*models.Message)
assert.Equal(t, ptr.Val(imsg.GetSubject()), ieml.GetHeader("Subject"))
assert.Equal(t, imsg.GetSentDateTime().Format(time.RFC1123Z), ieml.GetHeader("Date"))
assert.Equal(t, formatAddress(imsg.GetFrom().GetEmailAddress()), ieml.GetHeader("From"))
iattachments := ieml.Attachments
assert.Equal(t, 1, len(iattachments), "attachment count in child email")
// Known from testdata
assert.Contains(t, string(iattachments[0].Content), "X-LIC-LOCATION:Africa/Abidjan")
}

View File

@ -104,19 +104,6 @@
"contentId": null,
"contentLocation": null,
"contentBytes": "W1BhdGhzXQpQcmVmaXggPSAuLgo="
},
{
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "application/octet-stream",
"id": "ZZMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAEwbDEWAAABEgAQAD3rU0iyzCdHgz0xmOrWc9g=",
"lastModifiedDateTime": "2023-11-16T05:42:47Z",
"name": "qt2.conf",
"contentType": "application/octet-stream",
"size": 156,
"isInline": false,
"contentId": null,
"contentLocation": null,
"contentBytes": "Z1BhdGhzXQpQcmVmaXggPSAuLgo="
}
]
}

View File

@ -1,268 +0,0 @@
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAA=",
"@odata.type": "#microsoft.graph.message",
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages/$entity",
"@odata.etag": "W/\"CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl\"",
"categories": [],
"changeKey": "CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl",
"createdDateTime": "2024-02-05T09:33:23Z",
"lastModifiedDateTime": "2024-02-05T09:33:48Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k%3D&exvsurl=1&viewmodel=ItemAttachment"
}
},
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl02=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life part 2",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl02%3D&exvsurl=1&viewmodel=ItemAttachment"
}
},
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl03=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life part 3",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl03%3D&exvsurl=1&viewmodel=ItemAttachment"
}
}
],
"bccRecipients": [],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div><span class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">Now, this is what we call nesting in this business.<br></span></div></body></html>",
"contentType": "html"
},
"bodyPreview": "Now, this is what we call nesting in this business.",
"ccRecipients": [],
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAIv2-4RHwDhJhlqBV5PTE3Y=",
"conversationIndex": "AQHaWBZdi/b/hEfAOEmGWoFXk9MTdg==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"inferenceClassification": "focused",
"internetMessageId": "<SJ0PR04MB729409CE8C191E01151C110DBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
"receivedDateTime": "2024-02-05T09:33:46Z",
"replyTo": [],
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:45Z",
"subject": "Fw: Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAA%3D&exvsurl=1&viewmodel=ReadMessageItem"
}

View File

@ -10,6 +10,3 @@ var EmailWithEventInfo string
//go:embed email-with-event-object.json
var EmailWithEventObject string
//go:embed email-within-email.json
var EmailWithinEmail string

View File

@ -166,20 +166,3 @@ var GraphTimeZoneToTZ = map[string]string{
"Yukon Standard Time": "America/Whitehorse",
"tzone://Microsoft/Utc": "Etc/UTC",
}
// Map from alternatives to the canonical time zone name
// There mapping are currently generated by manually going on the
// values in the GraphTimeZoneToTZ which is not available in the tzdb
var CanonicalTimeZoneMap = map[string]string{
"Africa/Asmara": "Africa/Asmera",
"Asia/Calcutta": "Asia/Kolkata",
"Asia/Rangoon": "Asia/Yangon",
"Asia/Saigon": "Asia/Ho_Chi_Minh",
"Europe/Kiev": "Europe/Kyiv",
"Europe/Warsaw": "Europe/Warszawa",
"America/Buenos_Aires": "America/Argentina/Buenos_Aires",
"America/Godthab": "America/Nuuk",
// NOTE: "Atlantic/Raykjavik" missing in tzdb but is in MS list
"Etc/UTC": "UTC", // simplifying the time zone name
}

View File

@ -5,7 +5,6 @@ import (
"encoding/base64"
"encoding/json"
"fmt"
"net/mail"
"strings"
"time"
"unicode"
@ -17,7 +16,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/converters/ics/tzdata"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
@ -33,9 +31,8 @@ import (
// TODO locations: https://github.com/alcionai/corso/issues/5003
const (
ICalDateTimeFormat = "20060102T150405"
ICalDateTimeFormatUTC = "20060102T150405Z"
ICalDateFormat = "20060102"
ICalDateTimeFormat = "20060102T150405Z"
ICalDateFormat = "20060102"
)
func keyValues(key, value string) *ics.KeyValues {
@ -175,17 +172,6 @@ func getRecurrencePattern(
recurComponents = append(recurComponents, "BYDAY="+prefix+strings.Join(dowComponents, ","))
}
// This is necessary to compute when weekly events recur
fdow := pat.GetFirstDayOfWeek()
if fdow != nil {
icalday, ok := GraphToICalDOW[fdow.String()]
if !ok {
return "", clues.NewWC(ctx, "unknown first day of week").With("day", fdow)
}
recurComponents = append(recurComponents, "WKST="+icalday)
}
rrange := recurrence.GetRangeEscaped()
if rrange != nil {
switch ptr.Val(rrange.GetTypeEscaped()) {
@ -209,7 +195,7 @@ func getRecurrencePattern(
return "", clues.WrapWC(ctx, err, "parsing end time")
}
recurComponents = append(recurComponents, "UNTIL="+endTime.Format(ICalDateTimeFormatUTC))
recurComponents = append(recurComponents, "UNTIL="+endTime.Format(ICalDateTimeFormat))
}
case models.NOEND_RECURRENCERANGETYPE:
// Nothing to do
@ -238,15 +224,10 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
cal := ics.NewCalendar()
cal.SetProductId("-//Alcion//Corso") // Does this have to be customizable?
err := addTimeZoneComponents(ctx, cal, event)
if err != nil {
return "", clues.Wrap(err, "adding timezone components")
}
id := ptr.Val(event.GetId())
iCalEvent := cal.AddEvent(id)
err = updateEventProperties(ctx, event, iCalEvent)
err := updateEventProperties(ctx, event, iCalEvent)
if err != nil {
return "", clues.Wrap(err, "updating event properties")
}
@ -277,7 +258,7 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
exICalEvent := cal.AddEvent(id)
start := exception.GetOriginalStart() // will always be in UTC
exICalEvent.AddProperty(ics.ComponentProperty(ics.PropertyRecurrenceId), start.Format(ICalDateTimeFormatUTC))
exICalEvent.AddProperty(ics.ComponentProperty(ics.PropertyRecurrenceId), start.Format(ICalDateTimeFormat))
err = updateEventProperties(ctx, exception, exICalEvent)
if err != nil {
@ -288,91 +269,6 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
return cal.Serialize(), nil
}
func getTZDataKeyValues(ctx context.Context, timezone string) (map[string]string, error) {
template, ok := tzdata.TZData[timezone]
if !ok {
return nil, clues.NewWC(ctx, "timezone not found in tz database").
With("timezone", timezone)
}
keyValues := map[string]string{}
for _, line := range strings.Split(template, "\n") {
splits := strings.SplitN(line, ":", 2)
if len(splits) != 2 {
return nil, clues.NewWC(ctx, "invalid tzdata line").
With("line", line).
With("timezone", timezone)
}
keyValues[splits[0]] = splits[1]
}
return keyValues, nil
}
func addTimeZoneComponents(ctx context.Context, cal *ics.Calendar, event models.Eventable) error {
// Handling of timezone get a bit tricky when we have to deal with
// relative recurrence. The issue comes up when we set a recurrence
// to be something like "repeat every 3rd Tuesday". Tuesday in UTC
// and in IST will be different and so we cannot just always use UTC.
//
// The way this is solved is by using the timezone in the
// recurrence for start and end timezones as we have to use UTC
// for UNTIL(mostly).
// https://www.rfc-editor.org/rfc/rfc5545#section-3.3.10
timezone, err := getRecurrenceTimezone(ctx, event)
if err != nil {
return clues.Stack(err)
}
if timezone != time.UTC {
kvs, err := getTZDataKeyValues(ctx, timezone.String())
if err != nil {
return clues.Stack(err)
}
tz := cal.AddTimezone(timezone.String())
for k, v := range kvs {
tz.AddProperty(ics.ComponentProperty(k), v)
}
}
return nil
}
// getRecurrenceTimezone get the timezone specified by the recurrence
// in the calendar. It does a normalization pass where we always convert
// the timezone to the value in tzdb If we don't have a recurrence
// timezone, we don't have to use a specific timezone in the export and
// is safe to return UTC from this method.
func getRecurrenceTimezone(ctx context.Context, event models.Eventable) (*time.Location, error) {
if event.GetRecurrence() != nil {
timezone := ptr.Val(event.GetRecurrence().GetRangeEscaped().GetRecurrenceTimeZone())
ctz, ok := GraphTimeZoneToTZ[timezone]
if ok {
timezone = ctz
}
cannon, ok := CanonicalTimeZoneMap[timezone]
if ok {
timezone = cannon
}
loc, err := time.LoadLocation(timezone)
if err != nil {
return nil, clues.WrapWC(ctx, err, "unknown timezone").
With("timezone", timezone)
}
return loc, nil
}
return time.UTC, nil
}
func isASCII(s string) bool {
for _, c := range s {
if c > unicode.MaxASCII {
@ -383,12 +279,6 @@ func isASCII(s string) bool {
return true
}
// Checks if a given string is a valid email address
func isEmail(em string) bool {
_, err := mail.ParseAddress(em)
return err == nil
}
func updateEventProperties(ctx context.Context, event models.Eventable, iCalEvent *ics.VEvent) error {
// CREATED - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.7.1
created := event.GetCreatedDateTime()
@ -402,11 +292,6 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
iCalEvent.SetModifiedAt(ptr.Val(modified))
}
timezone, err := getRecurrenceTimezone(ctx, event)
if err != nil {
return err
}
// DTSTART - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.4
allDay := ptr.Val(event.GetIsAllDay())
startString := event.GetStart().GetDateTime()
@ -418,7 +303,11 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
return clues.WrapWC(ctx, err, "parsing start time")
}
addTime(iCalEvent, ics.ComponentPropertyDtStart, start, allDay, timezone)
if allDay {
iCalEvent.SetStartAt(start, ics.WithValue(string(ics.ValueDataTypeDate)))
} else {
iCalEvent.SetStartAt(start)
}
}
// DTEND - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.2
@ -431,7 +320,11 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
return clues.WrapWC(ctx, err, "parsing end time")
}
addTime(iCalEvent, ics.ComponentPropertyDtEnd, end, allDay, timezone)
if allDay {
iCalEvent.SetEndAt(end, ics.WithValue(string(ics.ValueDataTypeDate)))
} else {
iCalEvent.SetEndAt(end)
}
}
recurrence := event.GetRecurrence()
@ -484,14 +377,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
desc := replacer.Replace(description)
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
} else {
// Disable auto wrap, causes huge memory spikes
// https://github.com/jaytaylor/html2text/issues/48
prettyTablesOptions := html2text.NewPrettyTablesOptions()
prettyTablesOptions.AutoWrapText = false
stripped, err := html2text.FromString(
description,
html2text.Options{PrettyTables: true, PrettyTablesOptions: prettyTablesOptions})
stripped, err := html2text.FromString(description, html2text.Options{PrettyTables: true})
if err != nil {
return clues.Wrap(err, "converting html to text").
With("description_length", len(description))
@ -595,21 +481,8 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
}
}
// It is possible that we get non email items like the below
// one which is an internal representation of the user in the
// Exchange system. While we can technically output this as an
// attendee, it is not useful plus other downstream tools like
// ones to use PST can choke on this.
// /o=ExchangeLabs/ou=ExchangeAdministrative Group(FY...LT)/cn=Recipients/cn=883...4a-John Doe
addr := ptr.Val(attendee.GetEmailAddress().GetAddress())
if isEmail(addr) {
iCalEvent.AddAttendee(addr, props...)
} else {
logger.Ctx(ctx).
With("attendee_email", addr).
With("attendee_name", name).
Info("skipping non email attendee from ics export")
}
iCalEvent.AddAttendee(addr, props...)
}
// LOCATION - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.7
@ -737,26 +610,6 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
return nil
}
func addTime(iCalEvent *ics.VEvent, prop ics.ComponentProperty, tm time.Time, allDay bool, tzLoc *time.Location) {
if allDay {
if tzLoc == time.UTC {
iCalEvent.SetProperty(prop, tm.Format(ICalDateFormat), ics.WithValue(string(ics.ValueDataTypeDate)))
} else {
iCalEvent.SetProperty(
prop,
tm.In(tzLoc).Format(ICalDateFormat),
ics.WithValue(string(ics.ValueDataTypeDate)),
keyValues("TZID", tzLoc.String()))
}
} else {
if tzLoc == time.UTC {
iCalEvent.SetProperty(prop, tm.Format(ICalDateTimeFormatUTC))
} else {
iCalEvent.SetProperty(prop, tm.In(tzLoc).Format(ICalDateTimeFormat), keyValues("TZID", tzLoc.String()))
}
}
}
func getCancelledDates(ctx context.Context, event models.Eventable) ([]time.Time, error) {
dateStrings, err := api.GetCancelledEventDateStrings(event)
if err != nil {

View File

@ -13,7 +13,6 @@ import (
"testing"
"time"
ics "github.com/arran4/golang-ical"
"github.com/microsoft/kiota-abstractions-go/serialization"
kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models"
@ -22,7 +21,6 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/converters/ics/tzdata"
"github.com/alcionai/corso/src/internal/tester"
)
@ -34,7 +32,7 @@ func TestICSUnitSuite(t *testing.T) {
suite.Run(t, &ICSUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (s *ICSUnitSuite) TestGetLocationString() {
func (suite *ICSUnitSuite) TestGetLocationString() {
table := []struct {
name string
loc func() models.Locationable
@ -112,13 +110,13 @@ func (s *ICSUnitSuite) TestGetLocationString() {
}
for _, tt := range table {
s.Run(tt.name, func() {
assert.Equal(s.T(), tt.expect, getLocationString(tt.loc()))
suite.Run(tt.name, func() {
assert.Equal(suite.T(), tt.expect, getLocationString(tt.loc()))
})
}
}
func (s *ICSUnitSuite) TestGetUTCTime() {
func (suite *ICSUnitSuite) TestGetUTCTime() {
table := []struct {
name string
timestamp string
@ -164,18 +162,18 @@ func (s *ICSUnitSuite) TestGetUTCTime() {
}
for _, tt := range table {
s.Run(tt.name, func() {
suite.Run(tt.name, func() {
t, err := GetUTCTime(tt.timestamp, tt.timezone)
tt.errCheck(s.T(), err)
tt.errCheck(suite.T(), err)
if !tt.time.Equal(time.Time{}) {
assert.Equal(s.T(), tt.time, t)
assert.Equal(suite.T(), tt.time, t)
}
})
}
}
func (s *ICSUnitSuite) TestGetRecurrencePattern() {
func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
table := []struct {
name string
recurrence func() models.PatternedRecurrenceable
@ -189,37 +187,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=DAILY;INTERVAL=1;WKST=SU",
errCheck: require.NoError,
},
{
name: "daily different start of week",
recurrence: func() models.PatternedRecurrenceable {
rec := models.NewPatternedRecurrence()
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(s.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.MONDAY_DAYOFWEEK))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=DAILY;INTERVAL=1;WKST=MO",
expect: "FREQ=DAILY;INTERVAL=1",
errCheck: require.NoError,
},
{
@ -229,16 +206,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rng := models.NewRecurrenceRange()
rrtype, err := models.ParseRecurrenceRangeType("endDate")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
@ -251,7 +227,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=DAILY;INTERVAL=1;WKST=SU;UNTIL=20210101T182959Z",
expect: "FREQ=DAILY;INTERVAL=1;UNTIL=20210101T182959Z",
errCheck: require.NoError,
},
{
@ -261,17 +237,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("weekly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU",
expect: "FREQ=WEEKLY;INTERVAL=1",
errCheck: require.NoError,
},
{
@ -281,16 +256,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("weekly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rng := models.NewRecurrenceRange()
rrtype, err := models.ParseRecurrenceRangeType("endDate")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
@ -303,7 +277,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU;UNTIL=20210101T235959Z",
expect: "FREQ=WEEKLY;INTERVAL=1;UNTIL=20210101T235959Z",
errCheck: require.NoError,
},
{
@ -313,16 +287,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("weekly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rng := models.NewRecurrenceRange()
rrtype, err := models.ParseRecurrenceRangeType("numbered")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
@ -334,7 +307,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU;COUNT=10",
expect: "FREQ=WEEKLY;INTERVAL=1;COUNT=10",
errCheck: require.NoError,
},
{
@ -344,11 +317,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("weekly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
days := []models.DayOfWeek{
models.MONDAY_DAYOFWEEK,
@ -362,7 +334,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,TH;WKST=SU",
expect: "FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,TH",
errCheck: require.NoError,
},
{
@ -372,17 +344,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(2)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=DAILY;INTERVAL=2;WKST=SU",
expect: "FREQ=DAILY;INTERVAL=2",
errCheck: require.NoError,
},
{
@ -392,11 +363,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("absoluteMonthly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
pat.SetDayOfMonth(ptr.To(int32(5)))
@ -404,7 +374,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=5;WKST=SU",
expect: "FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=5",
errCheck: require.NoError,
},
{
@ -414,11 +384,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("absoluteYearly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(3)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
pat.SetMonth(ptr.To(int32(8)))
@ -426,7 +395,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=YEARLY;INTERVAL=3;BYMONTH=8;WKST=SU",
expect: "FREQ=YEARLY;INTERVAL=3;BYMONTH=8",
errCheck: require.NoError,
},
{
@ -436,38 +405,37 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("relativeYearly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
pat.SetMonth(ptr.To(int32(8)))
pat.SetDaysOfWeek([]models.DayOfWeek{models.FRIDAY_DAYOFWEEK})
wi, err := models.ParseWeekIndex("first")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetIndex(wi.(*models.WeekIndex))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=YEARLY;INTERVAL=1;BYMONTH=8;BYDAY=1FR;WKST=SU",
expect: "FREQ=YEARLY;INTERVAL=1;BYMONTH=8;BYDAY=1FR",
errCheck: require.NoError,
},
// TODO(meain): could still use more tests for edge cases of time
}
for _, tt := range table {
s.Run(tt.name, func() {
ctx, flush := tester.NewContext(s.T())
suite.Run(tt.name, func() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
rec, err := getRecurrencePattern(ctx, tt.recurrence())
tt.errCheck(s.T(), err)
tt.errCheck(suite.T(), err)
assert.Equal(s.T(), tt.expect, rec)
assert.Equal(suite.T(), tt.expect, rec)
})
}
}
@ -492,8 +460,8 @@ func baseEvent() *models.Event {
return e
}
func (s *ICSUnitSuite) TestEventConversion() {
t := s.T()
func (suite *ICSUnitSuite) TestEventConversion() {
t := suite.T()
table := []struct {
name string
@ -578,19 +546,14 @@ func (s *ICSUnitSuite) TestEventConversion() {
rec := models.NewPatternedRecurrence()
pat := models.NewRecurrencePattern()
rng := models.NewRecurrenceRange()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(t, err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rng.SetRecurrenceTimeZone(ptr.To("UTC"))
rec.SetPattern(pat)
rec.SetRangeEscaped(rng)
e.SetRecurrence(rec)
@ -867,8 +830,8 @@ func (s *ICSUnitSuite) TestEventConversion() {
}
for _, tt := range table {
s.Run(tt.name, func() {
t := s.T()
suite.Run(tt.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
@ -918,8 +881,8 @@ func checkAttendee(t *testing.T, out, check, msg string) {
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
}
func (s *ICSUnitSuite) TestAttendees() {
t := s.T()
func (suite *ICSUnitSuite) TestAttendees() {
t := suite.T()
table := []struct {
name string
@ -945,17 +908,6 @@ func (s *ICSUnitSuite) TestAttendees() {
"attendee")
},
},
{
name: "attendee with internal exchange representation for email",
att: [][]string{{
"/o=ExchangeLabs/ou=ExchangeAdministrative Group(FY...LT)/cn=Recipients/cn=883...4a-John Doe",
"required",
"declined",
}},
check: func(out string) {
assert.NotContains(t, out, "ATTENDEE")
},
},
{
name: "multiple attendees",
att: [][]string{
@ -986,8 +938,8 @@ func (s *ICSUnitSuite) TestAttendees() {
}
for _, tt := range table {
s.Run(tt.name, func() {
t := s.T()
suite.Run(tt.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
@ -1108,8 +1060,8 @@ func checkAttachment(t *testing.T, out, check, msg string) {
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
}
func (s *ICSUnitSuite) TestAttachments() {
t := s.T()
func (suite *ICSUnitSuite) TestAttachments() {
t := suite.T()
type attachment struct {
cid string // contentid
@ -1165,8 +1117,8 @@ func (s *ICSUnitSuite) TestAttachments() {
}
for _, tt := range table {
s.Run(tt.name, func() {
t := s.T()
suite.Run(tt.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
@ -1209,7 +1161,7 @@ func (s *ICSUnitSuite) TestAttachments() {
}
}
func (s *ICSUnitSuite) TestCancellations() {
func (suite *ICSUnitSuite) TestCancellations() {
table := []struct {
name string
cancelledIds []string
@ -1233,8 +1185,8 @@ func (s *ICSUnitSuite) TestCancellations() {
}
for _, tt := range table {
s.Run(tt.name, func() {
t := s.T()
suite.Run(tt.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
@ -1297,7 +1249,7 @@ func eventToJSON(e *models.Event) ([]byte, error) {
return bts, err
}
func (s *ICSUnitSuite) TestEventExceptions() {
func (suite *ICSUnitSuite) TestEventExceptions() {
table := []struct {
name string
event func() *models.Event
@ -1319,7 +1271,7 @@ func (s *ICSUnitSuite) TestEventExceptions() {
exception.SetEnd(newEnd)
parsed, err := eventToMap(exception)
require.NoError(s.T(), err, "parsing exception")
require.NoError(suite.T(), err, "parsing exception")
// add exception event to additional data
e.SetAdditionalData(map[string]any{
@ -1338,15 +1290,15 @@ func (s *ICSUnitSuite) TestEventExceptions() {
}
}
assert.Equal(s.T(), 2, events, "number of events")
assert.Equal(suite.T(), 2, events, "number of events")
assert.Contains(s.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id")
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id")
assert.Contains(s.T(), out, "SUMMARY:Subject", "original event")
assert.Contains(s.T(), out, "SUMMARY:Exception", "exception event")
assert.Contains(suite.T(), out, "SUMMARY:Subject", "original event")
assert.Contains(suite.T(), out, "SUMMARY:Exception", "exception event")
assert.Contains(s.T(), out, "DTSTART:20210101T130000Z", "new start time")
assert.Contains(s.T(), out, "DTEND:20210101T140000Z", "new end time")
assert.Contains(suite.T(), out, "DTSTART:20210101T130000Z", "new start time")
assert.Contains(suite.T(), out, "DTEND:20210101T140000Z", "new end time")
},
},
{
@ -1375,10 +1327,10 @@ func (s *ICSUnitSuite) TestEventExceptions() {
exception2.SetEnd(newEnd)
parsed1, err := eventToMap(exception1)
require.NoError(s.T(), err, "parsing exception 1")
require.NoError(suite.T(), err, "parsing exception 1")
parsed2, err := eventToMap(exception2)
require.NoError(s.T(), err, "parsing exception 2")
require.NoError(suite.T(), err, "parsing exception 2")
// add exception event to additional data
e.SetAdditionalData(map[string]any{
@ -1397,230 +1349,36 @@ func (s *ICSUnitSuite) TestEventExceptions() {
}
}
assert.Equal(s.T(), 3, events, "number of events")
assert.Equal(suite.T(), 3, events, "number of events")
assert.Contains(s.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id 1")
assert.Contains(s.T(), out, "RECURRENCE-ID:20210102T120000Z", "recurrence id 2")
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id 1")
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210102T120000Z", "recurrence id 2")
assert.Contains(s.T(), out, "SUMMARY:Subject", "original event")
assert.Contains(s.T(), out, "SUMMARY:Exception 1", "exception event 1")
assert.Contains(s.T(), out, "SUMMARY:Exception 2", "exception event 2")
assert.Contains(suite.T(), out, "SUMMARY:Subject", "original event")
assert.Contains(suite.T(), out, "SUMMARY:Exception 1", "exception event 1")
assert.Contains(suite.T(), out, "SUMMARY:Exception 2", "exception event 2")
assert.Contains(s.T(), out, "DTSTART:20210101T130000Z", "new start time 1")
assert.Contains(s.T(), out, "DTEND:20210101T140000Z", "new end time 1")
assert.Contains(suite.T(), out, "DTSTART:20210101T130000Z", "new start time 1")
assert.Contains(suite.T(), out, "DTEND:20210101T140000Z", "new end time 1")
assert.Contains(s.T(), out, "DTSTART:20210102T130000Z", "new start time 2")
assert.Contains(s.T(), out, "DTEND:20210102T140000Z", "new end time 2")
assert.Contains(suite.T(), out, "DTSTART:20210102T130000Z", "new start time 2")
assert.Contains(suite.T(), out, "DTEND:20210102T140000Z", "new end time 2")
},
},
}
for _, tt := range table {
s.Run(tt.name, func() {
ctx, flush := tester.NewContext(s.T())
suite.Run(tt.name, func() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
bts, err := eventToJSON(tt.event())
require.NoError(s.T(), err, "getting serialized content")
require.NoError(suite.T(), err, "getting serialized content")
out, err := FromJSON(ctx, bts)
require.NoError(s.T(), err, "converting to ics")
require.NoError(suite.T(), err, "converting to ics")
tt.check(out)
})
}
}
func (s *ICSUnitSuite) TestGetRecurrenceTimezone() {
table := []struct {
name string
intz string
outtz string
}{
{
name: "empty",
intz: "",
outtz: "UTC",
},
{
name: "utc",
intz: "UTC",
outtz: "UTC",
},
{
name: "simple",
intz: "Asia/Kolkata",
outtz: "Asia/Kolkata",
},
{
name: "windows tz",
intz: "India Standard Time",
outtz: "Asia/Kolkata",
},
{
name: "non canonical",
intz: "Asia/Calcutta",
outtz: "Asia/Kolkata",
},
}
for _, tt := range table {
s.Run(tt.name, func() {
ctx, flush := tester.NewContext(s.T())
defer flush()
event := baseEvent()
if len(tt.intz) > 0 {
recur := models.NewPatternedRecurrence()
rp := models.NewRecurrenceRange()
rp.SetRecurrenceTimeZone(ptr.To(tt.intz))
recur.SetRangeEscaped(rp)
event.SetRecurrence(recur)
}
timezone, err := getRecurrenceTimezone(ctx, event)
require.NoError(s.T(), err)
assert.Equal(s.T(), tt.outtz, timezone.String())
})
}
}
func (s *ICSUnitSuite) TestAddTimezoneComponents() {
event := baseEvent()
recur := models.NewPatternedRecurrence()
rp := models.NewRecurrenceRange()
rp.SetRecurrenceTimeZone(ptr.To("Asia/Kolkata"))
recur.SetRangeEscaped(rp)
event.SetRecurrence(recur)
ctx, flush := tester.NewContext(s.T())
defer flush()
cal := ics.NewCalendar()
err := addTimeZoneComponents(ctx, cal, event)
require.NoError(s.T(), err)
text := cal.Serialize()
assert.Contains(s.T(), text, "BEGIN:VTIMEZONE", "beginning of timezone")
assert.Contains(s.T(), text, "TZID:Asia/Kolkata", "timezone id")
assert.Contains(s.T(), text, "END:VTIMEZONE", "end of timezone")
}
func (s *ICSUnitSuite) TestAddTime() {
locak, err := time.LoadLocation("Asia/Kolkata")
require.NoError(s.T(), err)
table := []struct {
name string
prop ics.ComponentProperty
time time.Time
allDay bool
loc *time.Location
exp string
}{
{
name: "utc",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
allDay: false,
loc: time.UTC,
exp: "DTSTART:20210102T030405Z",
},
{
name: "local",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
allDay: false,
loc: locak,
exp: "DTSTART;TZID=Asia/Kolkata:20210102T083405",
},
{
name: "all day",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 0, 0, 0, 0, time.UTC),
allDay: true,
loc: time.UTC,
exp: "DTSTART;VALUE=DATE:20210102",
},
{
name: "all day local",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 0, 0, 0, 0, time.UTC),
allDay: true,
loc: locak,
exp: "DTSTART;VALUE=DATE;TZID=Asia/Kolkata:20210102",
},
{
name: "end",
prop: ics.ComponentPropertyDtEnd,
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
allDay: false,
loc: time.UTC,
exp: "DTEND:20210102T030405Z",
},
{
// This won't happen, but a good test to have to test loc handling
name: "windows tz",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
allDay: false,
loc: time.FixedZone("India Standard Time", 5*60*60+30*60),
exp: "DTSTART;TZID=India Standard Time:20210102T083405",
},
}
for _, tt := range table {
s.Run(tt.name, func() {
cal := ics.NewCalendar()
evt := cal.AddEvent("id")
addTime(evt, tt.prop, tt.time, tt.allDay, tt.loc)
expSplits := strings.FieldsFunc(tt.exp, func(c rune) bool {
return c == ':' || c == ';'
})
text := cal.Serialize()
checkLine := ""
for _, l := range strings.Split(text, "\r\n") {
if strings.HasPrefix(l, string(tt.prop)) {
checkLine = l
break
}
}
actSplits := strings.FieldsFunc(checkLine, func(c rune) bool {
return c == ':' || c == ';'
})
assert.Greater(s.T(), len(checkLine), 0, "line not found")
assert.Equal(s.T(), len(expSplits), len(actSplits), "length of fields")
assert.ElementsMatch(s.T(), expSplits, actSplits, "fields")
})
}
}
// This tests and ensures that the generated data is int he format
// that we expect
func (s *ICSUnitSuite) TestGetTZDataKeyValues() {
for key := range tzdata.TZData {
s.Run(key, func() {
ctx, flush := tester.NewContext(s.T())
defer flush()
data, err := getTZDataKeyValues(ctx, key)
require.NoError(s.T(), err)
assert.NotEmpty(s.T(), data, "data")
assert.NotContains(s.T(), data, "BEGIN", "beginning of timezone") // should be stripped
assert.NotContains(s.T(), data, "END", "end of timezone") // should be stripped
assert.NotContains(s.T(), data, "TZID", "timezone id") // should be stripped
assert.Contains(s.T(), data, "DTSTART", "start time")
assert.Contains(s.T(), data, "TZOFFSETFROM", "offset from")
})
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,35 +0,0 @@
#!/bin/sh
set -eo pipefail
if ! echo "$PWD" | grep -q '/tzdata$'; then
echo "Please run this script from the tzdata dir"
exit 1
fi
# TODO: Generate from https://www.iana.org/time-zones
if [ ! -d /tmp/corso-tzdata ]; then
git clone --depth 1 https://github.com/add2cal/timezones-ical-library.git /tmp/corso-tzdata
else
cd /tmp/corso-tzdata
git pull
cd -
fi
# Generate a huge go file with all the timezones
echo "package tzdata" >data.go
echo "" >>data.go
echo "var TZData = map[string]string{" >>data.go
find /tmp/corso-tzdata/ -name '*.ics' | while read -r f; do
tz=$(echo "$f" | sed 's|/tmp/corso-tzdata/api/||;s|\.ics$||')
echo "Processing $tz"
printf "\t\"%s\": \`" "$tz" >>data.go
cat "$f" | grep -Ev "(BEGIN:|END:|TZID:)" |
sed 's|`|\\`|g;s|\r||;s|TZID:/timezones-ical-library/|TZID:|' |
perl -pe 'chomp if eof' >>data.go
echo "\`," >>data.go
done
echo "}" >>data.go

View File

@ -59,15 +59,6 @@ const (
minEpochDurationUpperBound = 7 * 24 * time.Hour
)
// allValidCompressors is the set of compression algorithms either currently
// being used or that were previously used. Use this during the config verify
// command to avoid spurious errors. We can revisit whether we want to update
// the config in those old repos at a later time.
var allValidCompressors = map[compression.Name]struct{}{
compression.Name(defaultCompressor): {},
compression.Name("s2-default"): {},
}
var (
ErrSettingDefaultConfig = clues.New("setting default repo config values")
ErrorRepoAlreadyExists = clues.New("repo already exists")
@ -777,7 +768,7 @@ func (w *conn) verifyDefaultPolicyConfigOptions(
ctx = clues.Add(ctx, "current_global_policy", globalPol.String())
if _, ok := allValidCompressors[globalPol.CompressionPolicy.CompressorName]; !ok {
if globalPol.CompressionPolicy.CompressorName != defaultCompressor {
errs.AddAlert(ctx, fault.NewAlert(
"unexpected compressor",
corsoWrapperAlertNamespace,

View File

@ -891,20 +891,6 @@ func (suite *ConnRetentionIntegrationSuite) TestVerifyDefaultConfigOptions() {
},
expectAlerts: 1,
},
{
name: "OldValidCompressor",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
_, err = updateCompressionOnPolicy("s2-default", pol)
require.NoError(t, err, clues.ToCore(err))
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 0,
},
{
name: "NonDefaultCompression",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {

View File

@ -2,7 +2,6 @@ package m365
import (
"context"
"fmt"
"github.com/alcionai/clues"
@ -110,7 +109,7 @@ func (ctrl *Controller) ProduceBackupCollections(
handler = teamschats.NewBackup()
default:
return nil, nil, false, clues.NewWC(ctx, fmt.Sprintf("service not supported: %s", service.HumanString()))
return nil, nil, false, clues.Wrap(clues.NewWC(ctx, service.String()), "service not supported")
}
colls, excludeItems, canUsePreviousBackup, err = handler.ProduceBackupCollections(
@ -174,8 +173,7 @@ func verifyBackupInputs(sel selectors.Selector, cachedIDs []string) error {
}
if !filters.Contains(ids).Compare(sel.ID()) {
return clues.Wrap(core.ErrNotFound, "verifying existence of resource").
With("selector_protected_resource", sel.ID())
return clues.Stack(core.ErrNotFound).With("selector_protected_resource", sel.ID())
}
return nil

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/m365/service/exchange"
@ -18,7 +19,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
@ -36,7 +36,10 @@ import (
type DataCollectionIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
user string
site string
tenantID string
ac api.Client
}
func TestDataCollectionIntgSuite(t *testing.T) {
@ -48,14 +51,29 @@ func TestDataCollectionIntgSuite(t *testing.T) {
}
func (suite *DataCollectionIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
t := suite.T()
suite.user = tconfig.M365UserID(t)
suite.site = tconfig.M365SiteID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
}
func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
selUsers := []string{suite.m365.User.ID}
selUsers := []string{suite.user}
ctrl := newController(ctx, suite.T(), path.ExchangeService)
tests := []struct {
@ -67,7 +85,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup(selUsers)
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.m365.User.ID
sel.DiscreteOwner = suite.user
return sel.Selector
},
},
@ -76,7 +94,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup(selUsers)
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.m365.User.ID
sel.DiscreteOwner = suite.user
return sel.Selector
},
},
@ -124,8 +142,8 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
collections, excludes, canUsePreviousBackup, err := exchange.NewBackup().ProduceBackupCollections(
ctx,
bpc,
suite.m365.AC,
suite.m365.Creds,
suite.ac,
suite.ac.Credentials,
ctrl.UpdateStatus,
count.New(),
fault.New(true))
@ -252,7 +270,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
selSites := []string{suite.m365.Site.ID}
selSites := []string{suite.site}
ctrl := newController(ctx, suite.T(), path.SharePointService)
tests := []struct {
name string
@ -294,7 +312,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
collections, excludes, canUsePreviousBackup, err := sharepoint.NewBackup().ProduceBackupCollections(
ctx,
bpc,
suite.m365.AC,
suite.ac,
ctrl.credentials,
ctrl.UpdateStatus,
count.New(),
@ -333,7 +351,8 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
type SPCollectionIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
connector *Controller
user string
}
func TestSPCollectionIntgSuite(t *testing.T) {
@ -345,7 +364,13 @@ func TestSPCollectionIntgSuite(t *testing.T) {
}
func (suite *SPCollectionIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
ctx, flush := tester.NewContext(suite.T())
defer flush()
suite.connector = newController(ctx, suite.T(), path.SharePointService)
suite.user = tconfig.M365UserID(suite.T())
tester.LogTimeOfTest(suite.T())
}
func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
@ -354,20 +379,25 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
ctx, flush := tester.NewContext(t)
defer flush()
ctrl := newController(ctx, t, path.SharePointService)
var (
siteID = tconfig.M365SiteID(t)
ctrl = newController(ctx, t, path.SharePointService)
siteIDs = []string{siteID}
)
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Site.ID, nil)
site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup(siteIDs)
sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch()))
sel.Include(sel.Library("Documents"))
sel.SetDiscreteOwnerIDName(suite.m365.Site.ID, suite.m365.Site.WebURL)
sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Site.Provider,
ProtectedResource: site,
Selector: sel.Selector,
}
@ -385,15 +415,15 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
)
documentsColl, err := path.BuildPrefix(
suite.m365.TenantID,
suite.m365.Site.ID,
suite.connector.tenant,
siteID,
path.SharePointService,
path.LibrariesCategory)
require.NoError(t, err, clues.ToCore(err))
metadataColl, err := path.BuildMetadata(
suite.m365.TenantID,
suite.m365.Site.ID,
suite.connector.tenant,
siteID,
path.SharePointService,
path.LibrariesCategory,
false)
@ -420,19 +450,24 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
ctx, flush := tester.NewContext(t)
defer flush()
ctrl := newController(ctx, t, path.SharePointService)
var (
siteID = tconfig.M365SiteID(t)
ctrl = newController(ctx, t, path.SharePointService)
siteIDs = []string{siteID}
)
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Site.ID, nil)
site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup(siteIDs)
sel.Include(sel.Lists(selectors.Any()))
sel.SetDiscreteOwnerIDName(suite.m365.Site.ID, suite.m365.Site.WebURL)
sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Site.Provider,
ProtectedResource: site,
Selector: sel.Selector,
}
@ -467,7 +502,9 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
type GroupsCollectionIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
connector *Controller
tenantID string
user string
}
func TestGroupsCollectionIntgSuite(t *testing.T) {
@ -479,7 +516,21 @@ func TestGroupsCollectionIntgSuite(t *testing.T) {
}
func (suite *GroupsCollectionIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.connector = newController(ctx, t, path.GroupsService)
suite.user = tconfig.M365UserID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
tester.LogTimeOfTest(t)
}
func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint() {
@ -488,19 +539,24 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
ctx, flush := tester.NewContext(t)
defer flush()
ctrl := newController(ctx, t, path.GroupsService)
var (
groupID = tconfig.M365TeamID(t)
ctrl = newController(ctx, t, path.GroupsService)
groupIDs = []string{groupID}
)
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Group.ID, nil)
group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewGroupsBackup(groupIDs)
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
sel.SetDiscreteOwnerIDName(suite.m365.Group.ID, suite.m365.Group.DisplayName)
sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Group.Provider,
ProtectedResource: group,
Selector: sel.Selector,
}
@ -519,8 +575,8 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
assert.Greater(t, len(collections), 1)
p, err := path.BuildMetadata(
suite.m365.TenantID,
suite.m365.Group.ID,
suite.tenantID,
groupID,
path.GroupsService,
path.LibrariesCategory,
false)
@ -558,23 +614,31 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
ctx, flush := tester.NewContext(t)
defer flush()
ctrl := newController(ctx, t, path.GroupsService)
var (
groupID = tconfig.M365TeamID(t)
ctrl = newController(ctx, t, path.GroupsService)
groupIDs = []string{groupID}
)
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Group.ID, nil)
group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewGroupsBackup(groupIDs)
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
sel.SetDiscreteOwnerIDName(suite.m365.Group.ID, suite.m365.Group.DisplayName)
sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
site, err := suite.connector.AC.Groups().GetRootSite(ctx, groupID)
require.NoError(t, err, clues.ToCore(err))
pth, err := path.Build(
suite.m365.TenantID,
suite.m365.Group.ID,
suite.tenantID,
groupID,
path.GroupsService,
path.LibrariesCategory,
true,
odConsts.SitesPathDir,
suite.m365.Group.RootSite.ID)
ptr.Val(site.GetId()))
require.NoError(t, err, clues.ToCore(err))
mmc := []data.RestoreCollection{
@ -592,7 +656,7 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Group.Provider,
ProtectedResource: group,
Selector: sel.Selector,
MetadataCollections: mmc,
}
@ -612,8 +676,8 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
assert.Greater(t, len(collections), 1)
p, err := path.BuildMetadata(
suite.m365.TenantID,
suite.m365.Group.ID,
suite.tenantID,
groupID,
path.GroupsService,
path.LibrariesCategory,
false)
@ -626,13 +690,13 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
foundRootTombstone := false
sp, err := path.BuildPrefix(
suite.m365.TenantID,
suite.m365.Group.ID,
suite.tenantID,
groupID,
path.GroupsService,
path.LibrariesCategory)
require.NoError(t, err, clues.ToCore(err))
sp, err = sp.Append(false, odConsts.SitesPathDir, suite.m365.Group.RootSite.ID)
sp, err = sp.Append(false, odConsts.SitesPathDir, ptr.Val(site.GetId()))
require.NoError(t, err, clues.ToCore(err))
for _, coll := range collections {

View File

@ -366,7 +366,7 @@ func downloadContent(
itemID := ptr.Val(item.GetId())
ctx = clues.Add(ctx, "item_id", itemID)
content, err := downloadItem(ctx, iaag, driveID, item)
content, err := downloadItem(ctx, iaag, item)
if err == nil {
return content, nil
} else if !graph.IsErrUnauthorizedOrBadToken(err) {
@ -395,7 +395,7 @@ func downloadContent(
cdi := custom.ToCustomDriveItem(di)
content, err = downloadItem(ctx, iaag, driveID, cdi)
content, err = downloadItem(ctx, iaag, cdi)
if err != nil {
return nil, clues.Wrap(err, "content download retry")
}
@ -426,7 +426,7 @@ func readItemContents(
return nil, core.ErrNotFound
}
rc, err := downloadFile(ctx, iaag, props.downloadURL, false)
rc, err := downloadFile(ctx, iaag, props.downloadURL)
if graph.IsErrUnauthorizedOrBadToken(err) {
logger.CtxErr(ctx, err).Debug("stale item in cache")
}

View File

@ -22,6 +22,8 @@ import (
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
@ -39,6 +41,50 @@ import (
const defaultFileSize int64 = 42
// TODO(ashmrtn): Merge with similar structs in graph and exchange packages.
type oneDriveService struct {
credentials account.M365Config
status support.ControllerOperationStatus
ac api.Client
}
func newOneDriveService(credentials account.M365Config) (*oneDriveService, error) {
ac, err := api.NewClient(
credentials,
control.DefaultOptions(),
count.New())
if err != nil {
return nil, err
}
service := oneDriveService{
ac: ac,
credentials: credentials,
}
return &service, nil
}
func (ods *oneDriveService) updateStatus(status *support.ControllerOperationStatus) {
if status == nil {
return
}
ods.status = support.MergeStatus(ods.status, *status)
}
func loadTestService(t *testing.T) *oneDriveService {
a := tconfig.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
service, err := newOneDriveService(creds)
require.NoError(t, err, clues.ToCore(err))
return service
}
// ---------------------------------------------------------------------------
// collections
// ---------------------------------------------------------------------------
@ -795,12 +841,7 @@ func (h mockBackupHandler[T]) AugmentItemInfo(
return h.ItemInfo
}
func (h *mockBackupHandler[T]) Get(
context.Context,
string,
map[string]string,
bool,
) (*http.Response, error) {
func (h *mockBackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) {
c := h.getCall
h.getCall++

View File

@ -21,10 +21,8 @@ import (
)
const (
acceptHeaderKey = "Accept"
acceptHeaderValue = "*/*"
gigabyte = 1024 * 1024 * 1024
largeFileDownloadLimit = 15 * gigabyte
acceptHeaderKey = "Accept"
acceptHeaderValue = "*/*"
)
// downloadUrlKeys is used to find the download URL in a DriveItem response.
@ -35,8 +33,7 @@ var downloadURLKeys = []string{
func downloadItem(
ctx context.Context,
getter api.Getter,
driveID string,
ag api.Getter,
item *custom.DriveItem,
) (io.ReadCloser, error) {
if item == nil {
@ -44,37 +41,36 @@ func downloadItem(
}
var (
// very large file content needs to be downloaded through a different endpoint, or else
// the download could take longer than the lifespan of the download token in the cached
// url, which will cause us to timeout on every download request, even if we refresh the
// download url right before the query.
url = "https://graph.microsoft.com/v1.0/drives/" + driveID + "/items/" + ptr.Val(item.GetId()) + "/content"
reader io.ReadCloser
err error
isLargeFile = ptr.Val(item.GetSize()) > largeFileDownloadLimit
rc io.ReadCloser
isFile = item.GetFile() != nil
err error
)
// if this isn't a file, no content is available for download
if item.GetFile() == nil {
return reader, nil
if isFile {
var (
url string
ad = item.GetAdditionalData()
)
for _, key := range downloadURLKeys {
if v, err := str.AnyValueToString(key, ad); err == nil {
url = v
break
}
}
rc, err = downloadFile(ctx, ag, url)
if err != nil {
return nil, clues.Stack(err)
}
}
// smaller files will maintain our current behavior (prefetching the download url with the
// url cache). That pattern works for us in general, and we only need to deviate for very
// large file sizes.
if !isLargeFile {
url = str.FirstIn(item.GetAdditionalData(), downloadURLKeys...)
}
reader, err = downloadFile(ctx, getter, url, isLargeFile)
return reader, clues.StackWC(ctx, err).OrNil()
return rc, nil
}
type downloadWithRetries struct {
getter api.Getter
requireAuth bool
url string
getter api.Getter
url string
}
func (dg *downloadWithRetries) SupportsRange() bool {
@ -90,7 +86,7 @@ func (dg *downloadWithRetries) Get(
// wouldn't work without it (get 416 responses instead of 206).
headers[acceptHeaderKey] = acceptHeaderValue
resp, err := dg.getter.Get(ctx, dg.url, headers, dg.requireAuth)
resp, err := dg.getter.Get(ctx, dg.url, headers)
if err != nil {
return nil, clues.Wrap(err, "getting file")
}
@ -100,7 +96,7 @@ func (dg *downloadWithRetries) Get(
resp.Body.Close()
}
return nil, clues.NewWC(ctx, "malware detected").Label(graph.LabelsMalware)
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
}
if resp != nil && (resp.StatusCode/100) != 2 {
@ -111,7 +107,7 @@ func (dg *downloadWithRetries) Get(
// upstream error checks can compare the status with
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
return nil, clues.
Wrap(clues.NewWC(ctx, resp.Status), "non-2xx http response").
Wrap(clues.New(resp.Status), "non-2xx http response").
Label(graph.LabelStatus(resp.StatusCode))
}
@ -122,7 +118,6 @@ func downloadFile(
ctx context.Context,
ag api.Getter,
url string,
requireAuth bool,
) (io.ReadCloser, error) {
if len(url) == 0 {
return nil, clues.NewWC(ctx, "empty file url")
@ -146,9 +141,8 @@ func downloadFile(
rc, err := readers.NewResetRetryHandler(
ctx,
&downloadWithRetries{
getter: ag,
requireAuth: requireAuth,
url: url,
getter: ag,
url: url,
})
return rc, clues.Stack(err).OrNil()

View File

@ -12,7 +12,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
@ -234,18 +233,6 @@ func (suite *OneDriveIntgSuite) SetupSuite() {
require.NoError(t, err, clues.ToCore(err))
}
type stubStatusUpdater struct {
status support.ControllerOperationStatus
}
func (ssu *stubStatusUpdater) updateStatus(status *support.ControllerOperationStatus) {
if status == nil {
return
}
ssu.status = support.MergeStatus(ssu.status, *status)
}
func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
creds, err := tconfig.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err, clues.ToCore(err))
@ -271,10 +258,10 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
defer flush()
var (
scope = selectors.
service = loadTestService(t)
scope = selectors.
NewOneDriveBackup([]string{test.user}).
AllData()[0]
statusUpdater = stubStatusUpdater{}
)
colls := NewCollections(
@ -287,7 +274,7 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
},
creds.AzureTenantID,
idname.NewProvider(test.user, test.user),
statusUpdater.updateStatus,
service.updateStatus,
control.Options{
ToggleFeatures: control.Toggles{},
},

View File

@ -17,7 +17,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
@ -31,7 +30,9 @@ import (
type ItemIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
user string
userDriveID string
service *oneDriveService
}
func TestItemIntegrationSuite(t *testing.T) {
@ -43,7 +44,25 @@ func TestItemIntegrationSuite(t *testing.T) {
}
func (suite *ItemIntegrationSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.service = loadTestService(t)
suite.user = tconfig.SecondaryM365UserID(t)
graph.InitializeConcurrencyLimiter(ctx, true, 4)
pager := suite.service.ac.Drives().NewUserDrivePager(suite.user, nil)
odDrives, err := api.GetAllDrives(ctx, pager)
require.NoError(t, err, clues.ToCore(err))
// Test Requirement 1: Need a drive
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
// Pick the first drive
suite.userDriveID = ptr.Val(odDrives[0].GetId())
}
func getOneDriveItem(
@ -84,36 +103,28 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
defer flush()
sc := selectors.
NewOneDriveBackup([]string{suite.m365.User.ID}).
NewOneDriveBackup([]string{suite.user}).
AllData()[0]
driveItem := getOneDriveItem(
ctx,
t,
suite.m365.AC,
suite.m365.User.DriveID)
driveItem := getOneDriveItem(ctx, t, suite.service.ac, suite.userDriveID)
// Test Requirement 2: Need a file
require.NotEmpty(
t,
driveItem,
"no file item found for user %q drive %q",
suite.m365.User.ID,
suite.m365.User.DriveID)
"no file item found for user %s drive %s",
suite.user,
suite.userDriveID)
bh := &userDriveBackupHandler{
baseUserDriveHandler: baseUserDriveHandler{
ac: suite.m365.AC.Drives(),
ac: suite.service.ac.Drives(),
},
userID: suite.m365.User.ID,
userID: suite.user,
scope: sc,
}
// Read data for the file
itemData, err := downloadItem(
ctx,
bh,
suite.m365.User.DriveID,
custom.ToCustomDriveItem(driveItem))
itemData, err := downloadItem(ctx, bh, custom.ToCustomDriveItem(driveItem))
require.NoError(t, err, clues.ToCore(err))
size, err := io.Copy(io.Discard, itemData)
@ -131,13 +142,13 @@ func (suite *ItemIntegrationSuite) TestIsURLExpired() {
ctx, flush := tester.NewContext(t)
defer flush()
driveItem := getOneDriveItem(ctx, t, suite.m365.AC, suite.m365.User.DriveID)
driveItem := getOneDriveItem(ctx, t, suite.service.ac, suite.userDriveID)
require.NotEmpty(
t,
driveItem,
"no file item found for user %q drive %q",
suite.m365.User.ID,
suite.m365.User.DriveID)
"no file item found for user %s drive %s",
suite.user,
suite.userDriveID)
var url string
@ -162,7 +173,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
}{
{
name: "",
driveID: suite.m365.User.DriveID,
driveID: suite.userDriveID,
},
// {
// name: "sharePoint",
@ -172,12 +183,12 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
rh := NewUserDriveRestoreHandler(suite.m365.AC)
rh := NewUserDriveRestoreHandler(suite.service.ac)
ctx, flush := tester.NewContext(t)
defer flush()
root, err := suite.m365.AC.Drives().GetRootFolder(ctx, test.driveID)
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
require.NoError(t, err, clues.ToCore(err))
newFolderName := testdata.DefaultRestoreConfig("folder").Location
@ -206,7 +217,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
// newly created item should fail because it's a file not a folder
_, err = suite.m365.AC.Drives().GetFolderByName(
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(newFolder.GetId()),
@ -250,7 +261,7 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
}{
{
name: "oneDrive",
driveID: suite.m365.User.DriveID,
driveID: suite.userDriveID,
},
// {
// name: "sharePoint",
@ -264,11 +275,11 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
ctx, flush := tester.NewContext(t)
defer flush()
root, err := suite.m365.AC.Drives().GetRootFolder(ctx, test.driveID)
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
require.NoError(t, err, clues.ToCore(err))
// Lookup a folder that doesn't exist
_, err = suite.m365.AC.Drives().GetFolderByName(
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(root.GetId()),
@ -276,7 +287,7 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
// Lookup a folder that does exist
_, err = suite.m365.AC.Drives().GetFolderByName(
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(root.GetId()),
@ -296,7 +307,6 @@ func (m mockGetter) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return m.GetFunc(ctx, url)
}
@ -384,7 +394,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
return nil, clues.New("test error")
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
{
name: "download url is empty",
@ -421,7 +431,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
}, nil
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
{
name: "non-2xx http response",
@ -440,7 +450,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
}, nil
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
}
@ -453,78 +463,9 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
mg := mockGetter{
GetFunc: test.GetFunc,
}
rc, err := downloadItem(
ctx,
mg,
"driveID",
custom.ToCustomDriveItem(test.itemFunc()))
rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(test.itemFunc()))
test.errorExpected(t, err, clues.ToCore(err))
test.rcExpected(t, rc, "reader should only be nil if item is nil")
})
}
}
func (suite *ItemUnitTestSuite) TestDownloadItem_urlByFileSize() {
var (
testRc = io.NopCloser(bytes.NewReader([]byte("test")))
url = "https://example.com"
okResp = &http.Response{
StatusCode: http.StatusOK,
Body: testRc,
}
)
table := []struct {
name string
itemFunc func() models.DriveItemable
GetFunc func(ctx context.Context, url string) (*http.Response, error)
errorExpected require.ErrorAssertionFunc
rcExpected require.ValueAssertionFunc
label string
}{
{
name: "big file",
itemFunc: func() models.DriveItemable {
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
di.SetSize(ptr.To[int64](20 * gigabyte))
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
assert.Contains(suite.T(), url, "/content")
return okResp, nil
},
},
{
name: "small file",
itemFunc: func() models.DriveItemable {
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
di.SetSize(ptr.To[int64](2 * gigabyte))
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
assert.NotContains(suite.T(), url, "/content")
return okResp, nil
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := downloadItem(
ctx,
mockGetter{GetFunc: test.GetFunc},
"driveID",
custom.ToCustomDriveItem(test.itemFunc()))
require.NoError(t, err, clues.ToCore(err))
test.rcExpected(t, rc)
})
}
}
@ -581,11 +522,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
mg := mockGetter{
GetFunc: GetFunc,
}
rc, err := downloadItem(
ctx,
mg,
"driveID",
custom.ToCustomDriveItem(itemFunc()))
rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(itemFunc()))
errorExpected(t, err, clues.ToCore(err))
rcExpected(t, rc)

View File

@ -93,9 +93,8 @@ func (h siteBackupHandler) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return h.ac.Get(ctx, url, headers, requireAuth)
return h.ac.Get(ctx, url, headers)
}
func (h siteBackupHandler) PathPrefix(

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
@ -35,7 +34,9 @@ import (
type URLCacheIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
ac api.Client
user string
driveID string
}
func TestURLCacheIntegrationSuite(t *testing.T) {
@ -48,12 +49,29 @@ func TestURLCacheIntegrationSuite(t *testing.T) {
func (suite *URLCacheIntegrationSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.user = tconfig.SecondaryM365UserID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
drive, err := suite.ac.Users().GetDefaultDrive(ctx, suite.user)
require.NoError(t, err, clues.ToCore(err))
suite.driveID = ptr.Val(drive.GetId())
}
// Basic test for urlCache. Create some files in onedrive, then access them via
@ -61,18 +79,22 @@ func (suite *URLCacheIntegrationSuite) SetupSuite() {
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
var (
t = suite.T()
ac = suite.m365.AC.Drives()
driveID = suite.m365.User.DriveID
ac = suite.ac.Drives()
driveID = suite.driveID
newFolderName = testdata.DefaultRestoreConfig("folder").Location
)
ctx, flush := tester.NewContext(t)
defer flush()
// Create a new test folder
root, err := ac.GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
newFolder, err := ac.PostItemInContainer(
ctx,
driveID,
suite.m365.User.DriveRootFolderID,
ptr.Val(root.GetId()),
api.NewDriveItem(newFolderName, true),
control.Copy)
require.NoError(t, err, clues.ToCore(err))
@ -83,7 +105,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
// Get the previous delta to feed into url cache
pager := ac.EnumerateDriveItemsDelta(
ctx,
driveID,
suite.driveID,
"",
api.CallConfig{
Select: api.URLCacheDriveItemProps(),
@ -120,10 +142,10 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
// Create a new URL cache with a long TTL
uc, err := newURLCache(
driveID,
suite.driveID,
du.URL,
1*time.Hour,
ac,
suite.ac.Drives(),
count.New(),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -154,8 +176,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
http.MethodGet,
props.downloadURL,
nil,
nil,
false)
nil)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, resp)

View File

@ -93,9 +93,8 @@ func (h userDriveBackupHandler) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return h.ac.Get(ctx, url, headers, requireAuth)
return h.ac.Get(ctx, url, headers)
}
func (h userDriveBackupHandler) PathPrefix(

View File

@ -296,7 +296,6 @@ func populateCollections(
cl),
qp.ProtectedResource.ID(),
bh.itemHandler(),
bh,
addAndRem.Added,
addAndRem.Removed,
// TODO: produce a feature flag that allows selective

View File

@ -24,7 +24,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
@ -88,14 +87,6 @@ func (bh mockBackupHandler) folderGetter() containerGetter { return
func (bh mockBackupHandler) previewIncludeContainers() []string { return bh.previewIncludes }
func (bh mockBackupHandler) previewExcludeContainers() []string { return bh.previewExcludes }
func (bh mockBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return "", false
}
func (bh mockBackupHandler) NewContainerCache(
userID string,
) (string, graph.ContainerResolver) {
@ -481,7 +472,10 @@ func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.Con
type BackupIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
user string
site string
tenantID string
ac api.Client
}
func TestBackupIntgSuite(t *testing.T) {
@ -494,18 +488,35 @@ func TestBackupIntgSuite(t *testing.T) {
func (suite *BackupIntgSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.user = tconfig.M365UserID(t)
suite.site = tconfig.M365SiteID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
tester.LogTimeOfTest(t)
}
func (suite *BackupIntgSuite) TestMailFetch() {
var (
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
userID = tconfig.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
@ -549,14 +560,14 @@ func (suite *BackupIntgSuite) TestMailFetch() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: ctrlOpts,
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(userID, userID),
}
collections, err := CreateCollections(
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
metadata.DeltaPaths{},
func(status *support.ControllerOperationStatus) {},
@ -591,8 +602,9 @@ func (suite *BackupIntgSuite) TestMailFetch() {
func (suite *BackupIntgSuite) TestDelta() {
var (
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
userID = tconfig.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
@ -628,7 +640,7 @@ func (suite *BackupIntgSuite) TestDelta() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(userID, userID),
}
// get collections without providing any delta history (ie: full backup)
@ -636,7 +648,7 @@ func (suite *BackupIntgSuite) TestDelta() {
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
metadata.DeltaPaths{},
func(status *support.ControllerOperationStatus) {},
@ -669,7 +681,7 @@ func (suite *BackupIntgSuite) TestDelta() {
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
dps,
func(status *support.ControllerOperationStatus) {},
@ -691,8 +703,8 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
var (
wg sync.WaitGroup
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
sel := selectors.NewExchangeBackup(users)
@ -701,7 +713,7 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
Selector: sel.Selector,
}
@ -709,7 +721,7 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
sel.Scopes()[0],
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),
@ -761,8 +773,8 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
// a regression test to ensure that downloaded items can be uploaded.
func (suite *BackupIntgSuite) TestContactSerializationRegression() {
var (
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
@ -789,14 +801,14 @@ func (suite *BackupIntgSuite) TestContactSerializationRegression() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
}
edcs, err := CreateCollections(
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),
@ -863,8 +875,8 @@ func (suite *BackupIntgSuite) TestContactSerializationRegression() {
// to be able to successfully query, download and restore event objects
func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
var (
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
@ -899,14 +911,14 @@ func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
}
collections, err := CreateCollections(
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),

View File

@ -19,7 +19,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/fault"
@ -69,21 +68,21 @@ func getItemAndInfo(
ctx context.Context,
getter itemGetterSerializer,
userID string,
itemID string,
id string,
useImmutableIDs bool,
parentPath string,
) ([]byte, *details.ExchangeInfo, error) {
item, info, err := getter.GetItem(
ctx,
userID,
itemID,
id,
fault.New(true)) // temporary way to force a failFast error
if err != nil {
return nil, nil, clues.WrapWC(ctx, err, "fetching item").
Label(fault.LabelForceNoBackupCreation)
}
itemData, err := getter.Serialize(ctx, item, userID, itemID)
itemData, err := getter.Serialize(ctx, item, userID, id)
if err != nil {
return nil, nil, clues.WrapWC(ctx, err, "serializing item")
}
@ -109,7 +108,6 @@ func NewCollection(
bc data.BaseCollection,
user string,
items itemGetterSerializer,
canSkipFailChecker canSkipItemFailurer,
origAdded map[string]time.Time,
origRemoved []string,
validModTimes bool,
@ -142,7 +140,6 @@ func NewCollection(
added: added,
removed: removed,
getter: items,
skipChecker: canSkipFailChecker,
statusUpdater: statusUpdater,
}
}
@ -153,7 +150,6 @@ func NewCollection(
added: added,
removed: removed,
getter: items,
skipChecker: canSkipFailChecker,
statusUpdater: statusUpdater,
counter: counter,
}
@ -171,8 +167,7 @@ type prefetchCollection struct {
// removed is a list of item IDs that were deleted from, or moved out, of a container
removed map[string]struct{}
getter itemGetterSerializer
skipChecker canSkipItemFailurer
getter itemGetterSerializer
statusUpdater support.StatusUpdater
}
@ -199,12 +194,11 @@ func (col *prefetchCollection) streamItems(
wg sync.WaitGroup
progressMessage chan<- struct{}
user = col.user
dataCategory = col.Category().String()
)
ctx = clues.Add(
ctx,
"category", dataCategory)
"category", col.Category().String())
defer func() {
close(stream)
@ -233,7 +227,7 @@ func (col *prefetchCollection) streamItems(
defer close(semaphoreCh)
// delete all removed items
for itemID := range col.removed {
for id := range col.removed {
semaphoreCh <- struct{}{}
wg.Add(1)
@ -253,7 +247,7 @@ func (col *prefetchCollection) streamItems(
if progressMessage != nil {
progressMessage <- struct{}{}
}
}(itemID)
}(id)
}
var (
@ -262,7 +256,7 @@ func (col *prefetchCollection) streamItems(
)
// add any new items
for itemID := range col.added {
for id := range col.added {
if el.Failure() != nil {
break
}
@ -283,23 +277,8 @@ func (col *prefetchCollection) streamItems(
col.Opts().ToggleFeatures.ExchangeImmutableIDs,
parentPath)
if err != nil {
// pulled outside the switch due to multiple return values.
cause, canSkip := col.skipChecker.CanSkipItemFailure(
err,
user,
col.Opts())
// Handle known error cases
switch {
case canSkip:
// this is a special case handler that allows the item to be skipped
// instead of producing an error.
errs.AddSkip(ctx, fault.FileSkip(
cause,
dataCategory,
id,
id,
nil))
case errors.Is(err, core.ErrNotFound):
// Don't report errors for deleted items as there's no way for us to
// back up data that is gone. Record it as a "success", since there's
@ -321,19 +300,6 @@ func (col *prefetchCollection) streamItems(
id,
map[string]any{"parentPath": parentPath}))
atomic.AddInt64(&success, 1)
case graph.IsErrCorruptData(err):
// These items cannot be downloaded, graph error indicates that the item
// data is corrupted. Add to skipped list.
logger.
CtxErr(ctx, err).
With("skipped_reason", fault.SkipCorruptData).
Info("inaccessible email")
errs.AddSkip(ctx, fault.EmailSkip(
fault.SkipCorruptData,
user,
id,
map[string]any{"parentPath": parentPath}))
atomic.AddInt64(&success, 1)
default:
col.Counter.Inc(count.StreamItemsErred)
el.AddRecoverable(ctx, clues.Wrap(err, "fetching item").Label(fault.LabelForceNoBackupCreation))
@ -370,7 +336,7 @@ func (col *prefetchCollection) streamItems(
if progressMessage != nil {
progressMessage <- struct{}{}
}
}(itemID)
}(id)
}
wg.Wait()
@ -398,8 +364,7 @@ type lazyFetchCollection struct {
// removed is a list of item IDs that were deleted from, or moved out, of a container
removed map[string]struct{}
getter itemGetterSerializer
skipChecker canSkipItemFailurer
getter itemGetterSerializer
statusUpdater support.StatusUpdater
@ -426,8 +391,8 @@ func (col *lazyFetchCollection) streamItems(
var (
success int64
progressMessage chan<- struct{}
user = col.user
el = errs.Local()
user = col.user
)
defer func() {
@ -439,7 +404,7 @@ func (col *lazyFetchCollection) streamItems(
int(success),
0,
col.FullPath().Folder(false),
el.Failure())
errs.Failure())
}()
if len(col.added)+len(col.removed) > 0 {
@ -465,7 +430,7 @@ func (col *lazyFetchCollection) streamItems(
// add any new items
for id, modTime := range col.added {
if el.Failure() != nil {
if errs.Failure() != nil {
break
}
@ -481,18 +446,15 @@ func (col *lazyFetchCollection) streamItems(
&lazyItemGetter{
userID: user,
itemID: id,
category: col.Category(),
getter: col.getter,
modTime: modTime,
immutableIDs: col.Opts().ToggleFeatures.ExchangeImmutableIDs,
parentPath: parentPath,
skipChecker: col.skipChecker,
opts: col.Opts(),
},
id,
modTime,
col.counter,
el)
errs)
atomic.AddInt64(&success, 1)
@ -506,12 +468,9 @@ type lazyItemGetter struct {
getter itemGetterSerializer
userID string
itemID string
category path.CategoryType
parentPath string
modTime time.Time
immutableIDs bool
skipChecker canSkipItemFailurer
opts control.Options
}
func (lig *lazyItemGetter) GetData(
@ -526,25 +485,6 @@ func (lig *lazyItemGetter) GetData(
lig.immutableIDs,
lig.parentPath)
if err != nil {
if lig.skipChecker != nil {
cause, canSkip := lig.skipChecker.CanSkipItemFailure(
err,
lig.userID,
lig.opts)
if canSkip {
errs.AddSkip(ctx, fault.FileSkip(
cause,
lig.category.String(),
lig.itemID,
lig.itemID,
nil))
return nil, nil, false, clues.
NewWC(ctx, "error marked as skippable by handler").
Label(graph.LabelsSkippable)
}
}
// If an item was deleted then return an empty file so we don't fail
// the backup and return a sentinel error when asked for ItemInfo so
// we don't display the item in the backup.
@ -559,7 +499,7 @@ func (lig *lazyItemGetter) GetData(
err = clues.Stack(err)
errs.AddRecoverable(ctx, err)
return nil, nil, false, clues.Stack(err)
return nil, nil, false, err
}
// Update the mod time to what we already told kopia about. This is required

View File

@ -28,7 +28,6 @@ import (
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
graphTD "github.com/alcionai/corso/src/pkg/services/m365/api/graph/testdata"
)
@ -154,7 +153,6 @@ func (suite *CollectionUnitSuite) TestNewCollection_state() {
count.New()),
"u",
mock.DefaultItemGetSerialize(),
mock.NeverCanSkipFailChecker(),
nil,
nil,
colType.validModTimes,
@ -300,7 +298,6 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
count.New()),
"",
&mock.ItemGetSerialize{},
mock.NeverCanSkipFailChecker(),
test.added,
maps.Keys(test.removed),
false,
@ -336,232 +333,6 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
}
}
func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
var (
start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {}
)
table := []struct {
name string
category path.CategoryType
handler backupHandler
added map[string]time.Time
removed map[string]struct{}
expectItemCount int
expectSkippedCount int
expectErr assert.ErrorAssertionFunc
}{
{
name: "no items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
expectErr: assert.NoError,
},
{
name: "events only added items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 3,
expectErr: assert.NoError,
},
{
name: "events only removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "events added and removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "contacts only added items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 0,
expectErr: assert.Error,
},
{
name: "contacts only removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "contacts added and removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "mail only added items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 0,
expectErr: assert.Error,
},
{
name: "mail only removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "mail added and removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
var (
t = suite.T()
errs = fault.New(true)
itemCount int
)
ctx, flush := tester.NewContext(t)
defer flush()
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
opts := control.DefaultOptions()
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
col := NewCollection(
data.NewBaseCollection(
fullPath,
nil,
locPath.ToBuilder(),
opts,
false,
count.New()),
"pr",
&mock.ItemGetSerialize{
SerializeErr: graph.ErrServiceUnavailableEmptyResp,
},
test.handler,
test.added,
maps.Keys(test.removed),
false,
statusUpdater,
count.New())
for item := range col.Items(ctx, errs) {
itemCount++
_, rok := test.removed[item.ID()]
if rok {
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
_, aok := test.added[item.ID()]
if !rok && aok {
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
}
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
}
test.expectErr(t, errs.Failure())
assert.Equal(
t,
test.expectItemCount,
itemCount,
"should see all expected items")
assert.Len(t, errs.Skipped(), test.expectSkippedCount)
})
}
}
// This test verifies skipped error cases are handled correctly by collection enumeration
func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
var (
@ -593,17 +364,6 @@ func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
},
expectedSkipError: fault.EmailSkip(fault.SkipInvalidRecipients, "", "fisher", nil),
},
{
name: "ErrorCorruptData",
added: map[string]time.Time{
"fisher": {},
},
expectItemCount: 0,
itemGetter: &mock.ItemGetSerialize{
GetErr: graphTD.ODataErr(string(graph.ErrorCorruptData)),
},
expectedSkipError: fault.EmailSkip(fault.SkipCorruptData, "", "fisher", nil),
},
}
for _, test := range table {
@ -627,7 +387,6 @@ func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
count.New()),
"",
test.itemGetter,
mock.NeverCanSkipFailChecker(),
test.added,
nil,
false,
@ -708,7 +467,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
expectItemCount: 3,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
},
@ -761,7 +519,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
count.New()),
"",
mlg,
mock.NeverCanSkipFailChecker(),
test.added,
maps.Keys(test.removed),
true,
@ -773,10 +530,10 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
_, rok := test.removed[item.ID()]
if rok {
assert.True(t, item.Deleted(), "removals should be marked as deleted")
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
modTime, aok := test.added[item.ID()]
@ -785,6 +542,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
// initializer.
assert.Implements(t, (*data.ItemModTime)(nil), item)
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
// Check if the test want's us to read the item's data so the lazy
@ -804,8 +562,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
// collection initializer.
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, modTime, info.Modified(), "ItemInfo mod time")
} else {
assert.Fail(t, "unexpected read on item %s", item.ID())
}
}
@ -822,294 +578,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
}
}
func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
var (
start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {}
expectSkip = func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
assert.ErrorContains(t, err, "skip")
assert.True(t, clues.HasLabel(err, graph.LabelsSkippable), clues.ToCore(err))
}
expectNotSkipped = func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
assert.NotContains(t, err.Error(), "skip")
}
)
table := []struct {
name string
added map[string]time.Time
removed map[string]struct{}
category path.CategoryType
handler backupHandler
expectItemCount int
expectSkippedCount int
expectReads []string
expectErr func(t *testing.T, err error)
expectFailure assert.ErrorAssertionFunc
}{
{
name: "no items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
expectFailure: assert.NoError,
},
{
name: "events only added items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 3,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "events only removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "events added and removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "contacts only added items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 0,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectNotSkipped,
expectFailure: assert.Error,
},
{
name: "contacts only removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "contacts added and removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "mail only added items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 0,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectNotSkipped,
expectFailure: assert.Error,
},
{
name: "mail only removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "mail added and removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
var (
t = suite.T()
errs = fault.New(false)
itemCount int
)
ctx, flush := tester.NewContext(t)
defer flush()
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
mlg := &mockLazyItemGetterSerializer{
ItemGetSerialize: &mock.ItemGetSerialize{
SerializeErr: graph.ErrServiceUnavailableEmptyResp,
},
}
defer mlg.check(t, test.expectReads)
opts := control.DefaultOptions()
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
col := NewCollection(
data.NewBaseCollection(
fullPath,
nil,
locPath.ToBuilder(),
opts,
false,
count.New()),
"pr",
mlg,
test.handler,
test.added,
maps.Keys(test.removed),
true,
statusUpdater,
count.New())
for item := range col.Items(ctx, errs) {
itemCount++
_, rok := test.removed[item.ID()]
if rok {
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
modTime, aok := test.added[item.ID()]
if !rok && aok {
// Item's mod time should be what's passed into the collection
// initializer.
assert.Implements(t, (*data.ItemModTime)(nil), item)
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
// Check if the test want's us to read the item's data so the lazy
// data fetch is executed.
if slices.Contains(test.expectReads, item.ID()) {
r := item.ToReader()
_, err := io.ReadAll(r)
test.expectErr(t, err)
r.Close()
} else {
assert.Fail(t, "unexpected read on item %s", item.ID())
}
}
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
}
failure := errs.Failure()
if failure == nil && len(errs.Recovered()) > 0 {
failure = errs.Recovered()[0]
}
test.expectFailure(t, failure, clues.ToCore(failure))
assert.Equal(
t,
test.expectItemCount,
itemCount,
"should see all expected items")
assert.Len(t, errs.Skipped(), test.expectSkippedCount)
})
}
}
func (suite *CollectionUnitSuite) TestLazyItem_NoRead_GetInfo_Errors() {
t := suite.T()

View File

@ -1,8 +1,6 @@
package exchange
import (
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
@ -54,11 +52,3 @@ func (h contactBackupHandler) NewContainerCache(
getter: h.ac,
}
}
func (h contactBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return "", false
}

View File

@ -1,83 +0,0 @@
package exchange
import (
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type ContactsBackupHandlerUnitSuite struct {
tester.Suite
}
func TestContactsBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &ContactsBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ContactsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString()
table := []struct {
name string
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: assert.AnError,
opts: control.Options{},
expect: assert.False,
},
{
name: "false when map is empty",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
},
{
name: "false on nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "false even if resource matches",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newContactBackupHandler(api.Client{})
cause, result := h.CanSkipItemFailure(
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
}

View File

@ -12,7 +12,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
@ -55,7 +54,7 @@ func (m *contactRestoreMock) DeleteItem(
type ContactsRestoreIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestContactsRestoreIntgSuite(t *testing.T) {
@ -67,17 +66,17 @@ func TestContactsRestoreIntgSuite(t *testing.T) {
}
func (suite *ContactsRestoreIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = newIntegrationTesterSetup(suite.T())
}
// Testing to ensure that cache system works for in multiple different environments
func (suite *ContactsRestoreIntgSuite) TestCreateContainerDestination() {
runCreateDestinationTest(
suite.T(),
newContactRestoreHandler(suite.m365.AC),
newContactRestoreHandler(suite.its.ac),
path.ContactsCategory,
suite.m365.TenantID,
suite.m365.User.ID,
suite.its.creds.AzureTenantID,
suite.its.userID,
testdata.DefaultRestoreConfig("").Location,
[]string{"Hufflepuff"},
[]string{"Ravenclaw"})
@ -208,16 +207,17 @@ func (suite *ContactsRestoreIntgSuite) TestRestoreContact() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctr := count.New()
ctx, flush := tester.NewContext(t)
defer flush()
ctr := count.New()
_, err := restoreContact(
ctx,
test.apiMock,
body,
suite.m365.User.ID,
suite.its.userID,
"destination",
test.collisionMap,
test.onCollision,

View File

@ -3,13 +3,11 @@ package exchange
import (
"context"
"fmt"
"hash/crc32"
stdpath "path"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
@ -18,8 +16,10 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -1019,241 +1019,49 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestAddToCache() {
assert.Equal(t, m.expectedLocation, l.String(), "location path")
}
// ---------------------------------------------------------------------------
// EventContainerCache unit tests
// ---------------------------------------------------------------------------
var _ containerGetter = mockEventContainerGetter{}
type mockEventContainerGetter struct {
// containerGetter returns graph.CalendarDisplayable, unlike containersEnumerator
// which returns models.Calendarable.
idToCalendar map[string]graph.CalendarDisplayable
err error
}
func (m mockEventContainerGetter) GetContainerByID(
ctx context.Context,
userID string,
dirID string,
) (graph.Container, error) {
return m.idToCalendar[dirID], m.err
}
var _ containersEnumerator[models.Calendarable] = mockEventContainersEnumerator{}
type mockEventContainersEnumerator struct {
containers []models.Calendarable
err error
}
func (m mockEventContainersEnumerator) EnumerateContainers(
ctx context.Context,
userID string,
baseDirID string,
) ([]models.Calendarable, error) {
return m.containers, m.err
}
type EventsContainerUnitSuite struct {
type ContainerResolverSuite struct {
tester.Suite
credentials account.M365Config
}
func TestEventsContainerUnitSuite(t *testing.T) {
suite.Run(t, &EventsContainerUnitSuite{
Suite: tester.NewUnitSuite(t),
})
}
func makeCalendar(
id, name, ownerEmail string,
isDefault bool,
) *models.Calendar {
c := models.NewCalendar()
c.SetId(ptr.To(id))
c.SetName(ptr.To(name))
c.SetIsDefaultCalendar(ptr.To(isDefault))
if len(ownerEmail) > 0 {
email := models.NewEmailAddress()
email.SetAddress(ptr.To(ownerEmail))
// Set crc as the name for keeping this func simple.
eName := fmt.Sprintf("%d", crc32.ChecksumIEEE([]byte(ownerEmail)))
email.SetName(ptr.To(eName))
c.SetOwner(email)
}
return c
}
// Test if we skip backup of shared calendars. These will be backed up for
// the resource owner that owns the calendar.
func (suite *EventsContainerUnitSuite) TestPopulate_SkipSharedCalendars() {
// map of calendars
calendars := map[string]models.Calendarable{
// Default calendars Dx
"D0": makeCalendar(api.DefaultCalendar, api.DefaultCalendar, "owner@bar.com", true),
// Atypical, but creating another default calendar for testing purposes.
"D1": makeCalendar("D1", "D1", "owner@bar.com", true),
// Shared calendars Sx
"S0": makeCalendar("S0", "S0", "sharer@bar.com", false),
// Owned calendars, not default Ox
"O0": makeCalendar("O0", "O0", "owner@bar.com", false),
// Calendars with missing owner informaton
"M0": makeCalendar("M0", "M0", "", false),
}
// Always return default calendar from the getter.
getContainersByID := func() map[string]graph.CalendarDisplayable {
return map[string]graph.CalendarDisplayable{
api.DefaultCalendar: *graph.CreateCalendarDisplayable(calendars["D0"], "parentID"),
}
}
table := []struct {
name string
enumerateContainers func() []models.Calendarable
expectErr assert.ErrorAssertionFunc
assertFunc func(t *testing.T, ecc *eventContainerCache)
}{
{
name: "one default calendar, one shared",
enumerateContainers: func() []models.Calendarable {
return []models.Calendarable{
calendars["D0"],
calendars["S0"],
}
},
expectErr: assert.NoError,
assertFunc: func(t *testing.T, ecc *eventContainerCache) {
assert.Len(t, ecc.cache, 1, "expected calendar count")
assert.NotNil(t, ecc.cache[api.DefaultCalendar], "missing default calendar")
},
},
{
name: "2 default calendars, 1 shared",
enumerateContainers: func() []models.Calendarable {
return []models.Calendarable{
calendars["D0"],
calendars["D1"],
calendars["S0"],
}
},
expectErr: assert.NoError,
assertFunc: func(t *testing.T, ecc *eventContainerCache) {
assert.Len(t, ecc.cache, 2, "expected calendar count")
assert.NotNil(t, ecc.cache[api.DefaultCalendar], "missing default calendar")
assert.NotNil(t, ecc.cache["D1"], "missing default calendar")
},
},
{
name: "1 default, 1 additional owned, 1 shared",
enumerateContainers: func() []models.Calendarable {
return []models.Calendarable{
calendars["D0"],
calendars["O0"],
calendars["S0"],
}
},
expectErr: assert.NoError,
assertFunc: func(t *testing.T, ecc *eventContainerCache) {
assert.Len(t, ecc.cache, 2, "expected calendar count")
assert.NotNil(t, ecc.cache[api.DefaultCalendar], "missing default calendar")
assert.NotNil(t, ecc.cache["O0"], "missing owned calendar")
},
},
{
name: "1 default, 1 with missing owner information",
enumerateContainers: func() []models.Calendarable {
return []models.Calendarable{
calendars["D0"],
calendars["M0"],
}
},
expectErr: assert.NoError,
assertFunc: func(t *testing.T, ecc *eventContainerCache) {
assert.Len(t, ecc.cache, 2, "expected calendar count")
assert.NotNil(t, ecc.cache[api.DefaultCalendar], "missing default calendar")
assert.NotNil(t, ecc.cache["M0"], "missing calendar with missing owner info")
},
},
{
// Unlikely to happen, but we should back up the calendar if the default owner
// cannot be determined, i.e. default calendar is missing.
name: "default owner info missing",
enumerateContainers: func() []models.Calendarable {
return []models.Calendarable{
calendars["S0"],
}
},
expectErr: assert.NoError,
assertFunc: func(t *testing.T, ecc *eventContainerCache) {
assert.Len(t, ecc.cache, 2, "expected calendar count")
assert.NotNil(t, ecc.cache[api.DefaultCalendar], "missing default calendar")
assert.NotNil(t, ecc.cache["S0"], "missing additional calendar")
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ecc := &eventContainerCache{
userID: "test",
enumer: mockEventContainersEnumerator{containers: test.enumerateContainers()},
getter: mockEventContainerGetter{idToCalendar: getContainersByID()},
}
err := ecc.Populate(ctx, fault.New(true), "root", "root")
test.expectErr(t, err, clues.ToCore(err))
test.assertFunc(t, ecc)
})
}
}
// ---------------------------------------------------------------------------
// container resolver integration suite
// ---------------------------------------------------------------------------
type ContainerResolverIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
}
func TestContainerResolverIntgSuite(t *testing.T) {
suite.Run(t, &ContainerResolverIntgSuite{
func TestContainerResolverIntegrationSuite(t *testing.T) {
suite.Run(t, &ContainerResolverSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *ContainerResolverIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
func (suite *ContainerResolverSuite) SetupSuite() {
t := suite.T()
a := tconfig.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
func (suite *ContainerResolverIntgSuite) TestPopulate() {
func (suite *ContainerResolverSuite) TestPopulate() {
ac, err := api.NewClient(
suite.credentials,
control.DefaultOptions(),
count.New())
require.NoError(suite.T(), err, clues.ToCore(err))
eventFunc := func(t *testing.T) graph.ContainerResolver {
return &eventContainerCache{
userID: tconfig.M365UserID(t),
enumer: suite.m365.AC.Events(),
getter: suite.m365.AC.Events(),
enumer: ac.Events(),
getter: ac.Events(),
}
}
contactFunc := func(t *testing.T) graph.ContainerResolver {
return &contactContainerCache{
userID: tconfig.M365UserID(t),
enumer: suite.m365.AC.Contacts(),
getter: suite.m365.AC.Contacts(),
enumer: ac.Contacts(),
getter: ac.Contacts(),
}
}

View File

@ -1,13 +1,6 @@
package exchange
import (
"errors"
"net/http"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
@ -59,32 +52,3 @@ func (h eventBackupHandler) NewContainerCache(
getter: h.ac,
}
}
// todo: this could be further improved buy specifying the call source and matching that
// with the expected error. Might be necessary if we use this for more than one error.
// But since we only call this in a single place at this time, that additional guard isn't
// built into the func.
func (h eventBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
if err == nil {
return "", false
}
// this is a bit overly cautious. we do know that we get 503s with empty response bodies
// due to fauilures when getting too many instances. We don't know for sure if we get
// generic, well formed 503s. But since we're working with specific resources and item
// IDs in the first place, that extra caution will help make sure an unexpected error dosn't
// slip through the cracks on us.
if !errors.Is(err, graph.ErrServiceUnavailableEmptyResp) &&
!clues.HasLabel(err, graph.LabelStatus(http.StatusServiceUnavailable)) {
return "", false
}
_, ok := opts.SkipEventsOnInstance503ForResources[resourceID]
// strict equals required here. ids are case sensitive.
return fault.SkipKnownEventInstance503s, ok
}

View File

@ -1,112 +0,0 @@
package exchange
import (
"net/http"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
type EventsBackupHandlerUnitSuite struct {
tester.Suite
}
func TestEventsBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &EventsBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *EventsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString()
table := []struct {
name string
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{},
expect: assert.False,
expectCause: fault.SkipKnownEventInstance503s,
},
{
name: "empty skip on 503",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
expectCause: fault.SkipKnownEventInstance503s,
},
{
name: "nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "non-matching resource",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
"foo": {},
},
},
expect: assert.False,
expectCause: fault.SkipKnownEventInstance503s,
},
{
name: "match on instance 503 empty resp",
err: graph.ErrServiceUnavailableEmptyResp,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.True,
expectCause: fault.SkipKnownEventInstance503s,
},
{
name: "match on instance 503",
err: clues.New("arbitrary error").
Label(graph.LabelStatus(http.StatusServiceUnavailable)),
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.True,
expectCause: fault.SkipKnownEventInstance503s,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newEventBackupHandler(api.Client{})
cause, result := h.CanSkipItemFailure(
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
}

View File

@ -2,7 +2,6 @@ package exchange
import (
"context"
"strings"
"time"
"github.com/alcionai/clues"
@ -61,16 +60,6 @@ func (ecc *eventContainerCache) populateEventRoot(ctx context.Context) error {
return nil
}
func isSharedCalendar(defaultCalendarOwner string, c models.Calendarable) bool {
// If we can't determine the owner, assume the calendar is owned by the
// user.
if len(defaultCalendarOwner) == 0 || c.GetOwner() == nil {
return false
}
return !strings.EqualFold(defaultCalendarOwner, ptr.Val(c.GetOwner().GetAddress()))
}
// Populate utility function for populating eventCalendarCache.
// Executes 1 additional Graph Query
// @param baseID: ignored. Present to conform to interface
@ -100,39 +89,11 @@ func (ecc *eventContainerCache) Populate(
return clues.WrapWC(ctx, err, "enumerating containers")
}
var defaultCalendarOwner string
// Determine the owner for the default calendar. We'll use this to detect and
// skip shared calendars that are not owned by this user.
for _, c := range containers {
if ptr.Val(c.GetIsDefaultCalendar()) && c.GetOwner() != nil {
defaultCalendarOwner = ptr.Val(c.GetOwner().GetAddress())
ctx = clues.Add(ctx, "default_calendar_owner", defaultCalendarOwner)
break
}
}
for _, c := range containers {
if el.Failure() != nil {
return el.Failure()
}
// Skip shared calendars if we have enough information to determine the owner
if isSharedCalendar(defaultCalendarOwner, c) {
var ownerEmail string
if c.GetOwner() != nil {
ownerEmail = ptr.Val(c.GetOwner().GetAddress())
}
logger.Ctx(ctx).Infow(
"skipping shared calendar",
"name", ptr.Val(c.GetName()),
"owner", ownerEmail)
continue
}
cacheFolder := graph.NewCacheFolder(
api.CalendarDisplayable{Calendarable: c},
path.Builder{}.Append(ptr.Val(c.GetId())),

View File

@ -13,7 +13,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
@ -102,7 +101,7 @@ func (m *eventRestoreMock) PatchItem(
type EventsRestoreIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestEventsRestoreIntgSuite(t *testing.T) {
@ -114,17 +113,17 @@ func TestEventsRestoreIntgSuite(t *testing.T) {
}
func (suite *EventsRestoreIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = newIntegrationTesterSetup(suite.T())
}
// Testing to ensure that cache system works for in multiple different environments
func (suite *EventsRestoreIntgSuite) TestCreateContainerDestination() {
runCreateDestinationTest(
suite.T(),
newEventRestoreHandler(suite.m365.AC),
newEventRestoreHandler(suite.its.ac),
path.EventsCategory,
suite.m365.TenantID,
suite.m365.User.ID,
suite.its.creds.AzureTenantID,
suite.its.userID,
testdata.DefaultRestoreConfig("").Location,
[]string{"Durmstrang"},
[]string{"Beauxbatons"})
@ -265,7 +264,7 @@ func (suite *EventsRestoreIntgSuite) TestRestoreEvent() {
ctx,
test.apiMock,
body,
suite.m365.User.ID,
suite.its.userID,
"destination",
test.collisionMap,
test.onCollision,

View File

@ -26,8 +26,6 @@ type backupHandler interface {
previewIncludeContainers() []string
previewExcludeContainers() []string
NewContainerCache(userID string) (string, graph.ContainerResolver)
canSkipItemFailurer
}
type addedAndRemovedItemGetter interface {
@ -59,14 +57,6 @@ func BackupHandlers(ac api.Client) map[path.CategoryType]backupHandler {
}
}
type canSkipItemFailurer interface {
CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool)
}
// ---------------------------------------------------------------------------
// restore
// ---------------------------------------------------------------------------

View File

@ -0,0 +1,44 @@
package exchange
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type intgTesterSetup struct {
ac api.Client
creds account.M365Config
userID string
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its := intgTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
a := tconfig.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
its.creds = creds
its.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
its.userID = tconfig.GetM365UserID(ctx)
return its
}

View File

@ -1,8 +1,6 @@
package exchange
import (
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
@ -59,11 +57,3 @@ func (h mailBackupHandler) NewContainerCache(
getter: h.ac,
}
}
func (h mailBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return "", false
}

View File

@ -1,83 +0,0 @@
package exchange
import (
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type MailBackupHandlerUnitSuite struct {
tester.Suite
}
func TestMailBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &MailBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *MailBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString()
table := []struct {
name string
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: assert.AnError,
opts: control.Options{},
expect: assert.False,
},
{
name: "false when map is empty",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
},
{
name: "false on nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "false even if resource matches",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newMailBackupHandler(api.Client{})
cause, result := h.CanSkipItemFailure(
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
}

View File

@ -10,8 +10,10 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -28,24 +30,30 @@ const (
expectedFolderPath = "toplevel/subFolder/subsubfolder"
)
type MailFolderCacheIntgSuite struct {
type MailFolderCacheIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
credentials account.M365Config
}
func TestMailFolderCacheIntegrationSuite(t *testing.T) {
suite.Run(t, &MailFolderCacheIntgSuite{
suite.Run(t, &MailFolderCacheIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *MailFolderCacheIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
func (suite *MailFolderCacheIntegrationSuite) SetupSuite() {
t := suite.T()
a := tconfig.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
}
func (suite *MailFolderCacheIntgSuite) TestDeltaFetch() {
func (suite *MailFolderCacheIntegrationSuite) TestDeltaFetch() {
suite.T().Skipf("Test depends on hardcoded folder names. Skipping till that is fixed")
tests := []struct {
@ -67,6 +75,7 @@ func (suite *MailFolderCacheIntgSuite) TestDeltaFetch() {
path: []string{"some", "leading", "path"},
},
}
userID := tconfig.M365UserID(suite.T())
for _, test := range tests {
suite.Run(test.name, func() {
@ -75,15 +84,21 @@ func (suite *MailFolderCacheIntgSuite) TestDeltaFetch() {
ctx, flush := tester.NewContext(t)
defer flush()
acm := suite.m365.AC.Mail()
ac, err := api.NewClient(
suite.credentials,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
acm := ac.Mail()
mfc := mailContainerCache{
userID: suite.m365.User.ID,
userID: userID,
enumer: acm,
getter: acm,
}
err := mfc.Populate(ctx, fault.New(true), test.root, test.path...)
err = mfc.Populate(ctx, fault.New(true), test.root, test.path...)
require.NoError(t, err, clues.ToCore(err))
p, l, err := mfc.IDToPath(ctx, testFolderID)

View File

@ -3,7 +3,6 @@ package exchange
import (
"context"
"errors"
"regexp"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
@ -148,8 +147,6 @@ func restoreMail(
msg = setMessageSVEPs(toMessage(msg))
setReplyTos(msg)
attachments := msg.GetAttachments()
// Item.Attachments --> HasAttachments doesn't always have a value populated when deserialized
msg.SetAttachments([]models.Attachmentable{})
@ -232,38 +229,6 @@ func setMessageSVEPs(msg models.Messageable) models.Messageable {
return msg
}
func setReplyTos(msg models.Messageable) {
var (
replyTos = msg.GetReplyTo()
emailAddress models.EmailAddressable
name, address string
sanitizedReplyTos = make([]models.Recipientable, 0)
)
if len(replyTos) == 0 {
return
}
for _, replyTo := range replyTos {
emailAddress = replyTo.GetEmailAddress()
address = ptr.Val(emailAddress.GetAddress())
name = ptr.Val(emailAddress.GetName())
if isValidEmail(address) || isValidDN(address) {
newEmailAddress := models.NewEmailAddress()
newEmailAddress.SetAddress(ptr.To(address))
newEmailAddress.SetName(ptr.To(name))
sanitizedReplyTo := models.NewRecipient()
sanitizedReplyTo.SetEmailAddress(newEmailAddress)
sanitizedReplyTos = append(sanitizedReplyTos, sanitizedReplyTo)
}
}
msg.SetReplyTo(sanitizedReplyTos)
}
func (h mailRestoreHandler) GetItemsInContainerByCollisionKey(
ctx context.Context,
userID, containerID string,
@ -275,24 +240,3 @@ func (h mailRestoreHandler) GetItemsInContainerByCollisionKey(
return m, nil
}
// [TODO]relocate to a common place
func isValidEmail(email string) bool {
emailRegex := `^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$`
r := regexp.MustCompile(emailRegex)
return r.MatchString(email)
}
// isValidDN check if given string's format matches that of a MSFT Distinguished Name
// This regular expression matches strings that start with /o=,
// followed by any characters except /,
// then /ou=, followed by any characters except /,
// then /cn=, followed by any characters except /,
// then /cn= followed by a 32-character hexadecimal string followed by - and any additional characters.
func isValidDN(dn string) bool {
dnRegex := `^/o=[^/]+/ou=[^/]+/cn=[^/]+/cn=[a-fA-F0-9]{32}-[a-zA-Z0-9-]+$`
r := regexp.MustCompile(dnRegex)
return r.MatchString(dn)
}

View File

@ -11,10 +11,8 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
@ -25,127 +23,6 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
//nolint:lll
const TestDN = "/o=ExchangeLabs/ou=Exchange Administrative Group (FYDIBOHF23SPDLT)/cn=Recipients/cn=4eca0d46a2324036b0b326dc58cfc802-user"
type RestoreMailUnitSuite struct {
tester.Suite
}
func TestRestoreMailUnitSuite(t *testing.T) {
suite.Run(t, &RestoreMailUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *RestoreMailUnitSuite) TestIsValidEmail() {
table := []struct {
name string
email string
check assert.BoolAssertionFunc
}{
{
name: "valid email",
email: "foo@bar.com",
check: assert.True,
},
{
name: "invalid email, missing domain",
email: "foo.com",
check: assert.False,
},
{
name: "invalid email, random uuid",
email: "12345678-abcd-90ef-88f8-2d95ef12fb66",
check: assert.False,
},
{
name: "empty email",
email: "",
check: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
result := isValidEmail(test.email)
test.check(t, result)
})
}
}
func (suite *RestoreMailUnitSuite) TestIsValidDN() {
table := []struct {
name string
dn string
check assert.BoolAssertionFunc
}{
{
name: "valid DN",
dn: TestDN,
check: assert.True,
},
{
name: "invalid DN",
dn: "random string",
check: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
result := isValidDN(test.dn)
test.check(t, result)
})
}
}
func (suite *RestoreMailUnitSuite) TestSetReplyTos() {
t := suite.T()
replyTos := make([]models.Recipientable, 0)
emailAddresses := map[string]string{
"foo.bar": "foo@bar.com",
"foo.com": "foo.com",
"empty": "",
"dn": TestDN,
}
validEmailAddresses := map[string]string{
"foo.bar": "foo@bar.com",
"dn": TestDN,
}
for k, v := range emailAddresses {
emailAddress := models.NewEmailAddress()
emailAddress.SetAddress(ptr.To(v))
emailAddress.SetName(ptr.To(k))
replyTo := models.NewRecipient()
replyTo.SetEmailAddress(emailAddress)
replyTos = append(replyTos, replyTo)
}
mailMessage := models.NewMessage()
mailMessage.SetReplyTo(replyTos)
setReplyTos(mailMessage)
sanitizedReplyTos := mailMessage.GetReplyTo()
require.Len(t, sanitizedReplyTos, len(validEmailAddresses))
for _, sanitizedReplyTo := range sanitizedReplyTos {
emailAddress := sanitizedReplyTo.GetEmailAddress()
assert.Contains(t, validEmailAddresses, ptr.Val(emailAddress.GetName()))
assert.Equal(t, validEmailAddresses[ptr.Val(emailAddress.GetName())], ptr.Val(emailAddress.GetAddress()))
}
}
var _ mailRestorer = &mailRestoreMock{}
type mailRestoreMock struct {
@ -195,7 +72,7 @@ func (m *mailRestoreMock) PostLargeAttachment(
type MailRestoreIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestMailRestoreIntgSuite(t *testing.T) {
@ -207,16 +84,16 @@ func TestMailRestoreIntgSuite(t *testing.T) {
}
func (suite *MailRestoreIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = newIntegrationTesterSetup(suite.T())
}
func (suite *MailRestoreIntgSuite) TestCreateContainerDestination() {
runCreateDestinationTest(
suite.T(),
newMailRestoreHandler(suite.m365.AC),
newMailRestoreHandler(suite.its.ac),
path.EmailCategory,
suite.m365.TenantID,
suite.m365.User.ID,
suite.its.creds.AzureTenantID,
suite.its.userID,
testdata.DefaultRestoreConfig("").Location,
[]string{"Griffindor", "Croix"},
[]string{"Griffindor", "Felicius"})
@ -357,7 +234,7 @@ func (suite *MailRestoreIntgSuite) TestRestoreMail() {
ctx,
test.apiMock,
body,
suite.m365.User.ID,
suite.its.userID,
"destination",
test.collisionMap,
test.onCollision,

View File

@ -6,15 +6,10 @@ import (
"github.com/microsoft/kiota-abstractions-go/serialization"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
// get and serialize item mock
// ---------------------------------------------------------------------------
type ItemGetSerialize struct {
GetData serialization.Parsable
GetCount int
@ -49,23 +44,3 @@ func (m *ItemGetSerialize) Serialize(
func DefaultItemGetSerialize() *ItemGetSerialize {
return &ItemGetSerialize{}
}
// ---------------------------------------------------------------------------
// can skip item failure mock
// ---------------------------------------------------------------------------
type canSkipFailChecker struct {
canSkip bool
}
func (m canSkipFailChecker) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return fault.SkipCause("testing"), m.canSkip
}
func NeverCanSkipFailChecker() *canSkipFailChecker {
return &canSkipFailChecker{}
}

View File

@ -12,8 +12,8 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/count"
@ -24,7 +24,8 @@ import (
type RestoreIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
credentials account.M365Config
ac api.Client
}
func TestRestoreIntgSuite(t *testing.T) {
@ -36,7 +37,18 @@ func TestRestoreIntgSuite(t *testing.T) {
}
func (suite *RestoreIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
t := suite.T()
a := tconfig.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
suite.ac, err = api.NewClient(
m365,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
}
// TestRestoreContact ensures contact object can be created, placed into
@ -48,26 +60,26 @@ func (suite *RestoreIntgSuite) TestRestoreContact() {
defer flush()
var (
userID = tconfig.M365UserID(t)
folderName = testdata.DefaultRestoreConfig("contact").Location
handler = newContactRestoreHandler(suite.m365.AC)
handler = newContactRestoreHandler(suite.ac)
)
aFolder, err := handler.ac.CreateContainer(ctx, suite.m365.User.ID, "", folderName)
aFolder, err := handler.ac.CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
folderID := ptr.Val(aFolder.GetId())
defer func() {
// Remove the folder containing contact prior to exiting test
err = suite.m365.AC.Contacts().DeleteContainer(ctx, suite.m365.User.ID, folderID)
err = suite.ac.Contacts().DeleteContainer(ctx, userID, folderID)
assert.NoError(t, err, clues.ToCore(err))
}()
info, err := handler.restore(
ctx,
exchMock.ContactBytes("Corso TestContact"),
suite.m365.User.ID,
folderID,
userID, folderID,
nil,
control.Copy,
fault.New(true),
@ -85,18 +97,19 @@ func (suite *RestoreIntgSuite) TestRestoreEvent() {
defer flush()
var (
userID = tconfig.M365UserID(t)
subject = testdata.DefaultRestoreConfig("event").Location
handler = newEventRestoreHandler(suite.m365.AC)
handler = newEventRestoreHandler(suite.ac)
)
calendar, err := handler.ac.CreateContainer(ctx, suite.m365.User.ID, "", subject)
calendar, err := handler.ac.CreateContainer(ctx, userID, "", subject)
require.NoError(t, err, clues.ToCore(err))
calendarID := ptr.Val(calendar.GetId())
defer func() {
// Removes calendar containing events created during the test
err = suite.m365.AC.Events().DeleteContainer(ctx, suite.m365.User.ID, calendarID)
err = suite.ac.Events().DeleteContainer(ctx, userID, calendarID)
assert.NoError(t, err, clues.ToCore(err))
}()
@ -141,8 +154,7 @@ func (suite *RestoreIntgSuite) TestRestoreEvent() {
info, err := handler.restore(
ctx,
test.bytes,
suite.m365.User.ID,
calendarID,
userID, calendarID,
nil,
control.Copy,
fault.New(true),
@ -156,7 +168,10 @@ func (suite *RestoreIntgSuite) TestRestoreEvent() {
// TestRestoreExchangeObject verifies path.Category usage for restored objects
func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
t := suite.T()
handlers := RestoreHandlers(suite.m365.AC)
handlers := RestoreHandlers(suite.ac)
userID := tconfig.M365UserID(suite.T())
tests := []struct {
name string
@ -171,7 +186,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailobj").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -184,7 +199,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailwattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -197,7 +212,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("eventwattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -210,7 +225,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailitemattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -225,7 +240,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailbasicattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -240,7 +255,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailnestattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -255,7 +270,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailcontactattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -268,7 +283,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("nestedattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -281,7 +296,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("maillargeattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -294,7 +309,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailtwoattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -307,7 +322,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("mailrefattch").Location
folder, err := handlers[path.EmailCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -320,7 +335,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("contact").Location
folder, err := handlers[path.ContactsCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(folder.GetId())
@ -333,7 +348,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("event").Location
calendar, err := handlers[path.EventsCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(calendar.GetId())
@ -346,7 +361,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
destination: func(t *testing.T, ctx context.Context) string {
folderName := testdata.DefaultRestoreConfig("eventobj").Location
calendar, err := handlers[path.EventsCategory].
CreateContainer(ctx, suite.m365.User.ID, "", folderName)
CreateContainer(ctx, userID, "", folderName)
require.NoError(t, err, clues.ToCore(err))
return ptr.Val(calendar.GetId())
@ -365,8 +380,7 @@ func (suite *RestoreIntgSuite) TestRestoreExchangeObject() {
info, err := handlers[test.category].restore(
ctx,
test.bytes,
suite.m365.User.ID,
destination,
userID, destination,
nil,
control.Copy,
fault.New(true),
@ -386,11 +400,12 @@ func (suite *RestoreIntgSuite) TestRestoreAndBackupEvent_recurringInstancesWithA
defer flush()
var (
userID = tconfig.M365UserID(t)
subject = testdata.DefaultRestoreConfig("event").Location
handler = newEventRestoreHandler(suite.m365.AC)
handler = newEventRestoreHandler(suite.ac)
)
calendar, err := handler.ac.CreateContainer(ctx, suite.m365.User.ID, "", subject)
calendar, err := handler.ac.CreateContainer(ctx, userID, "", subject)
require.NoError(t, err, clues.ToCore(err))
calendarID := ptr.Val(calendar.GetId())
@ -399,8 +414,7 @@ func (suite *RestoreIntgSuite) TestRestoreAndBackupEvent_recurringInstancesWithA
info, err := handler.restore(
ctx,
bytes,
suite.m365.User.ID,
calendarID,
userID, calendarID,
nil,
control.Copy,
fault.New(true),
@ -411,7 +425,7 @@ func (suite *RestoreIntgSuite) TestRestoreAndBackupEvent_recurringInstancesWithA
ec, err := handler.ac.Stable.
Client().
Users().
ByUserId(suite.m365.User.ID).
ByUserId(userID).
Calendars().
ByCalendarId(calendarID).
Events().
@ -421,25 +435,17 @@ func (suite *RestoreIntgSuite) TestRestoreAndBackupEvent_recurringInstancesWithA
evts := ec.GetValue()
assert.Len(t, evts, 1, "count of events")
sp, info, err := suite.m365.AC.Events().GetItem(
ctx,
suite.m365.User.ID,
ptr.Val(evts[0].GetId()),
fault.New(true))
sp, info, err := suite.ac.Events().GetItem(ctx, userID, ptr.Val(evts[0].GetId()), fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, info, "event item info")
body, err := suite.m365.AC.Events().Serialize(
ctx,
sp,
suite.m365.User.ID,
ptr.Val(evts[0].GetId()))
body, err := suite.ac.Events().Serialize(ctx, sp, userID, ptr.Val(evts[0].GetId()))
require.NoError(t, err, clues.ToCore(err))
event, err := api.BytesToEventable(body)
require.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, event.GetRecurrence(), "recurrence")
assert.NotNil(t, event.GetRecurrence(), "recurrence")
eo := event.GetAdditionalData()["exceptionOccurrences"]
assert.NotNil(t, eo, "exceptionOccurrences")

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
@ -965,7 +964,9 @@ func (suite *BackupUnitSuite) TestPopulateCollections_ConversationsIncremental()
type BackupIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
resource string
tenantID string
ac api.Client
}
func TestBackupIntgSuite(t *testing.T) {
@ -978,19 +979,32 @@ func TestBackupIntgSuite(t *testing.T) {
func (suite *BackupIntgSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.resource = tconfig.M365TeamID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
}
func (suite *BackupIntgSuite) TestCreateCollections() {
var (
protectedResource = suite.m365.Group.ID
resources = []string{suite.m365.Group.ID}
handler = NewChannelBackupHandler(protectedResource, suite.m365.AC.Channels())
protectedResource = tconfig.M365TeamID(suite.T())
resources = []string{protectedResource}
handler = NewChannelBackupHandler(protectedResource, suite.ac.Channels())
)
tests := []struct {
@ -1016,13 +1030,13 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
ctrlOpts := control.DefaultOptions()
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewGroupsBackup([]string{protectedResource})
sel.Include(selTD.GroupsBackupChannelScope(sel))
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: ctrlOpts,
ProtectedResource: suite.m365.Group.Provider,
ProtectedResource: inMock.NewProvider(protectedResource, protectedResource),
Selector: sel.Selector,
}
@ -1030,7 +1044,7 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
ctx,
bpc,
handler,
suite.m365.TenantID,
suite.tenantID,
test.scope,
func(status *support.ControllerOperationStatus) {},
false,

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
@ -329,49 +328,59 @@ func (suite *SharePointBackupUnitSuite) TestPopulateListsCollections_incremental
}
}
type SharePointBackupIntgSuite struct {
type SharePointSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
}
func TestSharePointSuite(t *testing.T) {
suite.Run(t, &SharePointBackupIntgSuite{
suite.Run(t, &SharePointSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *SharePointBackupIntgSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
func (suite *SharePointSuite) SetupSuite() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
graph.InitializeConcurrencyLimiter(ctx, false, 4)
}
func (suite *SharePointBackupIntgSuite) TestCollectPages() {
func (suite *SharePointSuite) TestCollectPages() {
t := suite.T()
counter := count.New()
ctx, flush := tester.NewContext(t)
defer flush()
var (
siteID = tconfig.M365SiteID(t)
a = tconfig.NewM365Account(t)
counter = count.New()
)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
ac, err := api.NewClient(
creds,
control.DefaultOptions(),
counter)
require.NoError(t, err, clues.ToCore(err))
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Site.Provider,
ProtectedResource: mock.NewProvider(siteID, siteID),
}
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup([]string{siteID})
col, err := CollectPages(
ctx,
bpc,
suite.m365.Creds,
suite.m365.AC,
creds,
ac,
sel.Lists(selectors.Any())[0],
(&MockGraphService{}).UpdateStatus,
counter,
@ -380,27 +389,43 @@ func (suite *SharePointBackupIntgSuite) TestCollectPages() {
assert.NotEmpty(t, col)
}
func (suite *SharePointBackupIntgSuite) TestCollectLists() {
func (suite *SharePointSuite) TestCollectLists() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
siteID = tconfig.M365SiteID(t)
a = tconfig.NewM365Account(t)
counter = count.New()
)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
ac, err := api.NewClient(
creds,
control.DefaultOptions(),
counter)
require.NoError(t, err, clues.ToCore(err))
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Site.Provider,
ProtectedResource: mock.NewProvider(siteID, siteID),
}
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
bh := NewListsBackupHandler(suite.m365.Site.ID, suite.m365.AC.Lists())
sel := selectors.NewSharePointBackup([]string{siteID})
bh := NewListsBackupHandler(bpc.ProtectedResource.ID(), ac.Lists())
col, _, err := CollectLists(
ctx,
bh,
bpc,
suite.m365.AC,
suite.m365.Creds.AzureTenantID,
ac,
creds.AzureTenantID,
sel.Lists(selectors.Any())[0],
(&MockGraphService{}).UpdateStatus,
count.New(),
@ -420,7 +445,7 @@ func (suite *SharePointBackupIntgSuite) TestCollectLists() {
assert.True(t, metadataFound)
}
func (suite *SharePointBackupIntgSuite) TestParseListsMetadataCollections() {
func (suite *SharePointSuite) TestParseListsMetadataCollections() {
type fileValues struct {
fileName string
value string
@ -555,7 +580,7 @@ func (f failingColl) FetchItemByName(context.Context, string) (data.Item, error)
return nil, nil
}
func (suite *SharePointBackupIntgSuite) TestParseListsMetadataCollections_ReadFailure() {
func (suite *SharePointSuite) TestParseListsMetadataCollections_ReadFailure() {
t := suite.T()
ctx, flush := tester.NewContext(t)

View File

@ -20,7 +20,6 @@ import (
spMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -114,17 +113,34 @@ func (suite *SharePointCollectionUnitSuite) TestPrefetchCollection_state() {
}
}
type SharePointCollIntgSuite struct {
type SharePointCollectionSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
siteID string
creds account.M365Config
ac api.Client
}
func (suite *SharePointCollIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
func (suite *SharePointCollectionSuite) SetupSuite() {
t := suite.T()
suite.siteID = tconfig.M365SiteID(t)
a := tconfig.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
ac, err := api.NewClient(
m365,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
suite.ac = ac
}
func TestSharePointCollectionSuite(t *testing.T) {
suite.Run(t, &SharePointCollIntgSuite{
suite.Run(t, &SharePointCollectionSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
@ -133,13 +149,15 @@ func TestSharePointCollectionSuite(t *testing.T) {
// TestListCollection tests basic functionality to create
// SharePoint collection and to use the data stream channel.
func (suite *SharePointCollIntgSuite) TestPrefetchCollection_Items() {
func (suite *SharePointCollectionSuite) TestPrefetchCollection_Items() {
var (
tenant = "some"
user = "user"
prevRoot = "prev"
dirRoot = "directory"
)
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup([]string{"site"})
tables := []struct {
name, itemName string
@ -165,8 +183,8 @@ func (suite *SharePointCollIntgSuite) TestPrefetchCollection_Items() {
getter: &mock.ListHandler{},
getDir: func(t *testing.T, root string) path.Path {
dir, err := path.Build(
suite.m365.TenantID,
suite.m365.User.ID,
tenant,
user,
path.SharePointService,
path.ListsCategory,
false,
@ -214,8 +232,8 @@ func (suite *SharePointCollIntgSuite) TestPrefetchCollection_Items() {
getter: nil,
getDir: func(t *testing.T, root string) path.Path {
dir, err := path.Build(
suite.m365.TenantID,
suite.m365.User.ID,
tenant,
user,
path.SharePointService,
path.PagesCategory,
false,
@ -252,7 +270,7 @@ func (suite *SharePointCollIntgSuite) TestPrefetchCollection_Items() {
test.getDir(t, test.curr),
test.getDir(t, test.prev),
test.locPb,
suite.m365.AC,
suite.ac,
test.scope,
nil,
control.DefaultOptions(),
@ -288,7 +306,7 @@ func (suite *SharePointCollIntgSuite) TestPrefetchCollection_Items() {
}
}
func (suite *SharePointCollIntgSuite) TestLazyCollection_Items() {
func (suite *SharePointCollectionSuite) TestLazyCollection_Items() {
var (
t = suite.T()
errs = fault.New(true)
@ -398,7 +416,7 @@ func (suite *SharePointCollIntgSuite) TestLazyCollection_Items() {
}
}
func (suite *SharePointCollIntgSuite) TestLazyItem() {
func (suite *SharePointCollectionSuite) TestLazyItem() {
var (
t = suite.T()
now = time.Now()
@ -442,7 +460,7 @@ func (suite *SharePointCollIntgSuite) TestLazyItem() {
assert.Equal(t, now, info.Modified())
}
func (suite *SharePointCollIntgSuite) TestLazyItem_ReturnsEmptyReaderOnDeletedInFlight() {
func (suite *SharePointCollectionSuite) TestLazyItem_ReturnsEmptyReaderOnDeletedInFlight() {
var (
t = suite.T()
now = time.Now()

View File

@ -21,8 +21,8 @@ import (
siteMock "github.com/alcionai/corso/src/internal/m365/collection/site/mock"
spMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
@ -87,17 +87,32 @@ func (suite *SharePointCollectionUnitSuite) TestFormatListsRestoreDestination()
type SharePointRestoreSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
siteID string
creds account.M365Config
ac api.Client
}
func (suite *SharePointRestoreSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, false, 4)
suite.siteID = tconfig.M365SiteID(t)
a := tconfig.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
ac, err := api.NewClient(
m365,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
suite.ac = ac
}
func TestSharePointRestoreSuite(t *testing.T) {
@ -120,8 +135,8 @@ func (suite *SharePointRestoreSuite) TestListCollection_Restore() {
listTemplate = "genericList"
restoreCfg = testdata.DefaultRestoreConfig("")
destName = restoreCfg.Location
lrh = NewListsRestoreHandler(suite.m365.Site.ID, suite.m365.AC.Lists())
service = createTestService(t, suite.m365.Creds)
lrh = NewListsRestoreHandler(suite.siteID, suite.ac.Lists())
service = createTestService(t, suite.creds)
list = stubList(listTemplate, listName)
mockData = generateListData(t, service, list)
)
@ -132,7 +147,7 @@ func (suite *SharePointRestoreSuite) TestListCollection_Restore() {
ctx,
lrh,
mockData,
suite.m365.Site.ID,
suite.siteID,
restoreCfg,
nil,
count.New(),
@ -141,7 +156,7 @@ func (suite *SharePointRestoreSuite) TestListCollection_Restore() {
assert.Equal(t, fmt.Sprintf("%s_%s", destName, listName), deets.SharePoint.List.Name)
// Clean-Up
deleteList(ctx, t, suite.m365.Site.ID, lrh, deets)
deleteList(ctx, t, suite.siteID, lrh, deets)
}
func (suite *SharePointRestoreSuite) TestListCollection_Restore_invalidListTemplate() {
@ -151,10 +166,10 @@ func (suite *SharePointRestoreSuite) TestListCollection_Restore_invalidListTempl
defer flush()
var (
lrh = NewListsRestoreHandler(suite.m365.Site.ID, suite.m365.AC.Lists())
lrh = NewListsRestoreHandler(suite.siteID, suite.ac.Lists())
listName = "MockListing"
restoreCfg = testdata.DefaultRestoreConfig("")
service = createTestService(t, suite.m365.Creds)
service = createTestService(t, suite.creds)
)
restoreCfg.OnCollision = control.Copy
@ -186,7 +201,7 @@ func (suite *SharePointRestoreSuite) TestListCollection_Restore_invalidListTempl
ctx,
lrh,
listData,
suite.m365.Site.ID,
suite.siteID,
restoreCfg,
nil,
count.New(),
@ -207,8 +222,8 @@ func (suite *SharePointRestoreSuite) TestListCollection_RestoreInPlace_skip() {
listName = "MockListing"
listTemplate = "genericList"
restoreCfg = testdata.DefaultRestoreConfig("")
lrh = NewListsRestoreHandler(suite.m365.Site.ID, suite.m365.AC.Lists())
service = createTestService(t, suite.m365.Creds)
lrh = NewListsRestoreHandler(suite.siteID, suite.ac.Lists())
service = createTestService(t, suite.creds)
list = stubList(listTemplate, listName)
newList = stubList(listTemplate, listName)
cl = count.New()
@ -224,7 +239,7 @@ func (suite *SharePointRestoreSuite) TestListCollection_RestoreInPlace_skip() {
ctx,
lrh,
mockData,
suite.m365.Site.ID,
suite.siteID,
restoreCfg, // OnCollision is skip by default
collisionKeyToItemID,
cl,
@ -246,7 +261,7 @@ func (suite *SharePointRestoreSuite) TestListCollection_RestoreInPlace_copy() {
listTemplate = "genericList"
listID = "some-list-id"
restoreCfg = testdata.DefaultRestoreConfig("")
service = createTestService(t, suite.m365.Creds)
service = createTestService(t, suite.creds)
policyToKey = map[control.CollisionPolicy]count.Key{
control.Replace: count.CollisionReplace,
@ -339,7 +354,7 @@ func (suite *SharePointRestoreSuite) TestListCollection_RestoreInPlace_copy() {
ctx,
test.lrh,
mockData,
suite.m365.Site.ID,
suite.siteID,
restoreCfg,
collisionKeyToItemID,
cl,

View File

@ -17,7 +17,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
@ -256,7 +255,9 @@ func (suite *BackupUnitSuite) TestPopulateCollections() {
type BackupIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
resource string
tenantID string
ac api.Client
}
func TestBackupIntgSuite(t *testing.T) {
@ -269,20 +270,33 @@ func TestBackupIntgSuite(t *testing.T) {
func (suite *BackupIntgSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.resource = tconfig.M365TeamID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
}
func (suite *BackupIntgSuite) TestCreateCollections() {
var (
tenant = suite.m365.TenantID
protectedResource = suite.m365.Group.ID
tenant = tconfig.M365TenantID(suite.T())
protectedResource = tconfig.M365TeamID(suite.T())
resources = []string{protectedResource}
handler = NewUsersChatsBackupHandler(tenant, protectedResource, suite.m365.AC.Chats())
handler = NewUsersChatsBackupHandler(tenant, protectedResource, suite.ac.Chats())
)
tests := []struct {
@ -308,13 +322,13 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
ctrlOpts := control.DefaultOptions()
sel := selectors.NewTeamsChatsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewTeamsChatsBackup([]string{protectedResource})
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: ctrlOpts,
ProtectedResource: suite.m365.Group.Provider,
ProtectedResource: inMock.NewProvider(protectedResource, protectedResource),
Selector: sel.Selector,
}
@ -322,7 +336,7 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
ctx,
bpc,
handler,
suite.m365.TenantID,
suite.tenantID,
test.scope,
func(status *support.ControllerOperationStatus) {},
false,

View File

@ -23,7 +23,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -414,8 +413,9 @@ func (suite *ControllerUnitSuite) TestController_CacheItemInfo() {
type ControllerIntegrationSuite struct {
tester.Suite
ctrl *Controller
m365 its.M365IntgTestSetup
ctrl *Controller
user string
secondaryUser string
}
func TestControllerIntegrationSuite(t *testing.T) {
@ -428,12 +428,15 @@ func TestControllerIntegrationSuite(t *testing.T) {
func (suite *ControllerIntegrationSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
suite.ctrl = newController(ctx, t, path.ExchangeService)
suite.user = tconfig.M365UserID(t)
suite.secondaryUser = tconfig.SecondaryM365UserID(t)
tester.LogTimeOfTest(t)
}
func (suite *ControllerIntegrationSuite) TestEmptyCollections() {
@ -1061,7 +1064,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_core() {
suite.Run(test.name, func() {
cfg := stub.ConfigInfo{
Tenant: suite.ctrl.tenant,
ResourceOwners: []string{suite.m365.User.ID},
ResourceOwners: []string{suite.user},
Service: test.service,
Opts: control.DefaultOptions(),
RestoreCfg: control.DefaultRestoreConfig(dttm.SafeForTesting),
@ -1140,7 +1143,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() {
ctx, flush := tester.NewContext(t)
defer flush()
restoreSel := getSelectorWith(t, test.service, []string{suite.m365.User.ID}, true)
restoreSel := getSelectorWith(t, test.service, []string{suite.user}, true)
expectedDests := make([]destAndCats, 0, len(test.collections))
allItems := 0
allExpectedData := map[string]map[string][]byte{}
@ -1151,7 +1154,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() {
restoreCfg.IncludePermissions = true
expectedDests = append(expectedDests, destAndCats{
resourceOwner: suite.m365.User.ID,
resourceOwner: suite.user,
dest: restoreCfg.Location,
cats: map[path.CategoryType]struct{}{
collection.Category: {},
@ -1161,7 +1164,7 @@ func (suite *ControllerIntegrationSuite) TestMultiFolderBackupDifferentNames() {
totalItems, _, collections, expectedData, err := stub.CollectionsForInfo(
test.service,
suite.ctrl.tenant,
suite.m365.User.ID,
suite.user,
restoreCfg,
[]stub.ColInfo{collection},
version.Backup)
@ -1286,7 +1289,7 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_largeMailAttachmen
cfg := stub.ConfigInfo{
Tenant: suite.ctrl.tenant,
ResourceOwners: []string{suite.m365.User.ID},
ResourceOwners: []string{suite.user},
Service: test.service,
Opts: control.DefaultOptions(),
RestoreCfg: restoreCfg,
@ -1307,7 +1310,7 @@ func (suite *ControllerIntegrationSuite) TestProduceBackupCollections_createsPre
name: "Exchange",
resourceCat: resource.Users,
selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup([]string{suite.m365.User.ID})
sel := selectors.NewExchangeBackup([]string{suite.user})
sel.Include(
sel.ContactFolders(selectors.None()),
sel.EventCalendars(selectors.None()),
@ -1326,7 +1329,7 @@ func (suite *ControllerIntegrationSuite) TestProduceBackupCollections_createsPre
name: "OneDrive",
resourceCat: resource.Users,
selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup([]string{suite.m365.User.ID})
sel := selectors.NewOneDriveBackup([]string{suite.user})
sel.Include(sel.Folders(selectors.None()))
return sel.Selector

View File

@ -1,26 +1,30 @@
package m365
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
var (
@ -40,6 +44,119 @@ var (
readPerm = []string{"read"}
)
func mustGetDefaultDriveID(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
ac api.Client,
service path.ServiceType,
resourceOwner string,
) string {
var (
err error
d models.Driveable
)
switch service {
case path.OneDriveService:
d, err = ac.Users().GetDefaultDrive(ctx, resourceOwner)
case path.SharePointService:
d, err = ac.Sites().GetDefaultDrive(ctx, resourceOwner)
default:
assert.FailNowf(t, "unknown service type %s", service.String())
}
if err != nil {
err = clues.Wrap(err, "retrieving drive")
}
require.NoError(t, err, clues.ToCore(err))
id := ptr.Val(d.GetId())
require.NotEmpty(t, id)
return id
}
type suiteInfo interface {
APIClient() api.Client
Tenant() string
// Returns (username, user ID) for the user. These values are used for
// permissions.
PrimaryUser() (string, string)
SecondaryUser() (string, string)
TertiaryUser() (string, string)
// ResourceOwner returns the resource owner to run the backup/restore
// with. This can be different from the values used for permissions and it can
// also be a site.
ResourceOwner() string
Service() path.ServiceType
}
type oneDriveSuite interface {
tester.Suite
suiteInfo
}
type suiteInfoImpl struct {
ac api.Client
controller *Controller
resourceOwner string
secondaryUser string
secondaryUserID string
service path.ServiceType
tertiaryUser string
tertiaryUserID string
user string
userID string
}
func NewSuiteInfoImpl(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
resourceOwner string,
service path.ServiceType,
) suiteInfoImpl {
ctrl := newController(ctx, t, path.OneDriveService)
return suiteInfoImpl{
ac: ctrl.AC,
controller: ctrl,
resourceOwner: resourceOwner,
secondaryUser: tconfig.SecondaryM365UserID(t),
service: service,
tertiaryUser: tconfig.TertiaryM365UserID(t),
user: tconfig.M365UserID(t),
}
}
func (si suiteInfoImpl) APIClient() api.Client {
return si.ac
}
func (si suiteInfoImpl) Tenant() string {
return si.controller.tenant
}
func (si suiteInfoImpl) PrimaryUser() (string, string) {
return si.user, si.userID
}
func (si suiteInfoImpl) SecondaryUser() (string, string) {
return si.secondaryUser, si.secondaryUserID
}
func (si suiteInfoImpl) TertiaryUser() (string, string) {
return si.tertiaryUser, si.tertiaryUserID
}
func (si suiteInfoImpl) ResourceOwner() string {
return si.resourceOwner
}
func (si suiteInfoImpl) Service() path.ServiceType {
return si.service
}
// ---------------------------------------------------------------------------
// SharePoint Libraries
// ---------------------------------------------------------------------------
@ -49,8 +166,7 @@ var (
type SharePointIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
resourceAndSvc its.ResourceServicer
suiteInfo
}
func TestSharePointIntegrationSuite(t *testing.T) {
@ -62,38 +178,57 @@ func TestSharePointIntegrationSuite(t *testing.T) {
}
func (suite *SharePointIntegrationSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.resourceAndSvc = its.NewResourceService(suite.m365.Site, path.SharePointService)
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
si := NewSuiteInfoImpl(suite.T(), ctx, tconfig.M365SiteID(suite.T()), path.SharePointService)
// users needed for permissions
user, err := si.controller.AC.Users().GetByID(ctx, si.user, api.CallConfig{})
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
si.userID = ptr.Val(user.GetId())
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser, api.CallConfig{})
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser, api.CallConfig{})
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
suite.suiteInfo = si
}
func (suite *SharePointIntegrationSuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() {
testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, version.Backup)
}
// TODO: Re-enable these tests (disabled as it currently acting up CI)
func (suite *SharePointIntegrationSuite) TestPermissionsRestoreAndBackup() {
suite.T().Skip("Temporarily disabled due to CI issues")
testPermissionsRestoreAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testPermissionsRestoreAndBackup(suite, version.Backup)
}
func (suite *SharePointIntegrationSuite) TestRestoreNoPermissionsAndBackup() {
suite.T().Skip("Temporarily disabled due to CI issues")
testRestoreNoPermissionsAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testRestoreNoPermissionsAndBackup(suite, version.Backup)
}
func (suite *SharePointIntegrationSuite) TestPermissionsInheritanceRestoreAndBackup() {
suite.T().Skip("Temporarily disabled due to CI issues")
testPermissionsInheritanceRestoreAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testPermissionsInheritanceRestoreAndBackup(suite, version.Backup)
}
func (suite *SharePointIntegrationSuite) TestLinkSharesInheritanceRestoreAndBackup() {
suite.T().Skip("Temporarily disabled due to CI issues")
testLinkSharesInheritanceRestoreAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testLinkSharesInheritanceRestoreAndBackup(suite, version.Backup)
}
func (suite *SharePointIntegrationSuite) TestRestoreFolderNamedFolderRegression() {
// No reason why it couldn't work with previous versions, but this is when it got introduced.
testRestoreFolderNamedFolderRegression(suite, suite.m365, suite.resourceAndSvc, version.All8MigrateUserPNToID)
testRestoreFolderNamedFolderRegression(suite, version.All8MigrateUserPNToID)
}
// ---------------------------------------------------------------------------
@ -101,8 +236,7 @@ func (suite *SharePointIntegrationSuite) TestRestoreFolderNamedFolderRegression(
// ---------------------------------------------------------------------------
type OneDriveIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
resourceAndSvc its.ResourceServicer
suiteInfo
}
func TestOneDriveIntegrationSuite(t *testing.T) {
@ -114,33 +248,51 @@ func TestOneDriveIntegrationSuite(t *testing.T) {
}
func (suite *OneDriveIntegrationSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.resourceAndSvc = its.NewResourceService(suite.m365.User, path.OneDriveService)
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
si := NewSuiteInfoImpl(t, ctx, tconfig.M365UserID(t), path.OneDriveService)
user, err := si.controller.AC.Users().GetByID(ctx, si.user, api.CallConfig{})
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
si.userID = ptr.Val(user.GetId())
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser, api.CallConfig{})
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser, api.CallConfig{})
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
suite.suiteInfo = si
}
func (suite *OneDriveIntegrationSuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() {
testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, version.Backup)
}
func (suite *OneDriveIntegrationSuite) TestPermissionsRestoreAndBackup() {
testPermissionsRestoreAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testPermissionsRestoreAndBackup(suite, version.Backup)
}
func (suite *OneDriveIntegrationSuite) TestRestoreNoPermissionsAndBackup() {
testRestoreNoPermissionsAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testRestoreNoPermissionsAndBackup(suite, version.Backup)
}
func (suite *OneDriveIntegrationSuite) TestPermissionsInheritanceRestoreAndBackup() {
testPermissionsInheritanceRestoreAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testPermissionsInheritanceRestoreAndBackup(suite, version.Backup)
}
func (suite *OneDriveIntegrationSuite) TestLinkSharesInheritanceRestoreAndBackup() {
testLinkSharesInheritanceRestoreAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testLinkSharesInheritanceRestoreAndBackup(suite, version.Backup)
}
func (suite *OneDriveIntegrationSuite) TestRestoreFolderNamedFolderRegression() {
// No reason why it couldn't work with previous versions, but this is when it got introduced.
testRestoreFolderNamedFolderRegression(suite, suite.m365, suite.resourceAndSvc, version.All8MigrateUserPNToID)
testRestoreFolderNamedFolderRegression(suite, version.All8MigrateUserPNToID)
}
// ---------------------------------------------------------------------------
@ -148,8 +300,7 @@ func (suite *OneDriveIntegrationSuite) TestRestoreFolderNamedFolderRegression()
// ---------------------------------------------------------------------------
type OneDriveNightlySuite struct {
tester.Suite
m365 its.M365IntgTestSetup
resourceAndSvc its.ResourceServicer
suiteInfo
}
func TestOneDriveNightlySuite(t *testing.T) {
@ -161,48 +312,70 @@ func TestOneDriveNightlySuite(t *testing.T) {
}
func (suite *OneDriveNightlySuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.resourceAndSvc = its.NewResourceService(suite.m365.User, path.OneDriveService)
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
si := NewSuiteInfoImpl(t, ctx, tconfig.M365UserID(t), path.OneDriveService)
user, err := si.controller.AC.Users().GetByID(ctx, si.user, api.CallConfig{})
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
si.userID = ptr.Val(user.GetId())
secondaryUser, err := si.controller.AC.Users().GetByID(ctx, si.secondaryUser, api.CallConfig{})
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
tertiaryUser, err := si.controller.AC.Users().GetByID(ctx, si.tertiaryUser, api.CallConfig{})
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
suite.suiteInfo = si
}
func (suite *OneDriveNightlySuite) TestRestoreAndBackup_MultipleFilesAndFolders_NoPermissions() {
testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, suite.m365, suite.resourceAndSvc, 0)
testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(suite, 0)
}
func (suite *OneDriveNightlySuite) TestPermissionsRestoreAndBackup() {
testPermissionsRestoreAndBackup(suite, suite.m365, suite.resourceAndSvc, version.OneDrive1DataAndMetaFiles)
testPermissionsRestoreAndBackup(suite, version.OneDrive1DataAndMetaFiles)
}
func (suite *OneDriveNightlySuite) TestRestoreNoPermissionsAndBackup() {
testRestoreNoPermissionsAndBackup(suite, suite.m365, suite.resourceAndSvc, version.OneDrive1DataAndMetaFiles)
testRestoreNoPermissionsAndBackup(suite, version.OneDrive1DataAndMetaFiles)
}
func (suite *OneDriveNightlySuite) TestPermissionsInheritanceRestoreAndBackup() {
// No reason why it couldn't work with previous versions, but this is when it got introduced.
testPermissionsInheritanceRestoreAndBackup(
suite,
suite.m365,
suite.resourceAndSvc,
version.OneDrive4DirIncludesPermissions)
testPermissionsInheritanceRestoreAndBackup(suite, version.OneDrive4DirIncludesPermissions)
}
func (suite *OneDriveNightlySuite) TestLinkSharesInheritanceRestoreAndBackup() {
testLinkSharesInheritanceRestoreAndBackup(suite, suite.m365, suite.resourceAndSvc, version.Backup)
testLinkSharesInheritanceRestoreAndBackup(suite, version.Backup)
}
func (suite *OneDriveNightlySuite) TestRestoreFolderNamedFolderRegression() {
// No reason why it couldn't work with previous versions, but this is when it got introduced.
testRestoreFolderNamedFolderRegression(suite, suite.m365, suite.resourceAndSvc, version.All8MigrateUserPNToID)
testRestoreFolderNamedFolderRegression(suite, version.All8MigrateUserPNToID)
}
func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
suite tester.Suite,
m365 its.M365IntgTestSetup,
resourceAndSvc its.ResourceServicer,
suite oneDriveSuite,
startVersion int,
) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// Get the default drive ID for the test user.
driveID := resourceAndSvc.Resource().DriveID
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.APIClient(),
suite.Service(),
suite.ResourceOwner())
rootPath := []string{
odConsts.DrivesPathDir,
@ -315,17 +488,17 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
},
}
expected, err := stub.DataForInfo(resourceAndSvc.Service(), cols, version.Backup)
expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("Version%d", vn), func() {
t := suite.T()
input, err := stub.DataForInfo(resourceAndSvc.Service(), cols, vn)
input, err := stub.DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: resourceAndSvc.Service(),
service: suite.Service(),
backupVersion: vn,
collectionsPrevious: input,
collectionsLatest: expected,
@ -338,8 +511,8 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
opts := control.DefaultOptions()
cfg := m365Stub.ConfigInfo{
Tenant: m365.TenantID,
ResourceOwners: []string{resourceAndSvc.Resource().ID},
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: opts,
RestoreCfg: restoreCfg,
@ -350,14 +523,21 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
}
}
func testPermissionsRestoreAndBackup(
suite tester.Suite,
m365 its.M365IntgTestSetup,
resourceAndSvc its.ResourceServicer,
startVersion int,
) {
func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
secondaryUserName, secondaryUserID := suite.SecondaryUser()
// Get the default drive ID for the test user.
driveID := resourceAndSvc.Resource().DriveID
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.APIClient(),
suite.Service(),
suite.ResourceOwner())
fileName2 := "test-file2.txt"
folderCName := "folder-c"
@ -407,8 +587,8 @@ func testPermissionsRestoreAndBackup(
Data: fileAData,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
},
@ -434,8 +614,8 @@ func testPermissionsRestoreAndBackup(
Name: folderAName,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
@ -444,8 +624,8 @@ func testPermissionsRestoreAndBackup(
Name: folderCName,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
@ -465,8 +645,8 @@ func testPermissionsRestoreAndBackup(
Data: fileBData,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
},
@ -477,8 +657,8 @@ func testPermissionsRestoreAndBackup(
Name: folderAName,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
@ -496,15 +676,15 @@ func testPermissionsRestoreAndBackup(
// name: fileName,
// data: fileDData,
// perms: stub.PermData{
// user: m365.SecondaryUser.Email,
// entityID: m365.SecondaryUser.ID,
// user: secondaryUserName,
// entityID: secondaryUserID,
// roles: readPerm,
// },
// },
// },
// Perms: stub.PermData{
// User: m365.SecondaryUser.Email,
// EntityID: m365.SecondaryUser.ID,
// User: secondaryUserName,
// EntityID: secondaryUserID,
// Roles: readPerm,
// },
// },
@ -518,8 +698,8 @@ func testPermissionsRestoreAndBackup(
Data: fileEData,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
},
@ -527,8 +707,8 @@ func testPermissionsRestoreAndBackup(
},
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
@ -548,18 +728,17 @@ func testPermissionsRestoreAndBackup(
},
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
},
}
expected, err := stub.DataForInfo(resourceAndSvc.Service(), cols, version.Backup)
expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := resourceAndSvc.Service().String()
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
@ -567,11 +746,11 @@ func testPermissionsRestoreAndBackup(
// Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as
// to validate that flow as well.
input, err := stub.DataForInfo(resourceAndSvc.Service(), cols, vn)
input, err := stub.DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: resourceAndSvc.Service(),
service: suite.Service(),
backupVersion: vn,
collectionsPrevious: input,
collectionsLatest: expected,
@ -584,8 +763,8 @@ func testPermissionsRestoreAndBackup(
opts := control.DefaultOptions()
cfg := m365Stub.ConfigInfo{
Tenant: m365.TenantID,
ResourceOwners: []string{resourceAndSvc.Resource().ID},
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: opts,
RestoreCfg: restoreCfg,
@ -596,14 +775,21 @@ func testPermissionsRestoreAndBackup(
}
}
func testRestoreNoPermissionsAndBackup(
suite tester.Suite,
m365 its.M365IntgTestSetup,
resourceAndSvc its.ResourceServicer,
startVersion int,
) {
func testRestoreNoPermissionsAndBackup(suite oneDriveSuite, startVersion int) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
secondaryUserName, secondaryUserID := suite.SecondaryUser()
// Get the default drive ID for the test user.
driveID := resourceAndSvc.Resource().DriveID
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.APIClient(),
suite.Service(),
suite.ResourceOwner())
inputCols := []stub.ColInfo{
{
@ -618,8 +804,8 @@ func testRestoreNoPermissionsAndBackup(
Data: fileAData,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
SharingMode: metadata.SharingModeCustom,
@ -646,20 +832,18 @@ func testRestoreNoPermissionsAndBackup(
},
}
expected, err := stub.DataForInfo(resourceAndSvc.Service(), expectedCols, version.Backup)
require.NoError(suite.T(), err, clues.ToCore(err))
bss := resourceAndSvc.Service().String()
expected, err := stub.DataForInfo(suite.Service(), expectedCols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
t := suite.T()
input, err := stub.DataForInfo(resourceAndSvc.Service(), inputCols, vn)
require.NoError(t, err, clues.ToCore(err))
input, err := stub.DataForInfo(suite.Service(), inputCols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: resourceAndSvc.Service(),
service: suite.Service(),
backupVersion: vn,
collectionsPrevious: input,
collectionsLatest: expected,
@ -672,8 +856,8 @@ func testRestoreNoPermissionsAndBackup(
opts := control.DefaultOptions()
cfg := m365Stub.ConfigInfo{
Tenant: m365.TenantID,
ResourceOwners: []string{resourceAndSvc.Resource().ID},
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: opts,
RestoreCfg: restoreCfg,
@ -686,14 +870,22 @@ func testRestoreNoPermissionsAndBackup(
// This is similar to TestPermissionsRestoreAndBackup but tests purely
// for inheritance and that too only with newer versions
func testPermissionsInheritanceRestoreAndBackup(
suite tester.Suite,
m365 its.M365IntgTestSetup,
resourceAndSvc its.ResourceServicer,
startVersion int,
) {
func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersion int) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
secondaryUserName, secondaryUserID := suite.SecondaryUser()
tertiaryUserName, tertiaryUserID := suite.TertiaryUser()
// Get the default drive ID for the test user.
driveID := resourceAndSvc.Resource().DriveID
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.APIClient(),
suite.Service(),
suite.ResourceOwner())
folderAName := "custom"
folderBName := "inherited"
@ -737,8 +929,8 @@ func testPermissionsInheritanceRestoreAndBackup(
Data: fileAData,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.SecondaryUser.Email,
EntityID: m365.SecondaryUser.ID,
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
SharingMode: metadata.SharingModeCustom,
@ -810,8 +1002,8 @@ func testPermissionsInheritanceRestoreAndBackup(
},
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.TertiaryUser.Email,
EntityID: m365.TertiaryUser.ID,
User: tertiaryUserName,
EntityID: tertiaryUserID,
Roles: readPerm,
},
SharingMode: metadata.SharingModeCustom,
@ -822,8 +1014,8 @@ func testPermissionsInheritanceRestoreAndBackup(
Files: fileSet,
Meta: stub.MetaData{
Perms: stub.PermData{
User: m365.TertiaryUser.Email,
EntityID: m365.TertiaryUser.ID,
User: tertiaryUserName,
EntityID: tertiaryUserID,
Roles: writePerm,
},
SharingMode: metadata.SharingModeCustom,
@ -845,10 +1037,9 @@ func testPermissionsInheritanceRestoreAndBackup(
},
}
expected, err := stub.DataForInfo(resourceAndSvc.Service(), cols, version.Backup)
expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := resourceAndSvc.Service().String()
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
@ -856,11 +1047,11 @@ func testPermissionsInheritanceRestoreAndBackup(
// Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as
// to validate that flow as well.
input, err := stub.DataForInfo(resourceAndSvc.Service(), cols, vn)
input, err := stub.DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: resourceAndSvc.Service(),
service: suite.Service(),
backupVersion: vn,
collectionsPrevious: input,
collectionsLatest: expected,
@ -873,8 +1064,8 @@ func testPermissionsInheritanceRestoreAndBackup(
opts := control.DefaultOptions()
cfg := m365Stub.ConfigInfo{
Tenant: m365.TenantID,
ResourceOwners: []string{resourceAndSvc.Resource().ID},
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: opts,
RestoreCfg: restoreCfg,
@ -885,26 +1076,33 @@ func testPermissionsInheritanceRestoreAndBackup(
}
}
func testLinkSharesInheritanceRestoreAndBackup(
suite tester.Suite,
m365 its.M365IntgTestSetup,
resourceAndSvc its.ResourceServicer,
startVersion int,
) {
func testLinkSharesInheritanceRestoreAndBackup(suite oneDriveSuite, startVersion int) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
secondaryUserName, secondaryUserID := suite.SecondaryUser()
secondaryUser := metadata.Entity{
ID: m365.SecondaryUser.ID,
Email: m365.SecondaryUser.Email,
ID: secondaryUserID,
Email: secondaryUserName,
EntityType: metadata.GV2User,
}
tertiaryUserName, tertiaryUserID := suite.TertiaryUser()
tertiaryUser := metadata.Entity{
ID: m365.TertiaryUser.ID,
Email: m365.TertiaryUser.Email,
ID: tertiaryUserID,
Email: tertiaryUserName,
EntityType: metadata.GV2User,
}
// Get the default drive ID for the test user.
driveID := resourceAndSvc.Resource().DriveID
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.APIClient(),
suite.Service(),
suite.ResourceOwner())
folderAName := "custom"
folderBName := "inherited"
@ -1048,10 +1246,9 @@ func testLinkSharesInheritanceRestoreAndBackup(
},
}
expected, err := stub.DataForInfo(resourceAndSvc.Service(), cols, version.Backup)
expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := resourceAndSvc.Service().String()
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
@ -1059,11 +1256,11 @@ func testLinkSharesInheritanceRestoreAndBackup(
// Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as
// to validate that flow as well.
input, err := stub.DataForInfo(resourceAndSvc.Service(), cols, vn)
input, err := stub.DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: resourceAndSvc.Service(),
service: suite.Service(),
backupVersion: vn,
collectionsPrevious: input,
collectionsLatest: expected,
@ -1076,8 +1273,8 @@ func testLinkSharesInheritanceRestoreAndBackup(
opts := control.DefaultOptions()
cfg := m365Stub.ConfigInfo{
Tenant: m365.TenantID,
ResourceOwners: []string{resourceAndSvc.Resource().ID},
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: opts,
RestoreCfg: restoreCfg,
@ -1089,13 +1286,21 @@ func testLinkSharesInheritanceRestoreAndBackup(
}
func testRestoreFolderNamedFolderRegression(
suite tester.Suite,
m365 its.M365IntgTestSetup,
resourceAndSvc its.ResourceServicer,
suite oneDriveSuite,
startVersion int,
) {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
// Get the default drive ID for the test user.
driveID := resourceAndSvc.Resource().DriveID
driveID := mustGetDefaultDriveID(
suite.T(),
ctx,
suite.APIClient(),
suite.Service(),
suite.ResourceOwner())
rootPath := []string{
odConsts.DrivesPathDir,
@ -1164,19 +1369,18 @@ func testRestoreFolderNamedFolderRegression(
},
}
expected, err := stub.DataForInfo(resourceAndSvc.Service(), cols, version.Backup)
expected, err := stub.DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := resourceAndSvc.Service().String()
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
t := suite.T()
input, err := stub.DataForInfo(resourceAndSvc.Service(), cols, vn)
input, err := stub.DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: resourceAndSvc.Service(),
service: suite.Service(),
backupVersion: vn,
collectionsPrevious: input,
collectionsLatest: expected,
@ -1188,8 +1392,8 @@ func testRestoreFolderNamedFolderRegression(
opts := control.DefaultOptions()
cfg := m365Stub.ConfigInfo{
Tenant: m365.TenantID,
ResourceOwners: []string{resourceAndSvc.Resource().ID},
Tenant: suite.Tenant(),
ResourceOwners: []string{suite.ResourceOwner()},
Service: testData.service,
Opts: opts,
RestoreCfg: restoreCfg,

View File

@ -9,12 +9,14 @@ import (
"github.com/stretchr/testify/suite"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
@ -62,7 +64,9 @@ func (suite *GroupsUnitSuite) TestConsumeRestoreCollections_noErrorOnGroups() {
type groupsIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
resource string
tenantID string
ac api.Client
}
func TestGroupsIntegrationSuite(t *testing.T) {
@ -75,12 +79,25 @@ func TestGroupsIntegrationSuite(t *testing.T) {
func (suite *groupsIntegrationSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.resource = tconfig.M365TeamID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
}
// test for getSiteName
@ -90,9 +107,12 @@ func (suite *groupsIntegrationSuite) TestGetSiteName() {
ctx, flush := tester.NewContext(t)
defer flush()
rootSite, err := suite.ac.Groups().GetRootSite(ctx, suite.resource)
require.NoError(t, err, clues.ToCore(err))
// Generate a fake site ID that appears valid to graph API but doesn't actually exist.
// This "could" be flaky, but highly unlikely
unavailableSiteID := []rune(suite.m365.Group.RootSite.ID)
unavailableSiteID := []rune(ptr.Val(rootSite.GetId()))
firstIDChar := slices.Index(unavailableSiteID, ',') + 1
if unavailableSiteID[firstIDChar] != '2' {
@ -111,9 +131,9 @@ func (suite *groupsIntegrationSuite) TestGetSiteName() {
}{
{
name: "valid",
siteID: suite.m365.Group.RootSite.ID,
webURL: suite.m365.Group.RootSite.WebURL,
siteName: suite.m365.Group.RootSite.DisplayName,
siteID: ptr.Val(rootSite.GetId()),
webURL: ptr.Val(rootSite.GetWebUrl()),
siteName: *rootSite.GetDisplayName(),
webURLToSiteNames: map[string]string{},
expectErr: assert.NoError,
},
@ -143,7 +163,7 @@ func (suite *groupsIntegrationSuite) TestGetSiteName() {
ctx,
test.siteID,
test.webURL,
suite.m365.AC.Sites(),
suite.ac.Sites(),
test.webURLToSiteNames)
require.NoError(t, err, clues.ToCore(err))

View File

@ -197,12 +197,7 @@ func (h BackupHandler[T]) AugmentItemInfo(
return h.ItemInfo
}
func (h *BackupHandler[T]) Get(
context.Context,
string,
map[string]string,
bool,
) (*http.Response, error) {
func (h *BackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) {
c := h.getCall
h.getCall++

View File

@ -32,7 +32,6 @@ import (
"github.com/alcionai/corso/src/internal/streamstore"
ssmock "github.com/alcionai/corso/src/internal/streamstore/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup"
@ -48,6 +47,7 @@ import (
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
"github.com/alcionai/corso/src/pkg/store"
@ -420,9 +420,6 @@ func (suite *BackupOpUnitSuite) TestNewBackupOperation_configuredOptionsMatchInp
MaxPages: 46,
Enabled: true,
},
SkipEventsOnInstance503ForResources: map[string]struct{}{
"resource": {},
},
}
t := suite.T()
@ -1509,7 +1506,8 @@ func withoutModified(de details.Entry) details.Entry {
type BackupOpIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
user, site string
ac api.Client
}
func TestBackupOpIntegrationSuite(t *testing.T) {
@ -1522,12 +1520,25 @@ func TestBackupOpIntegrationSuite(t *testing.T) {
func (suite *BackupOpIntegrationSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.user = tconfig.M365UserID(t)
suite.site = tconfig.M365SiteID(t)
a := tconfig.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
}
func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
@ -1535,8 +1546,8 @@ func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
kw = &kopia.Wrapper{}
sw = store.NewWrapper(&kopia.ModelStore{})
ctrl = &mock.Controller{}
acct = tconfig.NewM365Account(suite.T())
opts = control.DefaultOptions()
acct = suite.m365.Acct
)
table := []struct {
@ -1734,7 +1745,7 @@ func makeMockItem(
func (suite *AssistBackupIntegrationSuite) TestBackupTypesForFailureModes() {
var (
acct = tconfig.NewM365Account(suite.T())
tenantID = acct.ID()
tenantID = acct.Config[account.AzureTenantIDKey]
osel = selectors.NewOneDriveBackup([]string{userID})
)
@ -2063,7 +2074,7 @@ func selectFilesFromDeets(d details.Details) map[string]details.Entry {
func (suite *AssistBackupIntegrationSuite) TestExtensionsIncrementals() {
var (
acct = tconfig.NewM365Account(suite.T())
tenantID = acct.ID()
tenantID = acct.Config[account.AzureTenantIDKey]
opts = control.DefaultOptions()
osel = selectors.NewOneDriveBackup([]string{userID})
// Default policy used by SDK clients

View File

@ -24,7 +24,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -305,10 +304,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead int
itemsWritten int
nonMetaItemsWritten int
// TODO: Temporary mechanism to skip permissions
// related tests. Remove once we figure out the issue.
skipChecks bool
}{
{
name: "clean incremental, no changes",
@ -357,7 +352,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
skipChecks: true,
},
{
name: "remove permission from new file",
@ -377,7 +371,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 1, // .data file for newitem
itemsWritten: 3, // .meta for newitem, .dirmeta for parent (.data is not written as it is not updated)
nonMetaItemsWritten: 0, // none because the file is considered cached instead of written.
skipChecks: true,
},
{
name: "add permission to container",
@ -398,7 +391,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 0,
itemsWritten: 2, // .dirmeta for collection
nonMetaItemsWritten: 0, // no files updated as update on container
skipChecks: true,
},
{
name: "remove permission from container",
@ -419,7 +411,6 @@ func RunIncrementalDriveishBackupTest(
itemsRead: 0,
itemsWritten: 2, // .dirmeta for collection
nonMetaItemsWritten: 0, // no files updated
skipChecks: true,
},
{
name: "update contents of a file",
@ -749,11 +740,9 @@ func RunIncrementalDriveishBackupTest(
assertReadWrite = assert.LessOrEqual
}
if !test.skipChecks {
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
}
assertReadWrite(t, expectWrites, incBO.Results.ItemsWritten, "incremental items written")
assertReadWrite(t, expectNonMetaWrites, incBO.Results.NonMetaItemsWritten, "incremental non-meta items written")
assertReadWrite(t, expectReads, incBO.Results.ItemsRead, "incremental items read")
assert.NoError(t, incBO.Errors.Failure(), "incremental non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
assert.Empty(t, incBO.Errors.Recovered(), "incremental recoverable/iteration errors")
@ -999,7 +988,7 @@ func RunDriveRestoreToAlternateProtectedResource(
suite tester.Suite,
ac api.Client,
sel selectors.Selector, // owner should match 'from', both Restore and Backup types work.
driveFrom, driveTo its.IDs,
driveFrom, driveTo IDs,
toResource string,
) {
ctx, flush := tester.NewContext(t)

View File

@ -21,7 +21,6 @@ import (
exchTD "github.com/alcionai/corso/src/internal/m365/service/exchange/testdata"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -42,7 +41,7 @@ import (
type ExchangeBackupIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestExchangeBackupIntgSuite(t *testing.T) {
@ -54,7 +53,7 @@ func TestExchangeBackupIntgSuite(t *testing.T) {
}
func (suite *ExchangeBackupIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
// MetadataFileNames produces the category-specific set of filenames used to
@ -81,9 +80,9 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{
name: "Mail",
selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.m365.User.ID})
sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.m365.User.ID
sel.DiscreteOwner = suite.its.User.ID
return sel
},
@ -93,7 +92,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{
name: "Contacts",
selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.m365.User.ID})
sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
return sel
},
@ -103,7 +102,7 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
{
name: "Calendar Events",
selector: func() *selectors.ExchangeBackup {
sel := selectors.NewExchangeBackup([]string{suite.m365.User.ID})
sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()))
return sel
},
@ -271,7 +270,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
// later on during the tests. Putting their identifiers into the selector
// at this point is harmless.
containers = []string{container1, container2, container3, containerRename}
sel = selectors.NewExchangeBackup([]string{suite.m365.User.ID})
sel = selectors.NewExchangeBackup([]string{suite.its.User.ID})
whatSet = deeTD.CategoryFromRepoRef
opts = control.DefaultOptions()
)
@ -311,7 +310,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
mailDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.MessageWith(
suite.m365.User.ID, suite.m365.User.ID, suite.m365.User.ID,
suite.its.User.ID, suite.its.User.ID, suite.its.User.ID,
subject, body, body,
now, now, now, now)
}
@ -328,7 +327,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
eventDBF := func(id, timeStamp, subject, body string) []byte {
return exchMock.EventWith(
suite.m365.User.ID, subject, body, body,
suite.its.User.ID, subject, body, body,
exchMock.NoOriginalStartDate, now, now,
exchMock.NoRecurrence, exchMock.NoAttendees,
exchMock.NoAttachments, exchMock.NoCancelledOccurrences,
@ -597,7 +596,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
service,
category,
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
creds.AzureTenantID, suite.m365.User.ID, "", "", container3,
creds.AzureTenantID, suite.its.User.ID, "", "", container3,
2,
version.Backup,
gen.dbf)
@ -890,7 +889,7 @@ func testExchangeContinuousBackups(suite *ExchangeBackupIntgSuite, toggles contr
type ExchangeBackupNightlyIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestExchangeBackupNightlyIntgSuite(t *testing.T) {
@ -902,11 +901,11 @@ func TestExchangeBackupNightlyIntgSuite(t *testing.T) {
}
func (suite *ExchangeBackupNightlyIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *ExchangeBackupNightlyIntgSuite) TestBackup_Run_exchangeVersion9MergeBase() {
sel := selectors.NewExchangeBackup([]string{suite.m365.User.ID})
sel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
sel.Include(
sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
// sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()),
@ -917,7 +916,7 @@ func (suite *ExchangeBackupNightlyIntgSuite) TestBackup_Run_exchangeVersion9Merg
type ExchangeRestoreNightlyIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestExchangeRestoreIntgSuite(t *testing.T) {
@ -929,7 +928,7 @@ func TestExchangeRestoreIntgSuite(t *testing.T) {
}
func (suite *ExchangeRestoreNightlyIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
type clientItemPager interface {
@ -960,7 +959,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
// a backup is required to run restores
baseSel := selectors.NewExchangeBackup([]string{suite.m365.User.ID})
baseSel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
baseSel.Include(
// events cannot be run, for the same reason as incremental backups: the user needs
// to have their account recycled.
@ -968,7 +967,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
baseSel.DiscreteOwner = suite.m365.User.ID
baseSel.DiscreteOwner = suite.its.User.ID
var (
mb = evmock.NewBus()
@ -1003,8 +1002,8 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeWithAdvanc
}
testCategories = map[path.CategoryType]clientItemPager{
path.ContactsCategory: suite.m365.AC.Contacts(),
path.EmailCategory: suite.m365.AC.Mail(),
path.ContactsCategory: suite.its.AC.Contacts(),
path.EmailCategory: suite.its.AC.Mail(),
// path.EventsCategory: suite.its.ac.Events(),
}
)
@ -1277,7 +1276,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
// a backup is required to run restores
baseSel := selectors.NewExchangeBackup([]string{suite.m365.User.ID})
baseSel := selectors.NewExchangeBackup([]string{suite.its.User.ID})
baseSel.Include(
// events cannot be run, for the same reason as incremental backups: the user needs
// to have their account recycled.
@ -1285,7 +1284,7 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
baseSel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()),
baseSel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
baseSel.DiscreteOwner = suite.m365.User.ID
baseSel.DiscreteOwner = suite.its.User.ID
var (
mb = evmock.NewBus()
@ -1304,11 +1303,11 @@ func (suite *ExchangeRestoreNightlyIntgSuite) TestRestore_Run_exchangeAlternateP
var (
restoreCfg = ctrlTD.DefaultRestoreConfig("exchange_restore_to_user")
sel = rsel.Selector
userID = suite.m365.User.ID
secondaryUserID = suite.m365.SecondaryUser.ID
userID = suite.its.User.ID
secondaryUserID = suite.its.SecondaryUser.ID
uid = userID
acCont = suite.m365.AC.Contacts()
acMail = suite.m365.AC.Mail()
acCont = suite.its.AC.Contacts()
acMail = suite.its.AC.Mail()
// acEvts = suite.its.ac.Events()
firstCtr = count.New()
)

View File

@ -14,7 +14,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/collection/drive"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
@ -29,7 +28,7 @@ import (
type GroupsBackupIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestGroupsBackupIntgSuite(t *testing.T) {
@ -41,12 +40,12 @@ func TestGroupsBackupIntgSuite(t *testing.T) {
}
func (suite *GroupsBackupIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups() {
var (
resourceID = suite.m365.Group.ID
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
)
@ -60,12 +59,12 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groups() {
}
func (suite *GroupsBackupIntgSuite) TestBackup_Run_incrementalGroups() {
runGroupsIncrementalBackupTests(suite, suite.m365, control.DefaultOptions())
runGroupsIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *GroupsBackupIntgSuite) TestBackup_Run_extensionsGroups() {
var (
resourceID = suite.m365.Group.ID
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
)
@ -85,7 +84,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_extensionsGroups() {
type GroupsBackupTreeIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestGroupsBackupTreeIntgSuite(t *testing.T) {
@ -97,12 +96,12 @@ func TestGroupsBackupTreeIntgSuite(t *testing.T) {
}
func (suite *GroupsBackupTreeIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeGroups() {
var (
resourceID = suite.m365.Group.ID
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
opts = control.DefaultOptions()
)
@ -118,12 +117,12 @@ func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeGroups() {
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeIncrementalGroups() {
opts := control.DefaultOptions()
runGroupsIncrementalBackupTests(suite, suite.m365, opts)
runGroupsIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeExtensionsGroups() {
var (
resourceID = suite.m365.Group.ID
resourceID = suite.its.Group.ID
sel = selectors.NewGroupsBackup([]string{resourceID})
opts = control.DefaultOptions()
)
@ -143,10 +142,10 @@ func (suite *GroupsBackupTreeIntgSuite) TestBackup_Run_treeExtensionsGroups() {
func runGroupsIncrementalBackupTests(
suite tester.Suite,
m365 its.M365IntgTestSetup,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewGroupsRestore([]string{m365.Group.ID})
sel := selectors.NewGroupsRestore([]string{its.Group.ID})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
@ -157,14 +156,14 @@ func runGroupsIncrementalBackupTests(
t *testing.T,
ctx context.Context,
) string {
return m365.Group.RootSite.DriveID
return its.Group.RootSite.DriveID
}
gtsi := func(
t *testing.T,
ctx context.Context,
) string {
return m365.Group.RootSite.ID
return its.Group.RootSite.ID
}
grh := func(ac api.Client) drive.RestoreHandler {
@ -174,8 +173,8 @@ func runGroupsIncrementalBackupTests(
RunIncrementalDriveishBackupTest(
suite,
opts,
m365.Group.ID,
m365.SecondaryGroup.ID, // more reliable than user
its.Group.ID,
its.User.ID,
path.GroupsService,
path.LibrariesCategory,
ic,
@ -194,19 +193,15 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
var (
mb = evmock.NewBus()
counter = count.New()
sel = selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel = selectors.NewGroupsBackup([]string{suite.its.Group.ID})
opts = control.DefaultOptions()
whatSet = deeTD.CategoryFromRepoRef
)
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
bo, bod := PrepNewTestBackupOp(t, ctx, mb, sel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
@ -311,7 +306,7 @@ func (suite *GroupsBackupIntgSuite) TestBackup_Run_groupsBasic() {
type GroupsBackupNightlyIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestGroupsBackupNightlyIntgSuite(t *testing.T) {
@ -323,40 +318,32 @@ func TestGroupsBackupNightlyIntgSuite(t *testing.T) {
}
func (suite *GroupsBackupNightlyIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9MergeBase() {
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
RunMergeBaseGroupsUpdate(suite, sel.Selector, false)
}
func (suite *GroupsBackupNightlyIntgSuite) TestBackup_Run_groupsVersion9AssistBases() {
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include(
selTD.GroupsBackupLibraryFolderScope(sel),
selTD.GroupsBackupChannelScope(sel))
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conv backups while
// we go fix the group mailbox.
// selTD.GroupsBackupConversationScope(sel))
selTD.GroupsBackupChannelScope(sel),
selTD.GroupsBackupConversationScope(sel))
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, false)
}
type GroupsRestoreNightlyIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestGroupsRestoreIntgSuite(t *testing.T) {
@ -368,20 +355,20 @@ func TestGroupsRestoreIntgSuite(t *testing.T) {
}
func (suite *GroupsRestoreNightlyIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *GroupsRestoreNightlyIntgSuite) TestRestore_Run_groupsWithAdvancedOptions() {
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewGroupsBackup([]string{suite.its.Group.ID})
sel.Include(selTD.GroupsBackupLibraryFolderScope(sel))
sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.m365.Group.ID
sel.DiscreteOwner = suite.its.Group.ID
RunDriveRestoreWithAdvancedOptions(
suite.T(),
suite,
suite.m365.AC,
suite.its.AC,
sel.Selector,
suite.m365.Group.RootSite.DriveID,
suite.m365.Group.RootSite.DriveRootFolderID)
suite.its.Group.RootSite.DriveID,
suite.its.Group.RootSite.DriveRootFolderID)
}

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/m365"
@ -18,6 +19,8 @@ import (
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -354,6 +357,125 @@ func ControllerWithSelector(
// Suite Setup
// ---------------------------------------------------------------------------
type IDs struct {
ID string
DriveID string
DriveRootFolderID string
}
type GIDs struct {
ID string
RootSite IDs
}
type IntgTesterSetup struct {
AC api.Client
GockAC api.Client
User IDs
SecondaryUser IDs
Site IDs
SecondarySite IDs
Group GIDs
SecondaryGroup GIDs
}
func NewIntegrationTesterSetup(t *testing.T) IntgTesterSetup {
its := IntgTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
a := tconfig.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
counter := count.New()
its.AC, err = api.NewClient(
creds,
control.DefaultOptions(),
counter)
require.NoError(t, err, clues.ToCore(err))
its.GockAC, err = GockClient(creds, counter)
require.NoError(t, err, clues.ToCore(err))
its.User = userIDs(t, tconfig.M365UserID(t), its.AC)
its.SecondaryUser = userIDs(t, tconfig.SecondaryM365UserID(t), its.AC)
its.Site = siteIDs(t, tconfig.M365SiteID(t), its.AC)
its.SecondarySite = siteIDs(t, tconfig.SecondaryM365SiteID(t), its.AC)
// teamID is used here intentionally. We want the group
// to have access to teams data
its.Group = groupIDs(t, tconfig.M365TeamID(t), its.AC)
its.SecondaryGroup = groupIDs(t, tconfig.SecondaryM365TeamID(t), its.AC)
return its
}
func userIDs(t *testing.T, id string, ac api.Client) IDs {
ctx, flush := tester.NewContext(t)
defer flush()
r := IDs{ID: id}
drive, err := ac.Users().GetDefaultDrive(ctx, id)
require.NoError(t, err, clues.ToCore(err))
r.DriveID = ptr.Val(drive.GetId())
driveRootFolder, err := ac.Drives().GetRootFolder(ctx, r.DriveID)
require.NoError(t, err, clues.ToCore(err))
r.DriveRootFolderID = ptr.Val(driveRootFolder.GetId())
return r
}
func siteIDs(t *testing.T, id string, ac api.Client) IDs {
ctx, flush := tester.NewContext(t)
defer flush()
r := IDs{ID: id}
drive, err := ac.Sites().GetDefaultDrive(ctx, id)
require.NoError(t, err, clues.ToCore(err))
r.DriveID = ptr.Val(drive.GetId())
driveRootFolder, err := ac.Drives().GetRootFolder(ctx, r.DriveID)
require.NoError(t, err, clues.ToCore(err))
r.DriveRootFolderID = ptr.Val(driveRootFolder.GetId())
return r
}
func groupIDs(t *testing.T, id string, ac api.Client) GIDs {
ctx, flush := tester.NewContext(t)
defer flush()
r := GIDs{ID: id}
site, err := ac.Groups().GetRootSite(ctx, id)
require.NoError(t, err, clues.ToCore(err))
r.RootSite.ID = ptr.Val(site.GetId())
drive, err := ac.Sites().GetDefaultDrive(ctx, r.RootSite.ID)
require.NoError(t, err, clues.ToCore(err))
r.RootSite.DriveID = ptr.Val(drive.GetId())
driveRootFolder, err := ac.Drives().GetRootFolder(ctx, r.RootSite.DriveID)
require.NoError(t, err, clues.ToCore(err))
r.RootSite.DriveRootFolderID = ptr.Val(driveRootFolder.GetId())
return r
}
func GetTestExtensionFactories() []extensions.CreateItemExtensioner {
return []extensions.CreateItemExtensioner{
&extensions.MockItemExtensionFactory{},

View File

@ -13,13 +13,12 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock"
m365Ctrl "github.com/alcionai/corso/src/internal/m365"
"github.com/alcionai/corso/src/internal/m365"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/model"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/streamstore"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup"
@ -37,7 +36,7 @@ import (
type OneDriveBackupIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestOneDriveBackupIntgSuite(t *testing.T) {
@ -49,12 +48,12 @@ func TestOneDriveBackupIntgSuite(t *testing.T) {
}
func (suite *OneDriveBackupIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDrive() {
var (
resourceID = suite.m365.SecondaryUser.ID
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
)
@ -68,12 +67,12 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDrive() {
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_incrementalOneDrive() {
runOneDriveIncrementalBackupTests(suite, suite.m365, control.DefaultOptions())
runOneDriveIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *OneDriveBackupIntgSuite) TestBackup_Run_extensionsOneDrive() {
var (
resourceID = suite.m365.SecondaryUser.ID
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
)
@ -92,7 +91,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_extensionsOneDrive() {
type OneDriveBackupTreeIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestOneDriveBackupTreeIntgSuite(t *testing.T) {
@ -104,12 +103,12 @@ func TestOneDriveBackupTreeIntgSuite(t *testing.T) {
}
func (suite *OneDriveBackupTreeIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeOneDrive() {
var (
resourceID = suite.m365.SecondaryUser.ID
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
opts = control.DefaultOptions()
)
@ -125,12 +124,12 @@ func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeOneDrive() {
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeIncrementalOneDrive() {
opts := control.DefaultOptions()
runOneDriveIncrementalBackupTests(suite, suite.m365, opts)
runOneDriveIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeExtensionsOneDrive() {
var (
resourceID = suite.m365.SecondaryUser.ID
resourceID = suite.its.SecondaryUser.ID
sel = selectors.NewOneDriveBackup([]string{resourceID})
opts = control.DefaultOptions()
)
@ -150,10 +149,10 @@ func (suite *OneDriveBackupTreeIntgSuite) TestBackup_Run_treeExtensionsOneDrive(
func runOneDriveIncrementalBackupTests(
suite tester.Suite,
m365 its.M365IntgTestSetup,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewOneDriveRestore([]string{m365.User.ID})
sel := selectors.NewOneDriveRestore([]string{its.User.ID})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.Folders(cs, selectors.PrefixMatch()))
@ -164,10 +163,10 @@ func runOneDriveIncrementalBackupTests(
t *testing.T,
ctx context.Context,
) string {
d, err := m365.AC.Users().GetDefaultDrive(ctx, m365.User.ID)
d, err := its.AC.Users().GetDefaultDrive(ctx, its.User.ID)
if err != nil {
err = clues.Wrap(err, "retrieving default user drive").
With("user", m365.User.ID)
With("user", its.User.ID)
}
require.NoError(t, err, clues.ToCore(err))
@ -185,8 +184,8 @@ func runOneDriveIncrementalBackupTests(
RunIncrementalDriveishBackupTest(
suite,
opts,
m365.User.ID,
m365.User.ID,
its.User.ID,
its.User.ID,
path.OneDriveService,
path.FilesCategory,
ic,
@ -220,7 +219,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
ctrl, err := m365Ctrl.NewController(
ctrl, err := m365.NewController(
ctx,
acct,
path.OneDriveService,
@ -230,7 +229,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
userable, err := ctrl.AC.Users().GetByID(
ctx,
suite.m365.User.ID,
suite.its.User.ID,
api.CallConfig{})
require.NoError(t, err, clues.ToCore(err))
@ -334,7 +333,7 @@ func (suite *OneDriveBackupIntgSuite) TestBackup_Run_oneDriveOwnerMigration() {
type OneDriveBackupNightlyIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestOneDriveBackupNightlyIntgSuite(t *testing.T) {
@ -346,11 +345,11 @@ func TestOneDriveBackupNightlyIntgSuite(t *testing.T) {
}
func (suite *OneDriveBackupNightlyIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveBackupNightlyIntgSuite) TestBackup_Run_oneDriveVersion9MergeBase() {
sel := selectors.NewOneDriveBackup([]string{suite.m365.User.ID})
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
@ -365,7 +364,7 @@ func (suite *OneDriveBackupNightlyIntgSuite) TestBackup_Run_oneDriveVersion9Merg
type OneDriveRestoreNightlyIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestOneDriveRestoreIntgSuite(t *testing.T) {
@ -377,34 +376,34 @@ func TestOneDriveRestoreIntgSuite(t *testing.T) {
}
func (suite *OneDriveRestoreNightlyIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *OneDriveRestoreNightlyIntgSuite) TestRestore_Run_onedriveWithAdvancedOptions() {
sel := selectors.NewOneDriveBackup([]string{suite.m365.User.ID})
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = suite.m365.User.ID
sel.DiscreteOwner = suite.its.User.ID
RunDriveRestoreWithAdvancedOptions(
suite.T(),
suite,
suite.m365.AC,
suite.its.AC,
sel.Selector,
suite.m365.User.DriveID,
suite.m365.User.DriveRootFolderID)
suite.its.User.DriveID,
suite.its.User.DriveRootFolderID)
}
func (suite *OneDriveRestoreNightlyIntgSuite) TestRestore_Run_onedriveAlternateProtectedResource() {
sel := selectors.NewOneDriveBackup([]string{suite.m365.User.ID})
sel := selectors.NewOneDriveBackup([]string{suite.its.User.ID})
sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = suite.m365.User.ID
sel.DiscreteOwner = suite.its.User.ID
RunDriveRestoreToAlternateProtectedResource(
suite.T(),
suite,
suite.m365.AC,
suite.its.AC,
sel.Selector,
suite.m365.User,
suite.m365.SecondaryUser,
suite.m365.SecondaryUser.ID)
suite.its.User,
suite.its.SecondaryUser,
suite.its.SecondaryUser.ID)
}

View File

@ -16,7 +16,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/collection/drive"
. "github.com/alcionai/corso/src/internal/operations/test/m365"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
deeTD "github.com/alcionai/corso/src/pkg/backup/details/testdata"
@ -32,7 +31,7 @@ import (
type SharePointBackupIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestSharePointBackupIntgSuite(t *testing.T) {
@ -44,12 +43,12 @@ func TestSharePointBackupIntgSuite(t *testing.T) {
}
func (suite *SharePointBackupIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePoint() {
var (
resourceID = suite.m365.Site.ID
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
)
@ -69,7 +68,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointList() {
defer flush()
var (
resourceID = suite.m365.Site.ID
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
tenID = tconfig.M365TenantID(t)
mb = evmock.NewBus()
@ -124,12 +123,12 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_sharePointList() {
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_incrementalSharePoint() {
runSharePointIncrementalBackupTests(suite, suite.m365, control.DefaultOptions())
runSharePointIncrementalBackupTests(suite, suite.its, control.DefaultOptions())
}
func (suite *SharePointBackupIntgSuite) TestBackup_Run_extensionsSharePoint() {
var (
resourceID = suite.m365.Site.ID
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
)
@ -148,7 +147,7 @@ func (suite *SharePointBackupIntgSuite) TestBackup_Run_extensionsSharePoint() {
type SharePointBackupTreeIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestSharePointBackupTreeIntgSuite(t *testing.T) {
@ -160,12 +159,12 @@ func TestSharePointBackupTreeIntgSuite(t *testing.T) {
}
func (suite *SharePointBackupTreeIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeSharePoint() {
var (
resourceID = suite.m365.Site.ID
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
opts = control.DefaultOptions()
)
@ -181,12 +180,12 @@ func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeSharePoint() {
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeIncrementalSharePoint() {
opts := control.DefaultOptions()
runSharePointIncrementalBackupTests(suite, suite.m365, opts)
runSharePointIncrementalBackupTests(suite, suite.its, opts)
}
func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeExtensionsSharePoint() {
var (
resourceID = suite.m365.Site.ID
resourceID = suite.its.Site.ID
sel = selectors.NewSharePointBackup([]string{resourceID})
opts = control.DefaultOptions()
)
@ -206,10 +205,10 @@ func (suite *SharePointBackupTreeIntgSuite) TestBackup_Run_treeExtensionsSharePo
func runSharePointIncrementalBackupTests(
suite tester.Suite,
m365 its.M365IntgTestSetup,
its IntgTesterSetup,
opts control.Options,
) {
sel := selectors.NewSharePointRestore([]string{m365.Site.ID})
sel := selectors.NewSharePointRestore([]string{its.Site.ID})
ic := func(cs []string) selectors.Selector {
sel.Include(sel.LibraryFolders(cs, selectors.PrefixMatch()))
@ -220,10 +219,10 @@ func runSharePointIncrementalBackupTests(
t *testing.T,
ctx context.Context,
) string {
d, err := m365.AC.Sites().GetDefaultDrive(ctx, m365.Site.ID)
d, err := its.AC.Sites().GetDefaultDrive(ctx, its.Site.ID)
if err != nil {
err = clues.Wrap(err, "retrieving default site drive").
With("site", m365.Site.ID)
With("site", its.Site.ID)
}
require.NoError(t, err, clues.ToCore(err))
@ -241,8 +240,8 @@ func runSharePointIncrementalBackupTests(
RunIncrementalDriveishBackupTest(
suite,
opts,
m365.Site.ID,
m365.User.ID,
its.Site.ID,
its.User.ID,
path.SharePointService,
path.LibrariesCategory,
ic,
@ -254,7 +253,7 @@ func runSharePointIncrementalBackupTests(
type SharePointBackupNightlyIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestSharePointBackupNightlyIntgSuite(t *testing.T) {
@ -266,18 +265,18 @@ func TestSharePointBackupNightlyIntgSuite(t *testing.T) {
}
func (suite *SharePointBackupNightlyIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9MergeBase() {
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
RunMergeBaseGroupsUpdate(suite, sel.Selector, true)
}
func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9AssistBases() {
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
RunDriveAssistBaseGroupsUpdate(suite, sel.Selector, true)
@ -285,7 +284,7 @@ func (suite *SharePointBackupNightlyIntgSuite) TestBackup_Run_sharePointVersion9
type SharePointRestoreNightlyIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
its IntgTesterSetup
}
func TestSharePointRestoreIntgSuite(t *testing.T) {
@ -297,38 +296,38 @@ func TestSharePointRestoreIntgSuite(t *testing.T) {
}
func (suite *SharePointRestoreNightlyIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
suite.its = NewIntegrationTesterSetup(suite.T())
}
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointWithAdvancedOptions() {
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.m365.Site.ID
sel.DiscreteOwner = suite.its.Site.ID
RunDriveRestoreWithAdvancedOptions(
suite.T(),
suite,
suite.m365.AC,
suite.its.AC,
sel.Selector,
suite.m365.Site.DriveID,
suite.m365.Site.DriveRootFolderID)
suite.its.Site.DriveID,
suite.its.Site.DriveRootFolderID)
}
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointAlternateProtectedResource() {
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.Filter(sel.Library("documents"))
sel.DiscreteOwner = suite.m365.Site.ID
sel.DiscreteOwner = suite.its.Site.ID
RunDriveRestoreToAlternateProtectedResource(
suite.T(),
suite,
suite.m365.AC,
suite.its.AC,
sel.Selector,
suite.m365.Site,
suite.m365.SecondarySite,
suite.m365.SecondarySite.ID)
suite.its.Site,
suite.its.SecondarySite,
suite.its.SecondarySite.ID)
}
func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDeletedDrives() {
@ -345,13 +344,13 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
rc.OnCollision = control.Copy
// create a new drive
md, err := suite.m365.AC.Lists().PostDrive(ctx, suite.m365.Site.ID, rc.Location)
md, err := suite.its.AC.Lists().PostDrive(ctx, suite.its.Site.ID, rc.Location)
require.NoError(t, err, clues.ToCore(err))
driveID := ptr.Val(md.GetId())
// get the root folder
mdi, err := suite.m365.AC.Drives().GetRootFolder(ctx, driveID)
mdi, err := suite.its.AC.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
rootFolderID := ptr.Val(mdi.GetId())
@ -365,7 +364,7 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
file := models.NewFile()
item.SetFile(file)
_, err = suite.m365.AC.Drives().PostItemInContainer(
_, err = suite.its.AC.Drives().PostItemInContainer(
ctx,
driveID,
rootFolderID,
@ -378,13 +377,13 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
mb = evmock.NewBus()
counter = count.New()
opts = control.DefaultOptions()
graphClient = suite.m365.AC.Stable.Client()
graphClient = suite.its.AC.Stable.Client()
)
bsel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
bsel := selectors.NewSharePointBackup([]string{suite.its.Site.ID})
bsel.Include(selTD.SharePointBackupFolderScope(bsel))
bsel.Filter(bsel.Library(rc.Location))
bsel.DiscreteOwner = suite.m365.Site.ID
bsel.DiscreteOwner = suite.its.Site.ID
bo, bod := PrepNewTestBackupOp(t, ctx, mb, bsel.Selector, opts, version.Backup, counter)
defer bod.Close(t, ctx)
@ -482,9 +481,9 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
RunAndCheckRestore(t, ctx, &ro, mb, false)
assert.Equal(t, 1, ctr.Get(count.NewItemCreated), "restored an item")
pgr := suite.m365.AC.
pgr := suite.its.AC.
Drives().
NewSiteDrivePager(suite.m365.Site.ID, []string{"id", "name"})
NewSiteDrivePager(suite.its.Site.ID, []string{"id", "name"})
drives, err := api.GetAllDrives(ctx, pgr)
require.NoError(t, err, clues.ToCore(err))
@ -503,7 +502,7 @@ func (suite *SharePointRestoreNightlyIntgSuite) TestRestore_Run_sharepointDelete
md = created
driveID = ptr.Val(md.GetId())
mdi, err := suite.m365.AC.Drives().GetRootFolder(ctx, driveID)
mdi, err := suite.its.AC.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
rootFolderID = ptr.Val(mdi.GetId())

View File

@ -1,258 +0,0 @@
package its
import (
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/h2non/gock"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
// ---------------------------------------------------------------------------
// Gockable client
// ---------------------------------------------------------------------------
// GockClient produces a new exchange api client that can be
// mocked using gock.
func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
s, err := graph.NewGockService(creds, counter)
if err != nil {
return api.Client{}, err
}
li, err := graph.NewGockService(creds, counter, graph.NoTimeout())
if err != nil {
return api.Client{}, err
}
return api.Client{
Credentials: creds,
Stable: s,
LargeItem: li,
}, nil
}
// ---------------------------------------------------------------------------
// Intercepting calls with Gock
// ---------------------------------------------------------------------------
const graphAPIHostURL = "https://graph.microsoft.com"
func V1APIURLPath(parts ...string) string {
return strings.Join(append([]string{"/v1.0"}, parts...), "/")
}
func InterceptV1Path(pathParts ...string) *gock.Request {
return gock.New(graphAPIHostURL).Get(V1APIURLPath(pathParts...))
}
// ---------------------------------------------------------------------------
// Suite Setup
// ---------------------------------------------------------------------------
type IDs struct {
Provider idname.Provider
ID string
Email string
DisplayName string
DriveID string
DriveRootFolderID string
WebURL string
// TestContainerID is used to store the ID of the primary container under
// test. Normally this will be empty, but in certain services or data
// categories, especially those where we don't have canonical containers
// to isolate for testing, we may specify a TestContainer here instead.
TestContainerID string
// a "RootSite" is used by resources that own one or more sites.
// ex: groups and teams. Each of those resources should designate
// a "root" site (aka: the "default" site). That site gets embedded
// here because we probably interface with it as its own resource
// within the drive processors.
RootSite struct {
Provider idname.Provider
ID string
DisplayName string
DriveID string
DriveRootFolderID string
WebURL string
}
}
// M365IntgTestSetup provides all the common references used in an m365 integration
// test suite. Call `its.GetM365()` to get the singleton for your test suite.
// If you're looking for unit testing setup, use `uts.GetM365()` instead.
type M365IntgTestSetup struct {
Acct account.Account
Creds account.M365Config
TenantID string
AC api.Client
GockAC api.Client
Site IDs
SecondarySite IDs
Group IDs
SecondaryGroup IDs
User IDs
SecondaryUser IDs
TertiaryUser IDs
}
// GetM365 returns the populated its.m365 singleton.
func GetM365(t *testing.T) M365IntgTestSetup {
var err error
setup := M365IntgTestSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
setup.Acct = tconfig.NewM365Account(t)
setup.Creds, err = setup.Acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
setup.TenantID = setup.Creds.AzureTenantID
setup.AC, err = api.NewClient(setup.Creds, control.DefaultOptions(), count.New())
require.NoError(t, err, clues.ToCore(err))
setup.GockAC, err = gockClient(setup.Creds, count.New())
require.NoError(t, err, clues.ToCore(err))
// users
fillUser(t, setup.AC, tconfig.M365UserID(t), &setup.User)
fillUser(t, setup.AC, tconfig.SecondaryM365UserID(t), &setup.SecondaryUser)
fillUser(t, setup.AC, tconfig.TertiaryM365UserID(t), &setup.TertiaryUser)
// site
fillSite(t, setup.AC, tconfig.M365SiteID(t), &setup.Site)
fillSite(t, setup.AC, tconfig.SecondaryM365SiteID(t), &setup.SecondarySite)
// team
fillTeam(t, setup.AC, tconfig.M365TeamID(t), &setup.Group)
fillTeam(t, setup.AC, tconfig.SecondaryM365TeamID(t), &setup.SecondaryGroup)
return setup
}
func fillUser(
t *testing.T,
ac api.Client,
uid string,
ids *IDs,
) {
ctx, flush := tester.NewContext(t)
defer flush()
ids.ID = tconfig.M365UserID(t)
user, err := ac.Users().GetByID(ctx, uid, api.CallConfig{})
require.NoError(t, err, clues.ToCore(err))
ids.ID = ptr.Val(user.GetId())
ids.Email = ptr.Val(user.GetUserPrincipalName())
ids.Provider = idname.NewProvider(ids.ID, ids.Email)
ids.DisplayName = ptr.Val(user.GetDisplayName())
drive, err := ac.Users().GetDefaultDrive(ctx, ids.ID)
require.NoError(t, err, clues.ToCore(err))
ids.DriveID = ptr.Val(drive.GetId())
rootFolder, err := ac.Drives().GetRootFolder(ctx, ids.DriveID)
require.NoError(t, err, clues.ToCore(err))
ids.DriveRootFolderID = ptr.Val(rootFolder.GetId())
}
func fillSite(
t *testing.T,
ac api.Client,
sid string,
ids *IDs,
) {
ctx, flush := tester.NewContext(t)
defer flush()
site, err := ac.Sites().GetByID(ctx, sid, api.CallConfig{})
require.NoError(t, err, clues.ToCore(err))
ids.ID = ptr.Val(site.GetId())
ids.WebURL = ptr.Val(site.GetWebUrl())
ids.Provider = idname.NewProvider(ids.ID, ids.WebURL)
ids.DisplayName = ptr.Val(site.GetDisplayName())
drive, err := ac.Sites().GetDefaultDrive(ctx, ids.ID)
require.NoError(t, err, clues.ToCore(err))
ids.DriveID = ptr.Val(drive.GetId())
rootFolder, err := ac.Drives().GetRootFolder(ctx, ids.DriveID)
require.NoError(t, err, clues.ToCore(err))
ids.DriveRootFolderID = ptr.Val(rootFolder.GetId())
}
func fillTeam(
t *testing.T,
ac api.Client,
gid string,
ids *IDs,
) {
ctx, flush := tester.NewContext(t)
defer flush()
team, err := ac.Groups().GetByID(ctx, gid, api.CallConfig{})
require.NoError(t, err, clues.ToCore(err))
ids.ID = ptr.Val(team.GetId())
ids.Email = ptr.Val(team.GetMail())
ids.Provider = idname.NewProvider(ids.ID, ids.Email)
ids.DisplayName = ptr.Val(team.GetDisplayName())
channel, err := ac.Channels().
GetChannelByName(
ctx,
ids.ID,
"Test")
require.NoError(t, err, clues.ToCore(err))
require.Equal(t, "Test", ptr.Val(channel.GetDisplayName()))
ids.TestContainerID = ptr.Val(channel.GetId())
site, err := ac.Groups().GetRootSite(ctx, gid)
require.NoError(t, err, clues.ToCore(err))
ids.RootSite.ID = ptr.Val(site.GetId())
ids.RootSite.WebURL = ptr.Val(site.GetWebUrl())
ids.RootSite.DisplayName = ptr.Val(site.GetDisplayName())
ids.RootSite.Provider = idname.NewProvider(ids.RootSite.ID, ids.RootSite.WebURL)
drive, err := ac.Sites().GetDefaultDrive(ctx, ids.RootSite.ID)
require.NoError(t, err, clues.ToCore(err))
ids.RootSite.DriveID = ptr.Val(drive.GetId())
rootFolder, err := ac.Drives().GetRootFolder(ctx, ids.RootSite.DriveID)
require.NoError(t, err, clues.ToCore(err))
ids.RootSite.DriveRootFolderID = ptr.Val(rootFolder.GetId())
}

View File

@ -1,154 +0,0 @@
package its
import (
"slices"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
type M365IntgSuite struct {
tester.Suite
}
func TestM365IntgSuite(t *testing.T) {
suite.Run(t, &M365IntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs, storeTD.AWSStorageCredEnvs}),
})
}
func (suite *M365IntgSuite) TestGetM365() {
t := suite.T()
result := GetM365(t)
assert.NotEmpty(t, result.Acct)
assert.NotEmpty(t, result.Creds)
assert.NotEmpty(t, result.AC)
assert.NotEmpty(t, result.GockAC)
assert.NotEmpty(t, result.TenantID)
var (
none = []string{}
expectSite = []string{id, weburl, provider, displayname, driveid, driverootfolderid}
expectRootSite = []string{id, weburl, provider, displayname, driveid, driverootfolderid}
expectGroup = []string{id, email, provider, displayname, testcontainerid}
expectUser = []string{id, email, provider, displayname, driveid, driverootfolderid}
)
assertIDs(t, result.Site, expectSite, none)
assertIDs(t, result.SecondarySite, expectSite, none)
assertIDs(t, result.Group, expectGroup, expectRootSite)
assertIDs(t, result.SecondaryGroup, expectGroup, expectRootSite)
assertIDs(t, result.User, expectUser, none)
assertIDs(t, result.SecondaryUser, expectUser, none)
assertIDs(t, result.TertiaryUser, expectUser, none)
}
const (
provider = "provider"
id = "id"
email = "email"
displayname = "displayname"
driveid = "driveid"
driverootfolderid = "driverootfolderid"
testcontainerid = "testcontainerid"
weburl = "weburl"
)
func assertIDs(
t *testing.T,
ids IDs,
expect []string,
expectRootSite []string,
) {
assert.NotEmpty(t, ids)
if slices.Contains(expect, provider) {
assert.NotNil(t, ids.Provider)
assert.NotEmpty(t, ids.Provider.ID())
assert.NotEmpty(t, ids.Provider.Name())
} else {
assert.Nil(t, ids.Provider)
}
if slices.Contains(expect, id) {
assert.NotEmpty(t, ids.ID)
} else {
assert.Empty(t, ids.ID)
}
if slices.Contains(expect, email) {
assert.NotEmpty(t, ids.Email)
} else {
assert.Empty(t, ids.Email)
}
if slices.Contains(expect, driveid) {
assert.NotEmpty(t, ids.DriveID)
} else {
assert.Empty(t, ids.DriveID)
}
if slices.Contains(expect, driverootfolderid) {
assert.NotEmpty(t, ids.DriveRootFolderID)
} else {
assert.Empty(t, ids.DriveRootFolderID)
}
if slices.Contains(expect, testcontainerid) {
assert.NotEmpty(t, ids.TestContainerID)
} else {
assert.Empty(t, ids.TestContainerID)
}
if slices.Contains(expect, weburl) {
assert.NotEmpty(t, ids.WebURL)
} else {
assert.Empty(t, ids.WebURL)
}
if slices.Contains(expectRootSite, provider) {
assert.NotNil(t, ids.RootSite.Provider)
assert.NotEmpty(t, ids.RootSite.Provider.ID())
assert.NotEmpty(t, ids.RootSite.Provider.Name())
} else {
assert.Nil(t, ids.RootSite.Provider)
}
if slices.Contains(expectRootSite, id) {
assert.NotEmpty(t, ids.RootSite.ID)
} else {
assert.Empty(t, ids.RootSite.ID)
}
if slices.Contains(expectRootSite, driveid) {
assert.NotEmpty(t, ids.RootSite.DriveID)
} else {
assert.Empty(t, ids.RootSite.DriveID)
}
if slices.Contains(expectRootSite, displayname) {
assert.NotEmpty(t, ids.RootSite.DisplayName)
} else {
assert.Empty(t, ids.RootSite.DisplayName)
}
if slices.Contains(expectRootSite, driverootfolderid) {
assert.NotEmpty(t, ids.RootSite.DriveRootFolderID)
} else {
assert.Empty(t, ids.RootSite.DriveRootFolderID)
}
if slices.Contains(expectRootSite, weburl) {
assert.NotEmpty(t, ids.RootSite.WebURL)
} else {
assert.Empty(t, ids.RootSite.WebURL)
}
}

View File

@ -1,30 +0,0 @@
package its
import "github.com/alcionai/corso/src/pkg/path"
type ResourceServicer interface {
Resource() IDs
Service() path.ServiceType
}
var _ ResourceServicer = resourceAndService{}
type resourceAndService struct {
protectedResource IDs
serviceType path.ServiceType
}
func (ras resourceAndService) Resource() IDs {
return ras.protectedResource
}
func (ras resourceAndService) Service() path.ServiceType {
return ras.serviceType
}
func NewResourceService(r IDs, s path.ServiceType) ResourceServicer {
return &resourceAndService{
protectedResource: r,
serviceType: s,
}
}

View File

@ -37,10 +37,10 @@ func NewFakeM365Account(t *testing.T) account.Account {
account.ProviderM365,
account.M365Config{
M365: credentials.M365{
AzureClientID: "client-12345",
AzureClientSecret: "secret-abcde",
AzureClientID: "12345",
AzureClientSecret: "abcde",
},
AzureTenantID: "tenant-09876",
AzureTenantID: "09876",
})
require.NoError(t, err, "initializing mock account", clues.ToCore(err))

View File

@ -16,7 +16,6 @@ import (
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control/repository"
@ -400,7 +399,6 @@ func (suite *ConfigSuite) TestReadFromFlags() {
type ConfigIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
}
func TestConfigIntegrationSuite(t *testing.T) {
@ -409,10 +407,6 @@ func TestConfigIntegrationSuite(t *testing.T) {
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *ConfigIntegrationSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
}
func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
t := suite.T()
vpr := viper.New()
@ -424,6 +418,7 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
bkt = "get-storage-and-account-bucket"
end = "https://get-storage-and-account.com"
pfx = "get-storage-and-account-prefix/"
tid = "3a2faa4e-a882-445c-9d27-f552ef189381"
)
// Configure viper to read test config file
@ -439,10 +434,9 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
DoNotVerifyTLS: true,
DoNotUseTLS: true,
}
m365 := account.M365Config{AzureTenantID: tid}
creds := suite.m365.Creds
err = writeRepoConfigWithViper(vpr, s3Cfg, creds, repository.Options{}, "repoid")
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, repository.Options{}, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err))
require.Equal(
@ -490,12 +484,13 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
bkt = "get-storage-and-account-no-file-bucket"
end = "https://get-storage-and-account.com/no-file"
pfx = "get-storage-and-account-no-file-prefix/"
tid = "88f8522b-18e4-4d0f-b514-2d7b34d4c5a1"
)
creds := suite.m365.Creds
m365 := account.M365Config{AzureTenantID: tid}
overrides := map[string]string{
account.AzureTenantID: suite.m365.TenantID,
account.AzureTenantID: tid,
account.AccountProviderTypeKey: account.ProviderM365.String(),
storage.Bucket: bkt,
storage.Endpoint: end,
@ -524,7 +519,7 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
readM365, err := cfg.Account.M365Config()
require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, creds.AzureTenantID)
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
}

View File

@ -27,11 +27,6 @@ type Options struct {
// backup data until the set limits without paying attention to what the other
// had already backed up.
PreviewLimits PreviewItemLimits `json:"previewItemLimits"`
// specifying a resource tuple in this map allows that resource to produce
// a Skip instead of a recoverable error in case of a failure due to 503 when
// retrieving calendar event item data.
SkipEventsOnInstance503ForResources map[string]struct{}
}
// RateLimiter is the set of options applied to any external service facing rate

View File

@ -66,12 +66,6 @@ var (
// about what it sounds like: we tried to look for a backup by ID, but the
// storage layer couldn't find anything for that ID.
ErrBackupNotFound = &Err{msg: "backup not found"}
// basically "internal server error". But not internal issues. We only return this
// when a downstream service (ex: graph api) responds with a 5xx style error.
// Note: producers may not funnel all 5xx errors in this umbrella, because
// different cases (ex: `StatusHTTPVersionNotSupported`) may need more specific
// attention and handling than standard gateway outages or service issues.
ErrDownstreamServerError = &Err{msg: "server error in downstream service"}
// a catch-all for downstream api auth issues. doesn't matter which api.
ErrInsufficientAuthorization = &Err{msg: "insufficient authorization"}
// happens when we look up something using an identifier other than a canonical ID
@ -122,27 +116,3 @@ func As(err error) (*Err, bool) {
return ce, ok
}
// ---------------------------------------------------------------------------
// Labels
//
// In some cases we like to attach well known labels to errors for additional
// metadata or metrics or other associations. Labeling differs from error
// typing or identification because a label won't explain the cause or clearly
// contextualize an error. Labels are tags for their own sake.
//
// Therefore, labels are expressly not for error identification. IE: if you
// see the check `if clues.HasLabel(err, labelFoo)` in place of
// `if errors.Is(err, errFoo)`, that's a red flag.
// ---------------------------------------------------------------------------
const (
// add this label when we might need to further investigate the cause of the
// error. For example, in the graph api layer we try to categorize errors
// by their specific identity, such as "the resource was locked out". If
// we're unsuccessful, we can still fall back to the more generic error code,
// "403 forbidden". But it tradeoff, we may end up catching (and gracefully
// handling) 403s, but not identifying an underlying root issue. This label
// is here to say, "maybe you should look for the reason why this happened".
LabelRootCauseUnknown = "root-cause-unknown"
)

View File

@ -10,7 +10,6 @@ import (
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
)
func ConsumeExportCollections(
@ -20,10 +19,6 @@ func ConsumeExportCollections(
errs *fault.Bus,
) error {
el := errs.Local()
counted := 0
log := logger.Ctx(ctx).
With("export_location", exportLocation,
"collection_count", len(expColl))
for _, col := range expColl {
if el.Failure() != nil {
@ -34,13 +29,6 @@ func ConsumeExportCollections(
ictx := clues.Add(ctx, "dir_name", folder)
for item := range col.Items(ictx) {
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("progress writing export items", "count_items", counted)
}
if item.Error != nil {
el.AddRecoverable(ictx, clues.Wrap(item.Error, "getting item"))
continue
@ -54,8 +42,6 @@ func ConsumeExportCollections(
}
}
log.Infow("completed writing export items", "count_items", counted)
return el.Failure()
}

View File

@ -12,39 +12,30 @@ type AddSkipper interface {
AddSkip(ctx context.Context, s *Skipped)
}
// SkipCause identifies the well-known conditions to Skip an item. It is
// skipCause identifies the well-known conditions to Skip an item. It is
// important that skip cause enumerations do not overlap with general error
// handling. Skips must be well known, well documented, and consistent.
// Transient failures, undocumented or unknown conditions, and arbitrary
// handling should never produce a skipped item. Those cases should get
// handled as normal errors.
type SkipCause string
type skipCause string
const (
// SkipMalware identifies a malware detection case. Files that graph
// api identifies as malware cannot be downloaded or uploaded, and will
// permanently fail any attempts to backup or restore.
SkipMalware SkipCause = "malware_detected"
SkipMalware skipCause = "malware_detected"
// SkipOneNote identifies that a file was skipped because it
// was a OneNote file that remains inaccessible (503 server response)
// regardless of the number of retries.
//nolint:lll
// https://support.microsoft.com/en-us/office/restrictions-and-limitations-in-onedrive-and-sharepoint-64883a5d-228e-48f5-b3d2-eb39e07630fa#onenotenotebooks
SkipOneNote SkipCause = "inaccessible_one_note_file"
SkipOneNote skipCause = "inaccessible_one_note_file"
// SkipInvalidRecipients identifies that an email was skipped because Exchange
// believes it is not valid and fails any attempt to read it.
SkipInvalidRecipients SkipCause = "invalid_recipients_email"
// SkipCorruptData identifies that an email was skipped because graph reported
// that the email data was corrupt and failed all attempts to read it.
SkipCorruptData SkipCause = "corrupt_data"
// SkipKnownEventInstance503s identifies cases where we have a pre-configured list
// of event IDs where the events are known to fail with a 503 due to there being
// too many instances to retrieve from graph api.
SkipKnownEventInstance503s SkipCause = "known_event_instance_503"
SkipInvalidRecipients skipCause = "invalid_recipients_email"
)
var _ print.Printable = &Skipped{}
@ -75,7 +66,7 @@ func (s *Skipped) String() string {
}
// HasCause compares the underlying cause against the parameter.
func (s *Skipped) HasCause(c SkipCause) bool {
func (s *Skipped) HasCause(c skipCause) bool {
if s == nil {
return false
}
@ -110,27 +101,27 @@ func (s Skipped) Values(bool) []string {
}
// ContainerSkip produces a Container-kind Item for tracking skipped items.
func ContainerSkip(cause SkipCause, namespace, id, name string, addtl map[string]any) *Skipped {
func ContainerSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
return itemSkip(ContainerType, cause, namespace, id, name, addtl)
}
// EmailSkip produces a Email-kind Item for tracking skipped items.
func EmailSkip(cause SkipCause, user, id string, addtl map[string]any) *Skipped {
func EmailSkip(cause skipCause, user, id string, addtl map[string]any) *Skipped {
return itemSkip(EmailType, cause, user, id, "", addtl)
}
// FileSkip produces a File-kind Item for tracking skipped items.
func FileSkip(cause SkipCause, namespace, id, name string, addtl map[string]any) *Skipped {
func FileSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
return itemSkip(FileType, cause, namespace, id, name, addtl)
}
// OnwerSkip produces a ResourceOwner-kind Item for tracking skipped items.
func OwnerSkip(cause SkipCause, namespace, id, name string, addtl map[string]any) *Skipped {
func OwnerSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
return itemSkip(ResourceOwnerType, cause, namespace, id, name, addtl)
}
// itemSkip produces a Item of the provided type for tracking skipped items.
func itemSkip(t ItemType, cause SkipCause, namespace, id, name string, addtl map[string]any) *Skipped {
func itemSkip(t ItemType, cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
return &Skipped{
Item: Item{
Namespace: namespace,

Some files were not shown because too many files have changed in this diff Show More