Compare commits
101 Commits
ci-cache-e
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
880cb899b5 | ||
|
|
779bb70301 | ||
|
|
2487072d95 | ||
|
|
ad927afbc1 | ||
|
|
b086f8c3ff | ||
|
|
d9bf48be7e | ||
|
|
fe261b22c5 | ||
|
|
18e3661289 | ||
|
|
d87e24d839 | ||
|
|
b3775e2feb | ||
|
|
4fc5b5b146 | ||
|
|
0fe2588e78 | ||
|
|
d9d993d267 | ||
|
|
1b842a1c60 | ||
|
|
64de1d9e17 | ||
|
|
48c0ab5175 | ||
|
|
eb3ab3aebc | ||
|
|
df423d5e18 | ||
|
|
23de1d53dd | ||
|
|
963dd4a11d | ||
|
|
e96f74e634 | ||
|
|
b180dee597 | ||
|
|
44d4821a8d | ||
|
|
6bbb46b29a | ||
|
|
f197d7cf7b | ||
|
|
6c9de9bef3 | ||
|
|
686867bd96 | ||
|
|
cd41d2fbce | ||
|
|
2b79c1b797 | ||
|
|
2ab6d34538 | ||
|
|
e0884c734c | ||
|
|
f3fdb4a885 | ||
|
|
f4dbaf60b0 | ||
|
|
b9b5650506 | ||
|
|
f28e79c098 | ||
|
|
42af271526 | ||
|
|
d87435fdc2 | ||
|
|
8bdf86bbad | ||
|
|
bf52fdbe6a | ||
|
|
90d6db486b | ||
|
|
f10730cf98 | ||
|
|
bb2bd6df3f | ||
|
|
5e8407a970 | ||
|
|
4b56754546 | ||
|
|
28aba60cc5 | ||
|
|
03048a6ca8 | ||
|
|
97535e2afc | ||
|
|
cd7450395e | ||
|
|
411ef24024 | ||
|
|
b3b52c0dfc | ||
|
|
8502e1fee6 | ||
|
|
f0b8041c3f | ||
|
|
f92f811559 | ||
|
|
71a9087e4d | ||
|
|
45886e2ad9 | ||
|
|
7262d3b284 | ||
|
|
2ab3c890b4 | ||
|
|
f00dd0f88a | ||
|
|
a2d40b4d38 | ||
|
|
c1ec1585a2 | ||
|
|
a680f13f84 | ||
|
|
9c8ac96aed | ||
|
|
e6dd387811 | ||
|
|
fb64a2f52b | ||
|
|
0cde1a4778 | ||
|
|
e86592f51e | ||
|
|
53a0525bfd | ||
|
|
de22131b23 | ||
|
|
4cf4c22259 | ||
|
|
b5ac65c3d0 | ||
|
|
b4b8088a97 | ||
|
|
29f6582bc7 | ||
|
|
d983488154 | ||
|
|
7e2b9dab62 | ||
|
|
1537db59c4 | ||
|
|
80d7d5c63d | ||
|
|
ca3ca60ba4 | ||
|
|
50ba30539a | ||
|
|
f1406a3334 | ||
|
|
f615198c78 | ||
|
|
820d6aba33 | ||
|
|
576c9f6b53 | ||
|
|
08d4803ebe | ||
|
|
734fd7239e | ||
|
|
d2f1bbb5c7 | ||
|
|
45b021d58e | ||
|
|
8e6a47b103 | ||
|
|
8ac7e6caa2 | ||
|
|
6ef2c2d494 | ||
|
|
8a7a61f05d | ||
|
|
e1cb5b6313 | ||
|
|
85aaa448c5 | ||
|
|
8437724254 | ||
|
|
08e1b1d1e6 | ||
|
|
41f2808bd9 | ||
|
|
79194c44df | ||
|
|
5f036a0cc1 | ||
|
|
d426250931 | ||
|
|
c3f4dd6bcf | ||
|
|
f7a9ca836f | ||
|
|
8133da3087 |
@ -1,4 +1,5 @@
|
||||
name: Backup Restore Test
|
||||
description: Run various backup/restore/export tests for a service.
|
||||
|
||||
inputs:
|
||||
service:
|
||||
|
||||
1
.github/actions/go-setup-cache/action.yml
vendored
1
.github/actions/go-setup-cache/action.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Setup and Cache Golang
|
||||
description: Build golang binaries for later use in CI.
|
||||
|
||||
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
||||
#
|
||||
|
||||
1
.github/actions/publish-binary/action.yml
vendored
1
.github/actions/publish-binary/action.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Publish Binary
|
||||
description: Publish binary artifacts.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
|
||||
1
.github/actions/publish-website/action.yml
vendored
1
.github/actions/publish-website/action.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Publish Website
|
||||
description: Publish website artifacts.
|
||||
|
||||
inputs:
|
||||
aws-iam-role:
|
||||
|
||||
44
.github/actions/purge-m365-data/action.yml
vendored
44
.github/actions/purge-m365-data/action.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Purge M365 User Data
|
||||
description: Deletes M365 data generated during CI tests.
|
||||
|
||||
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
||||
# of data churn (creation and immediate deletion) of files, the likes
|
||||
@ -30,12 +31,19 @@ inputs:
|
||||
description: Secret value of for AZURE_CLIENT_ID
|
||||
azure-client-secret:
|
||||
description: Secret value of for AZURE_CLIENT_SECRET
|
||||
azure-pnp-client-id:
|
||||
description: Secret value of AZURE_PNP_CLIENT_ID
|
||||
azure-pnp-client-cert:
|
||||
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
|
||||
azure-tenant-id:
|
||||
description: Secret value of for AZURE_TENANT_ID
|
||||
description: Secret value of AZURE_TENANT_ID
|
||||
m365-admin-user:
|
||||
description: Secret value of for M365_TENANT_ADMIN_USER
|
||||
m365-admin-password:
|
||||
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
||||
tenant-domain:
|
||||
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
@ -53,7 +61,13 @@ runs:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
||||
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
||||
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
||||
run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
|
||||
#- name: Reset retention for all mailboxes to 0
|
||||
@ -74,10 +88,16 @@ runs:
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
run: |
|
||||
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
################################################################################################################
|
||||
# Sharepoint
|
||||
@ -88,6 +108,14 @@ runs:
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
1
.github/actions/teams-message/action.yml
vendored
1
.github/actions/teams-message/action.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Send a message to Teams
|
||||
description: Send messages to communication apps.
|
||||
|
||||
inputs:
|
||||
msg:
|
||||
|
||||
1
.github/actions/website-linting/action.yml
vendored
1
.github/actions/website-linting/action.yml
vendored
@ -1,4 +1,5 @@
|
||||
name: Lint Website
|
||||
description: Lint website content.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
|
||||
2
.github/workflows/_filechange_checker.yml
vendored
2
.github/workflows/_filechange_checker.yml
vendored
@ -28,7 +28,7 @@ jobs:
|
||||
|
||||
# only run CI tests if the src folder or workflow actions have changed
|
||||
- name: Check for file changes in src/ or .github/workflows/
|
||||
uses: dorny/paths-filter@v2
|
||||
uses: dorny/paths-filter@v3
|
||||
id: dornycheck
|
||||
with:
|
||||
list-files: json
|
||||
|
||||
2
.github/workflows/binary-publish.yml
vendored
2
.github/workflows/binary-publish.yml
vendored
@ -40,5 +40,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[FAILED] Publishing Binary"
|
||||
msg: "[CORSO FAILED] Publishing Binary"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
@ -463,7 +463,7 @@ jobs:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Go Lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
uses: golangci/golangci-lint-action@v4
|
||||
with:
|
||||
# Keep pinned to a verson as sometimes updates will add new lint
|
||||
# failures in unchanged code.
|
||||
@ -518,6 +518,20 @@ jobs:
|
||||
echo "Make sure to propagate errors with clues"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check if clues without context are used when context is passed in
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
|
||||
echo "Do not use clues.*WC when context is passed in"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check clues with context is used when context is not passed in
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
|
||||
echo "Use clues.*WC when context is not passed in"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- GitHub Actions Linting -------------------------------------------------------------------------
|
||||
|
||||
14
.github/workflows/ci_test_cleanup.yml
vendored
14
.github/workflows/ci_test_cleanup.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
|
||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -33,12 +33,15 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
Test-Site-Data-Cleanup:
|
||||
@ -47,7 +50,7 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
|
||||
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -70,10 +73,13 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
3
.github/workflows/load_test.yml
vendored
3
.github/workflows/load_test.yml
vendored
@ -155,3 +155,6 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
9
.github/workflows/longevity_test.yml
vendored
9
.github/workflows/longevity_test.yml
vendored
@ -6,7 +6,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: 'User to run longevity test on'
|
||||
description: "User to run longevity test on"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Longevity-Tests:
|
||||
needs: [ SetM365App ]
|
||||
needs: [SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@ -37,7 +37,7 @@ jobs:
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
||||
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||
PREFIX: 'longevity'
|
||||
PREFIX: "longevity"
|
||||
|
||||
# Options for retention.
|
||||
RETENTION_MODE: GOVERNANCE
|
||||
@ -113,7 +113,6 @@ jobs:
|
||||
--extend-retention \
|
||||
--prefix ${{ env.PREFIX }} \
|
||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||
--succeed-if-exists \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||
|
||||
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||
@ -393,5 +392,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[FAILED] Longevity Test"
|
||||
msg: "[CORSO FAILED] Longevity Test"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
8
.github/workflows/nightly_test.yml
vendored
8
.github/workflows/nightly_test.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Test-Suite-Trusted:
|
||||
needs: [ Checkout, SetM365App]
|
||||
needs: [Checkout, SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
@ -100,9 +100,9 @@ jobs:
|
||||
-timeout 2h \
|
||||
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
# Logging & Notifications
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
@ -118,5 +118,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[FAILED] Nightly Checks"
|
||||
msg: "[COROS FAILED] Nightly Checks"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
2
.github/workflows/ok-to-test.yml
vendored
2
.github/workflows/ok-to-test.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
private_key: ${{ secrets.PRIVATE_KEY }}
|
||||
|
||||
- name: Slash Command Dispatch
|
||||
uses: peter-evans/slash-command-dispatch@v3
|
||||
uses: peter-evans/slash-command-dispatch@v4
|
||||
env:
|
||||
TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
with:
|
||||
|
||||
94
.github/workflows/sanity-test.yaml
vendored
94
.github/workflows/sanity-test.yaml
vendored
@ -6,7 +6,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: 'User to run sanity test on'
|
||||
description: "User to run sanity test on"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Sanity-Tests:
|
||||
needs: [ SetM365App ]
|
||||
needs: [SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
@ -44,11 +44,10 @@ jobs:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# setup
|
||||
# setup
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang with cache
|
||||
@ -64,9 +63,9 @@ jobs:
|
||||
|
||||
- run: mkdir ${CORSO_LOG_DIR}
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Pre-Run cleanup
|
||||
# Pre-Run cleanup
|
||||
|
||||
# unlike CI tests, sanity tests are not expected to run concurrently.
|
||||
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
|
||||
@ -91,6 +90,9 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Purge CI-Produced Folders for Sites
|
||||
timeout-minutes: 30
|
||||
@ -106,10 +108,13 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Repository commands
|
||||
# Repository commands
|
||||
|
||||
- name: Version Test
|
||||
timeout-minutes: 10
|
||||
@ -169,9 +174,9 @@ jobs:
|
||||
--mode complete \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Exchange
|
||||
# Exchange
|
||||
|
||||
# generate new entries to roll into the next load test
|
||||
# only runs if the test was successful
|
||||
@ -193,8 +198,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: first-backup
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -206,8 +211,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: incremental
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
@ -220,8 +225,8 @@ jobs:
|
||||
service: exchange
|
||||
kind: non-delta
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
@ -234,16 +239,15 @@ jobs:
|
||||
service: exchange
|
||||
kind: non-delta-incremental
|
||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Onedrive
|
||||
# Onedrive
|
||||
|
||||
# generate new entries for test
|
||||
- name: OneDrive - Create new data
|
||||
@ -270,8 +274,8 @@ jobs:
|
||||
service: onedrive
|
||||
kind: first-backup
|
||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -295,14 +299,14 @@ jobs:
|
||||
service: onedrive
|
||||
kind: incremental
|
||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Sharepoint Library
|
||||
# Sharepoint Library
|
||||
|
||||
# generate new entries for test
|
||||
- name: SharePoint - Create new data
|
||||
@ -330,8 +334,8 @@ jobs:
|
||||
service: sharepoint
|
||||
kind: first-backup
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: libraries
|
||||
@ -357,15 +361,15 @@ jobs:
|
||||
service: sharepoint
|
||||
kind: incremental
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
||||
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
category: libraries
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Sharepoint Lists
|
||||
# Sharepoint Lists
|
||||
|
||||
# generate new entries for test
|
||||
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
|
||||
@ -403,7 +407,7 @@ jobs:
|
||||
service: sharepoint
|
||||
kind: first-backup-lists
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
|
||||
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S') --allow-lists-restore"
|
||||
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
|
||||
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
|
||||
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
@ -446,7 +450,7 @@ jobs:
|
||||
service: sharepoint
|
||||
kind: incremental-lists
|
||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
|
||||
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S') --allow-lists-restore"
|
||||
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
|
||||
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
|
||||
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
@ -454,9 +458,9 @@ jobs:
|
||||
category: lists
|
||||
on-collision: copy
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Groups and Teams
|
||||
# Groups and Teams
|
||||
|
||||
# generate new entries for test
|
||||
- name: Groups - Create new data
|
||||
@ -483,8 +487,8 @@ jobs:
|
||||
with:
|
||||
service: groups
|
||||
kind: first-backup
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
@ -508,15 +512,15 @@ jobs:
|
||||
with:
|
||||
service: groups
|
||||
kind: incremental
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
||||
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||
with-export: true
|
||||
|
||||
##########################################################################################################################################
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
# Logging & Notifications
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
@ -532,5 +536,5 @@ jobs:
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[FAILED] Sanity Tests"
|
||||
msg: "[CORSO FAILED] Sanity Tests"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
19
CHANGELOG.md
19
CHANGELOG.md
@ -6,12 +6,22 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased] (beta)
|
||||
### Fixed
|
||||
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
|
||||
- Emails attached within other emails are now correctly exported
|
||||
- Gracefully handle email and post attachments without name when exporting to eml
|
||||
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
|
||||
- Fixed an issue causing exports dealing with calendar data to have high memory usage
|
||||
|
||||
## [v0.19.0] (beta) - 2024-02-06
|
||||
|
||||
### Added
|
||||
- Events can now be exported from Exchange backups as .ics files.
|
||||
- Update repo init configuration to reduce the total number of GET requests sent
|
||||
to the object store when using corso. This affects repos that have many
|
||||
backups created in them per day the most.
|
||||
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
|
||||
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
|
||||
|
||||
### Fixed
|
||||
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
|
||||
@ -19,10 +29,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Groups and Teams backups no longer fail when a resource has no display name.
|
||||
- Contacts in-place restore failed if the restore destination was empty.
|
||||
- Link shares with external users are now backed up and restored as expected
|
||||
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
|
||||
|
||||
### Changed
|
||||
- When running `backup details` on an empty backup returns a more helpful error message.
|
||||
- Backup List additionally shows the data category for each backup.
|
||||
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
|
||||
|
||||
### Known issues
|
||||
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
|
||||
@ -30,6 +42,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
|
||||
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
|
||||
- External users with access through shared links will not receive these links as they are not sent via email during restore.
|
||||
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
|
||||
- SharePoint list item attachments are not available due to graph API limitations.
|
||||
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
|
||||
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
|
||||
|
||||
## [v0.18.0] (beta) - 2024-01-02
|
||||
|
||||
@ -486,7 +502,8 @@ this case, Corso will skip over the item but report this in the backup summary.
|
||||
- Miscellaneous
|
||||
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
|
||||
|
||||
[Unreleased]: https://github.com/alcionai/corso/compare/v0.18.0...HEAD
|
||||
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD
|
||||
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
|
||||
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
|
||||
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
|
||||
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
|
||||
|
||||
@ -1,3 +1,6 @@
|
||||
> [!NOTE]
|
||||
> **The Corso project is no longer actively maintained and has been archived**.
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
||||
</p>
|
||||
|
||||
@ -45,6 +45,7 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||
addOneDriveCommands,
|
||||
addSharePointCommands,
|
||||
addGroupsCommands,
|
||||
addTeamsChatsCommands,
|
||||
}
|
||||
|
||||
// AddCommands attaches all `corso backup * *` commands to the parent.
|
||||
|
||||
@ -18,6 +18,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
@ -39,7 +40,7 @@ var (
|
||||
type NoBackupExchangeE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestNoBackupExchangeE2ESuite(t *testing.T) {
|
||||
@ -54,7 +55,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.its = newIntegrationTesterSetup(t)
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||
}
|
||||
|
||||
@ -93,7 +94,7 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
|
||||
type BackupExchangeE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestBackupExchangeE2ESuite(t *testing.T) {
|
||||
@ -108,7 +109,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.its = newIntegrationTesterSetup(t)
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||
}
|
||||
|
||||
@ -138,7 +139,7 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
|
||||
cmd, ctx := buildExchangeBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
suite.its.user.ID,
|
||||
suite.m365.User.ID,
|
||||
category.String(),
|
||||
&recorder)
|
||||
|
||||
@ -149,8 +150,11 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
|
||||
// as an offhand check: the result should contain the m365 user id
|
||||
assert.Contains(t, result, suite.its.user.ID)
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
|
||||
@ -173,7 +177,7 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
|
||||
cmd, ctx := buildExchangeBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.its.user.ID),
|
||||
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.m365.User.ID),
|
||||
category.String(),
|
||||
&recorder)
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
@ -182,8 +186,11 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
|
||||
// as an offhand check: the result should contain the m365 user id
|
||||
assert.Contains(t, result, suite.its.user.ID)
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
|
||||
@ -242,7 +249,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFl
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "exchange",
|
||||
"--user", suite.its.user.ID,
|
||||
"--user", suite.m365.User.ID,
|
||||
"--azure-client-id", "invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
@ -266,7 +273,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "exchange",
|
||||
"--user", suite.its.user.ID,
|
||||
"--user", suite.m365.User.ID,
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
@ -281,8 +288,11 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
|
||||
result := suite.dpnd.recorder.String()
|
||||
t.Log("backup results", result)
|
||||
|
||||
// as an offhand check: the result should contain the m365 user id
|
||||
assert.Contains(t, result, suite.its.user.ID)
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
}
|
||||
|
||||
// AWS flags
|
||||
@ -296,7 +306,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "exchange",
|
||||
"--user", suite.its.user.ID,
|
||||
"--user", suite.m365.User.ID,
|
||||
"--aws-access-key", "invalid-value",
|
||||
"--aws-secret-access-key", "some-invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
@ -319,7 +329,7 @@ type PreparedBackupExchangeE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps map[path.CategoryType]string
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
|
||||
@ -336,13 +346,13 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.its = newIntegrationTesterSetup(t)
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||
suite.backupOps = make(map[path.CategoryType]string)
|
||||
|
||||
var (
|
||||
users = []string{suite.its.user.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.its.user.ID: suite.its.user.ID})
|
||||
users = []string{suite.m365.User.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
|
||||
)
|
||||
|
||||
for _, set := range []path.CategoryType{email, contacts, events} {
|
||||
|
||||
@ -35,9 +35,12 @@ const (
|
||||
groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group
|
||||
corso backup create groups --group Marketing
|
||||
|
||||
# Backup only Teams conversations messages
|
||||
# Backup only Teams channel messages
|
||||
corso backup create groups --group Marketing --data messages
|
||||
|
||||
# Backup only group mailbox posts
|
||||
corso backup create groups --group Marketing --data conversations
|
||||
|
||||
# Backup all Groups and Teams data for all groups
|
||||
corso backup create groups --group '*'`
|
||||
|
||||
@ -50,7 +53,10 @@ corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
|
||||
# Explore Marketing messages posted after the start of 2022
|
||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--last-message-reply-after 2022-01-01T00:00:00`
|
||||
--last-message-reply-after 2022-01-01T00:00:00
|
||||
|
||||
# Explore group mailbox posts with conversation subject "hello world"
|
||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
@ -310,7 +316,7 @@ func groupsBackupCreateSelectors(
|
||||
group, cats []string,
|
||||
) *selectors.GroupsBackup {
|
||||
if filters.PathContains(group).Compare(flags.Wildcard) {
|
||||
return includeAllGroupWithCategories(ins, cats)
|
||||
return includeAllGroupsWithCategories(ins, cats)
|
||||
}
|
||||
|
||||
sel := selectors.NewGroupsBackup(slices.Clone(group))
|
||||
@ -318,6 +324,6 @@ func groupsBackupCreateSelectors(
|
||||
return utils.AddGroupsCategories(sel, cats)
|
||||
}
|
||||
|
||||
func includeAllGroupWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
|
||||
func includeAllGroupsWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
|
||||
return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
|
||||
}
|
||||
|
||||
@ -20,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
@ -35,7 +36,7 @@ import (
|
||||
type NoBackupGroupsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestNoBackupGroupsE2ESuite(t *testing.T) {
|
||||
@ -50,7 +51,7 @@ func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.its = newIntegrationTesterSetup(t)
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||
}
|
||||
|
||||
@ -89,7 +90,7 @@ func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
|
||||
type BackupGroupsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestBackupGroupsE2ESuite(t *testing.T) {
|
||||
@ -104,7 +105,7 @@ func (suite *BackupGroupsE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.its = newIntegrationTesterSetup(t)
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||
}
|
||||
|
||||
@ -113,6 +114,8 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
||||
// skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
||||
}
|
||||
|
||||
@ -134,7 +137,7 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
|
||||
cmd, ctx := buildGroupsBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
suite.its.group.ID,
|
||||
suite.m365.Group.ID,
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
@ -202,7 +205,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "groups",
|
||||
"--group", suite.its.group.ID,
|
||||
"--group", suite.m365.Group.ID,
|
||||
"--azure-client-id", "invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
@ -216,6 +219,9 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
||||
// Skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
@ -226,7 +232,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "groups",
|
||||
"--group", suite.its.group.ID,
|
||||
"--group", suite.m365.Group.ID,
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
@ -250,7 +256,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "groups",
|
||||
"--group", suite.its.group.ID,
|
||||
"--group", suite.m365.Group.ID,
|
||||
"--aws-access-key", "invalid-value",
|
||||
"--aws-secret-access-key", "some-invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
@ -273,7 +279,7 @@ type PreparedBackupGroupsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps map[path.CategoryType]string
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
|
||||
@ -290,16 +296,19 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.its = newIntegrationTesterSetup(t)
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||
suite.backupOps = make(map[path.CategoryType]string)
|
||||
|
||||
var (
|
||||
groups = []string{suite.its.group.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
|
||||
groups = []string{suite.m365.Group.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ChannelMessagesCategory,
|
||||
path.ConversationPostsCategory,
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
||||
// we go fix the group mailbox.
|
||||
// path.ConversationPostsCategory,
|
||||
path.LibrariesCategory,
|
||||
}
|
||||
)
|
||||
@ -453,6 +462,8 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
||||
// skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
|
||||
@ -14,141 +14,16 @@ import (
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
"github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Gockable client
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// GockClient produces a new exchange api client that can be
|
||||
// mocked using gock.
|
||||
func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
|
||||
s, err := graph.NewGockService(creds, counter)
|
||||
if err != nil {
|
||||
return api.Client{}, err
|
||||
}
|
||||
|
||||
li, err := graph.NewGockService(creds, counter, graph.NoTimeout())
|
||||
if err != nil {
|
||||
return api.Client{}, err
|
||||
}
|
||||
|
||||
return api.Client{
|
||||
Credentials: creds,
|
||||
Stable: s,
|
||||
LargeItem: li,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Suite Setup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type ids struct {
|
||||
ID string
|
||||
DriveID string
|
||||
DriveRootFolderID string
|
||||
}
|
||||
|
||||
type intgTesterSetup struct {
|
||||
acct account.Account
|
||||
ac api.Client
|
||||
gockAC api.Client
|
||||
user ids
|
||||
site ids
|
||||
group ids
|
||||
team ids
|
||||
}
|
||||
|
||||
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
|
||||
its := intgTesterSetup{}
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
graph.InitializeConcurrencyLimiter(ctx, true, 4)
|
||||
|
||||
its.acct = tconfig.NewM365Account(t)
|
||||
creds, err := its.acct.M365Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
its.ac, err = api.NewClient(
|
||||
creds,
|
||||
control.DefaultOptions(),
|
||||
count.New())
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
its.gockAC, err = gockClient(creds, count.New())
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// user drive
|
||||
|
||||
uids := ids{}
|
||||
|
||||
uids.ID = tconfig.M365UserID(t)
|
||||
|
||||
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, uids.ID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
uids.DriveID = ptr.Val(userDrive.GetId())
|
||||
|
||||
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, uids.DriveID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
uids.DriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
|
||||
|
||||
its.user = uids
|
||||
|
||||
// site
|
||||
|
||||
sids := ids{}
|
||||
|
||||
sids.ID = tconfig.M365SiteID(t)
|
||||
|
||||
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, sids.ID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
sids.DriveID = ptr.Val(siteDrive.GetId())
|
||||
|
||||
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, sids.DriveID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
sids.DriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
|
||||
|
||||
its.site = sids
|
||||
|
||||
// group
|
||||
|
||||
gids := ids{}
|
||||
|
||||
// use of the TeamID is intentional here, so that we are assured
|
||||
// the group has full usage of the teams api.
|
||||
gids.ID = tconfig.M365TeamID(t)
|
||||
|
||||
its.group = gids
|
||||
|
||||
// team
|
||||
|
||||
tids := ids{}
|
||||
tids.ID = tconfig.M365TeamID(t)
|
||||
its.team = tids
|
||||
|
||||
return its
|
||||
}
|
||||
|
||||
type dependencies struct {
|
||||
st storage.Storage
|
||||
repo repository.Repositoryer
|
||||
|
||||
@ -37,7 +37,11 @@ corso backup create sharepoint --site https://example.com/hr
|
||||
corso backup create sharepoint --site https://example.com/hr,https://example.com/team
|
||||
|
||||
# Backup all SharePoint data for all Sites
|
||||
corso backup create sharepoint --site '*'`
|
||||
corso backup create sharepoint --site '*'
|
||||
|
||||
# Backup all SharePoint list data for a Site
|
||||
corso backup create sharepoint --site https://example.com/hr --data lists
|
||||
`
|
||||
|
||||
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
and 1234abcd-12ab-cd34-56de-1234abce
|
||||
@ -57,7 +61,26 @@ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
# Explore all files within the document library "Work Documents"
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--library "Work Documents"
|
||||
`
|
||||
|
||||
# Explore lists by their name(s)
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list "list-name-1,list-name-2"
|
||||
|
||||
# Explore lists created after a given time
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-after 2024-01-01T12:23:34
|
||||
|
||||
# Explore lists created before a given time
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-before 2024-01-01T12:23:34
|
||||
|
||||
# Explore lists modified before a given time
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-before 2024-01-01T12:23:34
|
||||
|
||||
# Explore lists modified after a given time
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-after 2024-01-01T12:23:34`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
@ -73,6 +96,8 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||
|
||||
flags.AddSiteFlag(c, true)
|
||||
flags.AddSiteIDFlag(c, true)
|
||||
// [TODO](hitesh) to add lists flag to invoke backup for lists
|
||||
// when explicit invoke is not required anymore
|
||||
flags.AddDataFlag(c, []string{flags.DataLibraries}, true)
|
||||
flags.AddGenericBackupFlags(c)
|
||||
|
||||
|
||||
@ -20,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
@ -89,7 +90,7 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
|
||||
type BackupSharepointE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestBackupSharepointE2ESuite(t *testing.T) {
|
||||
@ -104,7 +105,7 @@ func (suite *BackupSharepointE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.its = newIntegrationTesterSetup(t)
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
||||
}
|
||||
|
||||
@ -128,7 +129,7 @@ func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category s
|
||||
cmd, ctx := buildSharepointBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
suite.its.site.ID,
|
||||
suite.m365.Site.ID,
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
@ -187,7 +188,7 @@ type PreparedBackupSharepointE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps map[path.CategoryType]string
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
|
||||
@ -204,13 +205,13 @@ func (suite *PreparedBackupSharepointE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.its = newIntegrationTesterSetup(t)
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
||||
suite.backupOps = make(map[path.CategoryType]string)
|
||||
|
||||
var (
|
||||
sites = []string{suite.its.site.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.its.site.ID: suite.its.site.ID})
|
||||
sites = []string{suite.m365.Site.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365.Site.ID: suite.m365.Site.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ListsCategory,
|
||||
}
|
||||
|
||||
305
src/cli/backup/teamschats.go
Normal file
305
src/cli/backup/teamschats.go
Normal file
@ -0,0 +1,305 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/filters"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
)
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// setup and globals
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
const (
|
||||
teamschatsServiceCommand = "chats"
|
||||
teamschatsServiceCommandCreateUseSuffix = "--user <userEmail> | '" + flags.Wildcard + "'"
|
||||
teamschatsServiceCommandDeleteUseSuffix = "--backups <backupId>"
|
||||
teamschatsServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
const (
|
||||
teamschatsServiceCommandCreateExamples = `# Backup all chats with bob@company.hr
|
||||
corso backup create chats --user bob@company.hr
|
||||
|
||||
# Backup all chats for all users
|
||||
corso backup create chats --user '*'`
|
||||
|
||||
teamschatsServiceCommandDeleteExamples = `# Delete chats backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
and 1234abcd-12ab-cd34-56de-1234abce
|
||||
corso backup delete chats --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
|
||||
|
||||
teamschatsServiceCommandDetailsExamples = `# Explore chats in Bob's latest backup (1234abcd...)
|
||||
corso backup details chats --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
func addTeamsChatsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case createCommand:
|
||||
c, _ = utils.AddCommand(cmd, teamschatsCreateCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + teamschatsServiceCommandCreateUseSuffix
|
||||
c.Example = teamschatsServiceCommandCreateExamples
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
flags.AddUserFlag(c)
|
||||
flags.AddDataFlag(c, []string{flags.DataChats}, false)
|
||||
flags.AddGenericBackupFlags(c)
|
||||
|
||||
case listCommand:
|
||||
c, _ = utils.AddCommand(cmd, teamschatsListCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
flags.AddAllBackupListFlags(c)
|
||||
|
||||
case detailsCommand:
|
||||
c, _ = utils.AddCommand(cmd, teamschatsDetailsCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + teamschatsServiceCommandDetailsUseSuffix
|
||||
c.Example = teamschatsServiceCommandDetailsExamples
|
||||
|
||||
flags.AddSkipReduceFlag(c)
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddTeamsChatsDetailsAndRestoreFlags(c)
|
||||
|
||||
case deleteCommand:
|
||||
c, _ = utils.AddCommand(cmd, teamschatsDeleteCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + teamschatsServiceCommandDeleteUseSuffix
|
||||
c.Example = teamschatsServiceCommandDeleteExamples
|
||||
|
||||
flags.AddMultipleBackupIDsFlag(c, false)
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup create
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup create chats [<flag>...]`
|
||||
func teamschatsCreateCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamschatsServiceCommand,
|
||||
Aliases: []string{teamsServiceCommand},
|
||||
Short: "Backup M365 Chats data",
|
||||
RunE: createTeamsChatsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a teamschats backup.
|
||||
func createTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateTeamsChatsBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
|
||||
ctx,
|
||||
cmd,
|
||||
path.TeamsChatsService)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
// TODO: log/print recoverable errors
|
||||
errs := fault.New(false)
|
||||
|
||||
svcCli, err := m365.NewM365Client(ctx, *acct)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Stack(err))
|
||||
}
|
||||
|
||||
ins, err := svcCli.AC.Users().GetAllIDsAndNames(ctx, errs)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 teamschats"))
|
||||
}
|
||||
|
||||
sel := teamschatsBackupCreateSelectors(ctx, ins, flags.UserFV, flags.CategoryDataFV)
|
||||
selectorSet := []selectors.Selector{}
|
||||
|
||||
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
|
||||
selectorSet = append(selectorSet, discSel.Selector)
|
||||
}
|
||||
|
||||
return genericCreateCommand(
|
||||
ctx,
|
||||
r,
|
||||
"Chats",
|
||||
selectorSet,
|
||||
ins)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup list
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup list teamschats [<flag>...]`
|
||||
func teamschatsListCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamschatsServiceCommand,
|
||||
Short: "List the history of M365 Chats backups",
|
||||
RunE: listTeamsChatsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// lists the history of backup operations
|
||||
func listTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsChatsService, args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup details
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup details teamschats [<flag>...]`
|
||||
func teamschatsDetailsCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamschatsServiceCommand,
|
||||
Short: "Shows the details of a M365 Chats backup",
|
||||
RunE: detailsTeamsChatsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a teamschats backup.
|
||||
func detailsTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
return runDetailsTeamsChatsCmd(cmd)
|
||||
}
|
||||
|
||||
func runDetailsTeamsChatsCmd(cmd *cobra.Command) error {
|
||||
ctx := cmd.Context()
|
||||
opts := utils.MakeTeamsChatsOpts(cmd)
|
||||
|
||||
sel := utils.IncludeTeamsChatsRestoreDataSelectors(ctx, opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterTeamsChatsRestoreInfoSelectors(sel, opts)
|
||||
|
||||
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) > 0 {
|
||||
ds.PrintEntries(ctx)
|
||||
} else {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup delete
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup delete teamschats [<flag>...]`
|
||||
func teamschatsDeleteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamschatsServiceCommand,
|
||||
Short: "Delete backed-up M365 Chats data",
|
||||
RunE: deleteTeamsChatsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// deletes an teamschats backup.
|
||||
func deleteTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||
backupIDValue := []string{}
|
||||
|
||||
if len(flags.BackupIDsFV) > 0 {
|
||||
backupIDValue = flags.BackupIDsFV
|
||||
} else if len(flags.BackupIDFV) > 0 {
|
||||
backupIDValue = append(backupIDValue, flags.BackupIDFV)
|
||||
} else {
|
||||
return clues.New("either --backup or --backups flag is required")
|
||||
}
|
||||
|
||||
return genericDeleteCommand(cmd, path.TeamsChatsService, "TeamsChats", backupIDValue, args)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func validateTeamsChatsBackupCreateFlags(teamschats, cats []string) error {
|
||||
if len(teamschats) == 0 {
|
||||
return clues.New(
|
||||
"requires one or more --" +
|
||||
flags.UserFN + " ids, or the wildcard --" +
|
||||
flags.UserFN + " *")
|
||||
}
|
||||
|
||||
msg := fmt.Sprintf(
|
||||
" is an unrecognized data type; only %s is supported",
|
||||
flags.DataChats)
|
||||
|
||||
allowedCats := utils.TeamsChatsAllowedCategories()
|
||||
|
||||
for _, d := range cats {
|
||||
if _, ok := allowedCats[d]; !ok {
|
||||
return clues.New(d + msg)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func teamschatsBackupCreateSelectors(
|
||||
ctx context.Context,
|
||||
ins idname.Cacher,
|
||||
users, cats []string,
|
||||
) *selectors.TeamsChatsBackup {
|
||||
if filters.PathContains(users).Compare(flags.Wildcard) {
|
||||
return includeAllTeamsChatsWithCategories(ins, cats)
|
||||
}
|
||||
|
||||
sel := selectors.NewTeamsChatsBackup(slices.Clone(users))
|
||||
|
||||
return utils.AddTeamsChatsCategories(sel, cats)
|
||||
}
|
||||
|
||||
func includeAllTeamsChatsWithCategories(ins idname.Cacher, categories []string) *selectors.TeamsChatsBackup {
|
||||
return utils.AddTeamsChatsCategories(selectors.NewTeamsChatsBackup(ins.IDs()), categories)
|
||||
}
|
||||
636
src/cli/backup/teamschats_e2e_test.go
Normal file
636
src/cli/backup/teamschats_e2e_test.go
Normal file
@ -0,0 +1,636 @@
|
||||
package backup_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests that require no existing backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type NoBackupTeamsChatsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestNoBackupTeamsChatsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
}
|
||||
|
||||
func (suite *NoBackupTeamsChatsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
t.Skip("not fully implemented")
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
||||
}
|
||||
|
||||
func (suite *NoBackupTeamsChatsE2ESuite) TestTeamsChatsBackupListCmd_noBackups() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
|
||||
// as an offhand check: the result should contain the m365 teamschat id
|
||||
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests with no prior backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type BackupTeamsChatsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestBackupTeamsChatsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
t.Skip("not fully implemented")
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_chats() {
|
||||
runTeamsChatsBackupCategoryTest(suite, flags.DataChats)
|
||||
}
|
||||
|
||||
func runTeamsChatsBackupCategoryTest(suite *BackupTeamsChatsE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildTeamsChatsBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
suite.m365.User.ID,
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_teamschatNotFound_chats() {
|
||||
runTeamsChatsBackupTeamsChatNotFoundTest(suite, flags.DataChats)
|
||||
}
|
||||
|
||||
func runTeamsChatsBackupTeamsChatNotFoundTest(suite *BackupTeamsChatsE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildTeamsChatsBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
"foo@not-there.com",
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
assert.Contains(
|
||||
t,
|
||||
err.Error(),
|
||||
"not found",
|
||||
"error missing user not found")
|
||||
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
|
||||
|
||||
t.Logf("backup error message: %s", err.Error())
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAzureClientIDFlag() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "chats",
|
||||
"--teamschat", suite.m365.User.ID,
|
||||
"--azure-client-id", "invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_fromConfigFile() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "chats",
|
||||
"--teamschat", suite.m365.User.ID,
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// AWS flags
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAWSFlags() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "chats",
|
||||
"--teamschat", suite.m365.User.ID,
|
||||
"--aws-access-key", "invalid-value",
|
||||
"--aws-secret-access-key", "some-invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
// since invalid aws creds are explicitly set, should see a failure
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests prepared with a previous backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type PreparedBackupTeamsChatsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps map[path.CategoryType]string
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestPreparedBackupTeamsChatsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &PreparedBackupTeamsChatsE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
t.Skip("not fully implemented")
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
||||
suite.backupOps = make(map[path.CategoryType]string)
|
||||
|
||||
var (
|
||||
teamschats = []string{suite.m365.User.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ChatsCategory,
|
||||
}
|
||||
)
|
||||
|
||||
for _, set := range cats {
|
||||
var (
|
||||
sel = selectors.NewTeamsChatsBackup(teamschats)
|
||||
scopes []selectors.TeamsChatsScope
|
||||
)
|
||||
|
||||
switch set {
|
||||
case path.ChatsCategory:
|
||||
scopes = selTD.TeamsChatsBackupChatScope(sel)
|
||||
}
|
||||
|
||||
sel.Include(scopes)
|
||||
|
||||
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = bop.Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
bIDs := string(bop.Results.BackupID)
|
||||
|
||||
// sanity check, ensure we can find the backup and its details immediately
|
||||
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
|
||||
require.NoError(t, err, "retrieving recent backup by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
|
||||
|
||||
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
|
||||
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
|
||||
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
|
||||
|
||||
suite.backupOps[set] = string(b.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_chats() {
|
||||
runTeamsChatsListCmdTest(suite, path.ChatsCategory)
|
||||
}
|
||||
|
||||
func runTeamsChatsListCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.Contains(t, result, suite.backupOps[category])
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_singleID_chats() {
|
||||
runTeamsChatsListSingleCmdTest(suite, path.ChatsCategory)
|
||||
}
|
||||
|
||||
func runTeamsChatsListSingleCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.Contains(t, result, bID)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_badID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", "smarfs")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsDetailsCmd_chats() {
|
||||
runTeamsChatsDetailsCmdTest(suite, path.ChatsCategory)
|
||||
}
|
||||
|
||||
func runTeamsChatsDetailsCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
// fetch the details from the repo first
|
||||
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
|
||||
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
|
||||
require.Empty(t, errs.Recovered())
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "details", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN, string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
|
||||
i := 0
|
||||
foundFolders := 0
|
||||
|
||||
for _, ent := range deets.Entries {
|
||||
// Skip folders as they don't mean anything to the end teamschat.
|
||||
if ent.Folder != nil {
|
||||
foundFolders++
|
||||
continue
|
||||
}
|
||||
|
||||
suite.Run(fmt.Sprintf("detail %d", i), func() {
|
||||
assert.Contains(suite.T(), result, ent.ShortRef)
|
||||
})
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
// We only backup the default folder for each category so there should be at
|
||||
// least that folder (we don't make details entries for prefix folders).
|
||||
assert.GreaterOrEqual(t, foundFolders, 1)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests for deleting backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type BackupDeleteTeamsChatsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps [3]operations.BackupOperation
|
||||
}
|
||||
|
||||
func TestBackupDeleteTeamsChatsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupDeleteTeamsChatsE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
t.Skip("not fully implemented")
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
||||
|
||||
m365TeamsChatID := tconfig.M365TeamID(t)
|
||||
teamschats := []string{m365TeamsChatID}
|
||||
|
||||
// some tests require an existing backup
|
||||
sel := selectors.NewTeamsChatsBackup(teamschats)
|
||||
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
|
||||
|
||||
for i := 0; i < cap(suite.backupOps); i++ {
|
||||
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.backupOps[i] = backupOp
|
||||
|
||||
err = suite.backupOps[i].Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN,
|
||||
fmt.Sprintf("%s,%s",
|
||||
string(suite.backupOps[0].Results.BackupID),
|
||||
string(suite.backupOps[1].Results.BackupID)))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backups", string(suite.backupOps[0].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_SingleID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN,
|
||||
string(suite.backupOps[2].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(suite.backupOps[2].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_UnknownID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN, uuid.NewString())
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// unknown backupIDs should error since the modelStore can't find the backup
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_NoBackupID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// empty backupIDs should error since no data provided
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func buildTeamsChatsBackupCmd(
|
||||
ctx context.Context,
|
||||
configFile, resource, category string,
|
||||
recorder *strings.Builder,
|
||||
) (*cobra.Command, context.Context) {
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "chats",
|
||||
"--"+flags.ConfigFileFN, configFile,
|
||||
"--"+flags.UserFN, resource,
|
||||
"--"+flags.CategoryDataFN, category)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(recorder)
|
||||
|
||||
return cmd, print.SetRootCmd(ctx, cmd)
|
||||
}
|
||||
248
src/cli/backup/teamschats_test.go
Normal file
248
src/cli/backup/teamschats_test.go
Normal file
@ -0,0 +1,248 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
)
|
||||
|
||||
type TeamsChatsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestTeamsChatsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &TeamsChatsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestAddTeamsChatsCommands() {
|
||||
expectUse := teamschatsServiceCommand
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "create teamschats",
|
||||
use: createCommand,
|
||||
expectUse: expectUse + " " + teamschatsServiceCommandCreateUseSuffix,
|
||||
expectShort: teamschatsCreateCmd().Short,
|
||||
expectRunE: createTeamsChatsCmd,
|
||||
},
|
||||
{
|
||||
name: "list teamschats",
|
||||
use: listCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: teamschatsListCmd().Short,
|
||||
expectRunE: listTeamsChatsCmd,
|
||||
},
|
||||
{
|
||||
name: "details teamschats",
|
||||
use: detailsCommand,
|
||||
expectUse: expectUse + " " + teamschatsServiceCommandDetailsUseSuffix,
|
||||
expectShort: teamschatsDetailsCmd().Short,
|
||||
expectRunE: detailsTeamsChatsCmd,
|
||||
},
|
||||
{
|
||||
name: "delete teamschats",
|
||||
use: deleteCommand,
|
||||
expectUse: expectUse + " " + teamschatsServiceCommandDeleteUseSuffix,
|
||||
expectShort: teamschatsDeleteCmd().Short,
|
||||
expectRunE: deleteTeamsChatsCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
c := addTeamsChatsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestValidateTeamsChatsBackupCreateFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
cats []string
|
||||
expect assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "none",
|
||||
cats: []string{},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "chats",
|
||||
cats: []string{flags.DataChats},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "all allowed",
|
||||
cats: []string{
|
||||
flags.DataChats,
|
||||
},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "bad inputs",
|
||||
cats: []string{"foo"},
|
||||
expect: assert.Error,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
err := validateTeamsChatsBackupCreateFlags([]string{"*"}, test.cats)
|
||||
test.expect(suite.T(), err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestBackupCreateFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: createCommand},
|
||||
addTeamsChatsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
teamschatsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
|
||||
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.TeamsChatsCategoryDataInput),
|
||||
},
|
||||
flagsTD.PreparedGenericBackupFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
opts := utils.MakeTeamsChatsOpts(cmd)
|
||||
co := utils.Control()
|
||||
backupOpts := utils.ParseBackupOptions()
|
||||
|
||||
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
|
||||
// restore flags are switched over too and we no longer parse flags beyond
|
||||
// connection info into control.Options.
|
||||
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
|
||||
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
|
||||
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
|
||||
|
||||
assert.Equal(t, control.FailFast, co.FailureHandling)
|
||||
assert.True(t, co.ToggleFeatures.DisableIncrementals)
|
||||
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
|
||||
|
||||
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
|
||||
flagsTD.AssertGenericBackupFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestBackupListFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: listCommand},
|
||||
addTeamsChatsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
teamschatsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedBackupListFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertBackupListFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestBackupDetailsFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: detailsCommand},
|
||||
addTeamsChatsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
teamschatsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.SkipReduceFN,
|
||||
},
|
||||
flagsTD.PreparedTeamsChatsFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
co := utils.Control()
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.True(t, co.SkipReduce)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
flagsTD.AssertTeamsChatsFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestBackupDeleteFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: deleteCommand},
|
||||
addTeamsChatsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
teamschatsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
@ -7,7 +7,6 @@ import (
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
// called by export.go to map subcommands to provider-specific handling.
|
||||
@ -51,7 +50,13 @@ corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
|
||||
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
|
||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00
|
||||
|
||||
# Export all posts from a conversation with topic "hello world" from group mailbox's last backup to /my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"
|
||||
|
||||
# Export post with ID 98765abcdef from a conversation from group mailbox's last backup to /my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world" --post 98765abcdef`
|
||||
)
|
||||
|
||||
// `corso export groups [<flag>...] <destination>`
|
||||
@ -93,10 +98,6 @@ func exportGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
||||
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
||||
|
||||
// TODO(pandeyabs): Exclude conversations from export since they are not
|
||||
// supported yet. https://github.com/alcionai/corso/issues/4822
|
||||
sel.Exclude(sel.Conversation(selectors.Any()))
|
||||
|
||||
acceptedGroupsFormatTypes := []string{
|
||||
string(control.DefaultFormat),
|
||||
string(control.JSONFormat),
|
||||
|
||||
@ -45,7 +45,27 @@ corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
|
||||
# Export all files in the "Documents" library to the current directory.
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--library Documents --folder "Display Templates/Style Sheets" .`
|
||||
--library Documents --folder "Display Templates/Style Sheets" .
|
||||
|
||||
# Export lists by their name(s)
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list "list-name-1,list-name-2" .
|
||||
|
||||
# Export lists created after a given time
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-after 2024-01-01T12:23:34 .
|
||||
|
||||
# Export lists created before a given time
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-before 2024-01-01T12:23:34 .
|
||||
|
||||
# Export lists modified before a given time
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-before 2024-01-01T12:23:34 .
|
||||
|
||||
# Export lists modified after a given time
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-after 2024-01-01T12:23:34 .`
|
||||
)
|
||||
|
||||
// `corso export sharepoint [<flag>...] <destination>`
|
||||
|
||||
@ -28,13 +28,6 @@ func AddFilesystemFlags(cmd *cobra.Command) {
|
||||
"",
|
||||
"path to local or network storage")
|
||||
cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN))
|
||||
|
||||
fs.BoolVar(
|
||||
&SucceedIfExistsFV,
|
||||
SucceedIfExistsFN,
|
||||
false,
|
||||
"Exit with success if the repo has already been initialized.")
|
||||
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
|
||||
}
|
||||
|
||||
func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string {
|
||||
|
||||
@ -12,9 +12,8 @@ const (
|
||||
AWSSessionTokenFN = "aws-session-token"
|
||||
|
||||
// Corso Flags
|
||||
PassphraseFN = "passphrase"
|
||||
NewPassphraseFN = "new-passphrase"
|
||||
SucceedIfExistsFN = "succeed-if-exists"
|
||||
PassphraseFN = "passphrase"
|
||||
NewPassphraseFN = "new-passphrase"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -25,7 +24,6 @@ var (
|
||||
AWSSessionTokenFV string
|
||||
PassphraseFV string
|
||||
NewPhasephraseFV string
|
||||
SucceedIfExistsFV bool
|
||||
)
|
||||
|
||||
// AddMultipleBackupIDsFlag adds the --backups flag.
|
||||
|
||||
@ -38,11 +38,6 @@ func AddS3BucketFlags(cmd *cobra.Command) {
|
||||
fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.")
|
||||
fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)")
|
||||
fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.")
|
||||
|
||||
// In general, we don't want to expose this flag to users and have them mistake it
|
||||
// for a broad-scale idempotency solution. We can un-hide it later the need arises.
|
||||
fs.BoolVar(&SucceedIfExistsFV, SucceedIfExistsFN, false, "Exit with success if the repo has already been initialized.")
|
||||
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
|
||||
}
|
||||
|
||||
func S3FlagOverrides(cmd *cobra.Command) map[string]string {
|
||||
|
||||
@ -18,7 +18,6 @@ const (
|
||||
ListModifiedBeforeFN = "list-modified-before"
|
||||
ListCreatedAfterFN = "list-created-after"
|
||||
ListCreatedBeforeFN = "list-created-before"
|
||||
AllowListsRestoreFN = "allow-lists-restore"
|
||||
|
||||
PageFolderFN = "page-folder"
|
||||
PageFN = "page"
|
||||
@ -35,7 +34,6 @@ var (
|
||||
ListModifiedBeforeFV string
|
||||
ListCreatedAfterFV string
|
||||
ListCreatedBeforeFV string
|
||||
AllowListsRestoreFV bool
|
||||
|
||||
PageFolderFV []string
|
||||
PageFV []string
|
||||
@ -101,11 +99,6 @@ func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
&ListCreatedBeforeFV,
|
||||
ListCreatedBeforeFN, "",
|
||||
"Select lists created before this datetime.")
|
||||
fs.BoolVar(
|
||||
&AllowListsRestoreFV,
|
||||
AllowListsRestoreFN, false,
|
||||
"enables lists restore if provided")
|
||||
cobra.CheckErr(fs.MarkHidden(AllowListsRestoreFN))
|
||||
|
||||
// pages
|
||||
|
||||
|
||||
13
src/cli/flags/teamschats.go
Normal file
13
src/cli/flags/teamschats.go
Normal file
@ -0,0 +1,13 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
DataChats = "chats"
|
||||
)
|
||||
|
||||
func AddTeamsChatsDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
// TODO: add details flags
|
||||
}
|
||||
1
src/cli/flags/testdata/flags.go
vendored
1
src/cli/flags/testdata/flags.go
vendored
@ -21,6 +21,7 @@ var (
|
||||
ExchangeCategoryDataInput = []string{"email", "events", "contacts"}
|
||||
SharepointCategoryDataInput = []string{"files", "lists", "pages"}
|
||||
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
|
||||
TeamsChatsCategoryDataInput = []string{"chats"}
|
||||
|
||||
ChannelInput = []string{"channel1", "channel2"}
|
||||
MessageInput = []string{"message1", "message2"}
|
||||
|
||||
25
src/cli/flags/testdata/teamschats.go
vendored
Normal file
25
src/cli/flags/testdata/teamschats.go
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func PreparedTeamsChatsFlags() []string {
|
||||
return []string{
|
||||
// FIXME: populate when adding filters
|
||||
// "--" + flags.ChatCreatedAfterFN, ChatCreatedAfterInput,
|
||||
// "--" + flags.ChatCreatedBeforeFN, ChatCreatedBeforeInput,
|
||||
// "--" + flags.ChatLastMessageAfterFN, ChatLastMessageAfterInput,
|
||||
// "--" + flags.ChatLastMessageBeforeFN, ChatLastMessageBeforeInput,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertTeamsChatsFlags(t *testing.T, cmd *cobra.Command) {
|
||||
// FIXME: populate when adding filters
|
||||
// assert.Equal(t, ChatCreatedAfterInput, flags.ChatCreatedAfterFV)
|
||||
// assert.Equal(t, ChatCreatedBeforeInput, flags.ChatCreatedBeforeFV)
|
||||
// assert.Equal(t, ChatLastMessageAfterInput, flags.ChatLastMessageAfterFV)
|
||||
// assert.Equal(t, ChatLastMessageBeforeInput, flags.ChatLastMessageBeforeFV)
|
||||
}
|
||||
@ -133,7 +133,7 @@ func Pretty(ctx context.Context, a any) {
|
||||
return
|
||||
}
|
||||
|
||||
printPrettyJSON(getRootCmd(ctx).ErrOrStderr(), a)
|
||||
printPrettyJSON(ctx, getRootCmd(ctx).ErrOrStderr(), a)
|
||||
}
|
||||
|
||||
// PrettyJSON prettifies and prints the value.
|
||||
@ -143,7 +143,7 @@ func PrettyJSON(ctx context.Context, p minimumPrintabler) {
|
||||
return
|
||||
}
|
||||
|
||||
outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
|
||||
outputJSON(ctx, getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
|
||||
}
|
||||
|
||||
// out is the testable core of exported print funcs
|
||||
@ -193,56 +193,56 @@ type minimumPrintabler interface {
|
||||
|
||||
// Item prints the printable, according to the caller's requested format.
|
||||
func Item(ctx context.Context, p Printable) {
|
||||
printItem(getRootCmd(ctx).OutOrStdout(), p)
|
||||
printItem(ctx, getRootCmd(ctx).OutOrStdout(), p)
|
||||
}
|
||||
|
||||
// print prints the printable items,
|
||||
// according to the caller's requested format.
|
||||
func printItem(w io.Writer, p Printable) {
|
||||
func printItem(ctx context.Context, w io.Writer, p Printable) {
|
||||
if outputAsJSON || outputAsJSONDebug {
|
||||
outputJSON(w, p, outputAsJSONDebug)
|
||||
outputJSON(ctx, w, p, outputAsJSONDebug)
|
||||
return
|
||||
}
|
||||
|
||||
outputTable(w, []Printable{p})
|
||||
outputTable(ctx, w, []Printable{p})
|
||||
}
|
||||
|
||||
// ItemProperties prints the printable either as in a single line or a json
|
||||
// The difference between this and Item is that this one does not print the ID
|
||||
func ItemProperties(ctx context.Context, p Printable) {
|
||||
printItemProperties(getRootCmd(ctx).OutOrStdout(), p)
|
||||
printItemProperties(ctx, getRootCmd(ctx).OutOrStdout(), p)
|
||||
}
|
||||
|
||||
// print prints the printable items,
|
||||
// according to the caller's requested format.
|
||||
func printItemProperties(w io.Writer, p Printable) {
|
||||
func printItemProperties(ctx context.Context, w io.Writer, p Printable) {
|
||||
if outputAsJSON || outputAsJSONDebug {
|
||||
outputJSON(w, p, outputAsJSONDebug)
|
||||
outputJSON(ctx, w, p, outputAsJSONDebug)
|
||||
return
|
||||
}
|
||||
|
||||
outputOneLine(w, []Printable{p})
|
||||
outputOneLine(ctx, w, []Printable{p})
|
||||
}
|
||||
|
||||
// All prints the slice of printable items,
|
||||
// according to the caller's requested format.
|
||||
func All(ctx context.Context, ps ...Printable) {
|
||||
printAll(getRootCmd(ctx).OutOrStdout(), ps)
|
||||
printAll(ctx, getRootCmd(ctx).OutOrStdout(), ps)
|
||||
}
|
||||
|
||||
// printAll prints the slice of printable items,
|
||||
// according to the caller's requested format.
|
||||
func printAll(w io.Writer, ps []Printable) {
|
||||
func printAll(ctx context.Context, w io.Writer, ps []Printable) {
|
||||
if len(ps) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
if outputAsJSON || outputAsJSONDebug {
|
||||
outputJSONArr(w, ps, outputAsJSONDebug)
|
||||
outputJSONArr(ctx, w, ps, outputAsJSONDebug)
|
||||
return
|
||||
}
|
||||
|
||||
outputTable(w, ps)
|
||||
outputTable(ctx, w, ps)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------
|
||||
@ -252,11 +252,11 @@ func printAll(w io.Writer, ps []Printable) {
|
||||
// Table writes the printables in a tabular format. Takes headers from
|
||||
// the 0th printable only.
|
||||
func Table(ctx context.Context, ps []Printable) {
|
||||
outputTable(getRootCmd(ctx).OutOrStdout(), ps)
|
||||
outputTable(ctx, getRootCmd(ctx).OutOrStdout(), ps)
|
||||
}
|
||||
|
||||
// output to stdout the list of printable structs in a table
|
||||
func outputTable(w io.Writer, ps []Printable) {
|
||||
func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
|
||||
t := table.Table{
|
||||
Headers: ps[0].Headers(false),
|
||||
Rows: [][]string{},
|
||||
@ -266,6 +266,9 @@ func outputTable(w io.Writer, ps []Printable) {
|
||||
t.Rows = append(t.Rows, p.Values(false))
|
||||
}
|
||||
|
||||
// observe bars needs to be flushed before printing
|
||||
observe.Flush(ctx)
|
||||
|
||||
_ = t.WriteTable(
|
||||
w,
|
||||
&table.Config{
|
||||
@ -279,20 +282,20 @@ func outputTable(w io.Writer, ps []Printable) {
|
||||
// JSON
|
||||
// ------------------------------------------------------------------------------------------
|
||||
|
||||
func outputJSON(w io.Writer, p minimumPrintabler, debug bool) {
|
||||
func outputJSON(ctx context.Context, w io.Writer, p minimumPrintabler, debug bool) {
|
||||
if debug {
|
||||
printJSON(w, p)
|
||||
printJSON(ctx, w, p)
|
||||
return
|
||||
}
|
||||
|
||||
if debug {
|
||||
printJSON(w, p)
|
||||
printJSON(ctx, w, p)
|
||||
} else {
|
||||
printJSON(w, p.MinimumPrintable())
|
||||
printJSON(ctx, w, p.MinimumPrintable())
|
||||
}
|
||||
}
|
||||
|
||||
func outputJSONArr(w io.Writer, ps []Printable, debug bool) {
|
||||
func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool) {
|
||||
sl := make([]any, 0, len(ps))
|
||||
|
||||
for _, p := range ps {
|
||||
@ -303,11 +306,14 @@ func outputJSONArr(w io.Writer, ps []Printable, debug bool) {
|
||||
}
|
||||
}
|
||||
|
||||
printJSON(w, sl)
|
||||
printJSON(ctx, w, sl)
|
||||
}
|
||||
|
||||
// output to stdout the list of printable structs as json.
|
||||
func printJSON(w io.Writer, a any) {
|
||||
func printJSON(ctx context.Context, w io.Writer, a any) {
|
||||
// observe bars needs to be flushed before printing
|
||||
observe.Flush(ctx)
|
||||
|
||||
bs, err := json.Marshal(a)
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
|
||||
@ -318,7 +324,10 @@ func printJSON(w io.Writer, a any) {
|
||||
}
|
||||
|
||||
// output to stdout the list of printable structs as prettified json.
|
||||
func printPrettyJSON(w io.Writer, a any) {
|
||||
func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
|
||||
// observe bars needs to be flushed before printing
|
||||
observe.Flush(ctx)
|
||||
|
||||
bs, err := json.MarshalIndent(a, "", " ")
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
|
||||
@ -334,7 +343,10 @@ func printPrettyJSON(w io.Writer, a any) {
|
||||
|
||||
// Output in the following format:
|
||||
// Bytes Uploaded: 401 kB | Items Uploaded: 59 | Items Skipped: 0 | Errors: 0
|
||||
func outputOneLine(w io.Writer, ps []Printable) {
|
||||
func outputOneLine(ctx context.Context, w io.Writer, ps []Printable) {
|
||||
// observe bars needs to be flushed before printing
|
||||
observe.Flush(ctx)
|
||||
|
||||
headers := ps[0].Headers(true)
|
||||
rows := [][]string{}
|
||||
|
||||
|
||||
@ -2,7 +2,6 @@ package repo
|
||||
|
||||
import (
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
@ -110,10 +109,6 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
|
||||
ric := repository.InitConfig{RetentionOpts: retentionOpts}
|
||||
|
||||
if err = r.Initialize(ctx, ric); err != nil {
|
||||
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
|
||||
}
|
||||
|
||||
|
||||
@ -5,7 +5,6 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
@ -82,9 +81,9 @@ func (suite *FilesystemE2ESuite) TestInitFilesystemCmd() {
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a second initialization should result in an error
|
||||
// noop
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,7 +4,6 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
@ -132,10 +131,6 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
|
||||
ric := repository.InitConfig{RetentionOpts: retentionOpts}
|
||||
|
||||
if err = r.Initialize(ctx, ric); err != nil {
|
||||
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
|
||||
}
|
||||
|
||||
|
||||
@ -89,9 +89,9 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a second initialization should result in an error
|
||||
// noop
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -116,8 +116,7 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
|
||||
"repo", "init", "s3",
|
||||
"--"+flags.ConfigFileFN, configFP,
|
||||
"--bucket", cfg.Bucket,
|
||||
"--prefix", cfg.Prefix,
|
||||
"--succeed-if-exists")
|
||||
"--prefix", cfg.Prefix)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
|
||||
@ -6,7 +6,6 @@ import (
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/dttm"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
// called by restore.go to map subcommands to provider-specific handling.
|
||||
@ -51,7 +50,27 @@ corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
|
||||
# Restore all files in the "Documents" library.
|
||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--library Documents --folder "Display Templates/Style Sheets" `
|
||||
--library Documents --folder "Display Templates/Style Sheets"
|
||||
|
||||
# Restore lists by their name(s)
|
||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list "list-name-1,list-name-2"
|
||||
|
||||
# Restore lists created after a given time
|
||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-after 2024-01-01T12:23:34
|
||||
|
||||
# Restore lists created before a given time
|
||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-before 2024-01-01T12:23:34
|
||||
|
||||
# Restore lists modified before a given time
|
||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-before 2024-01-01T12:23:34
|
||||
|
||||
# Restore lists modified after a given time
|
||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-after 2024-01-01T12:23:34`
|
||||
)
|
||||
|
||||
// `corso restore sharepoint [<flag>...]`
|
||||
@ -87,11 +106,6 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||
|
||||
if !opts.AllowListsRestore {
|
||||
// Exclude lists from restore since they are not supported yet.
|
||||
sel.Exclude(sel.Lists(selectors.Any()))
|
||||
}
|
||||
|
||||
return runRestore(
|
||||
ctx,
|
||||
cmd,
|
||||
|
||||
@ -103,7 +103,6 @@ func (suite *FlagUnitSuite) TestAddS3BucketFlags() {
|
||||
assert.Equal(t, "prefix1", flags.PrefixFV, flags.PrefixFN)
|
||||
assert.True(t, flags.DoNotUseTLSFV, flags.DoNotUseTLSFN)
|
||||
assert.True(t, flags.DoNotVerifyTLSFV, flags.DoNotVerifyTLSFN)
|
||||
assert.True(t, flags.SucceedIfExistsFV, flags.SucceedIfExistsFN)
|
||||
},
|
||||
}
|
||||
|
||||
@ -116,7 +115,6 @@ func (suite *FlagUnitSuite) TestAddS3BucketFlags() {
|
||||
"--" + flags.PrefixFN, "prefix1",
|
||||
"--" + flags.DoNotUseTLSFN,
|
||||
"--" + flags.DoNotVerifyTLSFN,
|
||||
"--" + flags.SucceedIfExistsFN,
|
||||
})
|
||||
|
||||
err := cmd.Execute()
|
||||
@ -130,7 +128,6 @@ func (suite *FlagUnitSuite) TestFilesystemFlags() {
|
||||
Use: "test",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
assert.Equal(t, "/tmp/test", flags.FilesystemPathFV, flags.FilesystemPathFN)
|
||||
assert.True(t, flags.SucceedIfExistsFV, flags.SucceedIfExistsFN)
|
||||
assert.Equal(t, "tenantID", flags.AzureClientTenantFV, flags.AzureClientTenantFN)
|
||||
assert.Equal(t, "clientID", flags.AzureClientIDFV, flags.AzureClientIDFN)
|
||||
assert.Equal(t, "secret", flags.AzureClientSecretFV, flags.AzureClientSecretFN)
|
||||
@ -143,7 +140,6 @@ func (suite *FlagUnitSuite) TestFilesystemFlags() {
|
||||
cmd.SetArgs([]string{
|
||||
"test",
|
||||
"--" + flags.FilesystemPathFN, "/tmp/test",
|
||||
"--" + flags.SucceedIfExistsFN,
|
||||
"--" + flags.AzureClientIDFN, "clientID",
|
||||
"--" + flags.AzureClientTenantFN, "tenantID",
|
||||
"--" + flags.AzureClientSecretFN, "secret",
|
||||
|
||||
@ -266,9 +266,14 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
|
||||
opts.Conversations = selectors.Any()
|
||||
}
|
||||
|
||||
// if no post is specified, select all posts in the conversation
|
||||
if convPosts == 0 {
|
||||
opts.Posts = selectors.Any()
|
||||
}
|
||||
|
||||
// if no post is specified, only select conversations;
|
||||
// otherwise, look for channel/message pairs
|
||||
if chanMsgs == 0 {
|
||||
// otherwise, look for conv/post pairs
|
||||
if convs == 0 {
|
||||
sel.Include(sel.Conversation(opts.Conversations))
|
||||
} else {
|
||||
sel.Include(sel.ConversationPosts(opts.Conversations, opts.Posts))
|
||||
|
||||
@ -30,7 +30,6 @@ type SharePointOpts struct {
|
||||
ListModifiedBefore string
|
||||
ListCreatedBefore string
|
||||
ListCreatedAfter string
|
||||
AllowListsRestore bool
|
||||
|
||||
PageFolder []string
|
||||
Page []string
|
||||
@ -82,7 +81,6 @@ func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
|
||||
ListModifiedBefore: flags.ListModifiedBeforeFV,
|
||||
ListCreatedAfter: flags.ListCreatedAfterFV,
|
||||
ListCreatedBefore: flags.ListCreatedBeforeFV,
|
||||
AllowListsRestore: flags.AllowListsRestoreFV,
|
||||
|
||||
Page: flags.PageFV,
|
||||
PageFolder: flags.PageFolderFV,
|
||||
@ -106,7 +104,9 @@ func SharePointAllowedCategories() map[string]struct{} {
|
||||
|
||||
func AddCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
|
||||
if len(cats) == 0 {
|
||||
sel.Include(sel.LibraryFolders(selectors.Any()), sel.Lists(selectors.Any()))
|
||||
// [TODO](hitesh) to enable lists without being invoked explicitly via --data flag
|
||||
// sel.Include(sel.LibraryFolders(selectors.Any()), sel.Lists(selectors.Any()))
|
||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||
}
|
||||
|
||||
for _, d := range cats {
|
||||
|
||||
@ -420,7 +420,7 @@ func (suite *SharePointUtilsSuite) TestAddSharepointCategories() {
|
||||
{
|
||||
name: "none",
|
||||
cats: []string{},
|
||||
expectScopeLen: 2,
|
||||
expectScopeLen: 1,
|
||||
},
|
||||
{
|
||||
name: "libraries",
|
||||
|
||||
101
src/cli/utils/teamschats.go
Normal file
101
src/cli/utils/teamschats.go
Normal file
@ -0,0 +1,101 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
type TeamsChatsOpts struct {
|
||||
Users []string
|
||||
|
||||
ExportCfg ExportCfgOpts
|
||||
|
||||
Populated flags.PopulatedFlags
|
||||
}
|
||||
|
||||
func TeamsChatsAllowedCategories() map[string]struct{} {
|
||||
return map[string]struct{}{
|
||||
flags.DataChats: {},
|
||||
}
|
||||
}
|
||||
|
||||
func AddTeamsChatsCategories(sel *selectors.TeamsChatsBackup, cats []string) *selectors.TeamsChatsBackup {
|
||||
if len(cats) == 0 {
|
||||
sel.Include(sel.AllData())
|
||||
}
|
||||
|
||||
for _, d := range cats {
|
||||
switch d {
|
||||
case flags.DataChats:
|
||||
sel.Include(sel.Chats(selectors.Any()))
|
||||
}
|
||||
}
|
||||
|
||||
return sel
|
||||
}
|
||||
|
||||
func MakeTeamsChatsOpts(cmd *cobra.Command) TeamsChatsOpts {
|
||||
return TeamsChatsOpts{
|
||||
Users: flags.UserFV,
|
||||
|
||||
ExportCfg: makeExportCfgOpts(cmd),
|
||||
|
||||
// populated contains the list of flags that appear in the
|
||||
// command, according to pflags. Use this to differentiate
|
||||
// between an "empty" and a "missing" value.
|
||||
Populated: flags.GetPopulatedFlags(cmd),
|
||||
}
|
||||
}
|
||||
|
||||
// ValidateTeamsChatsRestoreFlags checks common flags for correctness and interdependencies
|
||||
func ValidateTeamsChatsRestoreFlags(backupID string, opts TeamsChatsOpts, isRestore bool) error {
|
||||
if len(backupID) == 0 {
|
||||
return clues.New("a backup ID is required")
|
||||
}
|
||||
|
||||
// restore isn't currently supported
|
||||
if isRestore {
|
||||
return clues.New("restore not supported")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// AddTeamsChatsFilter adds the scope of the provided values to the selector's
|
||||
// filter set
|
||||
func AddTeamsChatsFilter(
|
||||
sel *selectors.TeamsChatsRestore,
|
||||
v string,
|
||||
f func(string) []selectors.TeamsChatsScope,
|
||||
) {
|
||||
if len(v) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
sel.Filter(f(v))
|
||||
}
|
||||
|
||||
// IncludeTeamsChatsRestoreDataSelectors builds the common data-selector
|
||||
// inclusions for teamschats commands.
|
||||
func IncludeTeamsChatsRestoreDataSelectors(ctx context.Context, opts TeamsChatsOpts) *selectors.TeamsChatsRestore {
|
||||
users := opts.Users
|
||||
|
||||
if len(opts.Users) == 0 {
|
||||
users = selectors.Any()
|
||||
}
|
||||
|
||||
return selectors.NewTeamsChatsRestore(users)
|
||||
}
|
||||
|
||||
// FilterTeamsChatsRestoreInfoSelectors builds the common info-selector filters.
|
||||
func FilterTeamsChatsRestoreInfoSelectors(
|
||||
sel *selectors.TeamsChatsRestore,
|
||||
opts TeamsChatsOpts,
|
||||
) {
|
||||
// TODO: populate when adding filters
|
||||
}
|
||||
@ -6,12 +6,6 @@ Param (
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
|
||||
[String]$Site,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
|
||||
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
|
||||
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
|
||||
[String[]]$LibraryNameList = @(),
|
||||
|
||||
@ -22,7 +16,16 @@ Param (
|
||||
[String[]]$FolderPrefixPurgeList,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
|
||||
[String[]]$LibraryPrefixDeleteList = @()
|
||||
[String[]]$LibraryPrefixDeleteList = @(),
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
|
||||
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
|
||||
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
|
||||
|
||||
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
|
||||
[String]$AppCert = $ENV:AZURE_APP_CERT
|
||||
)
|
||||
|
||||
Set-StrictMode -Version 2.0
|
||||
@ -108,6 +111,7 @@ function Purge-Library {
|
||||
$foldersToPurge = @()
|
||||
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
||||
|
||||
Write-Host "`nFolders: $folders"
|
||||
foreach ($f in $folders) {
|
||||
$folderName = $f.Name
|
||||
$createTime = Get-TimestampFromFolderName -Folder $f
|
||||
@ -209,8 +213,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
|
||||
}
|
||||
|
||||
|
||||
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
|
||||
Write-Host "Admin user name and password required as arguments or environment variables."
|
||||
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
|
||||
Write-Host "ClientId and AppCert required as arguments or environment variables."
|
||||
Exit
|
||||
}
|
||||
|
||||
@ -251,12 +255,8 @@ else {
|
||||
Exit
|
||||
}
|
||||
|
||||
|
||||
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
|
||||
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
|
||||
|
||||
Write-Host "`nAuthenticating and connecting to $SiteUrl"
|
||||
Connect-PnPOnline -Url $siteUrl -Credential $cred
|
||||
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
|
||||
Write-Host "Connected to $siteUrl`n"
|
||||
|
||||
# ensure that there are no unexpanded entries in the list of parameters
|
||||
|
||||
@ -5,6 +5,7 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
@ -20,19 +21,20 @@ const (
|
||||
// this increases the chance that we'll run into a race collision with
|
||||
// the cleanup script. Sometimes that's okay (deleting old data that
|
||||
// isn't scrutinized in the test), other times it's not. We mark whether
|
||||
// that's okay to do or not by specifying the folder that's being
|
||||
// scrutinized for the test. Any errors within that folder should cause
|
||||
// a fatal exit. Errors outside of that folder get ignored.
|
||||
// that's okay to do or not by specifying the folders being
|
||||
// scrutinized for the test. Any errors within those folders should cause
|
||||
// a fatal exit. Errors outside of those folders get ignored.
|
||||
//
|
||||
// since we're using folder names, requireNoErrorsWithinFolderName will
|
||||
// since we're using folder names, mustPopulateFolders will
|
||||
// work best (ie: have the fewest collisions/side-effects) if the folder
|
||||
// name is very specific. Standard sanity tests should include timestamps,
|
||||
// names are very specific. Standard sanity tests should include timestamps,
|
||||
// which should help ensure that. Be warned if you try to use it with
|
||||
// a more generic name: unintended effects could occur.
|
||||
func populateSanitree(
|
||||
ctx context.Context,
|
||||
ac api.Client,
|
||||
driveID, requireNoErrorsWithinFolderName string,
|
||||
driveID string,
|
||||
mustPopulateFolders []string,
|
||||
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
|
||||
common.Infof(ctx, "building sanitree for drive: %s", driveID)
|
||||
|
||||
@ -56,8 +58,8 @@ func populateSanitree(
|
||||
ac,
|
||||
driveID,
|
||||
stree.Name+"/",
|
||||
requireNoErrorsWithinFolderName,
|
||||
rootName == requireNoErrorsWithinFolderName,
|
||||
mustPopulateFolders,
|
||||
slices.Contains(mustPopulateFolders, rootName),
|
||||
stree)
|
||||
|
||||
return stree
|
||||
@ -66,7 +68,9 @@ func populateSanitree(
|
||||
func recursivelyBuildTree(
|
||||
ctx context.Context,
|
||||
ac api.Client,
|
||||
driveID, location, requireNoErrorsWithinFolderName string,
|
||||
driveID string,
|
||||
location string,
|
||||
mustPopulateFolders []string,
|
||||
isChildOfFolderRequiringNoErrors bool,
|
||||
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
|
||||
) {
|
||||
@ -80,9 +84,9 @@ func recursivelyBuildTree(
|
||||
|
||||
common.Infof(
|
||||
ctx,
|
||||
"ignoring error getting children in directory %q because it is not within directory %q\nerror: %s\n%+v",
|
||||
"ignoring error getting children in directory %q because it is not within directory set %v\nerror: %s\n%+v",
|
||||
location,
|
||||
requireNoErrorsWithinFolderName,
|
||||
mustPopulateFolders,
|
||||
err.Error(),
|
||||
clues.ToCore(err))
|
||||
|
||||
@ -99,11 +103,12 @@ func recursivelyBuildTree(
|
||||
// currently we don't restore blank folders.
|
||||
// skip permission check for empty folders
|
||||
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
|
||||
common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
|
||||
common.Infof(ctx, "skipped empty folder: %s%s", location, itemName)
|
||||
continue
|
||||
}
|
||||
|
||||
cannotAllowErrors := isChildOfFolderRequiringNoErrors || itemName == requireNoErrorsWithinFolderName
|
||||
cannotAllowErrors := isChildOfFolderRequiringNoErrors ||
|
||||
slices.Contains(mustPopulateFolders, itemName)
|
||||
|
||||
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
|
||||
Parent: stree,
|
||||
@ -124,7 +129,7 @@ func recursivelyBuildTree(
|
||||
ac,
|
||||
driveID,
|
||||
location+branch.Name+"/",
|
||||
requireNoErrorsWithinFolderName,
|
||||
mustPopulateFolders,
|
||||
cannotAllowErrors,
|
||||
branch)
|
||||
}
|
||||
|
||||
@ -32,7 +32,7 @@ func CheckExport(
|
||||
ctx,
|
||||
ac,
|
||||
driveID,
|
||||
envs.RestoreContainer)
|
||||
[]string{envs.SourceContainer})
|
||||
|
||||
sourceTree, ok := root.Children[envs.SourceContainer]
|
||||
common.Assert(
|
||||
|
||||
@ -45,7 +45,14 @@ func CheckRestoration(
|
||||
"drive_id", driveID,
|
||||
"drive_name", driveName)
|
||||
|
||||
root := populateSanitree(ctx, ac, driveID, envs.RestoreContainer)
|
||||
root := populateSanitree(
|
||||
ctx,
|
||||
ac,
|
||||
driveID,
|
||||
[]string{
|
||||
envs.SourceContainer,
|
||||
envs.RestoreContainer,
|
||||
})
|
||||
|
||||
sourceTree, ok := root.Children[envs.SourceContainer]
|
||||
common.Assert(
|
||||
|
||||
@ -3,7 +3,7 @@ module github.com/alcionai/corso/src
|
||||
go 1.21
|
||||
|
||||
replace (
|
||||
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe
|
||||
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4
|
||||
|
||||
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
|
||||
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
|
||||
@ -11,7 +11,7 @@ replace (
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
|
||||
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2
|
||||
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c
|
||||
github.com/armon/go-metrics v0.4.1
|
||||
github.com/aws/aws-xray-sdk-go v1.8.3
|
||||
github.com/cenkalti/backoff/v4 v4.2.1
|
||||
@ -51,7 +51,7 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/arran4/golang-ical v0.2.3
|
||||
github.com/arran4/golang-ical v0.2.4
|
||||
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9
|
||||
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056
|
||||
)
|
||||
@ -121,7 +121,7 @@ require (
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/microsoft/kiota-serialization-text-go v1.0.0
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/minio/minio-go/v7 v7.0.66
|
||||
github.com/minio/minio-go/v7 v7.0.67
|
||||
github.com/minio/sha256-simd v1.0.1 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
|
||||
16
src/go.sum
16
src/go.sum
@ -19,12 +19,12 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o
|
||||
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
|
||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
|
||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
|
||||
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2 h1:Oiz7puLziTpDUsEoiZMNor3j6um8RSvPOSIf4heGgTk=
|
||||
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
|
||||
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEejr07KFf2iyfCAdTxYGRAAFveLjFA=
|
||||
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
|
||||
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
|
||||
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
|
||||
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe h1:nLS5pxhm04Jz4+qeipNlxdyPGxqNWpBu8UGkRYpWoIw=
|
||||
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
|
||||
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk=
|
||||
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
@ -35,8 +35,8 @@ github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sx
|
||||
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
||||
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
||||
github.com/arran4/golang-ical v0.2.3 h1:C4Vj7+BjJBIrAJhHgi6Ku+XUkQVugRq4re5Cqj5QVdE=
|
||||
github.com/arran4/golang-ical v0.2.3/go.mod h1:RqMuPGmwRRwjkb07hmm+JBqcWa1vF1LvVmPtSZN2OhQ=
|
||||
github.com/arran4/golang-ical v0.2.4 h1:0/rTXn2qqEekLKec3SzRRy+z7pCLtniMb0KD/dPogUo=
|
||||
github.com/arran4/golang-ical v0.2.4/go.mod h1:RqMuPGmwRRwjkb07hmm+JBqcWa1vF1LvVmPtSZN2OhQ=
|
||||
github.com/aws/aws-sdk-go v1.48.6 h1:hnL/TE3eRigirDLrdRE9AWE1ALZSVLAsC4wK8TGsMqk=
|
||||
github.com/aws/aws-sdk-go v1.48.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
|
||||
github.com/aws/aws-xray-sdk-go v1.8.3 h1:S8GdgVncBRhzbNnNUgTPwhEqhwt2alES/9rLASyhxjU=
|
||||
@ -219,8 +219,8 @@ github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1 h1:uq4qZD8VXLiNZY0t4NoRpLDo
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1/go.mod h1:HUITyuFN556+0QZ/IVfH5K4FyJM7kllV6ExKi2ImKhE=
|
||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||
github.com/minio/minio-go/v7 v7.0.66 h1:bnTOXOHjOqv/gcMuiVbN9o2ngRItvqE774dG9nq0Dzw=
|
||||
github.com/minio/minio-go/v7 v7.0.66/go.mod h1:DHAgmyQEGdW3Cif0UooKOyrT3Vxs82zNdV6tkKhRtbs=
|
||||
github.com/minio/minio-go/v7 v7.0.67 h1:BeBvZWAS+kRJm1vGTMJYVjKUNoo0FoEt/wUWdUtfmh8=
|
||||
github.com/minio/minio-go/v7 v7.0.67/go.mod h1:+UXocnUeZ3wHvVh5s95gcrA4YjMIbccT6ubB+1m054A=
|
||||
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
|
||||
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
|
||||
@ -10,6 +10,7 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/dttm"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -56,12 +57,22 @@ func ZipExportCollection(
|
||||
defer wr.Close()
|
||||
|
||||
buf := make([]byte, ZipCopyBufferSize)
|
||||
counted := 0
|
||||
log := logger.Ctx(ctx).
|
||||
With("collection_count", len(expCollections))
|
||||
|
||||
for _, ec := range expCollections {
|
||||
folder := ec.BasePath()
|
||||
items := ec.Items(ctx)
|
||||
|
||||
for item := range items {
|
||||
counted++
|
||||
|
||||
// Log every 1000 items that are processed
|
||||
if counted%1000 == 0 {
|
||||
log.Infow("progress zipping export items", "count_items", counted)
|
||||
}
|
||||
|
||||
err := item.Error
|
||||
if err != nil {
|
||||
writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID))
|
||||
@ -88,8 +99,12 @@ func ZipExportCollection(
|
||||
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
|
||||
return
|
||||
}
|
||||
|
||||
item.Body.Close()
|
||||
}
|
||||
}
|
||||
|
||||
log.Infow("completed zipping export items", "count_items", counted)
|
||||
}()
|
||||
|
||||
return zipCollection{reader}, nil
|
||||
|
||||
@ -1,10 +1,13 @@
|
||||
package jwt
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
jwt "github.com/golang-jwt/jwt/v5"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
|
||||
@ -37,3 +40,51 @@ func IsJWTExpired(
|
||||
|
||||
return expired, nil
|
||||
}
|
||||
|
||||
// GetJWTLifetime returns the issued at(iat) and expiration time(exp) claims
|
||||
// present in the JWT token. These are optional claims and may not be present
|
||||
// in the token. Absence is not reported as an error.
|
||||
//
|
||||
// An error is returned if the supplied token is malformed. Times are returned
|
||||
// in UTC to have parity with graph responses.
|
||||
func GetJWTLifetime(
|
||||
ctx context.Context,
|
||||
rawToken string,
|
||||
) (time.Time, time.Time, error) {
|
||||
var (
|
||||
issuedAt time.Time
|
||||
expiresAt time.Time
|
||||
)
|
||||
|
||||
p := jwt.NewParser()
|
||||
|
||||
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
|
||||
if err != nil {
|
||||
logger.CtxErr(ctx, err).Debug("parsing jwt token")
|
||||
return time.Time{}, time.Time{}, clues.Wrap(err, "invalid jwt")
|
||||
}
|
||||
|
||||
exp, err := token.Claims.GetExpirationTime()
|
||||
if err != nil {
|
||||
logger.CtxErr(ctx, err).Debug("extracting exp claim")
|
||||
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token expiry time")
|
||||
}
|
||||
|
||||
iat, err := token.Claims.GetIssuedAt()
|
||||
if err != nil {
|
||||
logger.CtxErr(ctx, err).Debug("extracting iat claim")
|
||||
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token issued at time")
|
||||
}
|
||||
|
||||
// Absence of iat or exp claims is not reported as an error by jwt library as these
|
||||
// are optional as per spec.
|
||||
if iat != nil {
|
||||
issuedAt = iat.UTC()
|
||||
}
|
||||
|
||||
if exp != nil {
|
||||
expiresAt = exp.UTC()
|
||||
}
|
||||
|
||||
return issuedAt, expiresAt, nil
|
||||
}
|
||||
|
||||
@ -113,3 +113,134 @@ func (suite *JWTUnitSuite) TestIsJWTExpired() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *JWTUnitSuite) TestGetJWTLifetime() {
|
||||
// Set of time values to be used in the tests.
|
||||
// Truncate to seconds for comparisons since jwt tokens have second
|
||||
// level precision.
|
||||
idToTime := map[string]time.Time{
|
||||
"T0": time.Now().UTC().Add(-time.Hour).Truncate(time.Second),
|
||||
"T1": time.Now().UTC().Truncate(time.Second),
|
||||
"T2": time.Now().UTC().Add(time.Hour).Truncate(time.Second),
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
getToken func() (string, error)
|
||||
expectFunc func(t *testing.T, iat time.Time, exp time.Time)
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "alive token",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(
|
||||
jwt.RegisteredClaims{
|
||||
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
|
||||
ExpiresAt: jwt.NewNumericDate(idToTime["T1"]),
|
||||
})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, idToTime["T0"], iat)
|
||||
assert.Equal(t, idToTime["T1"], exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
// Test with a token which is not generated using the go-jwt lib.
|
||||
// This is a long lived token which is valid for 100 years.
|
||||
{
|
||||
name: "alive raw token with iat and exp claims",
|
||||
getToken: func() (string, error) {
|
||||
return rawToken, nil
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Less(t, iat, time.Now(), "iat should be in the past")
|
||||
assert.Greater(t, exp, time.Now(), "exp should be in the future")
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
// Regardless of whether the token is expired or not, we should be able to
|
||||
// extract the iat and exp claims from it without error.
|
||||
{
|
||||
name: "expired token",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(
|
||||
jwt.RegisteredClaims{
|
||||
IssuedAt: jwt.NewNumericDate(idToTime["T1"]),
|
||||
ExpiresAt: jwt.NewNumericDate(idToTime["T0"]),
|
||||
})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, idToTime["T1"], iat)
|
||||
assert.Equal(t, idToTime["T0"], exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "missing iat claim",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(
|
||||
jwt.RegisteredClaims{
|
||||
ExpiresAt: jwt.NewNumericDate(idToTime["T2"]),
|
||||
})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, time.Time{}, iat)
|
||||
assert.Equal(t, idToTime["T2"], exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "missing exp claim",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(
|
||||
jwt.RegisteredClaims{
|
||||
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
|
||||
})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, idToTime["T0"], iat)
|
||||
assert.Equal(t, time.Time{}, exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "both claims missing",
|
||||
getToken: func() (string, error) {
|
||||
return createJWTToken(jwt.RegisteredClaims{})
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, time.Time{}, iat)
|
||||
assert.Equal(t, time.Time{}, exp)
|
||||
},
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "malformed token",
|
||||
getToken: func() (string, error) {
|
||||
return "header.claims.signature", nil
|
||||
},
|
||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
||||
assert.Equal(t, time.Time{}, iat)
|
||||
assert.Equal(t, time.Time{}, exp)
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
token, err := test.getToken()
|
||||
require.NoError(t, err)
|
||||
|
||||
iat, exp, err := GetJWTLifetime(ctx, token)
|
||||
test.expectErr(t, err)
|
||||
|
||||
test.expectFunc(t, iat, exp)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -59,6 +59,19 @@ func First(vs ...string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// FirstIn returns the first entry in the map with a non-zero value
|
||||
// when iterating the provided list of keys.
|
||||
func FirstIn(m map[string]any, keys ...string) string {
|
||||
for _, key := range keys {
|
||||
v, err := AnyValueToString(key, m)
|
||||
if err == nil && len(v) > 0 {
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// Preview reduces the string to the specified size.
|
||||
// If the string is longer than the size, the last three
|
||||
// characters are replaced with an ellipsis. Size < 4
|
||||
|
||||
@ -118,3 +118,96 @@ func TestGenerateHash(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFirstIn(t *testing.T) {
|
||||
table := []struct {
|
||||
name string
|
||||
m map[string]any
|
||||
keys []string
|
||||
expect string
|
||||
}{
|
||||
{
|
||||
name: "nil map",
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "",
|
||||
},
|
||||
{
|
||||
name: "empty map",
|
||||
m: map[string]any{},
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "",
|
||||
},
|
||||
{
|
||||
name: "no match",
|
||||
m: map[string]any{
|
||||
"baz": "baz",
|
||||
},
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "",
|
||||
},
|
||||
{
|
||||
name: "no keys",
|
||||
m: map[string]any{
|
||||
"baz": "baz",
|
||||
},
|
||||
keys: []string{},
|
||||
expect: "",
|
||||
},
|
||||
{
|
||||
name: "nil match",
|
||||
m: map[string]any{
|
||||
"foo": nil,
|
||||
},
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "",
|
||||
},
|
||||
{
|
||||
name: "empty match",
|
||||
m: map[string]any{
|
||||
"foo": "",
|
||||
},
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "",
|
||||
},
|
||||
{
|
||||
name: "matches first key",
|
||||
m: map[string]any{
|
||||
"foo": "fnords",
|
||||
},
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "fnords",
|
||||
},
|
||||
{
|
||||
name: "matches second key",
|
||||
m: map[string]any{
|
||||
"bar": "smarf",
|
||||
},
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "smarf",
|
||||
},
|
||||
{
|
||||
name: "matches second key with nil first match",
|
||||
m: map[string]any{
|
||||
"foo": nil,
|
||||
"bar": "smarf",
|
||||
},
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "smarf",
|
||||
},
|
||||
{
|
||||
name: "matches second key with empty first match",
|
||||
m: map[string]any{
|
||||
"foo": "",
|
||||
"bar": "smarf",
|
||||
},
|
||||
keys: []string{"foo", "bar"},
|
||||
expect: "smarf",
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
result := FirstIn(test.m, test.keys...)
|
||||
assert.Equal(t, test.expect, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -23,6 +23,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/internal/converters/ics"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
@ -142,6 +143,121 @@ func getICalData(ctx context.Context, data models.Messageable) (string, error) {
|
||||
return ics.FromEventable(ctx, event)
|
||||
}
|
||||
|
||||
func getFileAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
|
||||
kind := ptr.Val(attachment.GetContentType())
|
||||
|
||||
bytes, err := attachment.GetBackingStore().Get("contentBytes")
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "failed to get attachment bytes").
|
||||
With("kind", kind)
|
||||
}
|
||||
|
||||
if bytes == nil {
|
||||
// TODO(meain): Handle non file attachments
|
||||
// https://github.com/alcionai/corso/issues/4772
|
||||
logger.Ctx(ctx).
|
||||
With("attachment_id", ptr.Val(attachment.GetId()),
|
||||
"attachment_type", ptr.Val(attachment.GetOdataType())).
|
||||
Info("no contentBytes for attachment")
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
bts, ok := bytes.([]byte)
|
||||
if !ok {
|
||||
return nil, clues.WrapWC(ctx, err, "invalid content bytes").
|
||||
With("kind", kind).
|
||||
With("interface_type", fmt.Sprintf("%T", bytes))
|
||||
}
|
||||
|
||||
name := ptr.Val(attachment.GetName())
|
||||
if len(name) == 0 {
|
||||
// Graph as of now does not let us create any attachments
|
||||
// without a name, but we have run into instances where we have
|
||||
// see attachments without a name, possibly from old
|
||||
// data. This is for those cases.
|
||||
name = "Unnamed"
|
||||
}
|
||||
|
||||
contentID, err := attachment.GetBackingStore().Get("contentId")
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "getting content id for attachment").
|
||||
With("kind", kind)
|
||||
}
|
||||
|
||||
if contentID != nil {
|
||||
cids, _ := str.AnyToString(contentID)
|
||||
if len(cids) > 0 {
|
||||
name = cids
|
||||
}
|
||||
}
|
||||
|
||||
return &mail.File{
|
||||
// cannot use filename as inline attachment will not get mapped properly
|
||||
Name: name,
|
||||
MimeType: kind,
|
||||
Data: bts,
|
||||
Inline: ptr.Val(attachment.GetIsInline()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func getItemAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
|
||||
it, err := attachment.GetBackingStore().Get("item")
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "getting item for attachment").
|
||||
With("attachment_id", ptr.Val(attachment.GetId()))
|
||||
}
|
||||
|
||||
name := ptr.Val(attachment.GetName())
|
||||
if len(name) == 0 {
|
||||
// Graph as of now does not let us create any attachments
|
||||
// without a name, but we have run into instances where we have
|
||||
// see attachments without a name, possibly from old
|
||||
// data. This is for those cases.
|
||||
name = "Unnamed"
|
||||
}
|
||||
|
||||
switch it := it.(type) {
|
||||
case *models.Message:
|
||||
cb, err := FromMessageable(ctx, it)
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "converting item attachment to eml").
|
||||
With("attachment_id", ptr.Val(attachment.GetId()))
|
||||
}
|
||||
|
||||
return &mail.File{
|
||||
Name: name,
|
||||
MimeType: "message/rfc822",
|
||||
Data: []byte(cb),
|
||||
}, nil
|
||||
default:
|
||||
logger.Ctx(ctx).
|
||||
With("attachment_id", ptr.Val(attachment.GetId()),
|
||||
"attachment_type", ptr.Val(attachment.GetOdataType())).
|
||||
Info("unknown item attachment type")
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func getMailAttachment(ctx context.Context, att models.Attachmentable) (*mail.File, error) {
|
||||
otyp := ptr.Val(att.GetOdataType())
|
||||
|
||||
switch otyp {
|
||||
case "#microsoft.graph.fileAttachment":
|
||||
return getFileAttachment(ctx, att)
|
||||
case "#microsoft.graph.itemAttachment":
|
||||
return getItemAttachment(ctx, att)
|
||||
default:
|
||||
logger.Ctx(ctx).
|
||||
With("attachment_id", ptr.Val(att.GetId()),
|
||||
"attachment_type", otyp).
|
||||
Info("unknown attachment type")
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
// FromJSON converts a Messageable (as json) to .eml format
|
||||
func FromJSON(ctx context.Context, body []byte) (string, error) {
|
||||
ctx = clues.Add(ctx, "body_len", len(body))
|
||||
@ -151,6 +267,11 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
||||
return "", clues.WrapWC(ctx, err, "converting to messageble")
|
||||
}
|
||||
|
||||
return FromMessageable(ctx, data)
|
||||
}
|
||||
|
||||
// Converts a Messageable to .eml format
|
||||
func FromMessageable(ctx context.Context, data models.Messageable) (string, error) {
|
||||
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
|
||||
|
||||
email := mail.NewMSG()
|
||||
@ -226,6 +347,115 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if data.GetAttachments() != nil {
|
||||
for _, attachment := range data.GetAttachments() {
|
||||
att, err := getMailAttachment(ctx, attachment)
|
||||
if err != nil {
|
||||
return "", clues.WrapWC(ctx, err, "getting mail attachment")
|
||||
}
|
||||
|
||||
// There are known cases where we just wanna log and
|
||||
// ignore instead of erroring out
|
||||
if att != nil {
|
||||
email.Attach(att)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch data.(type) {
|
||||
case *models.EventMessageResponse, *models.EventMessage:
|
||||
// We can't handle this as of now, not enough information
|
||||
// TODO: Fetch event object from graph when fetching email
|
||||
case *models.CalendarSharingMessage:
|
||||
// TODO: Parse out calendar sharing message
|
||||
// https://github.com/alcionai/corso/issues/5041
|
||||
case *models.EventMessageRequest:
|
||||
cal, err := getICalData(ctx, data)
|
||||
if err != nil {
|
||||
return "", clues.Wrap(err, "getting ical attachment")
|
||||
}
|
||||
|
||||
if len(cal) > 0 {
|
||||
email.AddAlternative(mail.TextCalendar, cal)
|
||||
}
|
||||
}
|
||||
|
||||
if err := email.GetError(); err != nil {
|
||||
return "", clues.WrapWC(ctx, err, "converting to eml")
|
||||
}
|
||||
|
||||
return email.GetMessage(), nil
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------
|
||||
// Postable -> EML
|
||||
//-------------------------------------------------------------
|
||||
|
||||
// FromJSONPostToEML converts a postable (as json) to .eml format.
|
||||
// TODO(pandeyabs): This is a stripped down copy of messageable to
|
||||
// eml conversion, it can be folded into one function by having a post
|
||||
// to messageable converter.
|
||||
func FromJSONPostToEML(
|
||||
ctx context.Context,
|
||||
body []byte,
|
||||
postMetadata metadata.ConversationPostMetadata,
|
||||
) (string, error) {
|
||||
ctx = clues.Add(ctx, "body_len", len(body))
|
||||
|
||||
data, err := api.BytesToPostable(body)
|
||||
if err != nil {
|
||||
return "", clues.WrapWC(ctx, err, "converting to postable")
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
|
||||
|
||||
email := mail.NewMSG()
|
||||
email.Encoding = mail.EncodingBase64 // Doing it to be safe for when we have eventMessage (newline issues)
|
||||
email.AllowDuplicateAddress = true // More "correct" conversion
|
||||
email.AddBccToHeader = true // Don't ignore Bcc
|
||||
email.AllowEmptyAttachments = true // Don't error on empty attachments
|
||||
email.UseProvidedAddress = true // Don't try to parse the email address
|
||||
|
||||
if data.GetFrom() != nil {
|
||||
email.SetFrom(formatAddress(data.GetFrom().GetEmailAddress()))
|
||||
}
|
||||
|
||||
// We don't have the To, Cc, Bcc recipient information for posts due to a graph
|
||||
// limitation. All posts carry the group email address as the only recipient
|
||||
// for now.
|
||||
email.AddTo(postMetadata.Recipients...)
|
||||
email.SetSubject(postMetadata.Topic)
|
||||
|
||||
// Reply-To email address is not available for posts. Note that this is different
|
||||
// from inReplyTo field.
|
||||
|
||||
if data.GetCreatedDateTime() != nil {
|
||||
email.SetDate(ptr.Val(data.GetCreatedDateTime()).Format(dateFormat))
|
||||
}
|
||||
|
||||
if data.GetBody() != nil {
|
||||
if data.GetBody().GetContentType() != nil {
|
||||
var contentType mail.ContentType
|
||||
|
||||
switch data.GetBody().GetContentType().String() {
|
||||
case "html":
|
||||
contentType = mail.TextHTML
|
||||
case "text":
|
||||
contentType = mail.TextPlain
|
||||
default:
|
||||
// https://learn.microsoft.com/en-us/graph/api/resources/itembody?view=graph-rest-1.0#properties
|
||||
// This should not be possible according to the documentation
|
||||
logger.Ctx(ctx).
|
||||
With("body_type", data.GetBody().GetContentType().String()).
|
||||
Info("unknown body content type")
|
||||
|
||||
contentType = mail.TextPlain
|
||||
}
|
||||
|
||||
email.SetBody(contentType, ptr.Val(data.GetBody().GetContent()))
|
||||
}
|
||||
}
|
||||
|
||||
if data.GetAttachments() != nil {
|
||||
for _, attachment := range data.GetAttachments() {
|
||||
kind := ptr.Val(attachment.GetContentType())
|
||||
@ -239,6 +469,9 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
||||
if bytes == nil {
|
||||
// TODO(meain): Handle non file attachments
|
||||
// https://github.com/alcionai/corso/issues/4772
|
||||
//
|
||||
// TODO(pandeyabs): Above issue is for messages.
|
||||
// This is not a problem for posts but leaving it here for safety.
|
||||
logger.Ctx(ctx).
|
||||
With("attachment_id", ptr.Val(attachment.GetId()),
|
||||
"attachment_type", ptr.Val(attachment.GetOdataType())).
|
||||
@ -255,6 +488,9 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
||||
}
|
||||
|
||||
name := ptr.Val(attachment.GetName())
|
||||
if len(name) == 0 {
|
||||
name = "Unnamed"
|
||||
}
|
||||
|
||||
contentID, err := attachment.GetBackingStore().Get("contentId")
|
||||
if err != nil {
|
||||
@ -279,24 +515,8 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
||||
}
|
||||
}
|
||||
|
||||
switch data.(type) {
|
||||
case *models.EventMessageResponse, *models.EventMessage:
|
||||
// We can't handle this as of now, not enough information
|
||||
// TODO: Fetch event object from graph when fetching email
|
||||
case *models.CalendarSharingMessage:
|
||||
// TODO: Parse out calendar sharing message
|
||||
// https://github.com/alcionai/corso/issues/5041
|
||||
case *models.EventMessageRequest:
|
||||
cal, err := getICalData(ctx, data)
|
||||
if err != nil {
|
||||
return "", clues.Wrap(err, "getting ical attachment")
|
||||
}
|
||||
|
||||
if len(cal) > 0 {
|
||||
email.AddAlternative(mail.TextCalendar, cal)
|
||||
}
|
||||
}
|
||||
|
||||
// Note: Posts cannot be of type EventMessageResponse, EventMessage or
|
||||
// CalendarSharingMessage. So we don't need to handle those cases here.
|
||||
if err = email.GetError(); err != nil {
|
||||
return "", clues.WrapWC(ctx, err, "converting to eml")
|
||||
}
|
||||
|
||||
@ -18,6 +18,8 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/converters/eml/testdata"
|
||||
"github.com/alcionai/corso/src/internal/converters/ics"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
|
||||
stub "github.com/alcionai/corso/src/internal/m365/service/groups/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
@ -135,6 +137,11 @@ func (suite *EMLUnitSuite) TestConvert_messageble_to_eml() {
|
||||
}
|
||||
|
||||
func (suite *EMLUnitSuite) TestConvert_edge_cases() {
|
||||
bodies := []string{
|
||||
testdata.EmailWithAttachments,
|
||||
testdata.EmailWithinEmail,
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
transform func(models.Messageable)
|
||||
@ -160,35 +167,75 @@ func (suite *EMLUnitSuite) TestConvert_edge_cases() {
|
||||
require.NoError(suite.T(), err, "setting attachment content")
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "attachment without name",
|
||||
transform: func(msg models.Messageable) {
|
||||
attachments := msg.GetAttachments()
|
||||
attachments[1].SetName(ptr.To(""))
|
||||
|
||||
// This test has to be run on a non inline attachment
|
||||
// as inline attachments use contentID instead of name
|
||||
// even when there is a name.
|
||||
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "attachment with nil name",
|
||||
transform: func(msg models.Messageable) {
|
||||
attachments := msg.GetAttachments()
|
||||
attachments[1].SetName(nil)
|
||||
|
||||
// This test has to be run on a non inline attachment
|
||||
// as inline attachments use contentID instead of name
|
||||
// even when there is a name.
|
||||
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple attachments without name",
|
||||
transform: func(msg models.Messageable) {
|
||||
attachments := msg.GetAttachments()
|
||||
attachments[1].SetName(ptr.To(""))
|
||||
attachments[2].SetName(ptr.To(""))
|
||||
|
||||
// This test has to be run on a non inline attachment
|
||||
// as inline attachments use contentID instead of name
|
||||
// even when there is a name.
|
||||
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
|
||||
assert.False(suite.T(), ptr.Val(attachments[2].GetIsInline()))
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
for _, b := range bodies {
|
||||
for _, test := range tests {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
body := []byte(testdata.EmailWithAttachments)
|
||||
body := []byte(b)
|
||||
|
||||
msg, err := api.BytesToMessageable(body)
|
||||
require.NoError(t, err, "creating message")
|
||||
msg, err := api.BytesToMessageable(body)
|
||||
require.NoError(t, err, "creating message")
|
||||
|
||||
test.transform(msg)
|
||||
test.transform(msg)
|
||||
|
||||
writer := kjson.NewJsonSerializationWriter()
|
||||
writer := kjson.NewJsonSerializationWriter()
|
||||
|
||||
defer writer.Close()
|
||||
defer writer.Close()
|
||||
|
||||
err = writer.WriteObjectValue("", msg)
|
||||
require.NoError(t, err, "serializing message")
|
||||
err = writer.WriteObjectValue("", msg)
|
||||
require.NoError(t, err, "serializing message")
|
||||
|
||||
nbody, err := writer.GetSerializedContent()
|
||||
require.NoError(t, err, "getting serialized content")
|
||||
nbody, err := writer.GetSerializedContent()
|
||||
require.NoError(t, err, "getting serialized content")
|
||||
|
||||
_, err = FromJSON(ctx, nbody)
|
||||
assert.NoError(t, err, "converting to eml")
|
||||
})
|
||||
_, err = FromJSON(ctx, nbody)
|
||||
assert.NoError(t, err, "converting to eml")
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -226,11 +273,11 @@ func (suite *EMLUnitSuite) TestConvert_eml_ics() {
|
||||
|
||||
assert.Equal(
|
||||
t,
|
||||
msg.GetCreatedDateTime().Format(ics.ICalDateTimeFormat),
|
||||
msg.GetCreatedDateTime().Format(ics.ICalDateTimeFormatUTC),
|
||||
event.GetProperty(ical.ComponentPropertyCreated).Value)
|
||||
assert.Equal(
|
||||
t,
|
||||
msg.GetLastModifiedDateTime().Format(ics.ICalDateTimeFormat),
|
||||
msg.GetLastModifiedDateTime().Format(ics.ICalDateTimeFormatUTC),
|
||||
event.GetProperty(ical.ComponentPropertyLastModified).Value)
|
||||
|
||||
st, err := ics.GetUTCTime(
|
||||
@ -245,11 +292,11 @@ func (suite *EMLUnitSuite) TestConvert_eml_ics() {
|
||||
|
||||
assert.Equal(
|
||||
t,
|
||||
st.Format(ics.ICalDateTimeFormat),
|
||||
st.Format(ics.ICalDateTimeFormatUTC),
|
||||
event.GetProperty(ical.ComponentPropertyDtStart).Value)
|
||||
assert.Equal(
|
||||
t,
|
||||
et.Format(ics.ICalDateTimeFormat),
|
||||
et.Format(ics.ICalDateTimeFormatUTC),
|
||||
event.GetProperty(ical.ComponentPropertyDtEnd).Value)
|
||||
|
||||
tos := msg.GetToRecipients()
|
||||
@ -325,3 +372,119 @@ func (suite *EMLUnitSuite) TestConvert_eml_ics_from_event_obj() {
|
||||
assert.NotEqual(t, ptr.Val(msg.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
|
||||
assert.Equal(t, ptr.Val(evt.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------
|
||||
// Postable -> EML tests
|
||||
//-------------------------------------------------------------
|
||||
|
||||
func (suite *EMLUnitSuite) TestConvert_postable_to_eml() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
body := []byte(stub.PostWithAttachments)
|
||||
|
||||
postMetadata := metadata.ConversationPostMetadata{
|
||||
Recipients: []string{"group@example.com"},
|
||||
Topic: "test subject",
|
||||
}
|
||||
|
||||
out, err := FromJSONPostToEML(ctx, body, postMetadata)
|
||||
assert.NoError(t, err, "converting to eml")
|
||||
|
||||
post, err := api.BytesToPostable(body)
|
||||
require.NoError(t, err, "creating post")
|
||||
|
||||
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
|
||||
require.NoError(t, err, "reading created eml")
|
||||
|
||||
assert.Equal(t, postMetadata.Topic, eml.GetHeader("Subject"))
|
||||
assert.Equal(t, post.GetCreatedDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
|
||||
|
||||
assert.Equal(t, formatAddress(post.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
|
||||
|
||||
// Test recipients. The post metadata should contain the group email address.
|
||||
|
||||
tos := strings.Split(eml.GetHeader("To"), ", ")
|
||||
for _, sourceTo := range postMetadata.Recipients {
|
||||
assert.Contains(t, tos, sourceTo)
|
||||
}
|
||||
|
||||
// Assert cc, bcc to be empty since they are not supported for posts right now.
|
||||
assert.Equal(t, "", eml.GetHeader("Cc"))
|
||||
assert.Equal(t, "", eml.GetHeader("Bcc"))
|
||||
|
||||
// Test attachments using PostWithAttachments data as a reference.
|
||||
// This data has 1 direct attachment and 1 inline attachment.
|
||||
assert.Equal(t, 1, len(eml.Attachments), "direct attachment count")
|
||||
assert.Equal(t, 1, len(eml.Inlines), "inline attachment count")
|
||||
|
||||
for _, sourceAttachment := range post.GetAttachments() {
|
||||
targetContent := eml.Attachments[0].Content
|
||||
if ptr.Val(sourceAttachment.GetIsInline()) {
|
||||
targetContent = eml.Inlines[0].Content
|
||||
}
|
||||
|
||||
sourceContent, err := sourceAttachment.GetBackingStore().Get("contentBytes")
|
||||
assert.NoError(t, err, "getting source attachment content")
|
||||
|
||||
assert.Equal(t, sourceContent, targetContent)
|
||||
}
|
||||
|
||||
// Test body
|
||||
source := strings.ReplaceAll(eml.HTML, "\n", "")
|
||||
target := strings.ReplaceAll(ptr.Val(post.GetBody().GetContent()), "\n", "")
|
||||
|
||||
// replace the cid with a constant value to make the comparison
|
||||
re := regexp.MustCompile(`(?:src|originalSrc)="cid:[^"]*"`)
|
||||
source = re.ReplaceAllString(source, `src="cid:replaced"`)
|
||||
target = re.ReplaceAllString(target, `src="cid:replaced"`)
|
||||
|
||||
assert.Equal(t, source, target)
|
||||
}
|
||||
|
||||
// Tests an ics within an eml within another eml
|
||||
func (suite *EMLUnitSuite) TestConvert_message_in_messageble_to_eml() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
body := []byte(testdata.EmailWithinEmail)
|
||||
|
||||
out, err := FromJSON(ctx, body)
|
||||
assert.NoError(t, err, "converting to eml")
|
||||
|
||||
msg, err := api.BytesToMessageable(body)
|
||||
require.NoError(t, err, "creating message")
|
||||
|
||||
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
|
||||
require.NoError(t, err, "reading created eml")
|
||||
|
||||
assert.Equal(t, ptr.Val(msg.GetSubject()), eml.GetHeader("Subject"))
|
||||
assert.Equal(t, msg.GetSentDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
|
||||
|
||||
assert.Equal(t, formatAddress(msg.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
|
||||
|
||||
attachments := eml.Attachments
|
||||
assert.Equal(t, 3, len(attachments), "attachment count in parent email")
|
||||
|
||||
ieml, err := enmime.ReadEnvelope(strings.NewReader(string(attachments[0].Content)))
|
||||
require.NoError(t, err, "reading created eml")
|
||||
|
||||
itm, err := msg.GetAttachments()[0].GetBackingStore().Get("item")
|
||||
require.NoError(t, err, "getting item from message")
|
||||
|
||||
imsg := itm.(*models.Message)
|
||||
assert.Equal(t, ptr.Val(imsg.GetSubject()), ieml.GetHeader("Subject"))
|
||||
assert.Equal(t, imsg.GetSentDateTime().Format(time.RFC1123Z), ieml.GetHeader("Date"))
|
||||
|
||||
assert.Equal(t, formatAddress(imsg.GetFrom().GetEmailAddress()), ieml.GetHeader("From"))
|
||||
|
||||
iattachments := ieml.Attachments
|
||||
assert.Equal(t, 1, len(iattachments), "attachment count in child email")
|
||||
|
||||
// Known from testdata
|
||||
assert.Contains(t, string(iattachments[0].Content), "X-LIC-LOCATION:Africa/Abidjan")
|
||||
}
|
||||
|
||||
@ -104,6 +104,19 @@
|
||||
"contentId": null,
|
||||
"contentLocation": null,
|
||||
"contentBytes": "W1BhdGhzXQpQcmVmaXggPSAuLgo="
|
||||
},
|
||||
{
|
||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
||||
"@odata.mediaContentType": "application/octet-stream",
|
||||
"id": "ZZMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAEwbDEWAAABEgAQAD3rU0iyzCdHgz0xmOrWc9g=",
|
||||
"lastModifiedDateTime": "2023-11-16T05:42:47Z",
|
||||
"name": "qt2.conf",
|
||||
"contentType": "application/octet-stream",
|
||||
"size": 156,
|
||||
"isInline": false,
|
||||
"contentId": null,
|
||||
"contentLocation": null,
|
||||
"contentBytes": "Z1BhdGhzXQpQcmVmaXggPSAuLgo="
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
268
src/internal/converters/eml/testdata/email-within-email.json
vendored
Normal file
268
src/internal/converters/eml/testdata/email-within-email.json
vendored
Normal file
@ -0,0 +1,268 @@
|
||||
{
|
||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAA=",
|
||||
"@odata.type": "#microsoft.graph.message",
|
||||
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages/$entity",
|
||||
"@odata.etag": "W/\"CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl\"",
|
||||
"categories": [],
|
||||
"changeKey": "CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl",
|
||||
"createdDateTime": "2024-02-05T09:33:23Z",
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:48Z",
|
||||
"attachments": [
|
||||
{
|
||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k=",
|
||||
"@odata.type": "#microsoft.graph.itemAttachment",
|
||||
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
|
||||
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
|
||||
"isInline": false,
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"name": "Purpose of life",
|
||||
"size": 11840,
|
||||
"item": {
|
||||
"id": "",
|
||||
"@odata.type": "#microsoft.graph.message",
|
||||
"createdDateTime": "2024-02-05T09:33:24Z",
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"attachments": [
|
||||
{
|
||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
|
||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
||||
"@odata.mediaContentType": "text/calendar",
|
||||
"contentType": "text/calendar",
|
||||
"isInline": false,
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"name": "Abidjan.ics",
|
||||
"size": 573,
|
||||
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
|
||||
"contentType": "html"
|
||||
},
|
||||
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
|
||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
|
||||
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
|
||||
"flag": {
|
||||
"flagStatus": "notFlagged"
|
||||
},
|
||||
"from": {
|
||||
"emailAddress": {
|
||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
||||
"name": "Johanna Lorenz"
|
||||
}
|
||||
},
|
||||
"hasAttachments": true,
|
||||
"importance": "normal",
|
||||
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
|
||||
"isDeliveryReceiptRequested": false,
|
||||
"isDraft": false,
|
||||
"isRead": true,
|
||||
"isReadReceiptRequested": false,
|
||||
"receivedDateTime": "2024-02-05T09:33:12Z",
|
||||
"sender": {
|
||||
"emailAddress": {
|
||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
||||
"name": "Johanna Lorenz"
|
||||
}
|
||||
},
|
||||
"sentDateTime": "2024-02-05T09:33:11Z",
|
||||
"subject": "Purpose of life",
|
||||
"toRecipients": [
|
||||
{
|
||||
"emailAddress": {
|
||||
"address": "PradeepG@10rqc2.onmicrosoft.com",
|
||||
"name": "Pradeep Gupta"
|
||||
}
|
||||
}
|
||||
],
|
||||
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k%3D&exvsurl=1&viewmodel=ItemAttachment"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl02=",
|
||||
"@odata.type": "#microsoft.graph.itemAttachment",
|
||||
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
|
||||
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
|
||||
"isInline": false,
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"name": "Purpose of life part 2",
|
||||
"size": 11840,
|
||||
"item": {
|
||||
"id": "",
|
||||
"@odata.type": "#microsoft.graph.message",
|
||||
"createdDateTime": "2024-02-05T09:33:24Z",
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"attachments": [
|
||||
{
|
||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
|
||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
||||
"@odata.mediaContentType": "text/calendar",
|
||||
"contentType": "text/calendar",
|
||||
"isInline": false,
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"name": "Abidjan.ics",
|
||||
"size": 573,
|
||||
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
|
||||
"contentType": "html"
|
||||
},
|
||||
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
|
||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
|
||||
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
|
||||
"flag": {
|
||||
"flagStatus": "notFlagged"
|
||||
},
|
||||
"from": {
|
||||
"emailAddress": {
|
||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
||||
"name": "Johanna Lorenz"
|
||||
}
|
||||
},
|
||||
"hasAttachments": true,
|
||||
"importance": "normal",
|
||||
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
|
||||
"isDeliveryReceiptRequested": false,
|
||||
"isDraft": false,
|
||||
"isRead": true,
|
||||
"isReadReceiptRequested": false,
|
||||
"receivedDateTime": "2024-02-05T09:33:12Z",
|
||||
"sender": {
|
||||
"emailAddress": {
|
||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
||||
"name": "Johanna Lorenz"
|
||||
}
|
||||
},
|
||||
"sentDateTime": "2024-02-05T09:33:11Z",
|
||||
"subject": "Purpose of life",
|
||||
"toRecipients": [
|
||||
{
|
||||
"emailAddress": {
|
||||
"address": "PradeepG@10rqc2.onmicrosoft.com",
|
||||
"name": "Pradeep Gupta"
|
||||
}
|
||||
}
|
||||
],
|
||||
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl02%3D&exvsurl=1&viewmodel=ItemAttachment"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl03=",
|
||||
"@odata.type": "#microsoft.graph.itemAttachment",
|
||||
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
|
||||
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
|
||||
"isInline": false,
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"name": "Purpose of life part 3",
|
||||
"size": 11840,
|
||||
"item": {
|
||||
"id": "",
|
||||
"@odata.type": "#microsoft.graph.message",
|
||||
"createdDateTime": "2024-02-05T09:33:24Z",
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"attachments": [
|
||||
{
|
||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
|
||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
||||
"@odata.mediaContentType": "text/calendar",
|
||||
"contentType": "text/calendar",
|
||||
"isInline": false,
|
||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
||||
"name": "Abidjan.ics",
|
||||
"size": 573,
|
||||
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
|
||||
}
|
||||
],
|
||||
"body": {
|
||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
|
||||
"contentType": "html"
|
||||
},
|
||||
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
|
||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
|
||||
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
|
||||
"flag": {
|
||||
"flagStatus": "notFlagged"
|
||||
},
|
||||
"from": {
|
||||
"emailAddress": {
|
||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
||||
"name": "Johanna Lorenz"
|
||||
}
|
||||
},
|
||||
"hasAttachments": true,
|
||||
"importance": "normal",
|
||||
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
|
||||
"isDeliveryReceiptRequested": false,
|
||||
"isDraft": false,
|
||||
"isRead": true,
|
||||
"isReadReceiptRequested": false,
|
||||
"receivedDateTime": "2024-02-05T09:33:12Z",
|
||||
"sender": {
|
||||
"emailAddress": {
|
||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
||||
"name": "Johanna Lorenz"
|
||||
}
|
||||
},
|
||||
"sentDateTime": "2024-02-05T09:33:11Z",
|
||||
"subject": "Purpose of life",
|
||||
"toRecipients": [
|
||||
{
|
||||
"emailAddress": {
|
||||
"address": "PradeepG@10rqc2.onmicrosoft.com",
|
||||
"name": "Pradeep Gupta"
|
||||
}
|
||||
}
|
||||
],
|
||||
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl03%3D&exvsurl=1&viewmodel=ItemAttachment"
|
||||
}
|
||||
}
|
||||
],
|
||||
"bccRecipients": [],
|
||||
"body": {
|
||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div><span class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">Now, this is what we call nesting in this business.<br></span></div></body></html>",
|
||||
"contentType": "html"
|
||||
},
|
||||
"bodyPreview": "Now, this is what we call nesting in this business.",
|
||||
"ccRecipients": [],
|
||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAIv2-4RHwDhJhlqBV5PTE3Y=",
|
||||
"conversationIndex": "AQHaWBZdi/b/hEfAOEmGWoFXk9MTdg==",
|
||||
"flag": {
|
||||
"flagStatus": "notFlagged"
|
||||
},
|
||||
"from": {
|
||||
"emailAddress": {
|
||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
||||
"name": "Johanna Lorenz"
|
||||
}
|
||||
},
|
||||
"hasAttachments": true,
|
||||
"importance": "normal",
|
||||
"inferenceClassification": "focused",
|
||||
"internetMessageId": "<SJ0PR04MB729409CE8C191E01151C110DBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
|
||||
"isDeliveryReceiptRequested": false,
|
||||
"isDraft": false,
|
||||
"isRead": true,
|
||||
"isReadReceiptRequested": false,
|
||||
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
|
||||
"receivedDateTime": "2024-02-05T09:33:46Z",
|
||||
"replyTo": [],
|
||||
"sender": {
|
||||
"emailAddress": {
|
||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
||||
"name": "Johanna Lorenz"
|
||||
}
|
||||
},
|
||||
"sentDateTime": "2024-02-05T09:33:45Z",
|
||||
"subject": "Fw: Purpose of life",
|
||||
"toRecipients": [
|
||||
{
|
||||
"emailAddress": {
|
||||
"address": "PradeepG@10rqc2.onmicrosoft.com",
|
||||
"name": "Pradeep Gupta"
|
||||
}
|
||||
}
|
||||
],
|
||||
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAA%3D&exvsurl=1&viewmodel=ReadMessageItem"
|
||||
}
|
||||
@ -10,3 +10,6 @@ var EmailWithEventInfo string
|
||||
|
||||
//go:embed email-with-event-object.json
|
||||
var EmailWithEventObject string
|
||||
|
||||
//go:embed email-within-email.json
|
||||
var EmailWithinEmail string
|
||||
|
||||
@ -166,3 +166,20 @@ var GraphTimeZoneToTZ = map[string]string{
|
||||
"Yukon Standard Time": "America/Whitehorse",
|
||||
"tzone://Microsoft/Utc": "Etc/UTC",
|
||||
}
|
||||
|
||||
// Map from alternatives to the canonical time zone name
|
||||
// There mapping are currently generated by manually going on the
|
||||
// values in the GraphTimeZoneToTZ which is not available in the tzdb
|
||||
var CanonicalTimeZoneMap = map[string]string{
|
||||
"Africa/Asmara": "Africa/Asmera",
|
||||
"Asia/Calcutta": "Asia/Kolkata",
|
||||
"Asia/Rangoon": "Asia/Yangon",
|
||||
"Asia/Saigon": "Asia/Ho_Chi_Minh",
|
||||
"Europe/Kiev": "Europe/Kyiv",
|
||||
"Europe/Warsaw": "Europe/Warszawa",
|
||||
"America/Buenos_Aires": "America/Argentina/Buenos_Aires",
|
||||
"America/Godthab": "America/Nuuk",
|
||||
// NOTE: "Atlantic/Raykjavik" missing in tzdb but is in MS list
|
||||
|
||||
"Etc/UTC": "UTC", // simplifying the time zone name
|
||||
}
|
||||
|
||||
@ -5,6 +5,7 @@ import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/mail"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
@ -16,6 +17,7 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/internal/converters/ics/tzdata"
|
||||
"github.com/alcionai/corso/src/pkg/dttm"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
@ -31,8 +33,9 @@ import (
|
||||
// TODO locations: https://github.com/alcionai/corso/issues/5003
|
||||
|
||||
const (
|
||||
ICalDateTimeFormat = "20060102T150405Z"
|
||||
ICalDateFormat = "20060102"
|
||||
ICalDateTimeFormat = "20060102T150405"
|
||||
ICalDateTimeFormatUTC = "20060102T150405Z"
|
||||
ICalDateFormat = "20060102"
|
||||
)
|
||||
|
||||
func keyValues(key, value string) *ics.KeyValues {
|
||||
@ -172,6 +175,17 @@ func getRecurrencePattern(
|
||||
recurComponents = append(recurComponents, "BYDAY="+prefix+strings.Join(dowComponents, ","))
|
||||
}
|
||||
|
||||
// This is necessary to compute when weekly events recur
|
||||
fdow := pat.GetFirstDayOfWeek()
|
||||
if fdow != nil {
|
||||
icalday, ok := GraphToICalDOW[fdow.String()]
|
||||
if !ok {
|
||||
return "", clues.NewWC(ctx, "unknown first day of week").With("day", fdow)
|
||||
}
|
||||
|
||||
recurComponents = append(recurComponents, "WKST="+icalday)
|
||||
}
|
||||
|
||||
rrange := recurrence.GetRangeEscaped()
|
||||
if rrange != nil {
|
||||
switch ptr.Val(rrange.GetTypeEscaped()) {
|
||||
@ -195,7 +209,7 @@ func getRecurrencePattern(
|
||||
return "", clues.WrapWC(ctx, err, "parsing end time")
|
||||
}
|
||||
|
||||
recurComponents = append(recurComponents, "UNTIL="+endTime.Format(ICalDateTimeFormat))
|
||||
recurComponents = append(recurComponents, "UNTIL="+endTime.Format(ICalDateTimeFormatUTC))
|
||||
}
|
||||
case models.NOEND_RECURRENCERANGETYPE:
|
||||
// Nothing to do
|
||||
@ -224,10 +238,15 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
|
||||
cal := ics.NewCalendar()
|
||||
cal.SetProductId("-//Alcion//Corso") // Does this have to be customizable?
|
||||
|
||||
err := addTimeZoneComponents(ctx, cal, event)
|
||||
if err != nil {
|
||||
return "", clues.Wrap(err, "adding timezone components")
|
||||
}
|
||||
|
||||
id := ptr.Val(event.GetId())
|
||||
iCalEvent := cal.AddEvent(id)
|
||||
|
||||
err := updateEventProperties(ctx, event, iCalEvent)
|
||||
err = updateEventProperties(ctx, event, iCalEvent)
|
||||
if err != nil {
|
||||
return "", clues.Wrap(err, "updating event properties")
|
||||
}
|
||||
@ -258,7 +277,7 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
|
||||
exICalEvent := cal.AddEvent(id)
|
||||
start := exception.GetOriginalStart() // will always be in UTC
|
||||
|
||||
exICalEvent.AddProperty(ics.ComponentProperty(ics.PropertyRecurrenceId), start.Format(ICalDateTimeFormat))
|
||||
exICalEvent.AddProperty(ics.ComponentProperty(ics.PropertyRecurrenceId), start.Format(ICalDateTimeFormatUTC))
|
||||
|
||||
err = updateEventProperties(ctx, exception, exICalEvent)
|
||||
if err != nil {
|
||||
@ -269,6 +288,91 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
|
||||
return cal.Serialize(), nil
|
||||
}
|
||||
|
||||
func getTZDataKeyValues(ctx context.Context, timezone string) (map[string]string, error) {
|
||||
template, ok := tzdata.TZData[timezone]
|
||||
if !ok {
|
||||
return nil, clues.NewWC(ctx, "timezone not found in tz database").
|
||||
With("timezone", timezone)
|
||||
}
|
||||
|
||||
keyValues := map[string]string{}
|
||||
|
||||
for _, line := range strings.Split(template, "\n") {
|
||||
splits := strings.SplitN(line, ":", 2)
|
||||
if len(splits) != 2 {
|
||||
return nil, clues.NewWC(ctx, "invalid tzdata line").
|
||||
With("line", line).
|
||||
With("timezone", timezone)
|
||||
}
|
||||
|
||||
keyValues[splits[0]] = splits[1]
|
||||
}
|
||||
|
||||
return keyValues, nil
|
||||
}
|
||||
|
||||
func addTimeZoneComponents(ctx context.Context, cal *ics.Calendar, event models.Eventable) error {
|
||||
// Handling of timezone get a bit tricky when we have to deal with
|
||||
// relative recurrence. The issue comes up when we set a recurrence
|
||||
// to be something like "repeat every 3rd Tuesday". Tuesday in UTC
|
||||
// and in IST will be different and so we cannot just always use UTC.
|
||||
//
|
||||
// The way this is solved is by using the timezone in the
|
||||
// recurrence for start and end timezones as we have to use UTC
|
||||
// for UNTIL(mostly).
|
||||
// https://www.rfc-editor.org/rfc/rfc5545#section-3.3.10
|
||||
timezone, err := getRecurrenceTimezone(ctx, event)
|
||||
if err != nil {
|
||||
return clues.Stack(err)
|
||||
}
|
||||
|
||||
if timezone != time.UTC {
|
||||
kvs, err := getTZDataKeyValues(ctx, timezone.String())
|
||||
if err != nil {
|
||||
return clues.Stack(err)
|
||||
}
|
||||
|
||||
tz := cal.AddTimezone(timezone.String())
|
||||
|
||||
for k, v := range kvs {
|
||||
tz.AddProperty(ics.ComponentProperty(k), v)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// getRecurrenceTimezone get the timezone specified by the recurrence
|
||||
// in the calendar. It does a normalization pass where we always convert
|
||||
// the timezone to the value in tzdb If we don't have a recurrence
|
||||
// timezone, we don't have to use a specific timezone in the export and
|
||||
// is safe to return UTC from this method.
|
||||
func getRecurrenceTimezone(ctx context.Context, event models.Eventable) (*time.Location, error) {
|
||||
if event.GetRecurrence() != nil {
|
||||
timezone := ptr.Val(event.GetRecurrence().GetRangeEscaped().GetRecurrenceTimeZone())
|
||||
|
||||
ctz, ok := GraphTimeZoneToTZ[timezone]
|
||||
if ok {
|
||||
timezone = ctz
|
||||
}
|
||||
|
||||
cannon, ok := CanonicalTimeZoneMap[timezone]
|
||||
if ok {
|
||||
timezone = cannon
|
||||
}
|
||||
|
||||
loc, err := time.LoadLocation(timezone)
|
||||
if err != nil {
|
||||
return nil, clues.WrapWC(ctx, err, "unknown timezone").
|
||||
With("timezone", timezone)
|
||||
}
|
||||
|
||||
return loc, nil
|
||||
}
|
||||
|
||||
return time.UTC, nil
|
||||
}
|
||||
|
||||
func isASCII(s string) bool {
|
||||
for _, c := range s {
|
||||
if c > unicode.MaxASCII {
|
||||
@ -279,6 +383,12 @@ func isASCII(s string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Checks if a given string is a valid email address
|
||||
func isEmail(em string) bool {
|
||||
_, err := mail.ParseAddress(em)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func updateEventProperties(ctx context.Context, event models.Eventable, iCalEvent *ics.VEvent) error {
|
||||
// CREATED - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.7.1
|
||||
created := event.GetCreatedDateTime()
|
||||
@ -292,6 +402,11 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
||||
iCalEvent.SetModifiedAt(ptr.Val(modified))
|
||||
}
|
||||
|
||||
timezone, err := getRecurrenceTimezone(ctx, event)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// DTSTART - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.4
|
||||
allDay := ptr.Val(event.GetIsAllDay())
|
||||
startString := event.GetStart().GetDateTime()
|
||||
@ -303,11 +418,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
||||
return clues.WrapWC(ctx, err, "parsing start time")
|
||||
}
|
||||
|
||||
if allDay {
|
||||
iCalEvent.SetStartAt(start, ics.WithValue(string(ics.ValueDataTypeDate)))
|
||||
} else {
|
||||
iCalEvent.SetStartAt(start)
|
||||
}
|
||||
addTime(iCalEvent, ics.ComponentPropertyDtStart, start, allDay, timezone)
|
||||
}
|
||||
|
||||
// DTEND - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.2
|
||||
@ -320,11 +431,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
||||
return clues.WrapWC(ctx, err, "parsing end time")
|
||||
}
|
||||
|
||||
if allDay {
|
||||
iCalEvent.SetEndAt(end, ics.WithValue(string(ics.ValueDataTypeDate)))
|
||||
} else {
|
||||
iCalEvent.SetEndAt(end)
|
||||
}
|
||||
addTime(iCalEvent, ics.ComponentPropertyDtEnd, end, allDay, timezone)
|
||||
}
|
||||
|
||||
recurrence := event.GetRecurrence()
|
||||
@ -339,7 +446,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
||||
|
||||
// STATUS - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.11
|
||||
cancelled := event.GetIsCancelled()
|
||||
if cancelled != nil {
|
||||
if cancelled != nil && ptr.Val(cancelled) {
|
||||
iCalEvent.SetStatus(ics.ObjectStatusCancelled)
|
||||
}
|
||||
|
||||
@ -377,7 +484,14 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
||||
desc := replacer.Replace(description)
|
||||
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
|
||||
} else {
|
||||
stripped, err := html2text.FromString(description, html2text.Options{PrettyTables: true})
|
||||
// Disable auto wrap, causes huge memory spikes
|
||||
// https://github.com/jaytaylor/html2text/issues/48
|
||||
prettyTablesOptions := html2text.NewPrettyTablesOptions()
|
||||
prettyTablesOptions.AutoWrapText = false
|
||||
|
||||
stripped, err := html2text.FromString(
|
||||
description,
|
||||
html2text.Options{PrettyTables: true, PrettyTablesOptions: prettyTablesOptions})
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "converting html to text").
|
||||
With("description_length", len(description))
|
||||
@ -481,8 +595,21 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
||||
}
|
||||
}
|
||||
|
||||
// It is possible that we get non email items like the below
|
||||
// one which is an internal representation of the user in the
|
||||
// Exchange system. While we can technically output this as an
|
||||
// attendee, it is not useful plus other downstream tools like
|
||||
// ones to use PST can choke on this.
|
||||
// /o=ExchangeLabs/ou=ExchangeAdministrative Group(FY...LT)/cn=Recipients/cn=883...4a-John Doe
|
||||
addr := ptr.Val(attendee.GetEmailAddress().GetAddress())
|
||||
iCalEvent.AddAttendee(addr, props...)
|
||||
if isEmail(addr) {
|
||||
iCalEvent.AddAttendee(addr, props...)
|
||||
} else {
|
||||
logger.Ctx(ctx).
|
||||
With("attendee_email", addr).
|
||||
With("attendee_name", name).
|
||||
Info("skipping non email attendee from ics export")
|
||||
}
|
||||
}
|
||||
|
||||
// LOCATION - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.7
|
||||
@ -610,6 +737,26 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
||||
return nil
|
||||
}
|
||||
|
||||
func addTime(iCalEvent *ics.VEvent, prop ics.ComponentProperty, tm time.Time, allDay bool, tzLoc *time.Location) {
|
||||
if allDay {
|
||||
if tzLoc == time.UTC {
|
||||
iCalEvent.SetProperty(prop, tm.Format(ICalDateFormat), ics.WithValue(string(ics.ValueDataTypeDate)))
|
||||
} else {
|
||||
iCalEvent.SetProperty(
|
||||
prop,
|
||||
tm.In(tzLoc).Format(ICalDateFormat),
|
||||
ics.WithValue(string(ics.ValueDataTypeDate)),
|
||||
keyValues("TZID", tzLoc.String()))
|
||||
}
|
||||
} else {
|
||||
if tzLoc == time.UTC {
|
||||
iCalEvent.SetProperty(prop, tm.Format(ICalDateTimeFormatUTC))
|
||||
} else {
|
||||
iCalEvent.SetProperty(prop, tm.In(tzLoc).Format(ICalDateTimeFormat), keyValues("TZID", tzLoc.String()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getCancelledDates(ctx context.Context, event models.Eventable) ([]time.Time, error) {
|
||||
dateStrings, err := api.GetCancelledEventDateStrings(event)
|
||||
if err != nil {
|
||||
|
||||
@ -13,6 +13,7 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
ics "github.com/arran4/golang-ical"
|
||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
||||
kjson "github.com/microsoft/kiota-serialization-json-go"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
@ -21,6 +22,7 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/converters/ics/tzdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
@ -32,7 +34,7 @@ func TestICSUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &ICSUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *ICSUnitSuite) TestGetLocationString() {
|
||||
func (s *ICSUnitSuite) TestGetLocationString() {
|
||||
table := []struct {
|
||||
name string
|
||||
loc func() models.Locationable
|
||||
@ -110,13 +112,13 @@ func (suite *ICSUnitSuite) TestGetLocationString() {
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
suite.Run(tt.name, func() {
|
||||
assert.Equal(suite.T(), tt.expect, getLocationString(tt.loc()))
|
||||
s.Run(tt.name, func() {
|
||||
assert.Equal(s.T(), tt.expect, getLocationString(tt.loc()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ICSUnitSuite) TestGetUTCTime() {
|
||||
func (s *ICSUnitSuite) TestGetUTCTime() {
|
||||
table := []struct {
|
||||
name string
|
||||
timestamp string
|
||||
@ -162,18 +164,18 @@ func (suite *ICSUnitSuite) TestGetUTCTime() {
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
suite.Run(tt.name, func() {
|
||||
s.Run(tt.name, func() {
|
||||
t, err := GetUTCTime(tt.timestamp, tt.timezone)
|
||||
tt.errCheck(suite.T(), err)
|
||||
tt.errCheck(s.T(), err)
|
||||
|
||||
if !tt.time.Equal(time.Time{}) {
|
||||
assert.Equal(suite.T(), tt.time, t)
|
||||
assert.Equal(s.T(), tt.time, t)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
table := []struct {
|
||||
name string
|
||||
recurrence func() models.PatternedRecurrenceable
|
||||
@ -187,16 +189,37 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("daily")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
rec.SetPattern(pat)
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=DAILY;INTERVAL=1",
|
||||
expect: "FREQ=DAILY;INTERVAL=1;WKST=SU",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
name: "daily different start of week",
|
||||
recurrence: func() models.PatternedRecurrenceable {
|
||||
rec := models.NewPatternedRecurrence()
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("daily")
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.MONDAY_DAYOFWEEK))
|
||||
|
||||
rec.SetPattern(pat)
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=DAILY;INTERVAL=1;WKST=MO",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -206,15 +229,16 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("daily")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
rng := models.NewRecurrenceRange()
|
||||
|
||||
rrtype, err := models.ParseRecurrenceRangeType("endDate")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
||||
|
||||
@ -227,7 +251,7 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=DAILY;INTERVAL=1;UNTIL=20210101T182959Z",
|
||||
expect: "FREQ=DAILY;INTERVAL=1;WKST=SU;UNTIL=20210101T182959Z",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -237,16 +261,17 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("weekly")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
rec.SetPattern(pat)
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=WEEKLY;INTERVAL=1",
|
||||
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -256,15 +281,16 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("weekly")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
rng := models.NewRecurrenceRange()
|
||||
|
||||
rrtype, err := models.ParseRecurrenceRangeType("endDate")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
||||
|
||||
@ -277,7 +303,7 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=WEEKLY;INTERVAL=1;UNTIL=20210101T235959Z",
|
||||
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU;UNTIL=20210101T235959Z",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -287,15 +313,16 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("weekly")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
rng := models.NewRecurrenceRange()
|
||||
|
||||
rrtype, err := models.ParseRecurrenceRangeType("numbered")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
||||
|
||||
@ -307,7 +334,7 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=WEEKLY;INTERVAL=1;COUNT=10",
|
||||
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU;COUNT=10",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -317,10 +344,11 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("weekly")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
days := []models.DayOfWeek{
|
||||
models.MONDAY_DAYOFWEEK,
|
||||
@ -334,7 +362,7 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,TH",
|
||||
expect: "FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,TH;WKST=SU",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -344,16 +372,17 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("daily")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(2)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
rec.SetPattern(pat)
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=DAILY;INTERVAL=2",
|
||||
expect: "FREQ=DAILY;INTERVAL=2;WKST=SU",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -363,10 +392,11 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("absoluteMonthly")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
pat.SetDayOfMonth(ptr.To(int32(5)))
|
||||
|
||||
@ -374,7 +404,7 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=5",
|
||||
expect: "FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=5;WKST=SU",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -384,10 +414,11 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("absoluteYearly")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(3)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
pat.SetMonth(ptr.To(int32(8)))
|
||||
|
||||
@ -395,7 +426,7 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=YEARLY;INTERVAL=3;BYMONTH=8",
|
||||
expect: "FREQ=YEARLY;INTERVAL=3;BYMONTH=8;WKST=SU",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
{
|
||||
@ -405,37 +436,38 @@ func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||
pat := models.NewRecurrencePattern()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("relativeYearly")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
pat.SetMonth(ptr.To(int32(8)))
|
||||
pat.SetDaysOfWeek([]models.DayOfWeek{models.FRIDAY_DAYOFWEEK})
|
||||
|
||||
wi, err := models.ParseWeekIndex("first")
|
||||
require.NoError(suite.T(), err)
|
||||
require.NoError(s.T(), err)
|
||||
pat.SetIndex(wi.(*models.WeekIndex))
|
||||
|
||||
rec.SetPattern(pat)
|
||||
|
||||
return rec
|
||||
},
|
||||
expect: "FREQ=YEARLY;INTERVAL=1;BYMONTH=8;BYDAY=1FR",
|
||||
expect: "FREQ=YEARLY;INTERVAL=1;BYMONTH=8;BYDAY=1FR;WKST=SU",
|
||||
errCheck: require.NoError,
|
||||
},
|
||||
// TODO(meain): could still use more tests for edge cases of time
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
suite.Run(tt.name, func() {
|
||||
ctx, flush := tester.NewContext(suite.T())
|
||||
s.Run(tt.name, func() {
|
||||
ctx, flush := tester.NewContext(s.T())
|
||||
defer flush()
|
||||
|
||||
rec, err := getRecurrencePattern(ctx, tt.recurrence())
|
||||
tt.errCheck(suite.T(), err)
|
||||
tt.errCheck(s.T(), err)
|
||||
|
||||
assert.Equal(suite.T(), tt.expect, rec)
|
||||
assert.Equal(s.T(), tt.expect, rec)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -460,8 +492,8 @@ func baseEvent() *models.Event {
|
||||
return e
|
||||
}
|
||||
|
||||
func (suite *ICSUnitSuite) TestEventConversion() {
|
||||
t := suite.T()
|
||||
func (s *ICSUnitSuite) TestEventConversion() {
|
||||
t := s.T()
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
@ -546,14 +578,19 @@ func (suite *ICSUnitSuite) TestEventConversion() {
|
||||
|
||||
rec := models.NewPatternedRecurrence()
|
||||
pat := models.NewRecurrencePattern()
|
||||
rng := models.NewRecurrenceRange()
|
||||
|
||||
typ, err := models.ParseRecurrencePatternType("daily")
|
||||
require.NoError(t, err)
|
||||
|
||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||
pat.SetInterval(ptr.To(int32(1)))
|
||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
||||
|
||||
rng.SetRecurrenceTimeZone(ptr.To("UTC"))
|
||||
|
||||
rec.SetPattern(pat)
|
||||
rec.SetRangeEscaped(rng)
|
||||
|
||||
e.SetRecurrence(rec)
|
||||
|
||||
@ -576,6 +613,19 @@ func (suite *ICSUnitSuite) TestEventConversion() {
|
||||
assert.Contains(t, out, "STATUS:CANCELLED", "cancelled status")
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "not cancelled event",
|
||||
event: func() *models.Event {
|
||||
e := baseEvent()
|
||||
|
||||
e.SetIsCancelled(ptr.To(false))
|
||||
|
||||
return e
|
||||
},
|
||||
check: func(out string) {
|
||||
assert.NotContains(t, out, "STATUS:CANCELLED", "cancelled status")
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "text body",
|
||||
event: func() *models.Event {
|
||||
@ -817,8 +867,8 @@ func (suite *ICSUnitSuite) TestEventConversion() {
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
suite.Run(tt.name, func() {
|
||||
t := suite.T()
|
||||
s.Run(tt.name, func() {
|
||||
t := s.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -868,8 +918,8 @@ func checkAttendee(t *testing.T, out, check, msg string) {
|
||||
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
|
||||
}
|
||||
|
||||
func (suite *ICSUnitSuite) TestAttendees() {
|
||||
t := suite.T()
|
||||
func (s *ICSUnitSuite) TestAttendees() {
|
||||
t := s.T()
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
@ -895,6 +945,17 @@ func (suite *ICSUnitSuite) TestAttendees() {
|
||||
"attendee")
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "attendee with internal exchange representation for email",
|
||||
att: [][]string{{
|
||||
"/o=ExchangeLabs/ou=ExchangeAdministrative Group(FY...LT)/cn=Recipients/cn=883...4a-John Doe",
|
||||
"required",
|
||||
"declined",
|
||||
}},
|
||||
check: func(out string) {
|
||||
assert.NotContains(t, out, "ATTENDEE")
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple attendees",
|
||||
att: [][]string{
|
||||
@ -925,8 +986,8 @@ func (suite *ICSUnitSuite) TestAttendees() {
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
suite.Run(tt.name, func() {
|
||||
t := suite.T()
|
||||
s.Run(tt.name, func() {
|
||||
t := s.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -1047,8 +1108,8 @@ func checkAttachment(t *testing.T, out, check, msg string) {
|
||||
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
|
||||
}
|
||||
|
||||
func (suite *ICSUnitSuite) TestAttachments() {
|
||||
t := suite.T()
|
||||
func (s *ICSUnitSuite) TestAttachments() {
|
||||
t := s.T()
|
||||
|
||||
type attachment struct {
|
||||
cid string // contentid
|
||||
@ -1104,8 +1165,8 @@ func (suite *ICSUnitSuite) TestAttachments() {
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
suite.Run(tt.name, func() {
|
||||
t := suite.T()
|
||||
s.Run(tt.name, func() {
|
||||
t := s.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -1148,7 +1209,7 @@ func (suite *ICSUnitSuite) TestAttachments() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ICSUnitSuite) TestCancellations() {
|
||||
func (s *ICSUnitSuite) TestCancellations() {
|
||||
table := []struct {
|
||||
name string
|
||||
cancelledIds []string
|
||||
@ -1172,8 +1233,8 @@ func (suite *ICSUnitSuite) TestCancellations() {
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
suite.Run(tt.name, func() {
|
||||
t := suite.T()
|
||||
s.Run(tt.name, func() {
|
||||
t := s.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
@ -1236,7 +1297,7 @@ func eventToJSON(e *models.Event) ([]byte, error) {
|
||||
return bts, err
|
||||
}
|
||||
|
||||
func (suite *ICSUnitSuite) TestEventExceptions() {
|
||||
func (s *ICSUnitSuite) TestEventExceptions() {
|
||||
table := []struct {
|
||||
name string
|
||||
event func() *models.Event
|
||||
@ -1258,7 +1319,7 @@ func (suite *ICSUnitSuite) TestEventExceptions() {
|
||||
exception.SetEnd(newEnd)
|
||||
|
||||
parsed, err := eventToMap(exception)
|
||||
require.NoError(suite.T(), err, "parsing exception")
|
||||
require.NoError(s.T(), err, "parsing exception")
|
||||
|
||||
// add exception event to additional data
|
||||
e.SetAdditionalData(map[string]any{
|
||||
@ -1277,15 +1338,15 @@ func (suite *ICSUnitSuite) TestEventExceptions() {
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(suite.T(), 2, events, "number of events")
|
||||
assert.Equal(s.T(), 2, events, "number of events")
|
||||
|
||||
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id")
|
||||
assert.Contains(s.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id")
|
||||
|
||||
assert.Contains(suite.T(), out, "SUMMARY:Subject", "original event")
|
||||
assert.Contains(suite.T(), out, "SUMMARY:Exception", "exception event")
|
||||
assert.Contains(s.T(), out, "SUMMARY:Subject", "original event")
|
||||
assert.Contains(s.T(), out, "SUMMARY:Exception", "exception event")
|
||||
|
||||
assert.Contains(suite.T(), out, "DTSTART:20210101T130000Z", "new start time")
|
||||
assert.Contains(suite.T(), out, "DTEND:20210101T140000Z", "new end time")
|
||||
assert.Contains(s.T(), out, "DTSTART:20210101T130000Z", "new start time")
|
||||
assert.Contains(s.T(), out, "DTEND:20210101T140000Z", "new end time")
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -1314,10 +1375,10 @@ func (suite *ICSUnitSuite) TestEventExceptions() {
|
||||
exception2.SetEnd(newEnd)
|
||||
|
||||
parsed1, err := eventToMap(exception1)
|
||||
require.NoError(suite.T(), err, "parsing exception 1")
|
||||
require.NoError(s.T(), err, "parsing exception 1")
|
||||
|
||||
parsed2, err := eventToMap(exception2)
|
||||
require.NoError(suite.T(), err, "parsing exception 2")
|
||||
require.NoError(s.T(), err, "parsing exception 2")
|
||||
|
||||
// add exception event to additional data
|
||||
e.SetAdditionalData(map[string]any{
|
||||
@ -1336,36 +1397,230 @@ func (suite *ICSUnitSuite) TestEventExceptions() {
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(suite.T(), 3, events, "number of events")
|
||||
assert.Equal(s.T(), 3, events, "number of events")
|
||||
|
||||
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id 1")
|
||||
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210102T120000Z", "recurrence id 2")
|
||||
assert.Contains(s.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id 1")
|
||||
assert.Contains(s.T(), out, "RECURRENCE-ID:20210102T120000Z", "recurrence id 2")
|
||||
|
||||
assert.Contains(suite.T(), out, "SUMMARY:Subject", "original event")
|
||||
assert.Contains(suite.T(), out, "SUMMARY:Exception 1", "exception event 1")
|
||||
assert.Contains(suite.T(), out, "SUMMARY:Exception 2", "exception event 2")
|
||||
assert.Contains(s.T(), out, "SUMMARY:Subject", "original event")
|
||||
assert.Contains(s.T(), out, "SUMMARY:Exception 1", "exception event 1")
|
||||
assert.Contains(s.T(), out, "SUMMARY:Exception 2", "exception event 2")
|
||||
|
||||
assert.Contains(suite.T(), out, "DTSTART:20210101T130000Z", "new start time 1")
|
||||
assert.Contains(suite.T(), out, "DTEND:20210101T140000Z", "new end time 1")
|
||||
assert.Contains(s.T(), out, "DTSTART:20210101T130000Z", "new start time 1")
|
||||
assert.Contains(s.T(), out, "DTEND:20210101T140000Z", "new end time 1")
|
||||
|
||||
assert.Contains(suite.T(), out, "DTSTART:20210102T130000Z", "new start time 2")
|
||||
assert.Contains(suite.T(), out, "DTEND:20210102T140000Z", "new end time 2")
|
||||
assert.Contains(s.T(), out, "DTSTART:20210102T130000Z", "new start time 2")
|
||||
assert.Contains(s.T(), out, "DTEND:20210102T140000Z", "new end time 2")
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
suite.Run(tt.name, func() {
|
||||
ctx, flush := tester.NewContext(suite.T())
|
||||
s.Run(tt.name, func() {
|
||||
ctx, flush := tester.NewContext(s.T())
|
||||
defer flush()
|
||||
|
||||
bts, err := eventToJSON(tt.event())
|
||||
require.NoError(suite.T(), err, "getting serialized content")
|
||||
require.NoError(s.T(), err, "getting serialized content")
|
||||
|
||||
out, err := FromJSON(ctx, bts)
|
||||
require.NoError(suite.T(), err, "converting to ics")
|
||||
require.NoError(s.T(), err, "converting to ics")
|
||||
|
||||
tt.check(out)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ICSUnitSuite) TestGetRecurrenceTimezone() {
|
||||
table := []struct {
|
||||
name string
|
||||
intz string
|
||||
outtz string
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
intz: "",
|
||||
outtz: "UTC",
|
||||
},
|
||||
{
|
||||
name: "utc",
|
||||
intz: "UTC",
|
||||
outtz: "UTC",
|
||||
},
|
||||
{
|
||||
name: "simple",
|
||||
intz: "Asia/Kolkata",
|
||||
outtz: "Asia/Kolkata",
|
||||
},
|
||||
{
|
||||
name: "windows tz",
|
||||
intz: "India Standard Time",
|
||||
outtz: "Asia/Kolkata",
|
||||
},
|
||||
{
|
||||
name: "non canonical",
|
||||
intz: "Asia/Calcutta",
|
||||
outtz: "Asia/Kolkata",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
s.Run(tt.name, func() {
|
||||
ctx, flush := tester.NewContext(s.T())
|
||||
defer flush()
|
||||
|
||||
event := baseEvent()
|
||||
if len(tt.intz) > 0 {
|
||||
recur := models.NewPatternedRecurrence()
|
||||
rp := models.NewRecurrenceRange()
|
||||
rp.SetRecurrenceTimeZone(ptr.To(tt.intz))
|
||||
|
||||
recur.SetRangeEscaped(rp)
|
||||
event.SetRecurrence(recur)
|
||||
}
|
||||
|
||||
timezone, err := getRecurrenceTimezone(ctx, event)
|
||||
require.NoError(s.T(), err)
|
||||
assert.Equal(s.T(), tt.outtz, timezone.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (s *ICSUnitSuite) TestAddTimezoneComponents() {
|
||||
event := baseEvent()
|
||||
recur := models.NewPatternedRecurrence()
|
||||
rp := models.NewRecurrenceRange()
|
||||
rp.SetRecurrenceTimeZone(ptr.To("Asia/Kolkata"))
|
||||
|
||||
recur.SetRangeEscaped(rp)
|
||||
event.SetRecurrence(recur)
|
||||
|
||||
ctx, flush := tester.NewContext(s.T())
|
||||
defer flush()
|
||||
|
||||
cal := ics.NewCalendar()
|
||||
|
||||
err := addTimeZoneComponents(ctx, cal, event)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
text := cal.Serialize()
|
||||
assert.Contains(s.T(), text, "BEGIN:VTIMEZONE", "beginning of timezone")
|
||||
assert.Contains(s.T(), text, "TZID:Asia/Kolkata", "timezone id")
|
||||
assert.Contains(s.T(), text, "END:VTIMEZONE", "end of timezone")
|
||||
}
|
||||
|
||||
func (s *ICSUnitSuite) TestAddTime() {
|
||||
locak, err := time.LoadLocation("Asia/Kolkata")
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
prop ics.ComponentProperty
|
||||
time time.Time
|
||||
allDay bool
|
||||
loc *time.Location
|
||||
exp string
|
||||
}{
|
||||
{
|
||||
name: "utc",
|
||||
prop: ics.ComponentPropertyDtStart,
|
||||
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
|
||||
allDay: false,
|
||||
loc: time.UTC,
|
||||
exp: "DTSTART:20210102T030405Z",
|
||||
},
|
||||
{
|
||||
name: "local",
|
||||
prop: ics.ComponentPropertyDtStart,
|
||||
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
|
||||
allDay: false,
|
||||
loc: locak,
|
||||
exp: "DTSTART;TZID=Asia/Kolkata:20210102T083405",
|
||||
},
|
||||
{
|
||||
name: "all day",
|
||||
prop: ics.ComponentPropertyDtStart,
|
||||
time: time.Date(2021, 1, 2, 0, 0, 0, 0, time.UTC),
|
||||
allDay: true,
|
||||
loc: time.UTC,
|
||||
exp: "DTSTART;VALUE=DATE:20210102",
|
||||
},
|
||||
{
|
||||
name: "all day local",
|
||||
prop: ics.ComponentPropertyDtStart,
|
||||
time: time.Date(2021, 1, 2, 0, 0, 0, 0, time.UTC),
|
||||
allDay: true,
|
||||
loc: locak,
|
||||
exp: "DTSTART;VALUE=DATE;TZID=Asia/Kolkata:20210102",
|
||||
},
|
||||
{
|
||||
name: "end",
|
||||
prop: ics.ComponentPropertyDtEnd,
|
||||
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
|
||||
allDay: false,
|
||||
loc: time.UTC,
|
||||
exp: "DTEND:20210102T030405Z",
|
||||
},
|
||||
{
|
||||
// This won't happen, but a good test to have to test loc handling
|
||||
name: "windows tz",
|
||||
prop: ics.ComponentPropertyDtStart,
|
||||
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
|
||||
allDay: false,
|
||||
loc: time.FixedZone("India Standard Time", 5*60*60+30*60),
|
||||
exp: "DTSTART;TZID=India Standard Time:20210102T083405",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range table {
|
||||
s.Run(tt.name, func() {
|
||||
cal := ics.NewCalendar()
|
||||
evt := cal.AddEvent("id")
|
||||
|
||||
addTime(evt, tt.prop, tt.time, tt.allDay, tt.loc)
|
||||
|
||||
expSplits := strings.FieldsFunc(tt.exp, func(c rune) bool {
|
||||
return c == ':' || c == ';'
|
||||
})
|
||||
|
||||
text := cal.Serialize()
|
||||
checkLine := ""
|
||||
|
||||
for _, l := range strings.Split(text, "\r\n") {
|
||||
if strings.HasPrefix(l, string(tt.prop)) {
|
||||
checkLine = l
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
actSplits := strings.FieldsFunc(checkLine, func(c rune) bool {
|
||||
return c == ':' || c == ';'
|
||||
})
|
||||
|
||||
assert.Greater(s.T(), len(checkLine), 0, "line not found")
|
||||
assert.Equal(s.T(), len(expSplits), len(actSplits), "length of fields")
|
||||
assert.ElementsMatch(s.T(), expSplits, actSplits, "fields")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// This tests and ensures that the generated data is int he format
|
||||
// that we expect
|
||||
func (s *ICSUnitSuite) TestGetTZDataKeyValues() {
|
||||
for key := range tzdata.TZData {
|
||||
s.Run(key, func() {
|
||||
ctx, flush := tester.NewContext(s.T())
|
||||
defer flush()
|
||||
|
||||
data, err := getTZDataKeyValues(ctx, key)
|
||||
require.NoError(s.T(), err)
|
||||
|
||||
assert.NotEmpty(s.T(), data, "data")
|
||||
assert.NotContains(s.T(), data, "BEGIN", "beginning of timezone") // should be stripped
|
||||
assert.NotContains(s.T(), data, "END", "end of timezone") // should be stripped
|
||||
assert.NotContains(s.T(), data, "TZID", "timezone id") // should be stripped
|
||||
assert.Contains(s.T(), data, "DTSTART", "start time")
|
||||
assert.Contains(s.T(), data, "TZOFFSETFROM", "offset from")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
2796
src/internal/converters/ics/tzdata/data.go
Normal file
2796
src/internal/converters/ics/tzdata/data.go
Normal file
File diff suppressed because it is too large
Load Diff
35
src/internal/converters/ics/tzdata/fetch.sh
Executable file
35
src/internal/converters/ics/tzdata/fetch.sh
Executable file
@ -0,0 +1,35 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
if ! echo "$PWD" | grep -q '/tzdata$'; then
|
||||
echo "Please run this script from the tzdata dir"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# TODO: Generate from https://www.iana.org/time-zones
|
||||
if [ ! -d /tmp/corso-tzdata ]; then
|
||||
git clone --depth 1 https://github.com/add2cal/timezones-ical-library.git /tmp/corso-tzdata
|
||||
else
|
||||
cd /tmp/corso-tzdata
|
||||
git pull
|
||||
cd -
|
||||
fi
|
||||
|
||||
# Generate a huge go file with all the timezones
|
||||
echo "package tzdata" >data.go
|
||||
echo "" >>data.go
|
||||
|
||||
echo "var TZData = map[string]string{" >>data.go
|
||||
|
||||
find /tmp/corso-tzdata/ -name '*.ics' | while read -r f; do
|
||||
tz=$(echo "$f" | sed 's|/tmp/corso-tzdata/api/||;s|\.ics$||')
|
||||
echo "Processing $tz"
|
||||
printf "\t\"%s\": \`" "$tz" >>data.go
|
||||
cat "$f" | grep -Ev "(BEGIN:|END:|TZID:)" |
|
||||
sed 's|`|\\`|g;s|\r||;s|TZID:/timezones-ical-library/|TZID:|' |
|
||||
perl -pe 'chomp if eof' >>data.go
|
||||
echo "\`," >>data.go
|
||||
done
|
||||
|
||||
echo "}" >>data.go
|
||||
@ -86,7 +86,7 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
||||
|
||||
data, err := api.BytesToContactable(body)
|
||||
if err != nil {
|
||||
return "", clues.Wrap(err, "converting to contactable").
|
||||
return "", clues.WrapWC(ctx, err, "converting to contactable").
|
||||
With("body_length", len(body))
|
||||
}
|
||||
|
||||
|
||||
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
@ -24,11 +25,14 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/kopia/retention"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
const (
|
||||
corsoWrapperAlertNamespace = "corso-kopia-wrapper"
|
||||
|
||||
defaultKopiaConfigDir = "/tmp/"
|
||||
kopiaConfigFileTemplate = "repository-%s.config"
|
||||
defaultCompressor = "zstd-better-compression"
|
||||
@ -55,6 +59,15 @@ const (
|
||||
minEpochDurationUpperBound = 7 * 24 * time.Hour
|
||||
)
|
||||
|
||||
// allValidCompressors is the set of compression algorithms either currently
|
||||
// being used or that were previously used. Use this during the config verify
|
||||
// command to avoid spurious errors. We can revisit whether we want to update
|
||||
// the config in those old repos at a later time.
|
||||
var allValidCompressors = map[compression.Name]struct{}{
|
||||
compression.Name(defaultCompressor): {},
|
||||
compression.Name("s2-default"): {},
|
||||
}
|
||||
|
||||
var (
|
||||
ErrSettingDefaultConfig = clues.New("setting default repo config values")
|
||||
ErrorRepoAlreadyExists = clues.New("repo already exists")
|
||||
@ -145,12 +158,16 @@ func (w *conn) Initialize(
|
||||
RetentionPeriod: blobCfg.RetentionPeriod,
|
||||
}
|
||||
|
||||
var initErr error
|
||||
|
||||
if err = repo.Initialize(ctx, bst, &kopiaOpts, cfg.CorsoPassphrase); err != nil {
|
||||
if errors.Is(err, repo.ErrAlreadyInitialized) {
|
||||
return clues.StackWC(ctx, ErrorRepoAlreadyExists, err)
|
||||
if !errors.Is(err, repo.ErrAlreadyInitialized) {
|
||||
return clues.WrapWC(ctx, err, "initializing repo")
|
||||
}
|
||||
|
||||
return clues.WrapWC(ctx, err, "initializing repo")
|
||||
logger.Ctx(ctx).Info("repo already exists, verifying repo config")
|
||||
|
||||
initErr = clues.StackWC(ctx, ErrorRepoAlreadyExists, err)
|
||||
}
|
||||
|
||||
err = w.commonConnect(
|
||||
@ -162,7 +179,10 @@ func (w *conn) Initialize(
|
||||
cfg.CorsoPassphrase,
|
||||
defaultCompressor)
|
||||
if err != nil {
|
||||
return err
|
||||
// If the repo already exists then give some indication to that to help the
|
||||
// user debug. For example, they could have called init again on a repo that
|
||||
// already exists but accidentally used a different passphrase.
|
||||
return clues.Stack(err, initErr)
|
||||
}
|
||||
|
||||
if err := w.setDefaultConfigValues(ctx); err != nil {
|
||||
@ -736,3 +756,115 @@ func (w *conn) updatePersistentConfig(
|
||||
"persisting updated config").
|
||||
OrNil()
|
||||
}
|
||||
|
||||
func (w *conn) verifyDefaultPolicyConfigOptions(
|
||||
ctx context.Context,
|
||||
errs *fault.Bus,
|
||||
) {
|
||||
const alertName = "kopia-global-policy"
|
||||
|
||||
globalPol, err := w.getGlobalPolicyOrEmpty(ctx)
|
||||
if err != nil {
|
||||
errs.AddAlert(ctx, fault.NewAlert(
|
||||
err.Error(),
|
||||
corsoWrapperAlertNamespace,
|
||||
"fetch-policy",
|
||||
alertName,
|
||||
nil))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "current_global_policy", globalPol.String())
|
||||
|
||||
if _, ok := allValidCompressors[globalPol.CompressionPolicy.CompressorName]; !ok {
|
||||
errs.AddAlert(ctx, fault.NewAlert(
|
||||
"unexpected compressor",
|
||||
corsoWrapperAlertNamespace,
|
||||
"compressor",
|
||||
alertName,
|
||||
nil))
|
||||
}
|
||||
|
||||
// Need to use deep equals because the values are pointers to optional types.
|
||||
// That makes regular equality checks fail even if the data contained in each
|
||||
// policy is the same.
|
||||
if !reflect.DeepEqual(globalPol.RetentionPolicy, defaultRetention) {
|
||||
errs.AddAlert(ctx, fault.NewAlert(
|
||||
"unexpected retention policy",
|
||||
corsoWrapperAlertNamespace,
|
||||
"retention-policy",
|
||||
alertName,
|
||||
nil))
|
||||
}
|
||||
|
||||
if globalPol.SchedulingPolicy.Interval() != defaultSchedulingInterval {
|
||||
errs.AddAlert(ctx, fault.NewAlert(
|
||||
"unexpected scheduling interval",
|
||||
corsoWrapperAlertNamespace,
|
||||
"scheduling-interval",
|
||||
alertName,
|
||||
nil))
|
||||
}
|
||||
}
|
||||
|
||||
func (w *conn) verifyRetentionConfig(
|
||||
ctx context.Context,
|
||||
errs *fault.Bus,
|
||||
) {
|
||||
const alertName = "kopia-object-locking"
|
||||
|
||||
directRepo, ok := w.Repository.(repo.DirectRepository)
|
||||
if !ok {
|
||||
errs.AddAlert(ctx, fault.NewAlert(
|
||||
"",
|
||||
corsoWrapperAlertNamespace,
|
||||
"fetch-direct-repo",
|
||||
alertName,
|
||||
nil))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
blobConfig, maintenanceParams, err := getRetentionConfigs(ctx, directRepo)
|
||||
if err != nil {
|
||||
errs.AddAlert(ctx, fault.NewAlert(
|
||||
err.Error(),
|
||||
corsoWrapperAlertNamespace,
|
||||
"fetch-config",
|
||||
alertName,
|
||||
nil))
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
err = retention.OptsFromConfigs(*blobConfig, *maintenanceParams).
|
||||
Verify(ctx)
|
||||
if err != nil {
|
||||
errs.AddAlert(ctx, fault.NewAlert(
|
||||
err.Error(),
|
||||
corsoWrapperAlertNamespace,
|
||||
"config-values",
|
||||
alertName,
|
||||
nil))
|
||||
}
|
||||
}
|
||||
|
||||
// verifyDefaultConfigOptions checks the following configurations:
|
||||
// kopia global policy:
|
||||
// - kopia snapshot retention is disabled
|
||||
// - kopia compression matches the default compression for corso
|
||||
// - kopia scheduling is disabled
|
||||
//
|
||||
// object locking:
|
||||
// - maintenance and blob config blob parameters are consistent (i.e. all
|
||||
// enabled or all disabled)
|
||||
func (w *conn) verifyDefaultConfigOptions(
|
||||
ctx context.Context,
|
||||
errs *fault.Bus,
|
||||
) {
|
||||
logger.Ctx(ctx).Info("verifying config parameters")
|
||||
|
||||
w.verifyDefaultPolicyConfigOptions(ctx, errs)
|
||||
w.verifyRetentionConfig(ctx, errs)
|
||||
}
|
||||
|
||||
@ -3,6 +3,7 @@ package kopia
|
||||
import (
|
||||
"context"
|
||||
"math"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@ -15,11 +16,13 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
@ -93,7 +96,7 @@ func TestWrapperIntegrationSuite(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *WrapperIntegrationSuite) TestRepoExistsError() {
|
||||
func (suite *WrapperIntegrationSuite) TestInitialize_SamePassphrase() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
@ -109,6 +112,46 @@ func (suite *WrapperIntegrationSuite) TestRepoExistsError() {
|
||||
err = k.Close(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *WrapperIntegrationSuite) TestInitialize_IncorrectPassphrase() {
|
||||
t := suite.T()
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
st1 := storeTD.NewFilesystemStorage(t)
|
||||
k := NewConn(st1)
|
||||
|
||||
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Close(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// Hacky way to edit the existing passphrase for the repo so we can check that
|
||||
// we get a sensible error back.
|
||||
st2 := st1
|
||||
st2.Config = maps.Clone(st1.Config)
|
||||
|
||||
var found bool
|
||||
|
||||
for k, v := range st2.Config {
|
||||
if strings.Contains(strings.ToLower(k), "passphrase") {
|
||||
st2.Config[k] = v + "1"
|
||||
found = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
require.True(t, found, "unable to update passphrase for test")
|
||||
|
||||
k = NewConn(st2)
|
||||
|
||||
err = k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, ErrorRepoAlreadyExists)
|
||||
@ -779,3 +822,281 @@ func (suite *ConnRetentionIntegrationSuite) TestInitWithAndWithoutRetention() {
|
||||
// Some checks to make sure retention was fully initialized as expected.
|
||||
checkRetentionParams(t, ctx, k2, blob.Governance, time.Hour*48, assert.True)
|
||||
}
|
||||
|
||||
// TestVerifyDefaultConfigOptions checks that if the repo has misconfigured
|
||||
// values an error is returned. This is easiest to do in a test suite that
|
||||
// allows object locking because some of the configured values that are checked
|
||||
// relate to object locking.
|
||||
func (suite *ConnRetentionIntegrationSuite) TestVerifyDefaultConfigOptions() {
|
||||
nonzeroOpt := policy.OptionalInt(42)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
setupRepo func(context.Context, *testing.T, *conn)
|
||||
expectAlerts int
|
||||
}{
|
||||
{
|
||||
name: "ValidConfigs NoRetention",
|
||||
setupRepo: func(context.Context, *testing.T, *conn) {},
|
||||
},
|
||||
{
|
||||
name: "ValidConfigs Retention",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
err := con.setRetentionParameters(
|
||||
ctx,
|
||||
repository.Retention{
|
||||
Mode: ptr.To(repository.GovernanceRetention),
|
||||
Duration: ptr.To(48 * time.Hour),
|
||||
Extend: ptr.To(true),
|
||||
})
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "ValidRetentionButNotExtending",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
err := con.setRetentionParameters(
|
||||
ctx,
|
||||
repository.Retention{
|
||||
Mode: ptr.To(repository.GovernanceRetention),
|
||||
Duration: ptr.To(48 * time.Hour),
|
||||
Extend: ptr.To(false),
|
||||
})
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "ExtendingRetentionButNotConfigured",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
err := con.setRetentionParameters(
|
||||
ctx,
|
||||
repository.Retention{
|
||||
Extend: ptr.To(true),
|
||||
})
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "NonZeroScheduleInterval",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
updateSchedulingOnPolicy(time.Hour, pol)
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "OldValidCompressor",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
_, err = updateCompressionOnPolicy("s2-default", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 0,
|
||||
},
|
||||
{
|
||||
name: "NonDefaultCompression",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
_, err = updateCompressionOnPolicy("pgzip-best-speed", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "NonZeroSnapshotRetentionLatest",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
retention := policy.RetentionPolicy{
|
||||
KeepLatest: &nonzeroOpt,
|
||||
KeepHourly: &zeroOpt,
|
||||
KeepWeekly: &zeroOpt,
|
||||
KeepDaily: &zeroOpt,
|
||||
KeepMonthly: &zeroOpt,
|
||||
KeepAnnual: &zeroOpt,
|
||||
}
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
updateRetentionOnPolicy(retention, pol)
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "NonZeroSnapshotRetentionHourly",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
retention := policy.RetentionPolicy{
|
||||
KeepLatest: &zeroOpt,
|
||||
KeepHourly: &nonzeroOpt,
|
||||
KeepWeekly: &zeroOpt,
|
||||
KeepDaily: &zeroOpt,
|
||||
KeepMonthly: &zeroOpt,
|
||||
KeepAnnual: &zeroOpt,
|
||||
}
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
updateRetentionOnPolicy(retention, pol)
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "NonZeroSnapshotRetentionWeekly",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
retention := policy.RetentionPolicy{
|
||||
KeepLatest: &zeroOpt,
|
||||
KeepHourly: &zeroOpt,
|
||||
KeepWeekly: &nonzeroOpt,
|
||||
KeepDaily: &zeroOpt,
|
||||
KeepMonthly: &zeroOpt,
|
||||
KeepAnnual: &zeroOpt,
|
||||
}
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
updateRetentionOnPolicy(retention, pol)
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "NonZeroSnapshotRetentionDaily",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
retention := policy.RetentionPolicy{
|
||||
KeepLatest: &zeroOpt,
|
||||
KeepHourly: &zeroOpt,
|
||||
KeepWeekly: &zeroOpt,
|
||||
KeepDaily: &nonzeroOpt,
|
||||
KeepMonthly: &zeroOpt,
|
||||
KeepAnnual: &zeroOpt,
|
||||
}
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
updateRetentionOnPolicy(retention, pol)
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "NonZeroSnapshotRetentionMonthly",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
retention := policy.RetentionPolicy{
|
||||
KeepLatest: &zeroOpt,
|
||||
KeepHourly: &zeroOpt,
|
||||
KeepWeekly: &zeroOpt,
|
||||
KeepDaily: &zeroOpt,
|
||||
KeepMonthly: &nonzeroOpt,
|
||||
KeepAnnual: &zeroOpt,
|
||||
}
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
updateRetentionOnPolicy(retention, pol)
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "NonZeroSnapshotRetentionAnnual",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
retention := policy.RetentionPolicy{
|
||||
KeepLatest: &zeroOpt,
|
||||
KeepHourly: &zeroOpt,
|
||||
KeepWeekly: &zeroOpt,
|
||||
KeepDaily: &zeroOpt,
|
||||
KeepMonthly: &zeroOpt,
|
||||
KeepAnnual: &nonzeroOpt,
|
||||
}
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
updateRetentionOnPolicy(retention, pol)
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 1,
|
||||
},
|
||||
{
|
||||
name: "MultipleAlerts",
|
||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
||||
err := con.setRetentionParameters(
|
||||
ctx,
|
||||
repository.Retention{
|
||||
Mode: ptr.To(repository.GovernanceRetention),
|
||||
Duration: ptr.To(48 * time.Hour),
|
||||
Extend: ptr.To(false),
|
||||
})
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
updateSchedulingOnPolicy(time.Hour, pol)
|
||||
|
||||
_, err = updateCompressionOnPolicy("pgzip-best-speed", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
},
|
||||
expectAlerts: 3,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
t.Cleanup(flush)
|
||||
|
||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||
st1 := storeTD.NewPrefixedS3Storage(t)
|
||||
|
||||
con := NewConn(st1)
|
||||
err := con.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
t.Cleanup(func() { con.Close(ctx) })
|
||||
|
||||
test.setupRepo(ctx, t, con)
|
||||
|
||||
errs := fault.New(true)
|
||||
con.verifyDefaultConfigOptions(ctx, errs)
|
||||
|
||||
// There shouldn't be any reported failures because this is just to check
|
||||
// if things are alright.
|
||||
assert.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
|
||||
assert.Len(t, errs.Alerts(), test.expectAlerts)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -665,7 +665,12 @@ func (w Wrapper) RepoMaintenance(
|
||||
ctx context.Context,
|
||||
storer store.Storer,
|
||||
opts repository.Maintenance,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
// Check the existing config parameters first so that even if we fail for some
|
||||
// reason below we know we checked the config.
|
||||
w.c.verifyDefaultConfigOptions(ctx, errs)
|
||||
|
||||
kopiaSafety, err := translateSafety(opts.Safety)
|
||||
if err != nil {
|
||||
return clues.WrapWC(ctx, err, "identifying safety level")
|
||||
@ -696,8 +701,9 @@ func (w Wrapper) RepoMaintenance(
|
||||
// Even if we fail this we don't want to fail the overall maintenance
|
||||
// operation since there's other useful work we can still do.
|
||||
if err := cleanupOrphanedData(ctx, storer, w.c, buffer, time.Now); err != nil {
|
||||
logger.CtxErr(ctx, err).Info(
|
||||
"cleaning up failed backups, some space may not be freed")
|
||||
errs.AddRecoverable(ctx, clues.Wrap(
|
||||
err,
|
||||
"cleaning up failed backups, some space may not be freed"))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -27,7 +27,6 @@ import (
|
||||
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||
istats "github.com/alcionai/corso/src/internal/stats"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -38,6 +37,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
@ -198,7 +198,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_FirstRun_NoChanges() {
|
||||
Type: repository.MetadataMaintenance,
|
||||
}
|
||||
|
||||
err = w.RepoMaintenance(ctx, nil, opts)
|
||||
err = w.RepoMaintenance(ctx, nil, opts, fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
@ -220,7 +220,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails
|
||||
}
|
||||
|
||||
// This will set the user.
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Close(ctx)
|
||||
@ -236,7 +236,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails
|
||||
|
||||
var notOwnedErr maintenance.NotOwnedError
|
||||
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
||||
assert.ErrorAs(t, err, ¬OwnedErr, clues.ToCore(err))
|
||||
}
|
||||
|
||||
@ -258,7 +258,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeed
|
||||
}
|
||||
|
||||
// This will set the user.
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = k.Close(ctx)
|
||||
@ -275,13 +275,13 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeed
|
||||
mOpts.Force = true
|
||||
|
||||
// This will set the user.
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
mOpts.Force = false
|
||||
|
||||
// Running without force should succeed now.
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
@ -733,7 +733,7 @@ func (suite *RetentionIntegrationSuite) TestSetRetentionParameters_And_Maintenan
|
||||
// This will set common maintenance config parameters. There's some interplay
|
||||
// between the maintenance schedule and retention period that we want to check
|
||||
// below.
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// Enable retention.
|
||||
@ -838,7 +838,7 @@ func (suite *RetentionIntegrationSuite) TestSetAndUpdateRetentionParameters_RunM
|
||||
// This will set common maintenance config parameters. There's some interplay
|
||||
// between the maintenance schedule and retention period that we want to check
|
||||
// below.
|
||||
err = w.RepoMaintenance(ctx, ms, mOpts)
|
||||
err = w.RepoMaintenance(ctx, ms, mOpts, fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// Enable retention.
|
||||
@ -882,7 +882,7 @@ func (suite *RetentionIntegrationSuite) TestSetAndUpdateRetentionParameters_RunM
|
||||
|
||||
// Run full maintenance again. This should extend object locks for things if
|
||||
// they exist.
|
||||
err = w.RepoMaintenance(ctx, ms, mOpts)
|
||||
err = w.RepoMaintenance(ctx, ms, mOpts, fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ package m365
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
@ -13,7 +14,10 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/teamschats"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
@ -22,9 +26,33 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/filters"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
|
||||
type backupHandler interface {
|
||||
produceBackupCollectionser
|
||||
}
|
||||
|
||||
type produceBackupCollectionser interface {
|
||||
ProduceBackupCollections(
|
||||
ctx context.Context,
|
||||
bpc inject.BackupProducerConfig,
|
||||
ac api.Client,
|
||||
creds account.M365Config,
|
||||
su support.StatusUpdater,
|
||||
counter *count.Bus,
|
||||
errs *fault.Bus,
|
||||
) (
|
||||
collections []data.BackupCollection,
|
||||
excludeItems *prefixmatcher.StringSetMatcher,
|
||||
// canUsePreviousBacukp can be always returned true for impelementations
|
||||
// that always return a tombstone collection when the metadata read fails
|
||||
canUsePreviousBackup bool,
|
||||
err error,
|
||||
)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Data Collections
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -63,65 +91,38 @@ func (ctrl *Controller) ProduceBackupCollections(
|
||||
canUsePreviousBackup bool
|
||||
)
|
||||
|
||||
var handler backupHandler
|
||||
|
||||
switch service {
|
||||
case path.ExchangeService:
|
||||
colls, excludeItems, canUsePreviousBackup, err = exchange.ProduceBackupCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
ctrl.AC,
|
||||
ctrl.credentials,
|
||||
ctrl.UpdateStatus,
|
||||
counter,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, false, err
|
||||
}
|
||||
handler = exchange.NewBackup()
|
||||
|
||||
case path.OneDriveService:
|
||||
colls, excludeItems, canUsePreviousBackup, err = onedrive.ProduceBackupCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
ctrl.AC,
|
||||
ctrl.credentials,
|
||||
ctrl.UpdateStatus,
|
||||
counter,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, false, err
|
||||
}
|
||||
handler = onedrive.NewBackup()
|
||||
|
||||
case path.SharePointService:
|
||||
colls, excludeItems, canUsePreviousBackup, err = sharepoint.ProduceBackupCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
ctrl.AC,
|
||||
ctrl.credentials,
|
||||
ctrl.UpdateStatus,
|
||||
counter,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, false, err
|
||||
}
|
||||
handler = sharepoint.NewBackup()
|
||||
|
||||
case path.GroupsService:
|
||||
colls, excludeItems, err = groups.ProduceBackupCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
ctrl.AC,
|
||||
ctrl.credentials,
|
||||
ctrl.UpdateStatus,
|
||||
counter,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, false, err
|
||||
}
|
||||
handler = groups.NewBackup()
|
||||
|
||||
// canUsePreviousBacukp can be always returned true for groups as we
|
||||
// return a tombstone collection in case the metadata read fails
|
||||
canUsePreviousBackup = true
|
||||
case path.TeamsChatsService:
|
||||
handler = teamschats.NewBackup()
|
||||
|
||||
default:
|
||||
return nil, nil, false, clues.Wrap(clues.NewWC(ctx, service.String()), "service not supported")
|
||||
return nil, nil, false, clues.NewWC(ctx, fmt.Sprintf("service not supported: %s", service.HumanString()))
|
||||
}
|
||||
|
||||
colls, excludeItems, canUsePreviousBackup, err = handler.ProduceBackupCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
ctrl.AC,
|
||||
ctrl.credentials,
|
||||
ctrl.UpdateStatus,
|
||||
counter,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, nil, false, err
|
||||
}
|
||||
|
||||
for _, c := range colls {
|
||||
@ -153,25 +154,28 @@ func (ctrl *Controller) IsServiceEnabled(
|
||||
return sharepoint.IsServiceEnabled(ctx, ctrl.AC.Sites(), resourceOwner)
|
||||
case path.GroupsService:
|
||||
return groups.IsServiceEnabled(ctx, ctrl.AC.Groups(), resourceOwner)
|
||||
case path.TeamsChatsService:
|
||||
return teamschats.IsServiceEnabled(ctx, ctrl.AC.Users(), resourceOwner)
|
||||
}
|
||||
|
||||
return false, clues.Wrap(clues.NewWC(ctx, service.String()), "service not supported")
|
||||
}
|
||||
|
||||
func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
|
||||
func verifyBackupInputs(sel selectors.Selector, cachedIDs []string) error {
|
||||
var ids []string
|
||||
|
||||
switch sels.Service {
|
||||
switch sel.Service {
|
||||
case selectors.ServiceExchange, selectors.ServiceOneDrive:
|
||||
// Exchange and OneDrive user existence now checked in checkServiceEnabled.
|
||||
return nil
|
||||
|
||||
case selectors.ServiceSharePoint, selectors.ServiceGroups:
|
||||
case selectors.ServiceSharePoint, selectors.ServiceGroups, selectors.ServiceTeamsChats:
|
||||
ids = cachedIDs
|
||||
}
|
||||
|
||||
if !filters.Contains(ids).Compare(sels.ID()) {
|
||||
return clues.Stack(core.ErrNotFound).With("selector_protected_resource", sels.DiscreteOwner)
|
||||
if !filters.Contains(ids).Compare(sel.ID()) {
|
||||
return clues.Wrap(core.ErrNotFound, "verifying existence of resource").
|
||||
With("selector_protected_resource", sel.ID())
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||
@ -19,6 +18,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
@ -36,10 +36,7 @@ import (
|
||||
|
||||
type DataCollectionIntgSuite struct {
|
||||
tester.Suite
|
||||
user string
|
||||
site string
|
||||
tenantID string
|
||||
ac api.Client
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestDataCollectionIntgSuite(t *testing.T) {
|
||||
@ -51,29 +48,14 @@ func TestDataCollectionIntgSuite(t *testing.T) {
|
||||
}
|
||||
|
||||
func (suite *DataCollectionIntgSuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
suite.user = tconfig.M365UserID(t)
|
||||
suite.site = tconfig.M365SiteID(t)
|
||||
|
||||
acct := tconfig.NewM365Account(t)
|
||||
creds, err := acct.M365Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.tenantID = creds.AzureTenantID
|
||||
|
||||
suite.ac, err = api.NewClient(
|
||||
creds,
|
||||
control.DefaultOptions(),
|
||||
count.New())
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
suite.m365 = its.GetM365(suite.T())
|
||||
}
|
||||
|
||||
func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
||||
ctx, flush := tester.NewContext(suite.T())
|
||||
defer flush()
|
||||
|
||||
selUsers := []string{suite.user}
|
||||
selUsers := []string{suite.m365.User.ID}
|
||||
|
||||
ctrl := newController(ctx, suite.T(), path.ExchangeService)
|
||||
tests := []struct {
|
||||
@ -85,7 +67,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
||||
getSelector: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewExchangeBackup(selUsers)
|
||||
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
|
||||
sel.DiscreteOwner = suite.user
|
||||
sel.DiscreteOwner = suite.m365.User.ID
|
||||
return sel.Selector
|
||||
},
|
||||
},
|
||||
@ -94,7 +76,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
||||
getSelector: func(t *testing.T) selectors.Selector {
|
||||
sel := selectors.NewExchangeBackup(selUsers)
|
||||
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
|
||||
sel.DiscreteOwner = suite.user
|
||||
sel.DiscreteOwner = suite.m365.User.ID
|
||||
return sel.Selector
|
||||
},
|
||||
},
|
||||
@ -139,11 +121,11 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
||||
Selector: sel,
|
||||
}
|
||||
|
||||
collections, excludes, canUsePreviousBackup, err := exchange.ProduceBackupCollections(
|
||||
collections, excludes, canUsePreviousBackup, err := exchange.NewBackup().ProduceBackupCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
suite.ac,
|
||||
suite.ac.Credentials,
|
||||
suite.m365.AC,
|
||||
suite.m365.Creds,
|
||||
ctrl.UpdateStatus,
|
||||
count.New(),
|
||||
fault.New(true))
|
||||
@ -270,7 +252,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
|
||||
ctx, flush := tester.NewContext(suite.T())
|
||||
defer flush()
|
||||
|
||||
selSites := []string{suite.site}
|
||||
selSites := []string{suite.m365.Site.ID}
|
||||
ctrl := newController(ctx, suite.T(), path.SharePointService)
|
||||
tests := []struct {
|
||||
name string
|
||||
@ -309,10 +291,10 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
|
||||
Selector: sel,
|
||||
}
|
||||
|
||||
collections, excludes, canUsePreviousBackup, err := sharepoint.ProduceBackupCollections(
|
||||
collections, excludes, canUsePreviousBackup, err := sharepoint.NewBackup().ProduceBackupCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
suite.ac,
|
||||
suite.m365.AC,
|
||||
ctrl.credentials,
|
||||
ctrl.UpdateStatus,
|
||||
count.New(),
|
||||
@ -351,8 +333,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
|
||||
|
||||
type SPCollectionIntgSuite struct {
|
||||
tester.Suite
|
||||
connector *Controller
|
||||
user string
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestSPCollectionIntgSuite(t *testing.T) {
|
||||
@ -364,13 +345,7 @@ func TestSPCollectionIntgSuite(t *testing.T) {
|
||||
}
|
||||
|
||||
func (suite *SPCollectionIntgSuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext(suite.T())
|
||||
defer flush()
|
||||
|
||||
suite.connector = newController(ctx, suite.T(), path.SharePointService)
|
||||
suite.user = tconfig.M365UserID(suite.T())
|
||||
|
||||
tester.LogTimeOfTest(suite.T())
|
||||
suite.m365 = its.GetM365(suite.T())
|
||||
}
|
||||
|
||||
func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
||||
@ -379,25 +354,20 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
siteID = tconfig.M365SiteID(t)
|
||||
ctrl = newController(ctx, t, path.SharePointService)
|
||||
siteIDs = []string{siteID}
|
||||
)
|
||||
ctrl := newController(ctx, t, path.SharePointService)
|
||||
|
||||
site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
|
||||
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Site.ID, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
sel := selectors.NewSharePointBackup(siteIDs)
|
||||
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
|
||||
sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch()))
|
||||
sel.Include(sel.Library("Documents"))
|
||||
|
||||
sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
|
||||
sel.SetDiscreteOwnerIDName(suite.m365.Site.ID, suite.m365.Site.WebURL)
|
||||
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: control.DefaultOptions(),
|
||||
ProtectedResource: site,
|
||||
ProtectedResource: suite.m365.Site.Provider,
|
||||
Selector: sel.Selector,
|
||||
}
|
||||
|
||||
@ -415,15 +385,15 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
||||
)
|
||||
|
||||
documentsColl, err := path.BuildPrefix(
|
||||
suite.connector.tenant,
|
||||
siteID,
|
||||
suite.m365.TenantID,
|
||||
suite.m365.Site.ID,
|
||||
path.SharePointService,
|
||||
path.LibrariesCategory)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
metadataColl, err := path.BuildMetadata(
|
||||
suite.connector.tenant,
|
||||
siteID,
|
||||
suite.m365.TenantID,
|
||||
suite.m365.Site.ID,
|
||||
path.SharePointService,
|
||||
path.LibrariesCategory,
|
||||
false)
|
||||
@ -450,24 +420,19 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
siteID = tconfig.M365SiteID(t)
|
||||
ctrl = newController(ctx, t, path.SharePointService)
|
||||
siteIDs = []string{siteID}
|
||||
)
|
||||
ctrl := newController(ctx, t, path.SharePointService)
|
||||
|
||||
site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
|
||||
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Site.ID, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
sel := selectors.NewSharePointBackup(siteIDs)
|
||||
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
|
||||
sel.Include(sel.Lists(selectors.Any()))
|
||||
|
||||
sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
|
||||
sel.SetDiscreteOwnerIDName(suite.m365.Site.ID, suite.m365.Site.WebURL)
|
||||
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: control.DefaultOptions(),
|
||||
ProtectedResource: site,
|
||||
ProtectedResource: suite.m365.Site.Provider,
|
||||
Selector: sel.Selector,
|
||||
}
|
||||
|
||||
@ -502,9 +467,7 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
||||
|
||||
type GroupsCollectionIntgSuite struct {
|
||||
tester.Suite
|
||||
connector *Controller
|
||||
tenantID string
|
||||
user string
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestGroupsCollectionIntgSuite(t *testing.T) {
|
||||
@ -516,21 +479,7 @@ func TestGroupsCollectionIntgSuite(t *testing.T) {
|
||||
}
|
||||
|
||||
func (suite *GroupsCollectionIntgSuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.connector = newController(ctx, t, path.GroupsService)
|
||||
suite.user = tconfig.M365UserID(t)
|
||||
|
||||
acct := tconfig.NewM365Account(t)
|
||||
creds, err := acct.M365Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.tenantID = creds.AzureTenantID
|
||||
|
||||
tester.LogTimeOfTest(t)
|
||||
suite.m365 = its.GetM365(suite.T())
|
||||
}
|
||||
|
||||
func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint() {
|
||||
@ -539,24 +488,19 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
groupID = tconfig.M365TeamID(t)
|
||||
ctrl = newController(ctx, t, path.GroupsService)
|
||||
groupIDs = []string{groupID}
|
||||
)
|
||||
ctrl := newController(ctx, t, path.GroupsService)
|
||||
|
||||
group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
|
||||
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Group.ID, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
sel := selectors.NewGroupsBackup(groupIDs)
|
||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
||||
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
|
||||
|
||||
sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
|
||||
sel.SetDiscreteOwnerIDName(suite.m365.Group.ID, suite.m365.Group.DisplayName)
|
||||
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: control.DefaultOptions(),
|
||||
ProtectedResource: group,
|
||||
ProtectedResource: suite.m365.Group.Provider,
|
||||
Selector: sel.Selector,
|
||||
}
|
||||
|
||||
@ -575,8 +519,8 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
|
||||
assert.Greater(t, len(collections), 1)
|
||||
|
||||
p, err := path.BuildMetadata(
|
||||
suite.tenantID,
|
||||
groupID,
|
||||
suite.m365.TenantID,
|
||||
suite.m365.Group.ID,
|
||||
path.GroupsService,
|
||||
path.LibrariesCategory,
|
||||
false)
|
||||
@ -614,31 +558,23 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
groupID = tconfig.M365TeamID(t)
|
||||
ctrl = newController(ctx, t, path.GroupsService)
|
||||
groupIDs = []string{groupID}
|
||||
)
|
||||
ctrl := newController(ctx, t, path.GroupsService)
|
||||
|
||||
group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
|
||||
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Group.ID, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
sel := selectors.NewGroupsBackup(groupIDs)
|
||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
||||
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
|
||||
|
||||
sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
|
||||
|
||||
site, err := suite.connector.AC.Groups().GetRootSite(ctx, groupID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
sel.SetDiscreteOwnerIDName(suite.m365.Group.ID, suite.m365.Group.DisplayName)
|
||||
|
||||
pth, err := path.Build(
|
||||
suite.tenantID,
|
||||
groupID,
|
||||
suite.m365.TenantID,
|
||||
suite.m365.Group.ID,
|
||||
path.GroupsService,
|
||||
path.LibrariesCategory,
|
||||
true,
|
||||
odConsts.SitesPathDir,
|
||||
ptr.Val(site.GetId()))
|
||||
suite.m365.Group.RootSite.ID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
mmc := []data.RestoreCollection{
|
||||
@ -656,7 +592,7 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: control.DefaultOptions(),
|
||||
ProtectedResource: group,
|
||||
ProtectedResource: suite.m365.Group.Provider,
|
||||
Selector: sel.Selector,
|
||||
MetadataCollections: mmc,
|
||||
}
|
||||
@ -676,8 +612,8 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
|
||||
assert.Greater(t, len(collections), 1)
|
||||
|
||||
p, err := path.BuildMetadata(
|
||||
suite.tenantID,
|
||||
groupID,
|
||||
suite.m365.TenantID,
|
||||
suite.m365.Group.ID,
|
||||
path.GroupsService,
|
||||
path.LibrariesCategory,
|
||||
false)
|
||||
@ -690,13 +626,13 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
|
||||
foundRootTombstone := false
|
||||
|
||||
sp, err := path.BuildPrefix(
|
||||
suite.tenantID,
|
||||
groupID,
|
||||
suite.m365.TenantID,
|
||||
suite.m365.Group.ID,
|
||||
path.GroupsService,
|
||||
path.LibrariesCategory)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
sp, err = sp.Append(false, odConsts.SitesPathDir, ptr.Val(site.GetId()))
|
||||
sp, err = sp.Append(false, odConsts.SitesPathDir, suite.m365.Group.RootSite.ID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
for _, coll := range collections {
|
||||
|
||||
@ -16,7 +16,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
@ -29,6 +28,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||
)
|
||||
|
||||
@ -366,7 +366,7 @@ func downloadContent(
|
||||
itemID := ptr.Val(item.GetId())
|
||||
ctx = clues.Add(ctx, "item_id", itemID)
|
||||
|
||||
content, err := downloadItem(ctx, iaag, item)
|
||||
content, err := downloadItem(ctx, iaag, driveID, item)
|
||||
if err == nil {
|
||||
return content, nil
|
||||
} else if !graph.IsErrUnauthorizedOrBadToken(err) {
|
||||
@ -395,7 +395,7 @@ func downloadContent(
|
||||
|
||||
cdi := custom.ToCustomDriveItem(di)
|
||||
|
||||
content, err = downloadItem(ctx, iaag, cdi)
|
||||
content, err = downloadItem(ctx, iaag, driveID, cdi)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "content download retry")
|
||||
}
|
||||
@ -426,7 +426,7 @@ func readItemContents(
|
||||
return nil, core.ErrNotFound
|
||||
}
|
||||
|
||||
rc, err := downloadFile(ctx, iaag, props.downloadURL)
|
||||
rc, err := downloadFile(ctx, iaag, props.downloadURL, false)
|
||||
if graph.IsErrUnauthorizedOrBadToken(err) {
|
||||
logger.CtxErr(ctx, err).Debug("stale item in cache")
|
||||
}
|
||||
|
||||
@ -21,7 +21,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/common/readers"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
|
||||
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
@ -34,6 +34,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||
)
|
||||
|
||||
@ -73,13 +74,13 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
stubMetaID = "testMetaID"
|
||||
stubMetaEntityID = "email@provider.com"
|
||||
stubMetaRoles = []string{"read", "write"}
|
||||
stubMeta = metadata.Metadata{
|
||||
stubMeta = odmetadata.Metadata{
|
||||
FileName: stubItemName,
|
||||
Permissions: []metadata.Permission{
|
||||
Permissions: []odmetadata.Permission{
|
||||
{
|
||||
ID: stubMetaID,
|
||||
EntityID: stubMetaEntityID,
|
||||
EntityType: metadata.GV2User,
|
||||
EntityType: odmetadata.GV2User,
|
||||
Roles: stubMetaRoles,
|
||||
Expiration: &now,
|
||||
},
|
||||
@ -208,7 +209,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
mbh.GetErrs = []error{test.getErr}
|
||||
mbh.GI = getsItem{Err: assert.AnError}
|
||||
|
||||
pcr := metaTD.NewStubPermissionResponse(metadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
|
||||
pcr := metaTD.NewStubPermissionResponse(odmetadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
|
||||
mbh.GIP = getsItemPermission{Perm: pcr}
|
||||
|
||||
coll, err := NewCollection(
|
||||
@ -294,7 +295,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
||||
assert.Equal(t, readers.DefaultSerializationVersion, rr.Format().Version)
|
||||
assert.False(t, rr.Format().DelInFlight)
|
||||
|
||||
readMeta := metadata.Metadata{}
|
||||
readMeta := odmetadata.Metadata{}
|
||||
err = json.NewDecoder(rr).Decode(&readMeta)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
|
||||
@ -14,7 +14,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||
@ -26,6 +25,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||
)
|
||||
|
||||
@ -9,9 +9,9 @@ import (
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||
)
|
||||
|
||||
|
||||
@ -7,13 +7,13 @@ import (
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/metrics"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
)
|
||||
|
||||
func NewExportCollection(
|
||||
|
||||
@ -12,9 +12,9 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
)
|
||||
|
||||
type ExportUnitSuite struct {
|
||||
|
||||
@ -19,12 +19,9 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
@ -34,6 +31,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||
@ -41,50 +39,6 @@ import (
|
||||
|
||||
const defaultFileSize int64 = 42
|
||||
|
||||
// TODO(ashmrtn): Merge with similar structs in graph and exchange packages.
|
||||
type oneDriveService struct {
|
||||
credentials account.M365Config
|
||||
status support.ControllerOperationStatus
|
||||
ac api.Client
|
||||
}
|
||||
|
||||
func newOneDriveService(credentials account.M365Config) (*oneDriveService, error) {
|
||||
ac, err := api.NewClient(
|
||||
credentials,
|
||||
control.DefaultOptions(),
|
||||
count.New())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
service := oneDriveService{
|
||||
ac: ac,
|
||||
credentials: credentials,
|
||||
}
|
||||
|
||||
return &service, nil
|
||||
}
|
||||
|
||||
func (ods *oneDriveService) updateStatus(status *support.ControllerOperationStatus) {
|
||||
if status == nil {
|
||||
return
|
||||
}
|
||||
|
||||
ods.status = support.MergeStatus(ods.status, *status)
|
||||
}
|
||||
|
||||
func loadTestService(t *testing.T) *oneDriveService {
|
||||
a := tconfig.NewM365Account(t)
|
||||
|
||||
creds, err := a.M365Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
service, err := newOneDriveService(creds)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return service
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// collections
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -841,7 +795,12 @@ func (h mockBackupHandler[T]) AugmentItemInfo(
|
||||
return h.ItemInfo
|
||||
}
|
||||
|
||||
func (h *mockBackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) {
|
||||
func (h *mockBackupHandler[T]) Get(
|
||||
context.Context,
|
||||
string,
|
||||
map[string]string,
|
||||
bool,
|
||||
) (*http.Response, error) {
|
||||
c := h.getCall
|
||||
h.getCall++
|
||||
|
||||
|
||||
@ -21,8 +21,10 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
acceptHeaderKey = "Accept"
|
||||
acceptHeaderValue = "*/*"
|
||||
acceptHeaderKey = "Accept"
|
||||
acceptHeaderValue = "*/*"
|
||||
gigabyte = 1024 * 1024 * 1024
|
||||
largeFileDownloadLimit = 15 * gigabyte
|
||||
)
|
||||
|
||||
// downloadUrlKeys is used to find the download URL in a DriveItem response.
|
||||
@ -33,7 +35,8 @@ var downloadURLKeys = []string{
|
||||
|
||||
func downloadItem(
|
||||
ctx context.Context,
|
||||
ag api.Getter,
|
||||
getter api.Getter,
|
||||
driveID string,
|
||||
item *custom.DriveItem,
|
||||
) (io.ReadCloser, error) {
|
||||
if item == nil {
|
||||
@ -41,36 +44,37 @@ func downloadItem(
|
||||
}
|
||||
|
||||
var (
|
||||
rc io.ReadCloser
|
||||
isFile = item.GetFile() != nil
|
||||
err error
|
||||
// very large file content needs to be downloaded through a different endpoint, or else
|
||||
// the download could take longer than the lifespan of the download token in the cached
|
||||
// url, which will cause us to timeout on every download request, even if we refresh the
|
||||
// download url right before the query.
|
||||
url = "https://graph.microsoft.com/v1.0/drives/" + driveID + "/items/" + ptr.Val(item.GetId()) + "/content"
|
||||
reader io.ReadCloser
|
||||
err error
|
||||
isLargeFile = ptr.Val(item.GetSize()) > largeFileDownloadLimit
|
||||
)
|
||||
|
||||
if isFile {
|
||||
var (
|
||||
url string
|
||||
ad = item.GetAdditionalData()
|
||||
)
|
||||
|
||||
for _, key := range downloadURLKeys {
|
||||
if v, err := str.AnyValueToString(key, ad); err == nil {
|
||||
url = v
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
rc, err = downloadFile(ctx, ag, url)
|
||||
if err != nil {
|
||||
return nil, clues.Stack(err)
|
||||
}
|
||||
// if this isn't a file, no content is available for download
|
||||
if item.GetFile() == nil {
|
||||
return reader, nil
|
||||
}
|
||||
|
||||
return rc, nil
|
||||
// smaller files will maintain our current behavior (prefetching the download url with the
|
||||
// url cache). That pattern works for us in general, and we only need to deviate for very
|
||||
// large file sizes.
|
||||
if !isLargeFile {
|
||||
url = str.FirstIn(item.GetAdditionalData(), downloadURLKeys...)
|
||||
}
|
||||
|
||||
reader, err = downloadFile(ctx, getter, url, isLargeFile)
|
||||
|
||||
return reader, clues.StackWC(ctx, err).OrNil()
|
||||
}
|
||||
|
||||
type downloadWithRetries struct {
|
||||
getter api.Getter
|
||||
url string
|
||||
getter api.Getter
|
||||
requireAuth bool
|
||||
url string
|
||||
}
|
||||
|
||||
func (dg *downloadWithRetries) SupportsRange() bool {
|
||||
@ -86,7 +90,7 @@ func (dg *downloadWithRetries) Get(
|
||||
// wouldn't work without it (get 416 responses instead of 206).
|
||||
headers[acceptHeaderKey] = acceptHeaderValue
|
||||
|
||||
resp, err := dg.getter.Get(ctx, dg.url, headers)
|
||||
resp, err := dg.getter.Get(ctx, dg.url, headers, dg.requireAuth)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "getting file")
|
||||
}
|
||||
@ -96,7 +100,7 @@ func (dg *downloadWithRetries) Get(
|
||||
resp.Body.Close()
|
||||
}
|
||||
|
||||
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
|
||||
return nil, clues.NewWC(ctx, "malware detected").Label(graph.LabelsMalware)
|
||||
}
|
||||
|
||||
if resp != nil && (resp.StatusCode/100) != 2 {
|
||||
@ -107,7 +111,7 @@ func (dg *downloadWithRetries) Get(
|
||||
// upstream error checks can compare the status with
|
||||
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
|
||||
return nil, clues.
|
||||
Wrap(clues.New(resp.Status), "non-2xx http response").
|
||||
Wrap(clues.NewWC(ctx, resp.Status), "non-2xx http response").
|
||||
Label(graph.LabelStatus(resp.StatusCode))
|
||||
}
|
||||
|
||||
@ -118,6 +122,7 @@ func downloadFile(
|
||||
ctx context.Context,
|
||||
ag api.Getter,
|
||||
url string,
|
||||
requireAuth bool,
|
||||
) (io.ReadCloser, error) {
|
||||
if len(url) == 0 {
|
||||
return nil, clues.NewWC(ctx, "empty file url")
|
||||
@ -141,8 +146,9 @@ func downloadFile(
|
||||
rc, err := readers.NewResetRetryHandler(
|
||||
ctx,
|
||||
&downloadWithRetries{
|
||||
getter: ag,
|
||||
url: url,
|
||||
getter: ag,
|
||||
requireAuth: requireAuth,
|
||||
url: url,
|
||||
})
|
||||
|
||||
return rc, clues.Stack(err).OrNil()
|
||||
|
||||
@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
@ -153,7 +154,8 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
||||
{
|
||||
Values: nil,
|
||||
NextLink: nil,
|
||||
Err: graph.Stack(ctx, mySiteURLNotFound),
|
||||
// needs graph.Stack, not clues.Stack
|
||||
Err: graph.Stack(ctx, mySiteURLNotFound),
|
||||
},
|
||||
},
|
||||
expectedErr: assert.NoError,
|
||||
@ -165,7 +167,8 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
||||
{
|
||||
Values: nil,
|
||||
NextLink: nil,
|
||||
Err: graph.Stack(ctx, mySiteNotFound),
|
||||
// needs graph.Stack, not clues.Stack
|
||||
Err: graph.Stack(ctx, mySiteNotFound),
|
||||
},
|
||||
},
|
||||
expectedErr: assert.NoError,
|
||||
@ -231,6 +234,18 @@ func (suite *OneDriveIntgSuite) SetupSuite() {
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
type stubStatusUpdater struct {
|
||||
status support.ControllerOperationStatus
|
||||
}
|
||||
|
||||
func (ssu *stubStatusUpdater) updateStatus(status *support.ControllerOperationStatus) {
|
||||
if status == nil {
|
||||
return
|
||||
}
|
||||
|
||||
ssu.status = support.MergeStatus(ssu.status, *status)
|
||||
}
|
||||
|
||||
func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
|
||||
creds, err := tconfig.NewM365Account(suite.T()).M365Config()
|
||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||
@ -256,10 +271,10 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
service = loadTestService(t)
|
||||
scope = selectors.
|
||||
scope = selectors.
|
||||
NewOneDriveBackup([]string{test.user}).
|
||||
AllData()[0]
|
||||
statusUpdater = stubStatusUpdater{}
|
||||
)
|
||||
|
||||
colls := NewCollections(
|
||||
@ -272,7 +287,7 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
|
||||
},
|
||||
creds.AzureTenantID,
|
||||
idname.NewProvider(test.user, test.user),
|
||||
service.updateStatus,
|
||||
statusUpdater.updateStatus,
|
||||
control.Options{
|
||||
ToggleFeatures: control.Toggles{},
|
||||
},
|
||||
|
||||
@ -17,6 +17,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||
@ -30,9 +31,7 @@ import (
|
||||
|
||||
type ItemIntegrationSuite struct {
|
||||
tester.Suite
|
||||
user string
|
||||
userDriveID string
|
||||
service *oneDriveService
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestItemIntegrationSuite(t *testing.T) {
|
||||
@ -44,25 +43,7 @@ func TestItemIntegrationSuite(t *testing.T) {
|
||||
}
|
||||
|
||||
func (suite *ItemIntegrationSuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.service = loadTestService(t)
|
||||
suite.user = tconfig.SecondaryM365UserID(t)
|
||||
|
||||
graph.InitializeConcurrencyLimiter(ctx, true, 4)
|
||||
|
||||
pager := suite.service.ac.Drives().NewUserDrivePager(suite.user, nil)
|
||||
|
||||
odDrives, err := api.GetAllDrives(ctx, pager)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
// Test Requirement 1: Need a drive
|
||||
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
|
||||
|
||||
// Pick the first drive
|
||||
suite.userDriveID = ptr.Val(odDrives[0].GetId())
|
||||
suite.m365 = its.GetM365(suite.T())
|
||||
}
|
||||
|
||||
func getOneDriveItem(
|
||||
@ -103,28 +84,36 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
||||
defer flush()
|
||||
|
||||
sc := selectors.
|
||||
NewOneDriveBackup([]string{suite.user}).
|
||||
NewOneDriveBackup([]string{suite.m365.User.ID}).
|
||||
AllData()[0]
|
||||
|
||||
driveItem := getOneDriveItem(ctx, t, suite.service.ac, suite.userDriveID)
|
||||
driveItem := getOneDriveItem(
|
||||
ctx,
|
||||
t,
|
||||
suite.m365.AC,
|
||||
suite.m365.User.DriveID)
|
||||
// Test Requirement 2: Need a file
|
||||
require.NotEmpty(
|
||||
t,
|
||||
driveItem,
|
||||
"no file item found for user %s drive %s",
|
||||
suite.user,
|
||||
suite.userDriveID)
|
||||
"no file item found for user %q drive %q",
|
||||
suite.m365.User.ID,
|
||||
suite.m365.User.DriveID)
|
||||
|
||||
bh := &userDriveBackupHandler{
|
||||
baseUserDriveHandler: baseUserDriveHandler{
|
||||
ac: suite.service.ac.Drives(),
|
||||
ac: suite.m365.AC.Drives(),
|
||||
},
|
||||
userID: suite.user,
|
||||
userID: suite.m365.User.ID,
|
||||
scope: sc,
|
||||
}
|
||||
|
||||
// Read data for the file
|
||||
itemData, err := downloadItem(ctx, bh, custom.ToCustomDriveItem(driveItem))
|
||||
itemData, err := downloadItem(
|
||||
ctx,
|
||||
bh,
|
||||
suite.m365.User.DriveID,
|
||||
custom.ToCustomDriveItem(driveItem))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
size, err := io.Copy(io.Discard, itemData)
|
||||
@ -142,13 +131,13 @@ func (suite *ItemIntegrationSuite) TestIsURLExpired() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
driveItem := getOneDriveItem(ctx, t, suite.service.ac, suite.userDriveID)
|
||||
driveItem := getOneDriveItem(ctx, t, suite.m365.AC, suite.m365.User.DriveID)
|
||||
require.NotEmpty(
|
||||
t,
|
||||
driveItem,
|
||||
"no file item found for user %s drive %s",
|
||||
suite.user,
|
||||
suite.userDriveID)
|
||||
"no file item found for user %q drive %q",
|
||||
suite.m365.User.ID,
|
||||
suite.m365.User.DriveID)
|
||||
|
||||
var url string
|
||||
|
||||
@ -173,7 +162,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
||||
}{
|
||||
{
|
||||
name: "",
|
||||
driveID: suite.userDriveID,
|
||||
driveID: suite.m365.User.DriveID,
|
||||
},
|
||||
// {
|
||||
// name: "sharePoint",
|
||||
@ -183,12 +172,12 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
rh := NewUserDriveRestoreHandler(suite.service.ac)
|
||||
rh := NewUserDriveRestoreHandler(suite.m365.AC)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
|
||||
root, err := suite.m365.AC.Drives().GetRootFolder(ctx, test.driveID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
newFolderName := testdata.DefaultRestoreConfig("folder").Location
|
||||
@ -217,7 +206,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
||||
|
||||
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
|
||||
// newly created item should fail because it's a file not a folder
|
||||
_, err = suite.service.ac.Drives().GetFolderByName(
|
||||
_, err = suite.m365.AC.Drives().GetFolderByName(
|
||||
ctx,
|
||||
test.driveID,
|
||||
ptr.Val(newFolder.GetId()),
|
||||
@ -261,7 +250,7 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
|
||||
}{
|
||||
{
|
||||
name: "oneDrive",
|
||||
driveID: suite.userDriveID,
|
||||
driveID: suite.m365.User.DriveID,
|
||||
},
|
||||
// {
|
||||
// name: "sharePoint",
|
||||
@ -275,11 +264,11 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
|
||||
root, err := suite.m365.AC.Drives().GetRootFolder(ctx, test.driveID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// Lookup a folder that doesn't exist
|
||||
_, err = suite.service.ac.Drives().GetFolderByName(
|
||||
_, err = suite.m365.AC.Drives().GetFolderByName(
|
||||
ctx,
|
||||
test.driveID,
|
||||
ptr.Val(root.GetId()),
|
||||
@ -287,7 +276,7 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
|
||||
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
|
||||
|
||||
// Lookup a folder that does exist
|
||||
_, err = suite.service.ac.Drives().GetFolderByName(
|
||||
_, err = suite.m365.AC.Drives().GetFolderByName(
|
||||
ctx,
|
||||
test.driveID,
|
||||
ptr.Val(root.GetId()),
|
||||
@ -307,6 +296,7 @@ func (m mockGetter) Get(
|
||||
ctx context.Context,
|
||||
url string,
|
||||
headers map[string]string,
|
||||
requireAuth bool,
|
||||
) (*http.Response, error) {
|
||||
return m.GetFunc(ctx, url)
|
||||
}
|
||||
@ -394,7 +384,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
return nil, clues.New("test error")
|
||||
},
|
||||
errorExpected: require.Error,
|
||||
rcExpected: require.Nil,
|
||||
rcExpected: require.NotNil,
|
||||
},
|
||||
{
|
||||
name: "download url is empty",
|
||||
@ -431,7 +421,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
}, nil
|
||||
},
|
||||
errorExpected: require.Error,
|
||||
rcExpected: require.Nil,
|
||||
rcExpected: require.NotNil,
|
||||
},
|
||||
{
|
||||
name: "non-2xx http response",
|
||||
@ -450,7 +440,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
}, nil
|
||||
},
|
||||
errorExpected: require.Error,
|
||||
rcExpected: require.Nil,
|
||||
rcExpected: require.NotNil,
|
||||
},
|
||||
}
|
||||
|
||||
@ -463,9 +453,78 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
|
||||
mg := mockGetter{
|
||||
GetFunc: test.GetFunc,
|
||||
}
|
||||
rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(test.itemFunc()))
|
||||
rc, err := downloadItem(
|
||||
ctx,
|
||||
mg,
|
||||
"driveID",
|
||||
custom.ToCustomDriveItem(test.itemFunc()))
|
||||
test.errorExpected(t, err, clues.ToCore(err))
|
||||
test.rcExpected(t, rc)
|
||||
test.rcExpected(t, rc, "reader should only be nil if item is nil")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ItemUnitTestSuite) TestDownloadItem_urlByFileSize() {
|
||||
var (
|
||||
testRc = io.NopCloser(bytes.NewReader([]byte("test")))
|
||||
url = "https://example.com"
|
||||
okResp = &http.Response{
|
||||
StatusCode: http.StatusOK,
|
||||
Body: testRc,
|
||||
}
|
||||
)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
itemFunc func() models.DriveItemable
|
||||
GetFunc func(ctx context.Context, url string) (*http.Response, error)
|
||||
errorExpected require.ErrorAssertionFunc
|
||||
rcExpected require.ValueAssertionFunc
|
||||
label string
|
||||
}{
|
||||
{
|
||||
name: "big file",
|
||||
itemFunc: func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
|
||||
di.SetSize(ptr.To[int64](20 * gigabyte))
|
||||
|
||||
return di
|
||||
},
|
||||
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||
assert.Contains(suite.T(), url, "/content")
|
||||
return okResp, nil
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "small file",
|
||||
itemFunc: func() models.DriveItemable {
|
||||
di := api.NewDriveItem("test", false)
|
||||
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
|
||||
di.SetSize(ptr.To[int64](2 * gigabyte))
|
||||
|
||||
return di
|
||||
},
|
||||
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
|
||||
assert.NotContains(suite.T(), url, "/content")
|
||||
return okResp, nil
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
_, err := downloadItem(
|
||||
ctx,
|
||||
mockGetter{GetFunc: test.GetFunc},
|
||||
"driveID",
|
||||
custom.ToCustomDriveItem(test.itemFunc()))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -522,7 +581,11 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
|
||||
mg := mockGetter{
|
||||
GetFunc: GetFunc,
|
||||
}
|
||||
rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(itemFunc()))
|
||||
rc, err := downloadItem(
|
||||
ctx,
|
||||
mg,
|
||||
"driveID",
|
||||
custom.ToCustomDriveItem(itemFunc()))
|
||||
errorExpected(t, err, clues.ToCore(err))
|
||||
rcExpected(t, rc)
|
||||
|
||||
|
||||
@ -10,12 +10,13 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/common/syncd"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
)
|
||||
|
||||
// empty string is used to indicate that a permission cannot be restored
|
||||
@ -23,20 +24,20 @@ const nonRestorablePermission = ""
|
||||
|
||||
func getParentMetadata(
|
||||
parentPath path.Path,
|
||||
parentDirToMeta syncd.MapTo[metadata.Metadata],
|
||||
) (metadata.Metadata, error) {
|
||||
parentDirToMeta syncd.MapTo[odmetadata.Metadata],
|
||||
) (odmetadata.Metadata, error) {
|
||||
parentMeta, ok := parentDirToMeta.Load(parentPath.String())
|
||||
if !ok {
|
||||
drivePath, err := path.ToDrivePath(parentPath)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.Wrap(err, "invalid restore path")
|
||||
return odmetadata.Metadata{}, clues.Wrap(err, "invalid restore path")
|
||||
}
|
||||
|
||||
if len(drivePath.Folders) != 0 {
|
||||
return metadata.Metadata{}, clues.Wrap(err, "computing item permissions")
|
||||
return odmetadata.Metadata{}, clues.Wrap(err, "computing item permissions")
|
||||
}
|
||||
|
||||
parentMeta = metadata.Metadata{}
|
||||
parentMeta = odmetadata.Metadata{}
|
||||
}
|
||||
|
||||
return parentMeta, nil
|
||||
@ -49,9 +50,9 @@ func getCollectionMetadata(
|
||||
caches *restoreCaches,
|
||||
backupVersion int,
|
||||
restorePerms bool,
|
||||
) (metadata.Metadata, error) {
|
||||
) (odmetadata.Metadata, error) {
|
||||
if !restorePerms || backupVersion < version.OneDrive1DataAndMetaFiles {
|
||||
return metadata.Metadata{}, nil
|
||||
return odmetadata.Metadata{}, nil
|
||||
}
|
||||
|
||||
var (
|
||||
@ -61,13 +62,13 @@ func getCollectionMetadata(
|
||||
|
||||
if len(drivePath.Folders) == 0 {
|
||||
// No permissions for root folder
|
||||
return metadata.Metadata{}, nil
|
||||
return odmetadata.Metadata{}, nil
|
||||
}
|
||||
|
||||
if backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||
colMeta, err := getParentMetadata(fullPath, caches.ParentDirToMeta)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.Wrap(err, "collection metadata")
|
||||
return odmetadata.Metadata{}, clues.Wrap(err, "collection metadata")
|
||||
}
|
||||
|
||||
return colMeta, nil
|
||||
@ -82,7 +83,7 @@ func getCollectionMetadata(
|
||||
|
||||
meta, err := FetchAndReadMetadata(ctx, dc, metaName)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.Wrap(err, "collection metadata")
|
||||
return odmetadata.Metadata{}, clues.Wrap(err, "collection metadata")
|
||||
}
|
||||
|
||||
return meta, nil
|
||||
@ -93,9 +94,9 @@ func getCollectionMetadata(
|
||||
func computePreviousLinkShares(
|
||||
ctx context.Context,
|
||||
originDir path.Path,
|
||||
parentMetas syncd.MapTo[metadata.Metadata],
|
||||
) ([]metadata.LinkShare, error) {
|
||||
linkShares := []metadata.LinkShare{}
|
||||
parentMetas syncd.MapTo[odmetadata.Metadata],
|
||||
) ([]odmetadata.LinkShare, error) {
|
||||
linkShares := []odmetadata.LinkShare{}
|
||||
ctx = clues.Add(ctx, "origin_dir", originDir)
|
||||
|
||||
parent, err := originDir.Dir()
|
||||
@ -122,7 +123,7 @@ func computePreviousLinkShares(
|
||||
|
||||
// Any change in permissions would change it to custom
|
||||
// permission set and so we can filter on that.
|
||||
if meta.SharingMode == metadata.SharingModeCustom {
|
||||
if meta.SharingMode == odmetadata.SharingModeCustom {
|
||||
linkShares = append(linkShares, meta.LinkShares...)
|
||||
}
|
||||
|
||||
@ -143,11 +144,11 @@ func computePreviousMetadata(
|
||||
ctx context.Context,
|
||||
originDir path.Path,
|
||||
// map parent dir -> parent's metadata
|
||||
parentMetas syncd.MapTo[metadata.Metadata],
|
||||
) (metadata.Metadata, error) {
|
||||
parentMetas syncd.MapTo[odmetadata.Metadata],
|
||||
) (odmetadata.Metadata, error) {
|
||||
var (
|
||||
parent path.Path
|
||||
meta metadata.Metadata
|
||||
meta odmetadata.Metadata
|
||||
|
||||
err error
|
||||
ok bool
|
||||
@ -158,26 +159,26 @@ func computePreviousMetadata(
|
||||
for {
|
||||
parent, err = parent.Dir()
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.WrapWC(ctx, err, "getting parent")
|
||||
return odmetadata.Metadata{}, clues.WrapWC(ctx, err, "getting parent")
|
||||
}
|
||||
|
||||
ictx := clues.Add(ctx, "parent_dir", parent)
|
||||
|
||||
drivePath, err := path.ToDrivePath(parent)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.WrapWC(ictx, err, "transforming dir to drivePath")
|
||||
return odmetadata.Metadata{}, clues.WrapWC(ictx, err, "transforming dir to drivePath")
|
||||
}
|
||||
|
||||
if len(drivePath.Folders) == 0 {
|
||||
return metadata.Metadata{}, nil
|
||||
return odmetadata.Metadata{}, nil
|
||||
}
|
||||
|
||||
meta, ok = parentMetas.Load(parent.String())
|
||||
if !ok {
|
||||
return metadata.Metadata{}, clues.NewWC(ictx, "no metadata found for parent folder: "+parent.String())
|
||||
return odmetadata.Metadata{}, clues.NewWC(ictx, "no metadata found for parent folder: "+parent.String())
|
||||
}
|
||||
|
||||
if meta.SharingMode == metadata.SharingModeCustom {
|
||||
if meta.SharingMode == odmetadata.SharingModeCustom {
|
||||
return meta, nil
|
||||
}
|
||||
}
|
||||
@ -195,7 +196,7 @@ func UpdatePermissions(
|
||||
udip updateDeleteItemPermissioner,
|
||||
driveID string,
|
||||
itemID string,
|
||||
permAdded, permRemoved []metadata.Permission,
|
||||
permAdded, permRemoved []odmetadata.Permission,
|
||||
oldPermIDToNewID syncd.MapTo[string],
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
@ -260,7 +261,7 @@ func UpdatePermissions(
|
||||
|
||||
// TODO: sitegroup support. Currently errors with "One or more users could not be resolved",
|
||||
// likely due to the site group entityID consisting of a single integer (ex: 4)
|
||||
if len(roles) == 0 || p.EntityType == metadata.GV2SiteGroup {
|
||||
if len(roles) == 0 || p.EntityType == odmetadata.GV2SiteGroup {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -315,7 +316,7 @@ func UpdateLinkShares(
|
||||
upils updateDeleteItemLinkSharer,
|
||||
driveID string,
|
||||
itemID string,
|
||||
lsAdded, lsRemoved []metadata.LinkShare,
|
||||
lsAdded, lsRemoved []odmetadata.LinkShare,
|
||||
oldLinkShareIDToNewID syncd.MapTo[string],
|
||||
errs *fault.Bus,
|
||||
) (bool, error) {
|
||||
@ -347,7 +348,7 @@ func UpdateLinkShares(
|
||||
for _, iden := range ls.Entities {
|
||||
// TODO: sitegroup support. Currently errors with "One or more users could not be resolved",
|
||||
// likely due to the site group entityID consisting of a single integer (ex: 4)
|
||||
if iden.EntityType == metadata.GV2SiteGroup {
|
||||
if iden.EntityType == odmetadata.GV2SiteGroup {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -457,11 +458,11 @@ func UpdateLinkShares(
|
||||
|
||||
func filterUnavailableEntitiesInLinkShare(
|
||||
ctx context.Context,
|
||||
linkShares []metadata.LinkShare,
|
||||
linkShares []odmetadata.LinkShare,
|
||||
availableEntities ResourceIDNames,
|
||||
oldLinkShareIDToNewID syncd.MapTo[string],
|
||||
) []metadata.LinkShare {
|
||||
filtered := []metadata.LinkShare{}
|
||||
) []odmetadata.LinkShare {
|
||||
filtered := []odmetadata.LinkShare{}
|
||||
|
||||
if availableEntities.Users == nil || availableEntities.Groups == nil {
|
||||
// This should not be happening unless we missed to fill in the caches
|
||||
@ -470,20 +471,20 @@ func filterUnavailableEntitiesInLinkShare(
|
||||
}
|
||||
|
||||
for _, p := range linkShares {
|
||||
entities := []metadata.Entity{}
|
||||
entities := []odmetadata.Entity{}
|
||||
|
||||
for _, e := range p.Entities {
|
||||
available := false
|
||||
|
||||
switch e.EntityType {
|
||||
case metadata.GV2User:
|
||||
case odmetadata.GV2User:
|
||||
// Link shares with external users won't have IDs
|
||||
if len(e.ID) == 0 && len(e.Email) > 0 {
|
||||
available = true
|
||||
} else {
|
||||
_, available = availableEntities.Users.NameOf(e.ID)
|
||||
}
|
||||
case metadata.GV2Group:
|
||||
case odmetadata.GV2Group:
|
||||
_, available = availableEntities.Groups.NameOf(e.ID)
|
||||
default:
|
||||
// We only know about users and groups
|
||||
@ -513,26 +514,26 @@ func filterUnavailableEntitiesInLinkShare(
|
||||
|
||||
func filterUnavailableEntitiesInPermissions(
|
||||
ctx context.Context,
|
||||
perms []metadata.Permission,
|
||||
perms []odmetadata.Permission,
|
||||
availableEntities ResourceIDNames,
|
||||
oldPermIDToNewID syncd.MapTo[string],
|
||||
) []metadata.Permission {
|
||||
) []odmetadata.Permission {
|
||||
if availableEntities.Users == nil || availableEntities.Groups == nil {
|
||||
// This should not be happening unless we missed to fill in the caches
|
||||
logger.Ctx(ctx).Info("no available entities, not filtering link shares")
|
||||
return perms
|
||||
}
|
||||
|
||||
filtered := []metadata.Permission{}
|
||||
filtered := []odmetadata.Permission{}
|
||||
|
||||
for _, p := range perms {
|
||||
available := false
|
||||
|
||||
switch p.EntityType {
|
||||
case metadata.GV2User:
|
||||
case odmetadata.GV2User:
|
||||
_, ok := availableEntities.Users.NameOf(p.EntityID)
|
||||
available = available || ok
|
||||
case metadata.GV2Group:
|
||||
case odmetadata.GV2Group:
|
||||
_, ok := availableEntities.Groups.NameOf(p.EntityID)
|
||||
available = available || ok
|
||||
default:
|
||||
@ -564,11 +565,11 @@ func RestorePermissions(
|
||||
driveID string,
|
||||
itemID string,
|
||||
itemPath path.Path,
|
||||
current metadata.Metadata,
|
||||
current odmetadata.Metadata,
|
||||
caches *restoreCaches,
|
||||
errs *fault.Bus,
|
||||
) {
|
||||
if current.SharingMode == metadata.SharingModeInherited {
|
||||
if current.SharingMode == odmetadata.SharingModeInherited {
|
||||
return
|
||||
}
|
||||
|
||||
@ -582,7 +583,7 @@ func RestorePermissions(
|
||||
}
|
||||
|
||||
if previousLinkShares != nil {
|
||||
lsAdded, lsRemoved := metadata.DiffLinkShares(previousLinkShares, current.LinkShares)
|
||||
lsAdded, lsRemoved := odmetadata.DiffLinkShares(previousLinkShares, current.LinkShares)
|
||||
lsAdded = filterUnavailableEntitiesInLinkShare(ctx, lsAdded, caches.AvailableEntities, caches.OldLinkShareIDToNewID)
|
||||
|
||||
// Link shares have to be updated before permissions as we have to
|
||||
@ -608,7 +609,7 @@ func RestorePermissions(
|
||||
return
|
||||
}
|
||||
|
||||
permAdded, permRemoved := metadata.DiffPermissions(previous.Permissions, current.Permissions)
|
||||
permAdded, permRemoved := odmetadata.DiffPermissions(previous.Permissions, current.Permissions)
|
||||
permAdded = filterUnavailableEntitiesInPermissions(ctx, permAdded, caches.AvailableEntities, caches.OldPermIDToNewID)
|
||||
|
||||
if didReset {
|
||||
@ -617,7 +618,7 @@ func RestorePermissions(
|
||||
// that an item has as they too will be removed.
|
||||
logger.Ctx(ctx).Debug("link share creation reset all inherited permissions")
|
||||
|
||||
permRemoved = []metadata.Permission{}
|
||||
permRemoved = []odmetadata.Permission{}
|
||||
permAdded = current.Permissions
|
||||
}
|
||||
|
||||
|
||||
@ -17,7 +17,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
@ -31,6 +31,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||
)
|
||||
|
||||
@ -552,7 +553,7 @@ func CreateRestoreFolders(
|
||||
drivePath *path.DrivePath,
|
||||
restoreDir *path.Builder,
|
||||
folderPath path.Path,
|
||||
folderMetadata metadata.Metadata,
|
||||
folderMetadata odmetadata.Metadata,
|
||||
caches *restoreCaches,
|
||||
restorePerms bool,
|
||||
errs *fault.Bus,
|
||||
@ -876,12 +877,12 @@ func FetchAndReadMetadata(
|
||||
ctx context.Context,
|
||||
fibn data.FetchItemByNamer,
|
||||
metaName string,
|
||||
) (metadata.Metadata, error) {
|
||||
) (odmetadata.Metadata, error) {
|
||||
ctx = clues.Add(ctx, "meta_file_name", metaName)
|
||||
|
||||
metaFile, err := fibn.FetchItemByName(ctx, metaName)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.Wrap(err, "getting item metadata")
|
||||
return odmetadata.Metadata{}, clues.Wrap(err, "getting item metadata")
|
||||
}
|
||||
|
||||
metaReader := metaFile.ToReader()
|
||||
@ -889,25 +890,25 @@ func FetchAndReadMetadata(
|
||||
|
||||
meta, err := getMetadata(metaReader)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.Wrap(err, "deserializing item metadata")
|
||||
return odmetadata.Metadata{}, clues.Wrap(err, "deserializing item metadata")
|
||||
}
|
||||
|
||||
return meta, nil
|
||||
}
|
||||
|
||||
// getMetadata read and parses the metadata info for an item
|
||||
func getMetadata(metar io.ReadCloser) (metadata.Metadata, error) {
|
||||
var meta metadata.Metadata
|
||||
func getMetadata(metar io.ReadCloser) (odmetadata.Metadata, error) {
|
||||
var meta odmetadata.Metadata
|
||||
// `metar` will be nil for the top level container folder
|
||||
if metar != nil {
|
||||
metaraw, err := io.ReadAll(metar)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, err
|
||||
return odmetadata.Metadata{}, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(metaraw, &meta)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, err
|
||||
return odmetadata.Metadata{}, err
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -93,8 +93,9 @@ func (h siteBackupHandler) Get(
|
||||
ctx context.Context,
|
||||
url string,
|
||||
headers map[string]string,
|
||||
requireAuth bool,
|
||||
) (*http.Response, error) {
|
||||
return h.ac.Get(ctx, url, headers)
|
||||
return h.ac.Get(ctx, url, headers, requireAuth)
|
||||
}
|
||||
|
||||
func (h siteBackupHandler) PathPrefix(
|
||||
|
||||
@ -18,6 +18,7 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||
@ -34,9 +35,7 @@ import (
|
||||
|
||||
type URLCacheIntegrationSuite struct {
|
||||
tester.Suite
|
||||
ac api.Client
|
||||
user string
|
||||
driveID string
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestURLCacheIntegrationSuite(t *testing.T) {
|
||||
@ -49,29 +48,12 @@ func TestURLCacheIntegrationSuite(t *testing.T) {
|
||||
|
||||
func (suite *URLCacheIntegrationSuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
suite.m365 = its.GetM365(t)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
graph.InitializeConcurrencyLimiter(ctx, true, 4)
|
||||
|
||||
suite.user = tconfig.SecondaryM365UserID(t)
|
||||
|
||||
acct := tconfig.NewM365Account(t)
|
||||
|
||||
creds, err := acct.M365Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.ac, err = api.NewClient(
|
||||
creds,
|
||||
control.DefaultOptions(),
|
||||
count.New())
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
drive, err := suite.ac.Users().GetDefaultDrive(ctx, suite.user)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.driveID = ptr.Val(drive.GetId())
|
||||
}
|
||||
|
||||
// Basic test for urlCache. Create some files in onedrive, then access them via
|
||||
@ -79,22 +61,18 @@ func (suite *URLCacheIntegrationSuite) SetupSuite() {
|
||||
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
var (
|
||||
t = suite.T()
|
||||
ac = suite.ac.Drives()
|
||||
driveID = suite.driveID
|
||||
ac = suite.m365.AC.Drives()
|
||||
driveID = suite.m365.User.DriveID
|
||||
newFolderName = testdata.DefaultRestoreConfig("folder").Location
|
||||
)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
// Create a new test folder
|
||||
root, err := ac.GetRootFolder(ctx, driveID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
newFolder, err := ac.PostItemInContainer(
|
||||
ctx,
|
||||
driveID,
|
||||
ptr.Val(root.GetId()),
|
||||
suite.m365.User.DriveRootFolderID,
|
||||
api.NewDriveItem(newFolderName, true),
|
||||
control.Copy)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
@ -105,7 +83,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
// Get the previous delta to feed into url cache
|
||||
pager := ac.EnumerateDriveItemsDelta(
|
||||
ctx,
|
||||
suite.driveID,
|
||||
driveID,
|
||||
"",
|
||||
api.CallConfig{
|
||||
Select: api.URLCacheDriveItemProps(),
|
||||
@ -142,10 +120,10 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
|
||||
// Create a new URL cache with a long TTL
|
||||
uc, err := newURLCache(
|
||||
suite.driveID,
|
||||
driveID,
|
||||
du.URL,
|
||||
1*time.Hour,
|
||||
suite.ac.Drives(),
|
||||
ac,
|
||||
count.New(),
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
@ -176,7 +154,8 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
http.MethodGet,
|
||||
props.downloadURL,
|
||||
nil,
|
||||
nil)
|
||||
nil,
|
||||
false)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
require.NotNil(t, resp)
|
||||
|
||||
@ -93,8 +93,9 @@ func (h userDriveBackupHandler) Get(
|
||||
ctx context.Context,
|
||||
url string,
|
||||
headers map[string]string,
|
||||
requireAuth bool,
|
||||
) (*http.Response, error) {
|
||||
return h.ac.Get(ctx, url, headers)
|
||||
return h.ac.Get(ctx, url, headers, requireAuth)
|
||||
}
|
||||
|
||||
func (h userDriveBackupHandler) PathPrefix(
|
||||
|
||||
@ -296,6 +296,7 @@ func populateCollections(
|
||||
cl),
|
||||
qp.ProtectedResource.ID(),
|
||||
bh.itemHandler(),
|
||||
bh,
|
||||
addAndRem.Added,
|
||||
addAndRem.Removed,
|
||||
// TODO: produce a feature flag that allows selective
|
||||
|
||||
@ -24,6 +24,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
@ -87,6 +88,14 @@ func (bh mockBackupHandler) folderGetter() containerGetter { return
|
||||
func (bh mockBackupHandler) previewIncludeContainers() []string { return bh.previewIncludes }
|
||||
func (bh mockBackupHandler) previewExcludeContainers() []string { return bh.previewExcludes }
|
||||
|
||||
func (bh mockBackupHandler) CanSkipItemFailure(
|
||||
err error,
|
||||
resourceID string,
|
||||
opts control.Options,
|
||||
) (fault.SkipCause, bool) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
func (bh mockBackupHandler) NewContainerCache(
|
||||
userID string,
|
||||
) (string, graph.ContainerResolver) {
|
||||
@ -472,10 +481,7 @@ func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.Con
|
||||
|
||||
type BackupIntgSuite struct {
|
||||
tester.Suite
|
||||
user string
|
||||
site string
|
||||
tenantID string
|
||||
ac api.Client
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestBackupIntgSuite(t *testing.T) {
|
||||
@ -488,35 +494,18 @@ func TestBackupIntgSuite(t *testing.T) {
|
||||
|
||||
func (suite *BackupIntgSuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
suite.m365 = its.GetM365(t)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
graph.InitializeConcurrencyLimiter(ctx, true, 4)
|
||||
|
||||
suite.user = tconfig.M365UserID(t)
|
||||
suite.site = tconfig.M365SiteID(t)
|
||||
|
||||
acct := tconfig.NewM365Account(t)
|
||||
creds, err := acct.M365Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.ac, err = api.NewClient(
|
||||
creds,
|
||||
control.DefaultOptions(),
|
||||
count.New())
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.tenantID = creds.AzureTenantID
|
||||
|
||||
tester.LogTimeOfTest(t)
|
||||
}
|
||||
|
||||
func (suite *BackupIntgSuite) TestMailFetch() {
|
||||
var (
|
||||
userID = tconfig.M365UserID(suite.T())
|
||||
users = []string{userID}
|
||||
handlers = BackupHandlers(suite.ac)
|
||||
users = []string{suite.m365.User.ID}
|
||||
handlers = BackupHandlers(suite.m365.AC)
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
@ -560,14 +549,14 @@ func (suite *BackupIntgSuite) TestMailFetch() {
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: ctrlOpts,
|
||||
ProtectedResource: inMock.NewProvider(userID, userID),
|
||||
ProtectedResource: suite.m365.User.Provider,
|
||||
}
|
||||
|
||||
collections, err := CreateCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
handlers,
|
||||
suite.tenantID,
|
||||
suite.m365.TenantID,
|
||||
test.scope,
|
||||
metadata.DeltaPaths{},
|
||||
func(status *support.ControllerOperationStatus) {},
|
||||
@ -602,9 +591,8 @@ func (suite *BackupIntgSuite) TestMailFetch() {
|
||||
|
||||
func (suite *BackupIntgSuite) TestDelta() {
|
||||
var (
|
||||
userID = tconfig.M365UserID(suite.T())
|
||||
users = []string{userID}
|
||||
handlers = BackupHandlers(suite.ac)
|
||||
users = []string{suite.m365.User.ID}
|
||||
handlers = BackupHandlers(suite.m365.AC)
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
@ -640,7 +628,7 @@ func (suite *BackupIntgSuite) TestDelta() {
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: control.DefaultOptions(),
|
||||
ProtectedResource: inMock.NewProvider(userID, userID),
|
||||
ProtectedResource: suite.m365.User.Provider,
|
||||
}
|
||||
|
||||
// get collections without providing any delta history (ie: full backup)
|
||||
@ -648,7 +636,7 @@ func (suite *BackupIntgSuite) TestDelta() {
|
||||
ctx,
|
||||
bpc,
|
||||
handlers,
|
||||
suite.tenantID,
|
||||
suite.m365.TenantID,
|
||||
test.scope,
|
||||
metadata.DeltaPaths{},
|
||||
func(status *support.ControllerOperationStatus) {},
|
||||
@ -681,7 +669,7 @@ func (suite *BackupIntgSuite) TestDelta() {
|
||||
ctx,
|
||||
bpc,
|
||||
handlers,
|
||||
suite.tenantID,
|
||||
suite.m365.TenantID,
|
||||
test.scope,
|
||||
dps,
|
||||
func(status *support.ControllerOperationStatus) {},
|
||||
@ -703,8 +691,8 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
|
||||
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
users = []string{suite.user}
|
||||
handlers = BackupHandlers(suite.ac)
|
||||
users = []string{suite.m365.User.ID}
|
||||
handlers = BackupHandlers(suite.m365.AC)
|
||||
)
|
||||
|
||||
sel := selectors.NewExchangeBackup(users)
|
||||
@ -713,7 +701,7 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: control.DefaultOptions(),
|
||||
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
|
||||
ProtectedResource: suite.m365.User.Provider,
|
||||
Selector: sel.Selector,
|
||||
}
|
||||
|
||||
@ -721,7 +709,7 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
|
||||
ctx,
|
||||
bpc,
|
||||
handlers,
|
||||
suite.tenantID,
|
||||
suite.m365.TenantID,
|
||||
sel.Scopes()[0],
|
||||
metadata.DeltaPaths{},
|
||||
newStatusUpdater(t, &wg),
|
||||
@ -773,8 +761,8 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
|
||||
// a regression test to ensure that downloaded items can be uploaded.
|
||||
func (suite *BackupIntgSuite) TestContactSerializationRegression() {
|
||||
var (
|
||||
users = []string{suite.user}
|
||||
handlers = BackupHandlers(suite.ac)
|
||||
users = []string{suite.m365.User.ID}
|
||||
handlers = BackupHandlers(suite.m365.AC)
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
@ -801,14 +789,14 @@ func (suite *BackupIntgSuite) TestContactSerializationRegression() {
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: control.DefaultOptions(),
|
||||
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
|
||||
ProtectedResource: suite.m365.User.Provider,
|
||||
}
|
||||
|
||||
edcs, err := CreateCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
handlers,
|
||||
suite.tenantID,
|
||||
suite.m365.TenantID,
|
||||
test.scope,
|
||||
metadata.DeltaPaths{},
|
||||
newStatusUpdater(t, &wg),
|
||||
@ -875,8 +863,8 @@ func (suite *BackupIntgSuite) TestContactSerializationRegression() {
|
||||
// to be able to successfully query, download and restore event objects
|
||||
func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
|
||||
var (
|
||||
users = []string{suite.user}
|
||||
handlers = BackupHandlers(suite.ac)
|
||||
users = []string{suite.m365.User.ID}
|
||||
handlers = BackupHandlers(suite.m365.AC)
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
@ -911,14 +899,14 @@ func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
|
||||
bpc := inject.BackupProducerConfig{
|
||||
LastBackupVersion: version.NoBackup,
|
||||
Options: control.DefaultOptions(),
|
||||
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
|
||||
ProtectedResource: suite.m365.User.Provider,
|
||||
}
|
||||
|
||||
collections, err := CreateCollections(
|
||||
ctx,
|
||||
bpc,
|
||||
handlers,
|
||||
suite.tenantID,
|
||||
suite.m365.TenantID,
|
||||
test.scope,
|
||||
metadata.DeltaPaths{},
|
||||
newStatusUpdater(t, &wg),
|
||||
|
||||
@ -19,6 +19,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
"github.com/alcionai/corso/src/pkg/errs/core"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
@ -68,21 +69,21 @@ func getItemAndInfo(
|
||||
ctx context.Context,
|
||||
getter itemGetterSerializer,
|
||||
userID string,
|
||||
id string,
|
||||
itemID string,
|
||||
useImmutableIDs bool,
|
||||
parentPath string,
|
||||
) ([]byte, *details.ExchangeInfo, error) {
|
||||
item, info, err := getter.GetItem(
|
||||
ctx,
|
||||
userID,
|
||||
id,
|
||||
itemID,
|
||||
fault.New(true)) // temporary way to force a failFast error
|
||||
if err != nil {
|
||||
return nil, nil, clues.WrapWC(ctx, err, "fetching item").
|
||||
Label(fault.LabelForceNoBackupCreation)
|
||||
}
|
||||
|
||||
itemData, err := getter.Serialize(ctx, item, userID, id)
|
||||
itemData, err := getter.Serialize(ctx, item, userID, itemID)
|
||||
if err != nil {
|
||||
return nil, nil, clues.WrapWC(ctx, err, "serializing item")
|
||||
}
|
||||
@ -108,6 +109,7 @@ func NewCollection(
|
||||
bc data.BaseCollection,
|
||||
user string,
|
||||
items itemGetterSerializer,
|
||||
canSkipFailChecker canSkipItemFailurer,
|
||||
origAdded map[string]time.Time,
|
||||
origRemoved []string,
|
||||
validModTimes bool,
|
||||
@ -140,6 +142,7 @@ func NewCollection(
|
||||
added: added,
|
||||
removed: removed,
|
||||
getter: items,
|
||||
skipChecker: canSkipFailChecker,
|
||||
statusUpdater: statusUpdater,
|
||||
}
|
||||
}
|
||||
@ -150,6 +153,7 @@ func NewCollection(
|
||||
added: added,
|
||||
removed: removed,
|
||||
getter: items,
|
||||
skipChecker: canSkipFailChecker,
|
||||
statusUpdater: statusUpdater,
|
||||
counter: counter,
|
||||
}
|
||||
@ -167,7 +171,8 @@ type prefetchCollection struct {
|
||||
// removed is a list of item IDs that were deleted from, or moved out, of a container
|
||||
removed map[string]struct{}
|
||||
|
||||
getter itemGetterSerializer
|
||||
getter itemGetterSerializer
|
||||
skipChecker canSkipItemFailurer
|
||||
|
||||
statusUpdater support.StatusUpdater
|
||||
}
|
||||
@ -194,11 +199,12 @@ func (col *prefetchCollection) streamItems(
|
||||
wg sync.WaitGroup
|
||||
progressMessage chan<- struct{}
|
||||
user = col.user
|
||||
dataCategory = col.Category().String()
|
||||
)
|
||||
|
||||
ctx = clues.Add(
|
||||
ctx,
|
||||
"category", col.Category().String())
|
||||
"category", dataCategory)
|
||||
|
||||
defer func() {
|
||||
close(stream)
|
||||
@ -227,7 +233,7 @@ func (col *prefetchCollection) streamItems(
|
||||
defer close(semaphoreCh)
|
||||
|
||||
// delete all removed items
|
||||
for id := range col.removed {
|
||||
for itemID := range col.removed {
|
||||
semaphoreCh <- struct{}{}
|
||||
|
||||
wg.Add(1)
|
||||
@ -247,7 +253,7 @@ func (col *prefetchCollection) streamItems(
|
||||
if progressMessage != nil {
|
||||
progressMessage <- struct{}{}
|
||||
}
|
||||
}(id)
|
||||
}(itemID)
|
||||
}
|
||||
|
||||
var (
|
||||
@ -256,7 +262,7 @@ func (col *prefetchCollection) streamItems(
|
||||
)
|
||||
|
||||
// add any new items
|
||||
for id := range col.added {
|
||||
for itemID := range col.added {
|
||||
if el.Failure() != nil {
|
||||
break
|
||||
}
|
||||
@ -277,8 +283,23 @@ func (col *prefetchCollection) streamItems(
|
||||
col.Opts().ToggleFeatures.ExchangeImmutableIDs,
|
||||
parentPath)
|
||||
if err != nil {
|
||||
// pulled outside the switch due to multiple return values.
|
||||
cause, canSkip := col.skipChecker.CanSkipItemFailure(
|
||||
err,
|
||||
user,
|
||||
col.Opts())
|
||||
|
||||
// Handle known error cases
|
||||
switch {
|
||||
case canSkip:
|
||||
// this is a special case handler that allows the item to be skipped
|
||||
// instead of producing an error.
|
||||
errs.AddSkip(ctx, fault.FileSkip(
|
||||
cause,
|
||||
dataCategory,
|
||||
id,
|
||||
id,
|
||||
nil))
|
||||
case errors.Is(err, core.ErrNotFound):
|
||||
// Don't report errors for deleted items as there's no way for us to
|
||||
// back up data that is gone. Record it as a "success", since there's
|
||||
@ -300,6 +321,19 @@ func (col *prefetchCollection) streamItems(
|
||||
id,
|
||||
map[string]any{"parentPath": parentPath}))
|
||||
atomic.AddInt64(&success, 1)
|
||||
case graph.IsErrCorruptData(err):
|
||||
// These items cannot be downloaded, graph error indicates that the item
|
||||
// data is corrupted. Add to skipped list.
|
||||
logger.
|
||||
CtxErr(ctx, err).
|
||||
With("skipped_reason", fault.SkipCorruptData).
|
||||
Info("inaccessible email")
|
||||
errs.AddSkip(ctx, fault.EmailSkip(
|
||||
fault.SkipCorruptData,
|
||||
user,
|
||||
id,
|
||||
map[string]any{"parentPath": parentPath}))
|
||||
atomic.AddInt64(&success, 1)
|
||||
default:
|
||||
col.Counter.Inc(count.StreamItemsErred)
|
||||
el.AddRecoverable(ctx, clues.Wrap(err, "fetching item").Label(fault.LabelForceNoBackupCreation))
|
||||
@ -336,7 +370,7 @@ func (col *prefetchCollection) streamItems(
|
||||
if progressMessage != nil {
|
||||
progressMessage <- struct{}{}
|
||||
}
|
||||
}(id)
|
||||
}(itemID)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
@ -364,7 +398,8 @@ type lazyFetchCollection struct {
|
||||
// removed is a list of item IDs that were deleted from, or moved out, of a container
|
||||
removed map[string]struct{}
|
||||
|
||||
getter itemGetterSerializer
|
||||
getter itemGetterSerializer
|
||||
skipChecker canSkipItemFailurer
|
||||
|
||||
statusUpdater support.StatusUpdater
|
||||
|
||||
@ -391,8 +426,8 @@ func (col *lazyFetchCollection) streamItems(
|
||||
var (
|
||||
success int64
|
||||
progressMessage chan<- struct{}
|
||||
|
||||
user = col.user
|
||||
user = col.user
|
||||
el = errs.Local()
|
||||
)
|
||||
|
||||
defer func() {
|
||||
@ -404,7 +439,7 @@ func (col *lazyFetchCollection) streamItems(
|
||||
int(success),
|
||||
0,
|
||||
col.FullPath().Folder(false),
|
||||
errs.Failure())
|
||||
el.Failure())
|
||||
}()
|
||||
|
||||
if len(col.added)+len(col.removed) > 0 {
|
||||
@ -430,7 +465,7 @@ func (col *lazyFetchCollection) streamItems(
|
||||
|
||||
// add any new items
|
||||
for id, modTime := range col.added {
|
||||
if errs.Failure() != nil {
|
||||
if el.Failure() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
@ -446,15 +481,18 @@ func (col *lazyFetchCollection) streamItems(
|
||||
&lazyItemGetter{
|
||||
userID: user,
|
||||
itemID: id,
|
||||
category: col.Category(),
|
||||
getter: col.getter,
|
||||
modTime: modTime,
|
||||
immutableIDs: col.Opts().ToggleFeatures.ExchangeImmutableIDs,
|
||||
parentPath: parentPath,
|
||||
skipChecker: col.skipChecker,
|
||||
opts: col.Opts(),
|
||||
},
|
||||
id,
|
||||
modTime,
|
||||
col.counter,
|
||||
errs)
|
||||
el)
|
||||
|
||||
atomic.AddInt64(&success, 1)
|
||||
|
||||
@ -468,9 +506,12 @@ type lazyItemGetter struct {
|
||||
getter itemGetterSerializer
|
||||
userID string
|
||||
itemID string
|
||||
category path.CategoryType
|
||||
parentPath string
|
||||
modTime time.Time
|
||||
immutableIDs bool
|
||||
skipChecker canSkipItemFailurer
|
||||
opts control.Options
|
||||
}
|
||||
|
||||
func (lig *lazyItemGetter) GetData(
|
||||
@ -485,6 +526,25 @@ func (lig *lazyItemGetter) GetData(
|
||||
lig.immutableIDs,
|
||||
lig.parentPath)
|
||||
if err != nil {
|
||||
if lig.skipChecker != nil {
|
||||
cause, canSkip := lig.skipChecker.CanSkipItemFailure(
|
||||
err,
|
||||
lig.userID,
|
||||
lig.opts)
|
||||
if canSkip {
|
||||
errs.AddSkip(ctx, fault.FileSkip(
|
||||
cause,
|
||||
lig.category.String(),
|
||||
lig.itemID,
|
||||
lig.itemID,
|
||||
nil))
|
||||
|
||||
return nil, nil, false, clues.
|
||||
NewWC(ctx, "error marked as skippable by handler").
|
||||
Label(graph.LabelsSkippable)
|
||||
}
|
||||
}
|
||||
|
||||
// If an item was deleted then return an empty file so we don't fail
|
||||
// the backup and return a sentinel error when asked for ItemInfo so
|
||||
// we don't display the item in the backup.
|
||||
@ -499,7 +559,7 @@ func (lig *lazyItemGetter) GetData(
|
||||
err = clues.Stack(err)
|
||||
errs.AddRecoverable(ctx, err)
|
||||
|
||||
return nil, nil, false, err
|
||||
return nil, nil, false, clues.Stack(err)
|
||||
}
|
||||
|
||||
// Update the mod time to what we already told kopia about. This is required
|
||||
|
||||
@ -28,6 +28,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/errs/core"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
graphTD "github.com/alcionai/corso/src/pkg/services/m365/api/graph/testdata"
|
||||
)
|
||||
@ -153,6 +154,7 @@ func (suite *CollectionUnitSuite) TestNewCollection_state() {
|
||||
count.New()),
|
||||
"u",
|
||||
mock.DefaultItemGetSerialize(),
|
||||
mock.NeverCanSkipFailChecker(),
|
||||
nil,
|
||||
nil,
|
||||
colType.validModTimes,
|
||||
@ -298,6 +300,7 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
|
||||
count.New()),
|
||||
"",
|
||||
&mock.ItemGetSerialize{},
|
||||
mock.NeverCanSkipFailChecker(),
|
||||
test.added,
|
||||
maps.Keys(test.removed),
|
||||
false,
|
||||
@ -333,6 +336,232 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
|
||||
var (
|
||||
start = time.Now().Add(-time.Second)
|
||||
statusUpdater = func(*support.ControllerOperationStatus) {}
|
||||
)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
category path.CategoryType
|
||||
handler backupHandler
|
||||
added map[string]time.Time
|
||||
removed map[string]struct{}
|
||||
expectItemCount int
|
||||
expectSkippedCount int
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "no items",
|
||||
category: path.EventsCategory,
|
||||
handler: newEventBackupHandler(api.Client{}),
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "events only added items",
|
||||
category: path.EventsCategory,
|
||||
handler: newEventBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"fisher": {},
|
||||
"flannigan": {},
|
||||
"fitzbog": {},
|
||||
},
|
||||
expectItemCount: 0,
|
||||
expectSkippedCount: 3,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "events only removed items",
|
||||
category: path.EventsCategory,
|
||||
handler: newEventBackupHandler(api.Client{}),
|
||||
removed: map[string]struct{}{
|
||||
"princess": {},
|
||||
"poppy": {},
|
||||
"petunia": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 0,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "events added and removed items",
|
||||
category: path.EventsCategory,
|
||||
handler: newEventBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"general": {},
|
||||
},
|
||||
removed: map[string]struct{}{
|
||||
"general": {},
|
||||
"goose": {},
|
||||
"grumbles": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
// not 1, because general is removed from the added
|
||||
// map due to being in the removed map
|
||||
expectSkippedCount: 0,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "contacts only added items",
|
||||
category: path.ContactsCategory,
|
||||
handler: newContactBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"fisher": {},
|
||||
"flannigan": {},
|
||||
"fitzbog": {},
|
||||
},
|
||||
expectItemCount: 0,
|
||||
expectSkippedCount: 0,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "contacts only removed items",
|
||||
category: path.ContactsCategory,
|
||||
handler: newContactBackupHandler(api.Client{}),
|
||||
removed: map[string]struct{}{
|
||||
"princess": {},
|
||||
"poppy": {},
|
||||
"petunia": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 0,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "contacts added and removed items",
|
||||
category: path.ContactsCategory,
|
||||
handler: newContactBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"general": {},
|
||||
},
|
||||
removed: map[string]struct{}{
|
||||
"general": {},
|
||||
"goose": {},
|
||||
"grumbles": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
// not 1, because general is removed from the added
|
||||
// map due to being in the removed map
|
||||
expectSkippedCount: 0,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "mail only added items",
|
||||
category: path.EmailCategory,
|
||||
handler: newMailBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"fisher": {},
|
||||
"flannigan": {},
|
||||
"fitzbog": {},
|
||||
},
|
||||
expectItemCount: 0,
|
||||
expectSkippedCount: 0,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "mail only removed items",
|
||||
category: path.EmailCategory,
|
||||
handler: newMailBackupHandler(api.Client{}),
|
||||
removed: map[string]struct{}{
|
||||
"princess": {},
|
||||
"poppy": {},
|
||||
"petunia": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 0,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "mail added and removed items",
|
||||
category: path.EmailCategory,
|
||||
handler: newMailBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"general": {},
|
||||
},
|
||||
removed: map[string]struct{}{
|
||||
"general": {},
|
||||
"goose": {},
|
||||
"grumbles": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
// not 1, because general is removed from the added
|
||||
// map due to being in the removed map
|
||||
expectSkippedCount: 0,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
var (
|
||||
t = suite.T()
|
||||
errs = fault.New(true)
|
||||
itemCount int
|
||||
)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
opts := control.DefaultOptions()
|
||||
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
|
||||
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
|
||||
|
||||
col := NewCollection(
|
||||
data.NewBaseCollection(
|
||||
fullPath,
|
||||
nil,
|
||||
locPath.ToBuilder(),
|
||||
opts,
|
||||
false,
|
||||
count.New()),
|
||||
"pr",
|
||||
&mock.ItemGetSerialize{
|
||||
SerializeErr: graph.ErrServiceUnavailableEmptyResp,
|
||||
},
|
||||
test.handler,
|
||||
test.added,
|
||||
maps.Keys(test.removed),
|
||||
false,
|
||||
statusUpdater,
|
||||
count.New())
|
||||
|
||||
for item := range col.Items(ctx, errs) {
|
||||
itemCount++
|
||||
|
||||
_, rok := test.removed[item.ID()]
|
||||
if rok {
|
||||
dimt, ok := item.(data.ItemModTime)
|
||||
require.True(t, ok, "item implements data.ItemModTime")
|
||||
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
|
||||
assert.True(t, item.Deleted(), "removals should be marked as deleted")
|
||||
}
|
||||
|
||||
_, aok := test.added[item.ID()]
|
||||
if !rok && aok {
|
||||
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
|
||||
}
|
||||
|
||||
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
|
||||
}
|
||||
|
||||
test.expectErr(t, errs.Failure())
|
||||
assert.Equal(
|
||||
t,
|
||||
test.expectItemCount,
|
||||
itemCount,
|
||||
"should see all expected items")
|
||||
assert.Len(t, errs.Skipped(), test.expectSkippedCount)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// This test verifies skipped error cases are handled correctly by collection enumeration
|
||||
func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
|
||||
var (
|
||||
@ -364,6 +593,17 @@ func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
|
||||
},
|
||||
expectedSkipError: fault.EmailSkip(fault.SkipInvalidRecipients, "", "fisher", nil),
|
||||
},
|
||||
{
|
||||
name: "ErrorCorruptData",
|
||||
added: map[string]time.Time{
|
||||
"fisher": {},
|
||||
},
|
||||
expectItemCount: 0,
|
||||
itemGetter: &mock.ItemGetSerialize{
|
||||
GetErr: graphTD.ODataErr(string(graph.ErrorCorruptData)),
|
||||
},
|
||||
expectedSkipError: fault.EmailSkip(fault.SkipCorruptData, "", "fisher", nil),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
@ -387,6 +627,7 @@ func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
|
||||
count.New()),
|
||||
"",
|
||||
test.itemGetter,
|
||||
mock.NeverCanSkipFailChecker(),
|
||||
test.added,
|
||||
nil,
|
||||
false,
|
||||
@ -467,6 +708,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
||||
expectItemCount: 3,
|
||||
expectReads: []string{
|
||||
"fisher",
|
||||
"flannigan",
|
||||
"fitzbog",
|
||||
},
|
||||
},
|
||||
@ -519,6 +761,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
||||
count.New()),
|
||||
"",
|
||||
mlg,
|
||||
mock.NeverCanSkipFailChecker(),
|
||||
test.added,
|
||||
maps.Keys(test.removed),
|
||||
true,
|
||||
@ -530,10 +773,10 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
||||
|
||||
_, rok := test.removed[item.ID()]
|
||||
if rok {
|
||||
assert.True(t, item.Deleted(), "removals should be marked as deleted")
|
||||
dimt, ok := item.(data.ItemModTime)
|
||||
require.True(t, ok, "item implements data.ItemModTime")
|
||||
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
|
||||
assert.True(t, item.Deleted(), "removals should be marked as deleted")
|
||||
}
|
||||
|
||||
modTime, aok := test.added[item.ID()]
|
||||
@ -542,7 +785,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
||||
// initializer.
|
||||
assert.Implements(t, (*data.ItemModTime)(nil), item)
|
||||
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
|
||||
|
||||
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
|
||||
|
||||
// Check if the test want's us to read the item's data so the lazy
|
||||
@ -562,6 +804,8 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
||||
// collection initializer.
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, modTime, info.Modified(), "ItemInfo mod time")
|
||||
} else {
|
||||
assert.Fail(t, "unexpected read on item %s", item.ID())
|
||||
}
|
||||
}
|
||||
|
||||
@ -578,6 +822,294 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
|
||||
var (
|
||||
start = time.Now().Add(-time.Second)
|
||||
statusUpdater = func(*support.ControllerOperationStatus) {}
|
||||
expectSkip = func(t *testing.T, err error) {
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
assert.ErrorContains(t, err, "skip")
|
||||
assert.True(t, clues.HasLabel(err, graph.LabelsSkippable), clues.ToCore(err))
|
||||
}
|
||||
expectNotSkipped = func(t *testing.T, err error) {
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
assert.NotContains(t, err.Error(), "skip")
|
||||
}
|
||||
)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
added map[string]time.Time
|
||||
removed map[string]struct{}
|
||||
category path.CategoryType
|
||||
handler backupHandler
|
||||
expectItemCount int
|
||||
expectSkippedCount int
|
||||
expectReads []string
|
||||
expectErr func(t *testing.T, err error)
|
||||
expectFailure assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "no items",
|
||||
category: path.EventsCategory,
|
||||
handler: newEventBackupHandler(api.Client{}),
|
||||
expectFailure: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "events only added items",
|
||||
category: path.EventsCategory,
|
||||
handler: newEventBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"fisher": start.Add(time.Minute),
|
||||
"flannigan": start.Add(2 * time.Minute),
|
||||
"fitzbog": start.Add(3 * time.Minute),
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 3,
|
||||
expectReads: []string{
|
||||
"fisher",
|
||||
"flannigan",
|
||||
"fitzbog",
|
||||
},
|
||||
expectErr: expectSkip,
|
||||
expectFailure: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "events only removed items",
|
||||
category: path.EventsCategory,
|
||||
handler: newEventBackupHandler(api.Client{}),
|
||||
removed: map[string]struct{}{
|
||||
"princess": {},
|
||||
"poppy": {},
|
||||
"petunia": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 0,
|
||||
expectErr: expectSkip,
|
||||
expectFailure: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "events added and removed items",
|
||||
category: path.EventsCategory,
|
||||
handler: newEventBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"general": {},
|
||||
},
|
||||
removed: map[string]struct{}{
|
||||
"general": {},
|
||||
"goose": {},
|
||||
"grumbles": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
// not 1, because general is removed from the added
|
||||
// map due to being in the removed map
|
||||
expectSkippedCount: 0,
|
||||
expectErr: expectSkip,
|
||||
expectFailure: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "contacts only added items",
|
||||
category: path.ContactsCategory,
|
||||
handler: newContactBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"fisher": start.Add(time.Minute),
|
||||
"flannigan": start.Add(2 * time.Minute),
|
||||
"fitzbog": start.Add(3 * time.Minute),
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 0,
|
||||
expectReads: []string{
|
||||
"fisher",
|
||||
"flannigan",
|
||||
"fitzbog",
|
||||
},
|
||||
expectErr: expectNotSkipped,
|
||||
expectFailure: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "contacts only removed items",
|
||||
category: path.ContactsCategory,
|
||||
handler: newContactBackupHandler(api.Client{}),
|
||||
removed: map[string]struct{}{
|
||||
"princess": {},
|
||||
"poppy": {},
|
||||
"petunia": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 0,
|
||||
expectErr: expectNotSkipped,
|
||||
expectFailure: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "contacts added and removed items",
|
||||
category: path.ContactsCategory,
|
||||
handler: newContactBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"general": {},
|
||||
},
|
||||
removed: map[string]struct{}{
|
||||
"general": {},
|
||||
"goose": {},
|
||||
"grumbles": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
// not 1, because general is removed from the added
|
||||
// map due to being in the removed map
|
||||
expectSkippedCount: 0,
|
||||
expectErr: expectNotSkipped,
|
||||
expectFailure: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "mail only added items",
|
||||
category: path.EmailCategory,
|
||||
handler: newMailBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"fisher": start.Add(time.Minute),
|
||||
"flannigan": start.Add(2 * time.Minute),
|
||||
"fitzbog": start.Add(3 * time.Minute),
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 0,
|
||||
expectReads: []string{
|
||||
"fisher",
|
||||
"flannigan",
|
||||
"fitzbog",
|
||||
},
|
||||
expectErr: expectNotSkipped,
|
||||
expectFailure: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "mail only removed items",
|
||||
category: path.EmailCategory,
|
||||
handler: newMailBackupHandler(api.Client{}),
|
||||
removed: map[string]struct{}{
|
||||
"princess": {},
|
||||
"poppy": {},
|
||||
"petunia": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
expectSkippedCount: 0,
|
||||
expectErr: expectNotSkipped,
|
||||
expectFailure: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "mail added and removed items",
|
||||
category: path.EmailCategory,
|
||||
handler: newMailBackupHandler(api.Client{}),
|
||||
added: map[string]time.Time{
|
||||
"general": {},
|
||||
},
|
||||
removed: map[string]struct{}{
|
||||
"general": {},
|
||||
"goose": {},
|
||||
"grumbles": {},
|
||||
},
|
||||
expectItemCount: 3,
|
||||
// not 1, because general is removed from the added
|
||||
// map due to being in the removed map
|
||||
expectSkippedCount: 0,
|
||||
expectErr: expectNotSkipped,
|
||||
expectFailure: assert.NoError,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
var (
|
||||
t = suite.T()
|
||||
errs = fault.New(false)
|
||||
itemCount int
|
||||
)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
mlg := &mockLazyItemGetterSerializer{
|
||||
ItemGetSerialize: &mock.ItemGetSerialize{
|
||||
SerializeErr: graph.ErrServiceUnavailableEmptyResp,
|
||||
},
|
||||
}
|
||||
defer mlg.check(t, test.expectReads)
|
||||
|
||||
opts := control.DefaultOptions()
|
||||
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
|
||||
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
|
||||
|
||||
col := NewCollection(
|
||||
data.NewBaseCollection(
|
||||
fullPath,
|
||||
nil,
|
||||
locPath.ToBuilder(),
|
||||
opts,
|
||||
false,
|
||||
count.New()),
|
||||
"pr",
|
||||
mlg,
|
||||
test.handler,
|
||||
test.added,
|
||||
maps.Keys(test.removed),
|
||||
true,
|
||||
statusUpdater,
|
||||
count.New())
|
||||
|
||||
for item := range col.Items(ctx, errs) {
|
||||
itemCount++
|
||||
|
||||
_, rok := test.removed[item.ID()]
|
||||
if rok {
|
||||
dimt, ok := item.(data.ItemModTime)
|
||||
require.True(t, ok, "item implements data.ItemModTime")
|
||||
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
|
||||
assert.True(t, item.Deleted(), "removals should be marked as deleted")
|
||||
}
|
||||
|
||||
modTime, aok := test.added[item.ID()]
|
||||
if !rok && aok {
|
||||
// Item's mod time should be what's passed into the collection
|
||||
// initializer.
|
||||
assert.Implements(t, (*data.ItemModTime)(nil), item)
|
||||
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
|
||||
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
|
||||
|
||||
// Check if the test want's us to read the item's data so the lazy
|
||||
// data fetch is executed.
|
||||
if slices.Contains(test.expectReads, item.ID()) {
|
||||
r := item.ToReader()
|
||||
|
||||
_, err := io.ReadAll(r)
|
||||
test.expectErr(t, err)
|
||||
|
||||
r.Close()
|
||||
} else {
|
||||
assert.Fail(t, "unexpected read on item %s", item.ID())
|
||||
}
|
||||
}
|
||||
|
||||
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
|
||||
}
|
||||
|
||||
failure := errs.Failure()
|
||||
if failure == nil && len(errs.Recovered()) > 0 {
|
||||
failure = errs.Recovered()[0]
|
||||
}
|
||||
|
||||
test.expectFailure(t, failure, clues.ToCore(failure))
|
||||
assert.Equal(
|
||||
t,
|
||||
test.expectItemCount,
|
||||
itemCount,
|
||||
"should see all expected items")
|
||||
assert.Len(t, errs.Skipped(), test.expectSkippedCount)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *CollectionUnitSuite) TestLazyItem_NoRead_GetInfo_Errors() {
|
||||
t := suite.T()
|
||||
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
package exchange
|
||||
|
||||
import (
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||
)
|
||||
@ -52,3 +54,11 @@ func (h contactBackupHandler) NewContainerCache(
|
||||
getter: h.ac,
|
||||
}
|
||||
}
|
||||
|
||||
func (h contactBackupHandler) CanSkipItemFailure(
|
||||
err error,
|
||||
resourceID string,
|
||||
opts control.Options,
|
||||
) (fault.SkipCause, bool) {
|
||||
return "", false
|
||||
}
|
||||
|
||||
@ -0,0 +1,83 @@
|
||||
package exchange
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type ContactsBackupHandlerUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestContactsBackupHandlerUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &ContactsBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *ContactsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
|
||||
resourceID := uuid.NewString()
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
err error
|
||||
opts control.Options
|
||||
expect assert.BoolAssertionFunc
|
||||
expectCause fault.SkipCause
|
||||
}{
|
||||
{
|
||||
name: "no config",
|
||||
err: assert.AnError,
|
||||
opts: control.Options{},
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "false when map is empty",
|
||||
err: assert.AnError,
|
||||
opts: control.Options{
|
||||
SkipEventsOnInstance503ForResources: map[string]struct{}{},
|
||||
},
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "false on nil error",
|
||||
err: nil,
|
||||
opts: control.Options{
|
||||
SkipEventsOnInstance503ForResources: map[string]struct{}{
|
||||
resourceID: {},
|
||||
},
|
||||
},
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "false even if resource matches",
|
||||
err: assert.AnError,
|
||||
opts: control.Options{
|
||||
SkipEventsOnInstance503ForResources: map[string]struct{}{
|
||||
resourceID: {},
|
||||
},
|
||||
},
|
||||
expect: assert.False,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
h := newContactBackupHandler(api.Client{})
|
||||
cause, result := h.CanSkipItemFailure(
|
||||
test.err,
|
||||
resourceID,
|
||||
test.opts)
|
||||
|
||||
test.expect(t, result)
|
||||
assert.Equal(t, test.expectCause, cause)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -126,7 +126,7 @@ func (cfc *contactContainerCache) Populate(
|
||||
if err != nil {
|
||||
errs.AddRecoverable(
|
||||
ctx,
|
||||
graph.Stack(ctx, err).Label(fault.LabelForceNoBackupCreation))
|
||||
clues.StackWC(ctx, err).Label(fault.LabelForceNoBackupCreation))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -120,7 +120,7 @@ func restoreContact(
|
||||
) (*details.ExchangeInfo, error) {
|
||||
contact, err := api.BytesToContactable(body)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "creating contact from bytes")
|
||||
return nil, clues.WrapWC(ctx, err, "creating contact from bytes")
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "item_id", ptr.Val(contact.GetId()))
|
||||
@ -148,7 +148,7 @@ func restoreContact(
|
||||
|
||||
item, err := cr.PostItem(ctx, userID, destinationID, contact)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "restoring contact")
|
||||
return nil, clues.Wrap(err, "restoring contact")
|
||||
}
|
||||
|
||||
// contacts have no PUT request, and PATCH could retain data that's not
|
||||
@ -159,7 +159,7 @@ func restoreContact(
|
||||
if shouldDeleteOriginal {
|
||||
err := cr.DeleteItem(ctx, userID, collisionID)
|
||||
if err != nil && !errors.Is(err, core.ErrNotFound) {
|
||||
return nil, graph.Wrap(ctx, err, "deleting colliding contact")
|
||||
return nil, clues.Wrap(err, "deleting colliding contact")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||
@ -54,7 +55,7 @@ func (m *contactRestoreMock) DeleteItem(
|
||||
|
||||
type ContactsRestoreIntgSuite struct {
|
||||
tester.Suite
|
||||
its intgTesterSetup
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestContactsRestoreIntgSuite(t *testing.T) {
|
||||
@ -66,17 +67,17 @@ func TestContactsRestoreIntgSuite(t *testing.T) {
|
||||
}
|
||||
|
||||
func (suite *ContactsRestoreIntgSuite) SetupSuite() {
|
||||
suite.its = newIntegrationTesterSetup(suite.T())
|
||||
suite.m365 = its.GetM365(suite.T())
|
||||
}
|
||||
|
||||
// Testing to ensure that cache system works for in multiple different environments
|
||||
func (suite *ContactsRestoreIntgSuite) TestCreateContainerDestination() {
|
||||
runCreateDestinationTest(
|
||||
suite.T(),
|
||||
newContactRestoreHandler(suite.its.ac),
|
||||
newContactRestoreHandler(suite.m365.AC),
|
||||
path.ContactsCategory,
|
||||
suite.its.creds.AzureTenantID,
|
||||
suite.its.userID,
|
||||
suite.m365.TenantID,
|
||||
suite.m365.User.ID,
|
||||
testdata.DefaultRestoreConfig("").Location,
|
||||
[]string{"Hufflepuff"},
|
||||
[]string{"Ravenclaw"})
|
||||
@ -207,17 +208,16 @@ func (suite *ContactsRestoreIntgSuite) TestRestoreContact() {
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
ctr := count.New()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
ctr := count.New()
|
||||
|
||||
_, err := restoreContact(
|
||||
ctx,
|
||||
test.apiMock,
|
||||
body,
|
||||
suite.its.userID,
|
||||
suite.m365.User.ID,
|
||||
"destination",
|
||||
test.collisionMap,
|
||||
test.onCollision,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user