Compare commits

..

152 Commits

Author SHA1 Message Date
HiteshRepo
053323719e select creds 2024-02-05 20:50:35 +05:30
HiteshRepo
79a0248049 re trigger CI 2024-02-05 20:46:00 +05:30
HiteshRepo
d0b4b2745d get concated M365 creds 2024-02-05 20:39:56 +05:30
HiteshRepo
8286bb1ffe pass all m365 acc for trusted tests 2024-02-04 12:59:30 +05:30
HiteshRepo
81ce531472 change slot 2024-02-02 21:12:21 +05:30
HiteshRepo
73dd557a91 run another test 2024-02-02 21:06:45 +05:30
HiteshRepo
e4026a7756 re run 2024-02-02 21:03:20 +05:30
HiteshRepo
81276b9ac1 change slot 2024-02-02 21:00:46 +05:30
HiteshRepo
2b68c8157e re run 2024-02-02 20:47:26 +05:30
HiteshRepo
f6b2958cc9 do not read aws creds 2024-02-02 20:44:49 +05:30
HiteshRepo
61d6a59ef1 re run 2024-02-02 20:40:32 +05:30
HiteshRepo
a4fe50c4d8 comment almost whole test 2024-02-02 20:38:17 +05:30
HiteshRepo
b37715a445 re run 2024-02-02 20:33:36 +05:30
HiteshRepo
1592070a14 comment some more 2024-02-02 20:30:30 +05:30
HiteshRepo
a14dab97ab re run 2024-02-02 20:26:26 +05:30
HiteshRepo
e421676b44 comment some code 2024-02-02 20:24:58 +05:30
HiteshRepo
a791587361 log env var that were read 2024-02-02 20:06:51 +05:30
HiteshRepo
dd48678bc4 re run 2024-02-02 19:46:28 +05:30
HiteshRepo
9de1a1802e run diff test 2024-02-02 19:44:15 +05:30
HiteshRepo
ec31a68371 re run 2024-02-02 19:19:44 +05:30
HiteshRepo
65b169a134 slot constant-ed 2024-02-02 19:16:59 +05:30
HiteshRepo
bc0990ad4c use accSelector.yaml@ci-cache-enable 2024-02-02 19:12:07 +05:30
HiteshRepo
08a3a1da4e fix slot 2024-02-02 19:04:44 +05:30
HiteshRepo
6d20069928 make azure creds constant - 2 2024-02-02 18:55:59 +05:30
HiteshRepo
39c70bcafc make azure creds constant 2024-02-02 18:53:23 +05:30
HiteshRepo
d028764fe7 re run 2024-01-31 16:14:32 +05:30
HiteshRepo
9a6edab353 check if w/o file creation test gets cached 2024-01-31 15:56:44 +05:30
HiteshRepo
0144eebb55 re run 2024-01-30 17:54:04 +05:30
HiteshRepo
a296b64f16 use temp dir 2024-01-30 17:51:08 +05:30
HiteshRepo
7636882a53 run one more time 2024-01-30 17:46:07 +05:30
HiteshRepo
ae5678389d re run 2024-01-30 17:33:04 +05:30
HiteshRepo
cb3c7da53b returns constant repo hash name for test 2024-01-30 17:30:10 +05:30
HiteshRepo
1a584fb95a re run 2024-01-30 17:12:50 +05:30
HiteshRepo
38f9ad03b4 peek 2024-01-30 17:09:04 +05:30
HiteshRepo
dbc1da92dc removes v flag 2024-01-30 17:06:36 +05:30
HiteshRepo
c18c2eb6d6 re run 2024-01-30 17:03:13 +05:30
HiteshRepo
1a1c888fb4 pass test dir 2024-01-30 16:59:48 +05:30
HiteshRepo
464dc95627 uploading whole test log 2024-01-30 16:26:55 +05:30
HiteshRepo
ee21611018 fix 2024-01-30 16:19:46 +05:30
HiteshRepo
e13eb26905 adds verbose flag 2024-01-30 16:13:36 +05:30
HiteshRepo
9c44541787 log kopia config dir 2024-01-30 16:09:02 +05:30
HiteshRepo
7a9678b4a2 re run 2024-01-30 15:52:41 +05:30
HiteshRepo
e509bf1015 runs only TestBackup_Run_exchange 2024-01-30 15:49:26 +05:30
HiteshRepo
0f83579a78 run all unit and retention tests 2024-01-30 15:45:59 +05:30
HiteshRepo
089020836d re run 2024-01-29 20:06:31 +05:30
HiteshRepo
f6fbec30dc skip 2 retention tests 2024-01-29 19:40:12 +05:30
HiteshRepo
ac5244f9bc re run 2024-01-29 19:35:47 +05:30
HiteshRepo
f7ce1208f8 tests TestRetentionIntegrationSuite 2024-01-29 19:33:23 +05:30
HiteshRepo
f440698c2d re run 2024-01-29 19:29:35 +05:30
HiteshRepo
4f0653c6c3 run only one suite kopia pk 2024-01-29 19:25:31 +05:30
HiteshRepo
c5ee023e7d peek 2 2024-01-29 18:42:46 +05:30
HiteshRepo
cc53e77c77 peek src 2024-01-29 18:39:08 +05:30
HiteshRepo
9da2e6536d re run 2024-01-29 18:27:33 +05:30
HiteshRepo
8595ff1088 comments some more 2024-01-29 18:26:45 +05:30
HiteshRepo
3305f1ffd4 re run 2024-01-29 18:23:20 +05:30
HiteshRepo
4505933aaa comment some more 2024-01-29 18:21:50 +05:30
HiteshRepo
619d13483b re run 2024-01-29 18:13:19 +05:30
HiteshRepo
bdaeaec3cd comment some test code 2024-01-29 18:12:25 +05:30
HiteshRepo
18098c9825 re run 2024-01-29 18:10:50 +05:30
HiteshRepo
5cb141b6cd test logger pkg 2024-01-29 18:07:37 +05:30
HiteshRepo
196f4013c3 re run 2024-01-29 18:01:48 +05:30
HiteshRepo
4d865e793a uncomment 2024-01-29 17:59:21 +05:30
HiteshRepo
7354262804 re run 2024-01-29 17:57:02 +05:30
HiteshRepo
1438f2f408 comment some more tests 2024-01-29 17:54:42 +05:30
HiteshRepo
0094c7a2fe comment test involving file write in pkg int test 2024-01-29 17:49:11 +05:30
HiteshRepo
76ba41313e re run 2024-01-29 17:38:53 +05:30
HiteshRepo
dab3dd4d66 check input IDs 2024-01-29 17:30:04 +05:30
HiteshRepo
d91116b122 re run 2024-01-29 17:24:03 +05:30
HiteshRepo
27f828759a correct the package run 2024-01-29 17:21:23 +05:30
HiteshRepo
46770fb12b run test for only config pkg 2024-01-29 17:18:43 +05:30
HiteshRepo
8956d2642c revert 2024-01-29 17:09:23 +05:30
HiteshRepo
722b0c7157 comment few tests 2024-01-29 17:04:59 +05:30
HiteshRepo
a2f9d70035 run 3 2024-01-29 16:45:06 +05:30
HiteshRepo
7c49832b54 re re run 2024-01-29 16:36:09 +05:30
HiteshRepo
ecc6e5eb8b re run 2024-01-29 16:29:40 +05:30
HiteshRepo
7f3549f2af run only one suite for trusted 2024-01-29 16:24:14 +05:30
HiteshRepo
6c7c57e92a run only one test for trusted tests 2024-01-29 15:18:21 +05:30
HiteshRepo
ffcd42badc re run 2024-01-29 13:27:39 +05:30
HiteshRepo
e1e6055351 same changes for trusted tests 2024-01-29 13:17:59 +05:30
HiteshRepo
f0f2639a40 re run 2024-01-29 13:04:12 +05:30
HiteshRepo
9bd4184097 update dir for retention test log 2024-01-29 13:01:10 +05:30
HiteshRepo
9d7e13812d re run again 2024-01-29 12:57:09 +05:30
HiteshRepo
ff70aae98b re run 2024-01-29 11:23:30 +05:30
HiteshRepo
1083ef096d remove json and v flags temporarily 2024-01-29 11:17:43 +05:30
HiteshRepo
46c75e1b4f re run 2024-01-29 11:12:57 +05:30
HiteshRepo
fb2977a335 update cache key for retention 2024-01-29 11:02:07 +05:30
HiteshRepo
9c0d8fc46c make same changes to retention tests 2024-01-29 10:34:46 +05:30
HiteshRepo
df2753428b peek some data 2024-01-29 10:26:12 +05:30
HiteshRepo
19ff21cb02 re-run 2024-01-29 10:17:36 +05:30
HiteshRepo
68eb8e77d1 remove cache debug 2024-01-29 10:13:25 +05:30
HiteshRepo
97871c6071 re run 2024-01-28 21:39:50 +05:30
HiteshRepo
de25ca69cb all tests 2024-01-28 21:33:06 +05:30
HiteshRepo
9939182054 run again 2024-01-28 21:29:14 +05:30
HiteshRepo
a2c6cf1085 fix test log dir 2024-01-28 21:25:59 +05:30
HiteshRepo
b04cd5aa4d some changes 2024-01-28 21:23:08 +05:30
HiteshRepo
7441d957e1 re-test those packages 2024-01-28 20:54:42 +05:30
HiteshRepo
3b377e12f5 run each test in the suites 2024-01-28 20:51:11 +05:30
HiteshRepo
87b6d79217 runs only 2 suites of site coll package - 2nd 2024-01-28 20:45:32 +05:30
HiteshRepo
2b5a14e6ae runs only 2 suites of site coll package 2024-01-28 20:42:15 +05:30
HiteshRepo
ce42add8ce re-check cache for site collection pkg 2024-01-28 20:37:46 +05:30
HiteshRepo
cb547c7b06 re-adds test to trigger ci 2024-01-28 20:33:54 +05:30
HiteshRepo
16978c3a43 test all suites individually 2024-01-28 20:31:05 +05:30
HiteshRepo
f1c4a1fce3 removes test 2024-01-28 20:26:18 +05:30
HiteshRepo
7f458b047e run all tests in the package site 2024-01-28 20:23:13 +05:30
HiteshRepo
fbb8bc7fe7 removes test 2024-01-28 20:18:17 +05:30
HiteshRepo
2623698f9f run all tests in suite TestSharePointSuite 2024-01-28 20:16:09 +05:30
HiteshRepo
0bc29426dd removes unwanted test 2024-01-28 18:24:28 +05:30
HiteshRepo
97860ba336 tests only TestSharePointSuite/TestCollectLists 2024-01-28 18:21:55 +05:30
HiteshRepo
b098f977e2 remove additional test 2024-01-28 17:55:46 +05:30
HiteshRepo
49666b8a6d all tests - 2nd run 2024-01-28 14:52:17 +05:30
HiteshRepo
b9d12bfd7d run all tests 2024-01-28 14:46:27 +05:30
HiteshRepo
0e07dacdee run one test 2024-01-28 14:42:27 +05:30
HiteshRepo
4a352ae097 check effect of timeout flag - 2nd run 2024-01-28 14:38:26 +05:30
HiteshRepo
d9568f8d12 check effect of timeout flag 2024-01-28 14:35:55 +05:30
HiteshRepo
aaea75590c check effect of parallel flag 2024-01-28 14:33:16 +05:30
HiteshRepo
9665a2401c check effect of failfast flag - 2nd run 2024-01-28 14:29:51 +05:30
HiteshRepo
e0711aa1b8 check effect of failfast flag 2024-01-28 14:27:13 +05:30
HiteshRepo
fcf327c5e4 check effect of verbosity flag 2024-01-28 14:24:25 +05:30
HiteshRepo
5f7468aa02 checks effect of tags - 2nd run 2024-01-28 14:21:41 +05:30
HiteshRepo
1c780744d2 checks effect of tags 2024-01-28 14:18:46 +05:30
HiteshRepo
f4b8fa190f checks effect of env var 2024-01-28 14:15:09 +05:30
HiteshRepo
94b1eaceee run both test in 1 2024-01-28 14:12:38 +05:30
HiteshRepo
a5e2617853 change to trigger ci 2024-01-28 14:07:48 +05:30
HiteshRepo
4881345415 some file for both tests 2024-01-28 14:05:16 +05:30
HiteshRepo
9f7aba555d some change 2024-01-28 14:02:54 +05:30
HiteshRepo
649138c9c0 more test to trigger ci 2024-01-28 13:59:34 +05:30
HiteshRepo
d90238e828 adds one more test in ci 2024-01-28 13:56:32 +05:30
HiteshRepo
9909335b76 triggers ci 2024-01-28 13:51:34 +05:30
HiteshRepo
b872d57f0c renames test 2024-01-28 13:39:44 +05:30
HiteshRepo
10aa0a93e5 adds new test 2024-01-28 13:03:49 +05:30
HiteshRepo
c5e4d99d70 revert to single test 2024-01-28 13:01:39 +05:30
HiteshRepo
fe95d210dc removes test case 2024-01-28 12:55:28 +05:30
HiteshRepo
b93608e53d removes json flag for tests 2024-01-28 12:49:48 +05:30
HiteshRepo
0739b3525b removes test 2024-01-28 12:44:26 +05:30
HiteshRepo
8fd8ae9094 removes filter 2024-01-28 12:28:00 +05:30
HiteshRepo
9924c742e8 modify the grep command 2024-01-28 12:19:12 +05:30
HiteshRepo
b776a3b209 run all unit tests 2024-01-28 12:13:47 +05:30
HiteshRepo
274e32d331 adds grep filter 2024-01-28 12:04:42 +05:30
HiteshRepo
b187990a06 removes losg filter 2024-01-28 11:56:27 +05:30
HiteshRepo
ac1bb9a866 reduces logs clutter 2024-01-28 11:52:33 +05:30
HiteshRepo
bf60df48e5 push test logs to file 2024-01-28 11:44:42 +05:30
HiteshRepo
5c0ceaf781 upload always 2024-01-28 11:41:17 +05:30
HiteshRepo
a337785542 adds new test to trigger 2024-01-28 11:38:43 +05:30
HiteshRepo
4de11255c8 runs only on test for convinience 2024-01-28 11:34:26 +05:30
HiteshRepo
2ef01e38bd re-adds test case to trigger ci 2024-01-28 11:15:34 +05:30
HiteshRepo
84c4894bf7 change cache key temporarily 2024-01-28 10:56:52 +05:30
HiteshRepo
08ed18b15d removes a test case to trigger a commit 2024-01-28 10:54:27 +05:30
HiteshRepo
35022dc961 removes gotestfmt filter 2024-01-28 10:42:31 +05:30
HiteshRepo
324b23c46b make unit test use cache explicit 2024-01-27 22:51:42 +05:30
HiteshRepo
ede0c6ee83 change dummy test 2024-01-27 22:24:18 +05:30
HiteshRepo
82dafe15c3 unit testing | replace magnetikonline with actions 2024-01-27 22:11:41 +05:30
HiteshRepo
1bae17b48d adds dummy test 2024-01-27 21:36:42 +05:30
290 changed files with 3655 additions and 20014 deletions

View File

@ -1,5 +1,4 @@
name: Backup Restore Test
description: Run various backup/restore/export tests for a service.
inputs:
service:

View File

@ -1,5 +1,4 @@
name: Setup and Cache Golang
description: Build golang binaries for later use in CI.
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
#

View File

@ -1,5 +1,4 @@
name: Publish Binary
description: Publish binary artifacts.
inputs:
version:

View File

@ -1,5 +1,4 @@
name: Publish Website
description: Publish website artifacts.
inputs:
aws-iam-role:

View File

@ -1,5 +1,4 @@
name: Purge M365 User Data
description: Deletes M365 data generated during CI tests.
# Hard deletion of an m365 user's data. Our CI processes create a lot
# of data churn (creation and immediate deletion) of files, the likes
@ -31,19 +30,12 @@ inputs:
description: Secret value of for AZURE_CLIENT_ID
azure-client-secret:
description: Secret value of for AZURE_CLIENT_SECRET
azure-pnp-client-id:
description: Secret value of AZURE_PNP_CLIENT_ID
azure-pnp-client-cert:
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
azure-tenant-id:
description: Secret value of AZURE_TENANT_ID
description: Secret value of for AZURE_TENANT_ID
m365-admin-user:
description: Secret value of for M365_TENANT_ADMIN_USER
m365-admin-password:
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
tenant-domain:
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
required: true
runs:
using: composite
@ -61,13 +53,7 @@ runs:
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
#- name: Reset retention for all mailboxes to 0
@ -88,16 +74,10 @@ runs:
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
################################################################################################################
# Sharepoint
@ -108,14 +88,6 @@ runs:
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}

View File

@ -1,5 +1,4 @@
name: Send a message to Teams
description: Send messages to communication apps.
inputs:
msg:

View File

@ -1,5 +1,4 @@
name: Lint Website
description: Lint website content.
inputs:
version:

View File

@ -28,7 +28,7 @@ jobs:
# only run CI tests if the src folder or workflow actions have changed
- name: Check for file changes in src/ or .github/workflows/
uses: dorny/paths-filter@v3
uses: dorny/paths-filter@v2
id: dornycheck
with:
list-files: json

View File

@ -22,7 +22,8 @@ jobs:
- name: Figure out which client id to use
id: roundrobin
run: |
slot=$((GITHUB_RUN_NUMBER % 4))
# slot=$((GITHUB_RUN_NUMBER % 4))
slot=2
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
case $slot in

View File

@ -40,5 +40,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Publishing Binary"
msg: "[FAILED] Publishing Binary"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -52,7 +52,18 @@ jobs:
# SetM365App will decide which M365 app to use for this CI run
SetM365App:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
environment: Testing
runs-on: ubuntu-latest
outputs:
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
steps:
- name: sets all client id and secrets
id: roundrobin
run: |
echo "CLIENT_ID_ENV=CLIENT_ID,CLIENT_ID_2,CLIENT_ID_3,CLIENT_ID_4" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET,CLIENT_SECRET_2,CLIENT_SECRET_3,CLIENT_SECRET_4" >> $GITHUB_OUTPUT
SetEnv:
environment: Testing
@ -101,7 +112,6 @@ jobs:
echo "website-cfid=ESFTEIYTIP7Y3" | tee -a $GITHUB_OUTPUT
fi
# ----------------------------------------------------------------------------------------------------
# --- Website Linting -----------------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
@ -136,24 +146,31 @@ jobs:
working-directory: src
env:
# Resolve the name of the secret that contains the Azure client ID/secret
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
AZURE_CLIENT_ID_NAME: ${{ secrets[CLIENT_ID] }},${{ secrets[CLIENT_ID_2] }},${{ secrets[CLIENT_ID_3] }},${{ secrets[CLIENT_ID_4] }}
AZURE_CLIENT_SECRET_NAME: ${{ secrets[CLIENT_SECRET] }},${{ secrets[CLIENT_SECRET_2] }},${{ secrets[CLIENT_SECRET_3] }},${{ secrets[CLIENT_SECRET_4] }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci.log
CORSO_LOG_FILE: /tmp/corso-trusted-testlog/run-ci.log
LOG_GRAPH_REQUESTS: true
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
- name: Setup Golang
uses: actions/setup-go@v5
with:
go-version-file: src/go.mod
go-version: 1.21
cache: true
- run: mkdir testlog
- name: Setup Golang caches
uses: actions/cache@v4
with:
path: |
~/.cache/go-build
~/go/pkg/mod
key: ${{ runner.os }}-golang-trusted-${{ github.sha }}
restore-keys: |
${{ runner.os }}-golang-trusted-
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
- run: mkdir -p /tmp/corso-trusted-testlog
# AWS creds
- name: Configure AWS credentials from Test account
@ -166,7 +183,7 @@ jobs:
# run the tests
- name: Integration Tests
env:
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
AZURE_CLIENT_ID: ${{ secrets[CLIENT_ID] }},${{ secrets[CLIENT_ID_2] }},${{ secrets[CLIENT_ID_3] }},${{ secrets[CLIENT_ID_4] }}
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_CI_TESTS: true
@ -178,21 +195,44 @@ jobs:
set -euo pipefail
go test \
-tags testing \
-json \
-v \
-failfast \
-p 1 \
-v \
-timeout 20m \
./... \
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
-run '^TestDataCollectionIntgSuite/TestExchangeDataCollection$' ./internal/m365 \
2>&1 | tee /tmp/corso-trusted-testlog/gotest-ci.log
- name: peek
run: ls /tmp/corso-trusted-testlog
# # run the tests
# - name: Integration Tests
# env:
# AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
# AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
# AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
# CORSO_CI_TESTS: true
# CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
# CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
# CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
# S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
# run: |
# set -euo pipefail
# go test \
# -tags testing \
# -failfast \
# -p 1 \
# -timeout 20m \
# ./... \
# 2>&1 | tee /tmp/corso-trusted-testlog/gotest-ci.log
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: failure()
if: always()
uses: actions/upload-artifact@v4
with:
name: ci-test-log
path: src/testlog/*
path: /tmp/corso-trusted-testlog/*
if-no-files-found: error
retention-days: 14
@ -210,21 +250,28 @@ jobs:
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci-retention.log
CORSO_LOG_FILE: /tmp/corso-retention-testlog/run-ci-retention.log
LOG_GRAPH_REQUESTS: true
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
- name: Setup Golang
uses: actions/setup-go@v5
with:
go-version-file: src/go.mod
go-version: 1.21
cache: true
- run: mkdir testlog
- name: Setup Golang caches
uses: actions/cache@v4
with:
path: |
~/.cache/go-build
~/go/pkg/mod
key: ${{ runner.os }}-golang-retention-${{ github.sha }}
restore-keys: |
${{ runner.os }}-golang-retention-
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
- run: mkdir -p /tmp/corso-retention-testlog
# AWS creds
- name: Configure AWS credentials from Test account
@ -249,24 +296,25 @@ jobs:
set -euo pipefail
go test \
-tags testing \
-json \
-v \
-failfast \
-p 1 \
-timeout 10m \
./... \
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
2>&1 | tee /tmp/corso-retention-testlog/gotest-ci.log
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: failure()
if: always()
uses: actions/upload-artifact@v4
with:
name: ci-retention-test-log
path: src/testlog/*
path: /tmp/corso-retention-testlog/*
if-no-files-found: error
retention-days: 14
- name: peek
run: ls
Unit-Test-Suite:
needs: [Precheck, Checkout]
environment: Testing
@ -277,48 +325,54 @@ jobs:
run:
working-directory: src
env:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
CORSO_LOG_FILE: /tmp/corso-testlog/run-unit.log
LOG_GRAPH_REQUESTS: true
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
- name: Setup Golang
uses: actions/setup-go@v5
with:
go-version-file: src/go.mod
go-version: 1.21
cache: true
- run: mkdir testlog
- name: Setup Golang caches
uses: actions/cache@v4
with:
path: |
~/.cache/go-build
~/go/pkg/mod
key: ${{ runner.os }}-golang-unit-${{ github.sha }}
restore-keys: |
${{ runner.os }}-golang-unit-
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
- run: mkdir -p /tmp/corso-testlog
# run the tests
- name: Unit Tests
env:
# Set these to a bad value so we don't accidentally fall back to
# something elsewhere.
CORSO_M365_TEST_USER_ID: 'foo'
CORSO_SECONDARY_M365_TEST_USER_ID: 'foo'
CORSO_M365_TEST_USER_ID: "foo"
CORSO_SECONDARY_M365_TEST_USER_ID: "foo"
run: |
set -euo pipefail
go test \
-tags testing \
-json \
-v \
-failfast \
-p 1 \
-timeout 20m \
./... \
2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests
# Run the first test
go test \
-tags testing \
-failfast \
-p 1 \
-timeout 20m \
./... \
2>&1 | tee -a /tmp/corso-testlog/gotest-unit.log
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: failure()
if: always()
uses: actions/upload-artifact@v4
with:
name: unit-test-log
path: src/testlog/*
path: /tmp/corso-testlog/*
if-no-files-found: error
retention-days: 14
@ -463,7 +517,7 @@ jobs:
go-version-file: src/go.mod
- name: Go Lint
uses: golangci/golangci-lint-action@v4
uses: golangci/golangci-lint-action@v3
with:
# Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code.
@ -495,7 +549,7 @@ jobs:
- uses: cachix/install-nix-action@v25
- uses: cachix/cachix-action@v14
with:
name: tree-grepper
name: tree-grepper
- run: nix-env -if https://github.com/BrianHicks/tree-grepper/archive/refs/heads/main.tar.gz
- name: Run trailing comma lint rule
run: |
@ -518,20 +572,6 @@ jobs:
echo "Make sure to propagate errors with clues"
exit 1
fi
- name: Check if clues without context are used when context is passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
echo "Do not use clues.*WC when context is passed in"
exit 1
fi
- name: Check clues with context is used when context is not passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
echo "Use clues.*WC when context is not passed in"
exit 1
fi
# ----------------------------------------------------------------------------------------------------
# --- GitHub Actions Linting -------------------------------------------------------------------------
@ -737,7 +777,14 @@ jobs:
cfid: ${{ needs.SetEnv.outputs.website-cfid }}
Publish-Website-Prod:
needs: [SetEnv, Validate-Linux-Artifacts, Validate-MacOS-Artifacts, Validate-Docker-Artifacts, Validate-Windows-Artifacts]
needs:
[
SetEnv,
Validate-Linux-Artifacts,
Validate-MacOS-Artifacts,
Validate-Docker-Artifacts,
Validate-Windows-Artifacts,
]
environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')

View File

@ -12,7 +12,7 @@ jobs:
continue-on-error: true
strategy:
matrix:
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
steps:
- uses: actions/checkout@v4
@ -33,15 +33,12 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
Test-Site-Data-Cleanup:
@ -50,7 +47,7 @@ jobs:
continue-on-error: true
strategy:
matrix:
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
steps:
- uses: actions/checkout@v4
@ -73,13 +70,10 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -155,6 +155,3 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
user:
description: "User to run longevity test on"
description: 'User to run longevity test on'
permissions:
# required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Longevity-Tests:
needs: [SetM365App]
needs: [ SetM365App ]
environment: Testing
runs-on: ubuntu-latest
env:
@ -37,7 +37,7 @@ jobs:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
RESTORE_DEST_PFX: Corso_Test_Longevity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
PREFIX: "longevity"
PREFIX: 'longevity'
# Options for retention.
RETENTION_MODE: GOVERNANCE
@ -46,7 +46,7 @@ jobs:
defaults:
run:
working-directory: src
############################################################################
# setup
steps:
@ -78,7 +78,7 @@ jobs:
- run: go build -o corso
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR}
# Use shorter-lived credentials obtained from assume-role since these
@ -113,6 +113,7 @@ jobs:
--extend-retention \
--prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
--succeed-if-exists \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
@ -163,7 +164,7 @@ jobs:
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Onedrive
@ -328,7 +329,7 @@ jobs:
--hide-progress \
--force \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
- name: Maintenance test Weekly
id: maintenance-test-weekly
@ -392,5 +393,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Longevity Test"
msg: "[FAILED] Longevity Test"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -48,7 +48,7 @@ jobs:
# ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted:
needs: [Checkout, SetM365App]
needs: [ Checkout, SetM365App]
environment: Testing
runs-on: ubuntu-latest
defaults:
@ -100,9 +100,9 @@ jobs:
-timeout 2h \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
##########################################################################################################################################
##########################################################################################################################################
# Logging & Notifications
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
@ -118,5 +118,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[COROS FAILED] Nightly Checks"
msg: "[FAILED] Nightly Checks"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -19,7 +19,7 @@ jobs:
private_key: ${{ secrets.PRIVATE_KEY }}
- name: Slash Command Dispatch
uses: peter-evans/slash-command-dispatch@v4
uses: peter-evans/slash-command-dispatch@v3
env:
TOKEN: ${{ steps.generate_token.outputs.token }}
with:

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
user:
description: "User to run sanity test on"
description: 'User to run sanity test on'
permissions:
# required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests:
needs: [SetM365App]
needs: [ SetM365App ]
environment: Testing
runs-on: ubuntu-latest
env:
@ -43,11 +43,12 @@ jobs:
defaults:
run:
working-directory: src
##########################################################################################################################################
##########################################################################################################################################
# setup
# setup
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
@ -63,9 +64,9 @@ jobs:
- run: mkdir ${CORSO_LOG_DIR}
##########################################################################################################################################
##########################################################################################################################################
# Pre-Run cleanup
# Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
@ -90,9 +91,6 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites
timeout-minutes: 30
@ -101,20 +99,17 @@ jobs:
with:
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
##########################################################################################################################################
##########################################################################################################################################
# Repository commands
# Repository commands
- name: Version Test
timeout-minutes: 10
@ -174,9 +169,9 @@ jobs:
--mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
##########################################################################################################################################
##########################################################################################################################################
# Exchange
# Exchange
# generate new entries to roll into the next load test
# only runs if the test was successful
@ -198,8 +193,8 @@ jobs:
service: exchange
kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -211,8 +206,8 @@ jobs:
service: exchange
kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -225,8 +220,8 @@ jobs:
service: exchange
kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -239,15 +234,16 @@ jobs:
service: exchange
kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Onedrive
##########################################################################################################################################
# Onedrive
# generate new entries for test
- name: OneDrive - Create new data
@ -274,8 +270,8 @@ jobs:
service: onedrive
kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -299,14 +295,14 @@ jobs:
service: onedrive
kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
##########################################################################################################################################
# Sharepoint Library
# Sharepoint Library
# generate new entries for test
- name: SharePoint - Create new data
@ -334,8 +330,8 @@ jobs:
service: sharepoint
kind: first-backup
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
@ -361,15 +357,15 @@ jobs:
service: sharepoint
kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
##########################################################################################################################################
##########################################################################################################################################
# Sharepoint Lists
# Sharepoint Lists
# generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
@ -407,7 +403,7 @@ jobs:
service: sharepoint
kind: first-backup-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S') --allow-lists-restore"
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
@ -422,7 +418,7 @@ jobs:
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
@ -450,7 +446,7 @@ jobs:
service: sharepoint
kind: incremental-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S') --allow-lists-restore"
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
@ -458,9 +454,9 @@ jobs:
category: lists
on-collision: copy
##########################################################################################################################################
##########################################################################################################################################
# Groups and Teams
# Groups and Teams
# generate new entries for test
- name: Groups - Create new data
@ -487,8 +483,8 @@ jobs:
with:
service: groups
kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -512,15 +508,15 @@ jobs:
with:
service: groups
kind: incremental
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
##########################################################################################################################################
# Logging & Notifications
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
@ -536,5 +532,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Sanity Tests"
msg: "[FAILED] Sanity Tests"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -6,22 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased] (beta)
### Fixed
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
- Emails attached within other emails are now correctly exported
- Gracefully handle email and post attachments without name when exporting to eml
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
- Fixed an issue causing exports dealing with calendar data to have high memory usage
## [v0.19.0] (beta) - 2024-02-06
### Added
- Events can now be exported from Exchange backups as .ics files.
- Update repo init configuration to reduce the total number of GET requests sent
to the object store when using corso. This affects repos that have many
backups created in them per day the most.
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
### Fixed
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
@ -29,12 +19,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Groups and Teams backups no longer fail when a resource has no display name.
- Contacts in-place restore failed if the restore destination was empty.
- Link shares with external users are now backed up and restored as expected
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
### Changed
- When running `backup details` on an empty backup returns a more helpful error message.
- Backup List additionally shows the data category for each backup.
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
### Known issues
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
@ -42,10 +30,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
- External users with access through shared links will not receive these links as they are not sent via email during restore.
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
- SharePoint list item attachments are not available due to graph API limitations.
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
## [v0.18.0] (beta) - 2024-01-02
@ -502,8 +486,7 @@ this case, Corso will skip over the item but report this in the backup summary.
- Miscellaneous
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
[Unreleased]: https://github.com/alcionai/corso/compare/v0.18.0...HEAD
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0

View File

@ -1,6 +1,3 @@
> [!NOTE]
> **The Corso project is no longer actively maintained and has been archived**.
<p align="center">
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
</p>

View File

@ -45,7 +45,6 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addTeamsChatsCommands,
}
// AddCommands attaches all `corso backup * *` commands to the parent.

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
@ -40,7 +39,7 @@ var (
type NoBackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestNoBackupExchangeE2ESuite(t *testing.T) {
@ -55,7 +54,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
}
@ -94,7 +93,7 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
type BackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupExchangeE2ESuite(t *testing.T) {
@ -109,7 +108,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
}
@ -139,7 +138,7 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
cmd, ctx := buildExchangeBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
suite.its.user.ID,
category.String(),
&recorder)
@ -150,11 +149,8 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
result := recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
@ -177,7 +173,7 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
cmd, ctx := buildExchangeBackupCmd(
ctx,
suite.dpnd.configFilePath,
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.m365.User.ID),
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.its.user.ID),
category.String(),
&recorder)
err := cmd.ExecuteContext(ctx)
@ -186,11 +182,8 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
result := recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
@ -249,7 +242,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFl
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
@ -273,7 +266,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
@ -288,11 +281,8 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
result := suite.dpnd.recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
// AWS flags
@ -306,7 +296,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
@ -329,7 +319,7 @@ type PreparedBackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
@ -346,13 +336,13 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
suite.backupOps = make(map[path.CategoryType]string)
var (
users = []string{suite.m365.User.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
users = []string{suite.its.user.ID}
ins = idname.NewCache(map[string]string{suite.its.user.ID: suite.its.user.ID})
)
for _, set := range []path.CategoryType{email, contacts, events} {

View File

@ -35,12 +35,9 @@ const (
groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group
corso backup create groups --group Marketing
# Backup only Teams channel messages
# Backup only Teams conversations messages
corso backup create groups --group Marketing --data messages
# Backup only group mailbox posts
corso backup create groups --group Marketing --data conversations
# Backup all Groups and Teams data for all groups
corso backup create groups --group '*'`
@ -53,10 +50,7 @@ corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
# Explore Marketing messages posted after the start of 2022
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
--last-message-reply-after 2022-01-01T00:00:00
# Explore group mailbox posts with conversation subject "hello world"
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"`
--last-message-reply-after 2022-01-01T00:00:00`
)
// called by backup.go to map subcommands to provider-specific handling.
@ -316,7 +310,7 @@ func groupsBackupCreateSelectors(
group, cats []string,
) *selectors.GroupsBackup {
if filters.PathContains(group).Compare(flags.Wildcard) {
return includeAllGroupsWithCategories(ins, cats)
return includeAllGroupWithCategories(ins, cats)
}
sel := selectors.NewGroupsBackup(slices.Clone(group))
@ -324,6 +318,6 @@ func groupsBackupCreateSelectors(
return utils.AddGroupsCategories(sel, cats)
}
func includeAllGroupsWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
func includeAllGroupWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
}

View File

@ -20,7 +20,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
@ -36,7 +35,7 @@ import (
type NoBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestNoBackupGroupsE2ESuite(t *testing.T) {
@ -51,7 +50,7 @@ func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
}
@ -90,7 +89,7 @@ func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
type BackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupGroupsE2ESuite(t *testing.T) {
@ -105,7 +104,7 @@ func (suite *BackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
}
@ -114,8 +113,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsBackupCategoryTest(suite, flags.DataConversations)
}
@ -137,7 +134,7 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
cmd, ctx := buildGroupsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Group.ID,
suite.its.group.ID,
category,
&recorder)
@ -205,7 +202,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
@ -219,9 +216,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
}
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
@ -232,7 +226,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
@ -256,7 +250,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
@ -279,7 +273,7 @@ type PreparedBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
@ -296,19 +290,16 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
groups = []string{suite.m365.Group.ID}
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
groups = []string{suite.its.group.ID}
ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
cats = []path.CategoryType{
path.ChannelMessagesCategory,
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// path.ConversationPostsCategory,
path.ConversationPostsCategory,
path.LibrariesCategory,
}
)
@ -462,8 +453,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
}

View File

@ -14,16 +14,141 @@ import (
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/storage"
"github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// Gockable client
// ---------------------------------------------------------------------------
// GockClient produces a new exchange api client that can be
// mocked using gock.
func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
s, err := graph.NewGockService(creds, counter)
if err != nil {
return api.Client{}, err
}
li, err := graph.NewGockService(creds, counter, graph.NoTimeout())
if err != nil {
return api.Client{}, err
}
return api.Client{
Credentials: creds,
Stable: s,
LargeItem: li,
}, nil
}
// ---------------------------------------------------------------------------
// Suite Setup
// ---------------------------------------------------------------------------
type ids struct {
ID string
DriveID string
DriveRootFolderID string
}
type intgTesterSetup struct {
acct account.Account
ac api.Client
gockAC api.Client
user ids
site ids
group ids
team ids
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its := intgTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
its.acct = tconfig.NewM365Account(t)
creds, err := its.acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
its.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
its.gockAC, err = gockClient(creds, count.New())
require.NoError(t, err, clues.ToCore(err))
// user drive
uids := ids{}
uids.ID = tconfig.M365UserID(t)
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, uids.ID)
require.NoError(t, err, clues.ToCore(err))
uids.DriveID = ptr.Val(userDrive.GetId())
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, uids.DriveID)
require.NoError(t, err, clues.ToCore(err))
uids.DriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
its.user = uids
// site
sids := ids{}
sids.ID = tconfig.M365SiteID(t)
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, sids.ID)
require.NoError(t, err, clues.ToCore(err))
sids.DriveID = ptr.Val(siteDrive.GetId())
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, sids.DriveID)
require.NoError(t, err, clues.ToCore(err))
sids.DriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
its.site = sids
// group
gids := ids{}
// use of the TeamID is intentional here, so that we are assured
// the group has full usage of the teams api.
gids.ID = tconfig.M365TeamID(t)
its.group = gids
// team
tids := ids{}
tids.ID = tconfig.M365TeamID(t)
its.team = tids
return its
}
type dependencies struct {
st storage.Storage
repo repository.Repositoryer

View File

@ -37,11 +37,7 @@ corso backup create sharepoint --site https://example.com/hr
corso backup create sharepoint --site https://example.com/hr,https://example.com/team
# Backup all SharePoint data for all Sites
corso backup create sharepoint --site '*'
# Backup all SharePoint list data for a Site
corso backup create sharepoint --site https://example.com/hr --data lists
`
corso backup create sharepoint --site '*'`
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce
@ -61,26 +57,7 @@ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Explore all files within the document library "Work Documents"
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library "Work Documents"
# Explore lists by their name(s)
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2"
# Explore lists created after a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34
# Explore lists created before a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34
# Explore lists modified before a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34
# Explore lists modified after a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34`
`
)
// called by backup.go to map subcommands to provider-specific handling.
@ -96,8 +73,6 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
flags.AddSiteFlag(c, true)
flags.AddSiteIDFlag(c, true)
// [TODO](hitesh) to add lists flag to invoke backup for lists
// when explicit invoke is not required anymore
flags.AddDataFlag(c, []string{flags.DataLibraries}, true)
flags.AddGenericBackupFlags(c)

View File

@ -20,7 +20,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/config"
@ -90,7 +89,7 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
type BackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupSharepointE2ESuite(t *testing.T) {
@ -105,7 +104,7 @@ func (suite *BackupSharepointE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
}
@ -129,7 +128,7 @@ func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category s
cmd, ctx := buildSharepointBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Site.ID,
suite.its.site.ID,
category,
&recorder)
@ -188,7 +187,7 @@ type PreparedBackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
@ -205,13 +204,13 @@ func (suite *PreparedBackupSharepointE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
suite.backupOps = make(map[path.CategoryType]string)
var (
sites = []string{suite.m365.Site.ID}
ins = idname.NewCache(map[string]string{suite.m365.Site.ID: suite.m365.Site.ID})
sites = []string{suite.its.site.ID}
ins = idname.NewCache(map[string]string{suite.its.site.ID: suite.its.site.ID})
cats = []path.CategoryType{
path.ListsCategory,
}

View File

@ -1,305 +0,0 @@
package backup
import (
"context"
"fmt"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
)
// ------------------------------------------------------------------------------------------------
// setup and globals
// ------------------------------------------------------------------------------------------------
const (
teamschatsServiceCommand = "chats"
teamschatsServiceCommandCreateUseSuffix = "--user <userEmail> | '" + flags.Wildcard + "'"
teamschatsServiceCommandDeleteUseSuffix = "--backups <backupId>"
teamschatsServiceCommandDetailsUseSuffix = "--backup <backupId>"
)
const (
teamschatsServiceCommandCreateExamples = `# Backup all chats with bob@company.hr
corso backup create chats --user bob@company.hr
# Backup all chats for all users
corso backup create chats --user '*'`
teamschatsServiceCommandDeleteExamples = `# Delete chats backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce
corso backup delete chats --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
teamschatsServiceCommandDetailsExamples = `# Explore chats in Bob's latest backup (1234abcd...)
corso backup details chats --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// called by backup.go to map subcommands to provider-specific handling.
func addTeamsChatsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case createCommand:
c, _ = utils.AddCommand(cmd, teamschatsCreateCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandCreateUseSuffix
c.Example = teamschatsServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddUserFlag(c)
flags.AddDataFlag(c, []string{flags.DataChats}, false)
flags.AddGenericBackupFlags(c)
case listCommand:
c, _ = utils.AddCommand(cmd, teamschatsListCmd(), utils.MarkPreReleaseCommand())
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, teamschatsDetailsCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandDetailsUseSuffix
c.Example = teamschatsServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true)
flags.AddTeamsChatsDetailsAndRestoreFlags(c)
case deleteCommand:
c, _ = utils.AddCommand(cmd, teamschatsDeleteCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandDeleteUseSuffix
c.Example = teamschatsServiceCommandDeleteExamples
flags.AddMultipleBackupIDsFlag(c, false)
flags.AddBackupIDFlag(c, false)
}
return c
}
// ------------------------------------------------------------------------------------------------
// backup create
// ------------------------------------------------------------------------------------------------
// `corso backup create chats [<flag>...]`
func teamschatsCreateCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Aliases: []string{teamsServiceCommand},
Short: "Backup M365 Chats data",
RunE: createTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// processes a teamschats backup.
func createTeamsChatsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := validateTeamsChatsBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
return err
}
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
ctx,
cmd,
path.TeamsChatsService)
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
// TODO: log/print recoverable errors
errs := fault.New(false)
svcCli, err := m365.NewM365Client(ctx, *acct)
if err != nil {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.AC.Users().GetAllIDsAndNames(ctx, errs)
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 teamschats"))
}
sel := teamschatsBackupCreateSelectors(ctx, ins, flags.UserFV, flags.CategoryDataFV)
selectorSet := []selectors.Selector{}
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
selectorSet = append(selectorSet, discSel.Selector)
}
return genericCreateCommand(
ctx,
r,
"Chats",
selectorSet,
ins)
}
// ------------------------------------------------------------------------------------------------
// backup list
// ------------------------------------------------------------------------------------------------
// `corso backup list teamschats [<flag>...]`
func teamschatsListCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "List the history of M365 Chats backups",
RunE: listTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// lists the history of backup operations
func listTeamsChatsCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsChatsService, args)
}
// ------------------------------------------------------------------------------------------------
// backup details
// ------------------------------------------------------------------------------------------------
// `corso backup details teamschats [<flag>...]`
func teamschatsDetailsCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "Shows the details of a M365 Chats backup",
RunE: detailsTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// processes a teamschats backup.
func detailsTeamsChatsCmd(cmd *cobra.Command, args []string) error {
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
return runDetailsTeamsChatsCmd(cmd)
}
func runDetailsTeamsChatsCmd(cmd *cobra.Command) error {
ctx := cmd.Context()
opts := utils.MakeTeamsChatsOpts(cmd)
sel := utils.IncludeTeamsChatsRestoreDataSelectors(ctx, opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterTeamsChatsRestoreInfoSelectors(sel, opts)
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
if err != nil {
return Only(ctx, err)
}
if len(ds.Entries) > 0 {
ds.PrintEntries(ctx)
} else {
Info(ctx, selectors.ErrorNoMatchingItems)
}
return nil
}
// ------------------------------------------------------------------------------------------------
// backup delete
// ------------------------------------------------------------------------------------------------
// `corso backup delete teamschats [<flag>...]`
func teamschatsDeleteCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "Delete backed-up M365 Chats data",
RunE: deleteTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// deletes an teamschats backup.
func deleteTeamsChatsCmd(cmd *cobra.Command, args []string) error {
backupIDValue := []string{}
if len(flags.BackupIDsFV) > 0 {
backupIDValue = flags.BackupIDsFV
} else if len(flags.BackupIDFV) > 0 {
backupIDValue = append(backupIDValue, flags.BackupIDFV)
} else {
return clues.New("either --backup or --backups flag is required")
}
return genericDeleteCommand(cmd, path.TeamsChatsService, "TeamsChats", backupIDValue, args)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func validateTeamsChatsBackupCreateFlags(teamschats, cats []string) error {
if len(teamschats) == 0 {
return clues.New(
"requires one or more --" +
flags.UserFN + " ids, or the wildcard --" +
flags.UserFN + " *")
}
msg := fmt.Sprintf(
" is an unrecognized data type; only %s is supported",
flags.DataChats)
allowedCats := utils.TeamsChatsAllowedCategories()
for _, d := range cats {
if _, ok := allowedCats[d]; !ok {
return clues.New(d + msg)
}
}
return nil
}
func teamschatsBackupCreateSelectors(
ctx context.Context,
ins idname.Cacher,
users, cats []string,
) *selectors.TeamsChatsBackup {
if filters.PathContains(users).Compare(flags.Wildcard) {
return includeAllTeamsChatsWithCategories(ins, cats)
}
sel := selectors.NewTeamsChatsBackup(slices.Clone(users))
return utils.AddTeamsChatsCategories(sel, cats)
}
func includeAllTeamsChatsWithCategories(ins idname.Cacher, categories []string) *selectors.TeamsChatsBackup {
return utils.AddTeamsChatsCategories(selectors.NewTeamsChatsBackup(ins.IDs()), categories)
}

View File

@ -1,636 +0,0 @@
package backup_test
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// tests that require no existing backups
// ---------------------------------------------------------------------------
type NoBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestNoBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *NoBackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
func (suite *NoBackupTeamsChatsE2ESuite) TestTeamsChatsBackupListCmd_noBackups() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
// as an offhand check: the result should contain the m365 teamschat id
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
}
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *BackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_chats() {
runTeamsChatsBackupCategoryTest(suite, flags.DataChats)
}
func runTeamsChatsBackupCategoryTest(suite *BackupTeamsChatsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_teamschatNotFound_chats() {
runTeamsChatsBackupTeamsChatNotFoundTest(suite, flags.DataChats)
}
func runTeamsChatsBackupTeamsChatNotFoundTest(suite *BackupTeamsChatsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
"foo@not-there.com",
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"not found",
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAzureClientIDFlag() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_fromConfigFile() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
}
// AWS flags
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAWSFlags() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
// since invalid aws creds are explicitly set, should see a failure
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
}
func TestPreparedBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupTeamsChatsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
teamschats = []string{suite.m365.User.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
cats = []path.CategoryType{
path.ChatsCategory,
}
)
for _, set := range cats {
var (
sel = selectors.NewTeamsChatsBackup(teamschats)
scopes []selectors.TeamsChatsScope
)
switch set {
case path.ChatsCategory:
scopes = selTD.TeamsChatsBackupChatScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_chats() {
runTeamsChatsListCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsListCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_singleID_chats() {
runTeamsChatsListSingleCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsListSingleCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, bID)
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", "smarfs")
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsDetailsCmd_chats() {
runTeamsChatsDetailsCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsDetailsCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
foundFolders := 0
for _, ent := range deets.Entries {
// Skip folders as they don't mean anything to the end teamschat.
if ent.Folder != nil {
foundFolders++
continue
}
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
// We only backup the default folder for each category so there should be at
// least that folder (we don't make details entries for prefix folders).
assert.GreaterOrEqual(t, foundFolders, 1)
}
// ---------------------------------------------------------------------------
// tests for deleting backups
// ---------------------------------------------------------------------------
type BackupDeleteTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps [3]operations.BackupOperation
}
func TestBackupDeleteTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteTeamsChatsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *BackupDeleteTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
m365TeamsChatID := tconfig.M365TeamID(t)
teamschats := []string{m365TeamsChatID}
// some tests require an existing backup
sel := selectors.NewTeamsChatsBackup(teamschats)
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
for i := 0; i < cap(suite.backupOps); i++ {
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[i] = backupOp
err = suite.backupOps[i].Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID),
string(suite.backupOps[1].Results.BackupID)))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backups", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_SingleID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_UnknownID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_NoBackupID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildTeamsChatsBackupCmd(
ctx context.Context,
configFile, resource, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--"+flags.ConfigFileFN, configFile,
"--"+flags.UserFN, resource,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -1,248 +0,0 @@
package backup
import (
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
)
type TeamsChatsUnitSuite struct {
tester.Suite
}
func TestTeamsChatsUnitSuite(t *testing.T) {
suite.Run(t, &TeamsChatsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *TeamsChatsUnitSuite) TestAddTeamsChatsCommands() {
expectUse := teamschatsServiceCommand
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "create teamschats",
use: createCommand,
expectUse: expectUse + " " + teamschatsServiceCommandCreateUseSuffix,
expectShort: teamschatsCreateCmd().Short,
expectRunE: createTeamsChatsCmd,
},
{
name: "list teamschats",
use: listCommand,
expectUse: expectUse,
expectShort: teamschatsListCmd().Short,
expectRunE: listTeamsChatsCmd,
},
{
name: "details teamschats",
use: detailsCommand,
expectUse: expectUse + " " + teamschatsServiceCommandDetailsUseSuffix,
expectShort: teamschatsDetailsCmd().Short,
expectRunE: detailsTeamsChatsCmd,
},
{
name: "delete teamschats",
use: deleteCommand,
expectUse: expectUse + " " + teamschatsServiceCommandDeleteUseSuffix,
expectShort: teamschatsDeleteCmd().Short,
expectRunE: deleteTeamsChatsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addTeamsChatsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
})
}
}
func (suite *TeamsChatsUnitSuite) TestValidateTeamsChatsBackupCreateFlags() {
table := []struct {
name string
cats []string
expect assert.ErrorAssertionFunc
}{
{
name: "none",
cats: []string{},
expect: assert.NoError,
},
{
name: "chats",
cats: []string{flags.DataChats},
expect: assert.NoError,
},
{
name: "all allowed",
cats: []string{
flags.DataChats,
},
expect: assert.NoError,
},
{
name: "bad inputs",
cats: []string{"foo"},
expect: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
err := validateTeamsChatsBackupCreateFlags([]string{"*"}, test.cats)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}
func (suite *TeamsChatsUnitSuite) TestBackupCreateFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: createCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.TeamsChatsCategoryDataInput),
},
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeTeamsChatsOpts(cmd)
co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupListFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: listCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedBackupListFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupDetailsFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: detailsCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
},
flagsTD.PreparedTeamsChatsFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
flagsTD.AssertTeamsChatsFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupDeleteFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: deleteCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}

View File

@ -7,6 +7,7 @@ import (
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by export.go to map subcommands to provider-specific handling.
@ -50,13 +51,7 @@ corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00
# Export all posts from a conversation with topic "hello world" from group mailbox's last backup to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"
# Export post with ID 98765abcdef from a conversation from group mailbox's last backup to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world" --post 98765abcdef`
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
)
// `corso export groups [<flag>...] <destination>`
@ -98,6 +93,10 @@ func exportGroupsCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
// TODO(pandeyabs): Exclude conversations from export since they are not
// supported yet. https://github.com/alcionai/corso/issues/4822
sel.Exclude(sel.Conversation(selectors.Any()))
acceptedGroupsFormatTypes := []string{
string(control.DefaultFormat),
string(control.JSONFormat),

View File

@ -45,27 +45,7 @@ corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Export all files in the "Documents" library to the current directory.
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library Documents --folder "Display Templates/Style Sheets" .
# Export lists by their name(s)
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2" .
# Export lists created after a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34 .
# Export lists created before a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34 .
# Export lists modified before a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34 .
# Export lists modified after a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34 .`
--library Documents --folder "Display Templates/Style Sheets" .`
)
// `corso export sharepoint [<flag>...] <destination>`

View File

@ -28,6 +28,13 @@ func AddFilesystemFlags(cmd *cobra.Command) {
"",
"path to local or network storage")
cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN))
fs.BoolVar(
&SucceedIfExistsFV,
SucceedIfExistsFN,
false,
"Exit with success if the repo has already been initialized.")
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
}
func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string {

View File

@ -12,8 +12,9 @@ const (
AWSSessionTokenFN = "aws-session-token"
// Corso Flags
PassphraseFN = "passphrase"
NewPassphraseFN = "new-passphrase"
PassphraseFN = "passphrase"
NewPassphraseFN = "new-passphrase"
SucceedIfExistsFN = "succeed-if-exists"
)
var (
@ -24,6 +25,7 @@ var (
AWSSessionTokenFV string
PassphraseFV string
NewPhasephraseFV string
SucceedIfExistsFV bool
)
// AddMultipleBackupIDsFlag adds the --backups flag.

View File

@ -38,6 +38,11 @@ func AddS3BucketFlags(cmd *cobra.Command) {
fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.")
fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)")
fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.")
// In general, we don't want to expose this flag to users and have them mistake it
// for a broad-scale idempotency solution. We can un-hide it later the need arises.
fs.BoolVar(&SucceedIfExistsFV, SucceedIfExistsFN, false, "Exit with success if the repo has already been initialized.")
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
}
func S3FlagOverrides(cmd *cobra.Command) map[string]string {

View File

@ -18,6 +18,7 @@ const (
ListModifiedBeforeFN = "list-modified-before"
ListCreatedAfterFN = "list-created-after"
ListCreatedBeforeFN = "list-created-before"
AllowListsRestoreFN = "allow-lists-restore"
PageFolderFN = "page-folder"
PageFN = "page"
@ -34,6 +35,7 @@ var (
ListModifiedBeforeFV string
ListCreatedAfterFV string
ListCreatedBeforeFV string
AllowListsRestoreFV bool
PageFolderFV []string
PageFV []string
@ -99,6 +101,11 @@ func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
&ListCreatedBeforeFV,
ListCreatedBeforeFN, "",
"Select lists created before this datetime.")
fs.BoolVar(
&AllowListsRestoreFV,
AllowListsRestoreFN, false,
"enables lists restore if provided")
cobra.CheckErr(fs.MarkHidden(AllowListsRestoreFN))
// pages

View File

@ -1,13 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
DataChats = "chats"
)
func AddTeamsChatsDetailsAndRestoreFlags(cmd *cobra.Command) {
// TODO: add details flags
}

View File

@ -21,7 +21,6 @@ var (
ExchangeCategoryDataInput = []string{"email", "events", "contacts"}
SharepointCategoryDataInput = []string{"files", "lists", "pages"}
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
TeamsChatsCategoryDataInput = []string{"chats"}
ChannelInput = []string{"channel1", "channel2"}
MessageInput = []string{"message1", "message2"}

View File

@ -1,25 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
)
func PreparedTeamsChatsFlags() []string {
return []string{
// FIXME: populate when adding filters
// "--" + flags.ChatCreatedAfterFN, ChatCreatedAfterInput,
// "--" + flags.ChatCreatedBeforeFN, ChatCreatedBeforeInput,
// "--" + flags.ChatLastMessageAfterFN, ChatLastMessageAfterInput,
// "--" + flags.ChatLastMessageBeforeFN, ChatLastMessageBeforeInput,
}
}
func AssertTeamsChatsFlags(t *testing.T, cmd *cobra.Command) {
// FIXME: populate when adding filters
// assert.Equal(t, ChatCreatedAfterInput, flags.ChatCreatedAfterFV)
// assert.Equal(t, ChatCreatedBeforeInput, flags.ChatCreatedBeforeFV)
// assert.Equal(t, ChatLastMessageAfterInput, flags.ChatLastMessageAfterFV)
// assert.Equal(t, ChatLastMessageBeforeInput, flags.ChatLastMessageBeforeFV)
}

View File

@ -133,7 +133,7 @@ func Pretty(ctx context.Context, a any) {
return
}
printPrettyJSON(ctx, getRootCmd(ctx).ErrOrStderr(), a)
printPrettyJSON(getRootCmd(ctx).ErrOrStderr(), a)
}
// PrettyJSON prettifies and prints the value.
@ -143,7 +143,7 @@ func PrettyJSON(ctx context.Context, p minimumPrintabler) {
return
}
outputJSON(ctx, getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
}
// out is the testable core of exported print funcs
@ -193,56 +193,56 @@ type minimumPrintabler interface {
// Item prints the printable, according to the caller's requested format.
func Item(ctx context.Context, p Printable) {
printItem(ctx, getRootCmd(ctx).OutOrStdout(), p)
printItem(getRootCmd(ctx).OutOrStdout(), p)
}
// print prints the printable items,
// according to the caller's requested format.
func printItem(ctx context.Context, w io.Writer, p Printable) {
func printItem(w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug {
outputJSON(ctx, w, p, outputAsJSONDebug)
outputJSON(w, p, outputAsJSONDebug)
return
}
outputTable(ctx, w, []Printable{p})
outputTable(w, []Printable{p})
}
// ItemProperties prints the printable either as in a single line or a json
// The difference between this and Item is that this one does not print the ID
func ItemProperties(ctx context.Context, p Printable) {
printItemProperties(ctx, getRootCmd(ctx).OutOrStdout(), p)
printItemProperties(getRootCmd(ctx).OutOrStdout(), p)
}
// print prints the printable items,
// according to the caller's requested format.
func printItemProperties(ctx context.Context, w io.Writer, p Printable) {
func printItemProperties(w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug {
outputJSON(ctx, w, p, outputAsJSONDebug)
outputJSON(w, p, outputAsJSONDebug)
return
}
outputOneLine(ctx, w, []Printable{p})
outputOneLine(w, []Printable{p})
}
// All prints the slice of printable items,
// according to the caller's requested format.
func All(ctx context.Context, ps ...Printable) {
printAll(ctx, getRootCmd(ctx).OutOrStdout(), ps)
printAll(getRootCmd(ctx).OutOrStdout(), ps)
}
// printAll prints the slice of printable items,
// according to the caller's requested format.
func printAll(ctx context.Context, w io.Writer, ps []Printable) {
func printAll(w io.Writer, ps []Printable) {
if len(ps) == 0 {
return
}
if outputAsJSON || outputAsJSONDebug {
outputJSONArr(ctx, w, ps, outputAsJSONDebug)
outputJSONArr(w, ps, outputAsJSONDebug)
return
}
outputTable(ctx, w, ps)
outputTable(w, ps)
}
// ------------------------------------------------------------------------------------------
@ -252,11 +252,11 @@ func printAll(ctx context.Context, w io.Writer, ps []Printable) {
// Table writes the printables in a tabular format. Takes headers from
// the 0th printable only.
func Table(ctx context.Context, ps []Printable) {
outputTable(ctx, getRootCmd(ctx).OutOrStdout(), ps)
outputTable(getRootCmd(ctx).OutOrStdout(), ps)
}
// output to stdout the list of printable structs in a table
func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
func outputTable(w io.Writer, ps []Printable) {
t := table.Table{
Headers: ps[0].Headers(false),
Rows: [][]string{},
@ -266,9 +266,6 @@ func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
t.Rows = append(t.Rows, p.Values(false))
}
// observe bars needs to be flushed before printing
observe.Flush(ctx)
_ = t.WriteTable(
w,
&table.Config{
@ -282,20 +279,20 @@ func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
// JSON
// ------------------------------------------------------------------------------------------
func outputJSON(ctx context.Context, w io.Writer, p minimumPrintabler, debug bool) {
func outputJSON(w io.Writer, p minimumPrintabler, debug bool) {
if debug {
printJSON(ctx, w, p)
printJSON(w, p)
return
}
if debug {
printJSON(ctx, w, p)
printJSON(w, p)
} else {
printJSON(ctx, w, p.MinimumPrintable())
printJSON(w, p.MinimumPrintable())
}
}
func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool) {
func outputJSONArr(w io.Writer, ps []Printable, debug bool) {
sl := make([]any, 0, len(ps))
for _, p := range ps {
@ -306,14 +303,11 @@ func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool)
}
}
printJSON(ctx, w, sl)
printJSON(w, sl)
}
// output to stdout the list of printable structs as json.
func printJSON(ctx context.Context, w io.Writer, a any) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
func printJSON(w io.Writer, a any) {
bs, err := json.Marshal(a)
if err != nil {
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
@ -324,10 +318,7 @@ func printJSON(ctx context.Context, w io.Writer, a any) {
}
// output to stdout the list of printable structs as prettified json.
func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
func printPrettyJSON(w io.Writer, a any) {
bs, err := json.MarshalIndent(a, "", " ")
if err != nil {
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
@ -343,10 +334,7 @@ func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
// Output in the following format:
// Bytes Uploaded: 401 kB | Items Uploaded: 59 | Items Skipped: 0 | Errors: 0
func outputOneLine(ctx context.Context, w io.Writer, ps []Printable) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
func outputOneLine(w io.Writer, ps []Printable) {
headers := ps[0].Headers(true)
rows := [][]string{}

View File

@ -2,6 +2,7 @@ package repo
import (
"github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
@ -109,6 +110,10 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
ric := repository.InitConfig{RetentionOpts: retentionOpts}
if err = r.Initialize(ctx, ric); err != nil {
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
return nil
}
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
}

View File

@ -5,6 +5,7 @@ import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
@ -81,9 +82,9 @@ func (suite *FilesystemE2ESuite) TestInitFilesystemCmd() {
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// noop
// a second initialization should result in an error
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
})
}
}

View File

@ -4,6 +4,7 @@ import (
"strings"
"github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
@ -131,6 +132,10 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
ric := repository.InitConfig{RetentionOpts: retentionOpts}
if err = r.Initialize(ctx, ric); err != nil {
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
return nil
}
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
}

View File

@ -89,9 +89,9 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// noop
// a second initialization should result in an error
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
})
}
}
@ -116,7 +116,8 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
"repo", "init", "s3",
"--"+flags.ConfigFileFN, configFP,
"--bucket", cfg.Bucket,
"--prefix", cfg.Prefix)
"--prefix", cfg.Prefix,
"--succeed-if-exists")
cli.BuildCommandTree(cmd)
// run the command

View File

@ -6,6 +6,7 @@ import (
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by restore.go to map subcommands to provider-specific handling.
@ -50,27 +51,7 @@ corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Restore all files in the "Documents" library.
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library Documents --folder "Display Templates/Style Sheets"
# Restore lists by their name(s)
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2"
# Restore lists created after a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34
# Restore lists created before a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34
# Restore lists modified before a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34
# Restore lists modified after a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34`
--library Documents --folder "Display Templates/Style Sheets" `
)
// `corso restore sharepoint [<flag>...]`
@ -106,6 +87,11 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
if !opts.AllowListsRestore {
// Exclude lists from restore since they are not supported yet.
sel.Exclude(sel.Lists(selectors.Any()))
}
return runRestore(
ctx,
cmd,

View File

@ -103,6 +103,7 @@ func (suite *FlagUnitSuite) TestAddS3BucketFlags() {
assert.Equal(t, "prefix1", flags.PrefixFV, flags.PrefixFN)
assert.True(t, flags.DoNotUseTLSFV, flags.DoNotUseTLSFN)
assert.True(t, flags.DoNotVerifyTLSFV, flags.DoNotVerifyTLSFN)
assert.True(t, flags.SucceedIfExistsFV, flags.SucceedIfExistsFN)
},
}
@ -115,6 +116,7 @@ func (suite *FlagUnitSuite) TestAddS3BucketFlags() {
"--" + flags.PrefixFN, "prefix1",
"--" + flags.DoNotUseTLSFN,
"--" + flags.DoNotVerifyTLSFN,
"--" + flags.SucceedIfExistsFN,
})
err := cmd.Execute()
@ -128,6 +130,7 @@ func (suite *FlagUnitSuite) TestFilesystemFlags() {
Use: "test",
Run: func(cmd *cobra.Command, args []string) {
assert.Equal(t, "/tmp/test", flags.FilesystemPathFV, flags.FilesystemPathFN)
assert.True(t, flags.SucceedIfExistsFV, flags.SucceedIfExistsFN)
assert.Equal(t, "tenantID", flags.AzureClientTenantFV, flags.AzureClientTenantFN)
assert.Equal(t, "clientID", flags.AzureClientIDFV, flags.AzureClientIDFN)
assert.Equal(t, "secret", flags.AzureClientSecretFV, flags.AzureClientSecretFN)
@ -140,6 +143,7 @@ func (suite *FlagUnitSuite) TestFilesystemFlags() {
cmd.SetArgs([]string{
"test",
"--" + flags.FilesystemPathFN, "/tmp/test",
"--" + flags.SucceedIfExistsFN,
"--" + flags.AzureClientIDFN, "clientID",
"--" + flags.AzureClientTenantFN, "tenantID",
"--" + flags.AzureClientSecretFN, "secret",

View File

@ -266,14 +266,9 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
opts.Conversations = selectors.Any()
}
// if no post is specified, select all posts in the conversation
if convPosts == 0 {
opts.Posts = selectors.Any()
}
// if no post is specified, only select conversations;
// otherwise, look for conv/post pairs
if convs == 0 {
// otherwise, look for channel/message pairs
if chanMsgs == 0 {
sel.Include(sel.Conversation(opts.Conversations))
} else {
sel.Include(sel.ConversationPosts(opts.Conversations, opts.Posts))

View File

@ -30,6 +30,7 @@ type SharePointOpts struct {
ListModifiedBefore string
ListCreatedBefore string
ListCreatedAfter string
AllowListsRestore bool
PageFolder []string
Page []string
@ -81,6 +82,7 @@ func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
ListModifiedBefore: flags.ListModifiedBeforeFV,
ListCreatedAfter: flags.ListCreatedAfterFV,
ListCreatedBefore: flags.ListCreatedBeforeFV,
AllowListsRestore: flags.AllowListsRestoreFV,
Page: flags.PageFV,
PageFolder: flags.PageFolderFV,
@ -104,9 +106,7 @@ func SharePointAllowedCategories() map[string]struct{} {
func AddCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
if len(cats) == 0 {
// [TODO](hitesh) to enable lists without being invoked explicitly via --data flag
// sel.Include(sel.LibraryFolders(selectors.Any()), sel.Lists(selectors.Any()))
sel.Include(sel.LibraryFolders(selectors.Any()))
sel.Include(sel.LibraryFolders(selectors.Any()), sel.Lists(selectors.Any()))
}
for _, d := range cats {

View File

@ -420,7 +420,7 @@ func (suite *SharePointUtilsSuite) TestAddSharepointCategories() {
{
name: "none",
cats: []string{},
expectScopeLen: 1,
expectScopeLen: 2,
},
{
name: "libraries",

View File

@ -1,101 +0,0 @@
package utils
import (
"context"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/pkg/selectors"
)
type TeamsChatsOpts struct {
Users []string
ExportCfg ExportCfgOpts
Populated flags.PopulatedFlags
}
func TeamsChatsAllowedCategories() map[string]struct{} {
return map[string]struct{}{
flags.DataChats: {},
}
}
func AddTeamsChatsCategories(sel *selectors.TeamsChatsBackup, cats []string) *selectors.TeamsChatsBackup {
if len(cats) == 0 {
sel.Include(sel.AllData())
}
for _, d := range cats {
switch d {
case flags.DataChats:
sel.Include(sel.Chats(selectors.Any()))
}
}
return sel
}
func MakeTeamsChatsOpts(cmd *cobra.Command) TeamsChatsOpts {
return TeamsChatsOpts{
Users: flags.UserFV,
ExportCfg: makeExportCfgOpts(cmd),
// populated contains the list of flags that appear in the
// command, according to pflags. Use this to differentiate
// between an "empty" and a "missing" value.
Populated: flags.GetPopulatedFlags(cmd),
}
}
// ValidateTeamsChatsRestoreFlags checks common flags for correctness and interdependencies
func ValidateTeamsChatsRestoreFlags(backupID string, opts TeamsChatsOpts, isRestore bool) error {
if len(backupID) == 0 {
return clues.New("a backup ID is required")
}
// restore isn't currently supported
if isRestore {
return clues.New("restore not supported")
}
return nil
}
// AddTeamsChatsFilter adds the scope of the provided values to the selector's
// filter set
func AddTeamsChatsFilter(
sel *selectors.TeamsChatsRestore,
v string,
f func(string) []selectors.TeamsChatsScope,
) {
if len(v) == 0 {
return
}
sel.Filter(f(v))
}
// IncludeTeamsChatsRestoreDataSelectors builds the common data-selector
// inclusions for teamschats commands.
func IncludeTeamsChatsRestoreDataSelectors(ctx context.Context, opts TeamsChatsOpts) *selectors.TeamsChatsRestore {
users := opts.Users
if len(opts.Users) == 0 {
users = selectors.Any()
}
return selectors.NewTeamsChatsRestore(users)
}
// FilterTeamsChatsRestoreInfoSelectors builds the common info-selector filters.
func FilterTeamsChatsRestoreInfoSelectors(
sel *selectors.TeamsChatsRestore,
opts TeamsChatsOpts,
) {
// TODO: populate when adding filters
}

View File

@ -6,6 +6,12 @@ Param (
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
[String]$Site,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
[String[]]$LibraryNameList = @(),
@ -16,16 +22,7 @@ Param (
[String[]]$FolderPrefixPurgeList,
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
[String[]]$LibraryPrefixDeleteList = @(),
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
[String]$AppCert = $ENV:AZURE_APP_CERT
[String[]]$LibraryPrefixDeleteList = @()
)
Set-StrictMode -Version 2.0
@ -40,7 +37,7 @@ function Get-TimestampFromFolderName {
$name = $folder.Name
#fallback on folder create time
#fallback on folder create time
[datetime]$timestamp = $folder.TimeCreated
try {
@ -69,7 +66,7 @@ function Get-TimestampFromListName {
$name = $list.Title
#fallback on list create time
#fallback on list create time
[datetime]$timestamp = $list.LastItemUserModifiedDate
try {
@ -109,9 +106,8 @@ function Purge-Library {
Write-Host "`nPurging library: $LibraryName"
$foldersToPurge = @()
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
Write-Host "`nFolders: $folders"
foreach ($f in $folders) {
$folderName = $f.Name
$createTime = Get-TimestampFromFolderName -Folder $f
@ -163,7 +159,7 @@ function Delete-LibraryByPrefix {
Write-Host "`nDeleting library: $LibraryNamePrefix"
$listsToDelete = @()
$lists = Get-PnPList
$lists = Get-PnPList
foreach ($l in $lists) {
$listName = $l.Title
@ -187,7 +183,7 @@ function Delete-LibraryByPrefix {
Write-Host "Deleting list: "$l.Title
try {
$listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden
# Check if the 'hidden' property is true
if ($listInfo.Hidden) {
Write-Host "List: $($l.Title) is hidden. Skipping..."
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
}
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
Write-Host "ClientId and AppCert required as arguments or environment variables."
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
Write-Host "Admin user name and password required as arguments or environment variables."
Exit
}
@ -255,8 +251,12 @@ else {
Exit
}
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
Write-Host "`nAuthenticating and connecting to $SiteUrl"
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
Connect-PnPOnline -Url $siteUrl -Credential $cred
Write-Host "Connected to $siteUrl`n"
# ensure that there are no unexpanded entries in the list of parameters

View File

@ -5,7 +5,6 @@ import (
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/internal/common/ptr"
@ -21,20 +20,19 @@ const (
// this increases the chance that we'll run into a race collision with
// the cleanup script. Sometimes that's okay (deleting old data that
// isn't scrutinized in the test), other times it's not. We mark whether
// that's okay to do or not by specifying the folders being
// scrutinized for the test. Any errors within those folders should cause
// a fatal exit. Errors outside of those folders get ignored.
// that's okay to do or not by specifying the folder that's being
// scrutinized for the test. Any errors within that folder should cause
// a fatal exit. Errors outside of that folder get ignored.
//
// since we're using folder names, mustPopulateFolders will
// since we're using folder names, requireNoErrorsWithinFolderName will
// work best (ie: have the fewest collisions/side-effects) if the folder
// names are very specific. Standard sanity tests should include timestamps,
// name is very specific. Standard sanity tests should include timestamps,
// which should help ensure that. Be warned if you try to use it with
// a more generic name: unintended effects could occur.
func populateSanitree(
ctx context.Context,
ac api.Client,
driveID string,
mustPopulateFolders []string,
driveID, requireNoErrorsWithinFolderName string,
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
common.Infof(ctx, "building sanitree for drive: %s", driveID)
@ -58,8 +56,8 @@ func populateSanitree(
ac,
driveID,
stree.Name+"/",
mustPopulateFolders,
slices.Contains(mustPopulateFolders, rootName),
requireNoErrorsWithinFolderName,
rootName == requireNoErrorsWithinFolderName,
stree)
return stree
@ -68,9 +66,7 @@ func populateSanitree(
func recursivelyBuildTree(
ctx context.Context,
ac api.Client,
driveID string,
location string,
mustPopulateFolders []string,
driveID, location, requireNoErrorsWithinFolderName string,
isChildOfFolderRequiringNoErrors bool,
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
) {
@ -84,9 +80,9 @@ func recursivelyBuildTree(
common.Infof(
ctx,
"ignoring error getting children in directory %q because it is not within directory set %v\nerror: %s\n%+v",
"ignoring error getting children in directory %q because it is not within directory %q\nerror: %s\n%+v",
location,
mustPopulateFolders,
requireNoErrorsWithinFolderName,
err.Error(),
clues.ToCore(err))
@ -103,12 +99,11 @@ func recursivelyBuildTree(
// currently we don't restore blank folders.
// skip permission check for empty folders
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
common.Infof(ctx, "skipped empty folder: %s%s", location, itemName)
common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
continue
}
cannotAllowErrors := isChildOfFolderRequiringNoErrors ||
slices.Contains(mustPopulateFolders, itemName)
cannotAllowErrors := isChildOfFolderRequiringNoErrors || itemName == requireNoErrorsWithinFolderName
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
Parent: stree,
@ -129,7 +124,7 @@ func recursivelyBuildTree(
ac,
driveID,
location+branch.Name+"/",
mustPopulateFolders,
requireNoErrorsWithinFolderName,
cannotAllowErrors,
branch)
}

View File

@ -32,7 +32,7 @@ func CheckExport(
ctx,
ac,
driveID,
[]string{envs.SourceContainer})
envs.RestoreContainer)
sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert(

View File

@ -45,14 +45,7 @@ func CheckRestoration(
"drive_id", driveID,
"drive_name", driveName)
root := populateSanitree(
ctx,
ac,
driveID,
[]string{
envs.SourceContainer,
envs.RestoreContainer,
})
root := populateSanitree(ctx, ac, driveID, envs.RestoreContainer)
sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert(

View File

@ -3,7 +3,7 @@ module github.com/alcionai/corso/src
go 1.21
replace (
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
@ -11,7 +11,7 @@ replace (
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2
github.com/armon/go-metrics v0.4.1
github.com/aws/aws-xray-sdk-go v1.8.3
github.com/cenkalti/backoff/v4 v4.2.1
@ -51,7 +51,7 @@ require (
)
require (
github.com/arran4/golang-ical v0.2.4
github.com/arran4/golang-ical v0.2.3
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056
)
@ -121,7 +121,7 @@ require (
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/microsoft/kiota-serialization-text-go v1.0.0
github.com/minio/md5-simd v1.1.2 // indirect
github.com/minio/minio-go/v7 v7.0.67
github.com/minio/minio-go/v7 v7.0.66
github.com/minio/sha256-simd v1.0.1 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect

View File

@ -19,12 +19,12 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEejr07KFf2iyfCAdTxYGRAAFveLjFA=
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2 h1:Oiz7puLziTpDUsEoiZMNor3j6um8RSvPOSIf4heGgTk=
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe h1:nLS5pxhm04Jz4+qeipNlxdyPGxqNWpBu8UGkRYpWoIw=
github.com/alcionai/kopia v0.12.2-0.20240116215733-ec3d100029fe/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -35,8 +35,8 @@ github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sx
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/arran4/golang-ical v0.2.4 h1:0/rTXn2qqEekLKec3SzRRy+z7pCLtniMb0KD/dPogUo=
github.com/arran4/golang-ical v0.2.4/go.mod h1:RqMuPGmwRRwjkb07hmm+JBqcWa1vF1LvVmPtSZN2OhQ=
github.com/arran4/golang-ical v0.2.3 h1:C4Vj7+BjJBIrAJhHgi6Ku+XUkQVugRq4re5Cqj5QVdE=
github.com/arran4/golang-ical v0.2.3/go.mod h1:RqMuPGmwRRwjkb07hmm+JBqcWa1vF1LvVmPtSZN2OhQ=
github.com/aws/aws-sdk-go v1.48.6 h1:hnL/TE3eRigirDLrdRE9AWE1ALZSVLAsC4wK8TGsMqk=
github.com/aws/aws-sdk-go v1.48.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
github.com/aws/aws-xray-sdk-go v1.8.3 h1:S8GdgVncBRhzbNnNUgTPwhEqhwt2alES/9rLASyhxjU=
@ -219,8 +219,8 @@ github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1 h1:uq4qZD8VXLiNZY0t4NoRpLDo
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1/go.mod h1:HUITyuFN556+0QZ/IVfH5K4FyJM7kllV6ExKi2ImKhE=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
github.com/minio/minio-go/v7 v7.0.67 h1:BeBvZWAS+kRJm1vGTMJYVjKUNoo0FoEt/wUWdUtfmh8=
github.com/minio/minio-go/v7 v7.0.67/go.mod h1:+UXocnUeZ3wHvVh5s95gcrA4YjMIbccT6ubB+1m054A=
github.com/minio/minio-go/v7 v7.0.66 h1:bnTOXOHjOqv/gcMuiVbN9o2ngRItvqE774dG9nq0Dzw=
github.com/minio/minio-go/v7 v7.0.66/go.mod h1:DHAgmyQEGdW3Cif0UooKOyrT3Vxs82zNdV6tkKhRtbs=
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=

View File

@ -10,7 +10,6 @@ import (
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/logger"
)
const (
@ -57,22 +56,12 @@ func ZipExportCollection(
defer wr.Close()
buf := make([]byte, ZipCopyBufferSize)
counted := 0
log := logger.Ctx(ctx).
With("collection_count", len(expCollections))
for _, ec := range expCollections {
folder := ec.BasePath()
items := ec.Items(ctx)
for item := range items {
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("progress zipping export items", "count_items", counted)
}
err := item.Error
if err != nil {
writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID))
@ -99,12 +88,8 @@ func ZipExportCollection(
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
return
}
item.Body.Close()
}
}
log.Infow("completed zipping export items", "count_items", counted)
}()
return zipCollection{reader}, nil

View File

@ -1,13 +1,10 @@
package jwt
import (
"context"
"time"
"github.com/alcionai/clues"
jwt "github.com/golang-jwt/jwt/v5"
"github.com/alcionai/corso/src/pkg/logger"
)
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
@ -40,51 +37,3 @@ func IsJWTExpired(
return expired, nil
}
// GetJWTLifetime returns the issued at(iat) and expiration time(exp) claims
// present in the JWT token. These are optional claims and may not be present
// in the token. Absence is not reported as an error.
//
// An error is returned if the supplied token is malformed. Times are returned
// in UTC to have parity with graph responses.
func GetJWTLifetime(
ctx context.Context,
rawToken string,
) (time.Time, time.Time, error) {
var (
issuedAt time.Time
expiresAt time.Time
)
p := jwt.NewParser()
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
if err != nil {
logger.CtxErr(ctx, err).Debug("parsing jwt token")
return time.Time{}, time.Time{}, clues.Wrap(err, "invalid jwt")
}
exp, err := token.Claims.GetExpirationTime()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting exp claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token expiry time")
}
iat, err := token.Claims.GetIssuedAt()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting iat claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token issued at time")
}
// Absence of iat or exp claims is not reported as an error by jwt library as these
// are optional as per spec.
if iat != nil {
issuedAt = iat.UTC()
}
if exp != nil {
expiresAt = exp.UTC()
}
return issuedAt, expiresAt, nil
}

View File

@ -113,134 +113,3 @@ func (suite *JWTUnitSuite) TestIsJWTExpired() {
})
}
}
func (suite *JWTUnitSuite) TestGetJWTLifetime() {
// Set of time values to be used in the tests.
// Truncate to seconds for comparisons since jwt tokens have second
// level precision.
idToTime := map[string]time.Time{
"T0": time.Now().UTC().Add(-time.Hour).Truncate(time.Second),
"T1": time.Now().UTC().Truncate(time.Second),
"T2": time.Now().UTC().Add(time.Hour).Truncate(time.Second),
}
table := []struct {
name string
getToken func() (string, error)
expectFunc func(t *testing.T, iat time.Time, exp time.Time)
expectErr assert.ErrorAssertionFunc
}{
{
name: "alive token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T1"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, idToTime["T1"], exp)
},
expectErr: assert.NoError,
},
// Test with a token which is not generated using the go-jwt lib.
// This is a long lived token which is valid for 100 years.
{
name: "alive raw token with iat and exp claims",
getToken: func() (string, error) {
return rawToken, nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Less(t, iat, time.Now(), "iat should be in the past")
assert.Greater(t, exp, time.Now(), "exp should be in the future")
},
expectErr: assert.NoError,
},
// Regardless of whether the token is expired or not, we should be able to
// extract the iat and exp claims from it without error.
{
name: "expired token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T1"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T1"], iat)
assert.Equal(t, idToTime["T0"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing iat claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(idToTime["T2"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, idToTime["T2"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing exp claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "both claims missing",
getToken: func() (string, error) {
return createJWTToken(jwt.RegisteredClaims{})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "malformed token",
getToken: func() (string, error) {
return "header.claims.signature", nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
token, err := test.getToken()
require.NoError(t, err)
iat, exp, err := GetJWTLifetime(ctx, token)
test.expectErr(t, err)
test.expectFunc(t, iat, exp)
})
}
}

View File

@ -59,19 +59,6 @@ func First(vs ...string) string {
return ""
}
// FirstIn returns the first entry in the map with a non-zero value
// when iterating the provided list of keys.
func FirstIn(m map[string]any, keys ...string) string {
for _, key := range keys {
v, err := AnyValueToString(key, m)
if err == nil && len(v) > 0 {
return v
}
}
return ""
}
// Preview reduces the string to the specified size.
// If the string is longer than the size, the last three
// characters are replaced with an ellipsis. Size < 4

View File

@ -118,96 +118,3 @@ func TestGenerateHash(t *testing.T) {
}
}
}
func TestFirstIn(t *testing.T) {
table := []struct {
name string
m map[string]any
keys []string
expect string
}{
{
name: "nil map",
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty map",
m: map[string]any{},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no match",
m: map[string]any{
"baz": "baz",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no keys",
m: map[string]any{
"baz": "baz",
},
keys: []string{},
expect: "",
},
{
name: "nil match",
m: map[string]any{
"foo": nil,
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty match",
m: map[string]any{
"foo": "",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "matches first key",
m: map[string]any{
"foo": "fnords",
},
keys: []string{"foo", "bar"},
expect: "fnords",
},
{
name: "matches second key",
m: map[string]any{
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with nil first match",
m: map[string]any{
"foo": nil,
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with empty first match",
m: map[string]any{
"foo": "",
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
}
for _, test := range table {
t.Run(test.name, func(t *testing.T) {
result := FirstIn(test.m, test.keys...)
assert.Equal(t, test.expect, result)
})
}
}

View File

@ -5,5 +5,6 @@ import "github.com/google/uuid"
const hashLength = 7
func NewHashForRepoConfigName() string {
return uuid.NewString()[:hashLength]
_ = uuid.NewString()[:hashLength]
return "constant"
}

View File

@ -23,7 +23,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/converters/ics"
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -143,121 +142,6 @@ func getICalData(ctx context.Context, data models.Messageable) (string, error) {
return ics.FromEventable(ctx, event)
}
func getFileAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
kind := ptr.Val(attachment.GetContentType())
bytes, err := attachment.GetBackingStore().Get("contentBytes")
if err != nil {
return nil, clues.WrapWC(ctx, err, "failed to get attachment bytes").
With("kind", kind)
}
if bytes == nil {
// TODO(meain): Handle non file attachments
// https://github.com/alcionai/corso/issues/4772
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
Info("no contentBytes for attachment")
return nil, nil
}
bts, ok := bytes.([]byte)
if !ok {
return nil, clues.WrapWC(ctx, err, "invalid content bytes").
With("kind", kind).
With("interface_type", fmt.Sprintf("%T", bytes))
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
// Graph as of now does not let us create any attachments
// without a name, but we have run into instances where we have
// see attachments without a name, possibly from old
// data. This is for those cases.
name = "Unnamed"
}
contentID, err := attachment.GetBackingStore().Get("contentId")
if err != nil {
return nil, clues.WrapWC(ctx, err, "getting content id for attachment").
With("kind", kind)
}
if contentID != nil {
cids, _ := str.AnyToString(contentID)
if len(cids) > 0 {
name = cids
}
}
return &mail.File{
// cannot use filename as inline attachment will not get mapped properly
Name: name,
MimeType: kind,
Data: bts,
Inline: ptr.Val(attachment.GetIsInline()),
}, nil
}
func getItemAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
it, err := attachment.GetBackingStore().Get("item")
if err != nil {
return nil, clues.WrapWC(ctx, err, "getting item for attachment").
With("attachment_id", ptr.Val(attachment.GetId()))
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
// Graph as of now does not let us create any attachments
// without a name, but we have run into instances where we have
// see attachments without a name, possibly from old
// data. This is for those cases.
name = "Unnamed"
}
switch it := it.(type) {
case *models.Message:
cb, err := FromMessageable(ctx, it)
if err != nil {
return nil, clues.WrapWC(ctx, err, "converting item attachment to eml").
With("attachment_id", ptr.Val(attachment.GetId()))
}
return &mail.File{
Name: name,
MimeType: "message/rfc822",
Data: []byte(cb),
}, nil
default:
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
Info("unknown item attachment type")
}
return nil, nil
}
func getMailAttachment(ctx context.Context, att models.Attachmentable) (*mail.File, error) {
otyp := ptr.Val(att.GetOdataType())
switch otyp {
case "#microsoft.graph.fileAttachment":
return getFileAttachment(ctx, att)
case "#microsoft.graph.itemAttachment":
return getItemAttachment(ctx, att)
default:
logger.Ctx(ctx).
With("attachment_id", ptr.Val(att.GetId()),
"attachment_type", otyp).
Info("unknown attachment type")
return nil, nil
}
}
// FromJSON converts a Messageable (as json) to .eml format
func FromJSON(ctx context.Context, body []byte) (string, error) {
ctx = clues.Add(ctx, "body_len", len(body))
@ -267,11 +151,6 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
return "", clues.WrapWC(ctx, err, "converting to messageble")
}
return FromMessageable(ctx, data)
}
// Converts a Messageable to .eml format
func FromMessageable(ctx context.Context, data models.Messageable) (string, error) {
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
email := mail.NewMSG()
@ -347,115 +226,6 @@ func FromMessageable(ctx context.Context, data models.Messageable) (string, erro
}
}
if data.GetAttachments() != nil {
for _, attachment := range data.GetAttachments() {
att, err := getMailAttachment(ctx, attachment)
if err != nil {
return "", clues.WrapWC(ctx, err, "getting mail attachment")
}
// There are known cases where we just wanna log and
// ignore instead of erroring out
if att != nil {
email.Attach(att)
}
}
}
switch data.(type) {
case *models.EventMessageResponse, *models.EventMessage:
// We can't handle this as of now, not enough information
// TODO: Fetch event object from graph when fetching email
case *models.CalendarSharingMessage:
// TODO: Parse out calendar sharing message
// https://github.com/alcionai/corso/issues/5041
case *models.EventMessageRequest:
cal, err := getICalData(ctx, data)
if err != nil {
return "", clues.Wrap(err, "getting ical attachment")
}
if len(cal) > 0 {
email.AddAlternative(mail.TextCalendar, cal)
}
}
if err := email.GetError(); err != nil {
return "", clues.WrapWC(ctx, err, "converting to eml")
}
return email.GetMessage(), nil
}
//-------------------------------------------------------------
// Postable -> EML
//-------------------------------------------------------------
// FromJSONPostToEML converts a postable (as json) to .eml format.
// TODO(pandeyabs): This is a stripped down copy of messageable to
// eml conversion, it can be folded into one function by having a post
// to messageable converter.
func FromJSONPostToEML(
ctx context.Context,
body []byte,
postMetadata metadata.ConversationPostMetadata,
) (string, error) {
ctx = clues.Add(ctx, "body_len", len(body))
data, err := api.BytesToPostable(body)
if err != nil {
return "", clues.WrapWC(ctx, err, "converting to postable")
}
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
email := mail.NewMSG()
email.Encoding = mail.EncodingBase64 // Doing it to be safe for when we have eventMessage (newline issues)
email.AllowDuplicateAddress = true // More "correct" conversion
email.AddBccToHeader = true // Don't ignore Bcc
email.AllowEmptyAttachments = true // Don't error on empty attachments
email.UseProvidedAddress = true // Don't try to parse the email address
if data.GetFrom() != nil {
email.SetFrom(formatAddress(data.GetFrom().GetEmailAddress()))
}
// We don't have the To, Cc, Bcc recipient information for posts due to a graph
// limitation. All posts carry the group email address as the only recipient
// for now.
email.AddTo(postMetadata.Recipients...)
email.SetSubject(postMetadata.Topic)
// Reply-To email address is not available for posts. Note that this is different
// from inReplyTo field.
if data.GetCreatedDateTime() != nil {
email.SetDate(ptr.Val(data.GetCreatedDateTime()).Format(dateFormat))
}
if data.GetBody() != nil {
if data.GetBody().GetContentType() != nil {
var contentType mail.ContentType
switch data.GetBody().GetContentType().String() {
case "html":
contentType = mail.TextHTML
case "text":
contentType = mail.TextPlain
default:
// https://learn.microsoft.com/en-us/graph/api/resources/itembody?view=graph-rest-1.0#properties
// This should not be possible according to the documentation
logger.Ctx(ctx).
With("body_type", data.GetBody().GetContentType().String()).
Info("unknown body content type")
contentType = mail.TextPlain
}
email.SetBody(contentType, ptr.Val(data.GetBody().GetContent()))
}
}
if data.GetAttachments() != nil {
for _, attachment := range data.GetAttachments() {
kind := ptr.Val(attachment.GetContentType())
@ -469,9 +239,6 @@ func FromJSONPostToEML(
if bytes == nil {
// TODO(meain): Handle non file attachments
// https://github.com/alcionai/corso/issues/4772
//
// TODO(pandeyabs): Above issue is for messages.
// This is not a problem for posts but leaving it here for safety.
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
@ -488,9 +255,6 @@ func FromJSONPostToEML(
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
name = "Unnamed"
}
contentID, err := attachment.GetBackingStore().Get("contentId")
if err != nil {
@ -515,8 +279,24 @@ func FromJSONPostToEML(
}
}
// Note: Posts cannot be of type EventMessageResponse, EventMessage or
// CalendarSharingMessage. So we don't need to handle those cases here.
switch data.(type) {
case *models.EventMessageResponse, *models.EventMessage:
// We can't handle this as of now, not enough information
// TODO: Fetch event object from graph when fetching email
case *models.CalendarSharingMessage:
// TODO: Parse out calendar sharing message
// https://github.com/alcionai/corso/issues/5041
case *models.EventMessageRequest:
cal, err := getICalData(ctx, data)
if err != nil {
return "", clues.Wrap(err, "getting ical attachment")
}
if len(cal) > 0 {
email.AddAlternative(mail.TextCalendar, cal)
}
}
if err = email.GetError(); err != nil {
return "", clues.WrapWC(ctx, err, "converting to eml")
}

View File

@ -18,8 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/converters/eml/testdata"
"github.com/alcionai/corso/src/internal/converters/ics"
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
stub "github.com/alcionai/corso/src/internal/m365/service/groups/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -137,11 +135,6 @@ func (suite *EMLUnitSuite) TestConvert_messageble_to_eml() {
}
func (suite *EMLUnitSuite) TestConvert_edge_cases() {
bodies := []string{
testdata.EmailWithAttachments,
testdata.EmailWithinEmail,
}
tests := []struct {
name string
transform func(models.Messageable)
@ -167,75 +160,35 @@ func (suite *EMLUnitSuite) TestConvert_edge_cases() {
require.NoError(suite.T(), err, "setting attachment content")
},
},
{
name: "attachment without name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(ptr.To(""))
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
},
},
{
name: "attachment with nil name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(nil)
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
},
},
{
name: "multiple attachments without name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(ptr.To(""))
attachments[2].SetName(ptr.To(""))
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
assert.False(suite.T(), ptr.Val(attachments[2].GetIsInline()))
},
},
}
for _, b := range bodies {
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(b)
body := []byte(testdata.EmailWithAttachments)
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
test.transform(msg)
test.transform(msg)
writer := kjson.NewJsonSerializationWriter()
writer := kjson.NewJsonSerializationWriter()
defer writer.Close()
defer writer.Close()
err = writer.WriteObjectValue("", msg)
require.NoError(t, err, "serializing message")
err = writer.WriteObjectValue("", msg)
require.NoError(t, err, "serializing message")
nbody, err := writer.GetSerializedContent()
require.NoError(t, err, "getting serialized content")
nbody, err := writer.GetSerializedContent()
require.NoError(t, err, "getting serialized content")
_, err = FromJSON(ctx, nbody)
assert.NoError(t, err, "converting to eml")
})
}
_, err = FromJSON(ctx, nbody)
assert.NoError(t, err, "converting to eml")
})
}
}
@ -273,11 +226,11 @@ func (suite *EMLUnitSuite) TestConvert_eml_ics() {
assert.Equal(
t,
msg.GetCreatedDateTime().Format(ics.ICalDateTimeFormatUTC),
msg.GetCreatedDateTime().Format(ics.ICalDateTimeFormat),
event.GetProperty(ical.ComponentPropertyCreated).Value)
assert.Equal(
t,
msg.GetLastModifiedDateTime().Format(ics.ICalDateTimeFormatUTC),
msg.GetLastModifiedDateTime().Format(ics.ICalDateTimeFormat),
event.GetProperty(ical.ComponentPropertyLastModified).Value)
st, err := ics.GetUTCTime(
@ -292,11 +245,11 @@ func (suite *EMLUnitSuite) TestConvert_eml_ics() {
assert.Equal(
t,
st.Format(ics.ICalDateTimeFormatUTC),
st.Format(ics.ICalDateTimeFormat),
event.GetProperty(ical.ComponentPropertyDtStart).Value)
assert.Equal(
t,
et.Format(ics.ICalDateTimeFormatUTC),
et.Format(ics.ICalDateTimeFormat),
event.GetProperty(ical.ComponentPropertyDtEnd).Value)
tos := msg.GetToRecipients()
@ -372,119 +325,3 @@ func (suite *EMLUnitSuite) TestConvert_eml_ics_from_event_obj() {
assert.NotEqual(t, ptr.Val(msg.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
assert.Equal(t, ptr.Val(evt.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
}
//-------------------------------------------------------------
// Postable -> EML tests
//-------------------------------------------------------------
func (suite *EMLUnitSuite) TestConvert_postable_to_eml() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(stub.PostWithAttachments)
postMetadata := metadata.ConversationPostMetadata{
Recipients: []string{"group@example.com"},
Topic: "test subject",
}
out, err := FromJSONPostToEML(ctx, body, postMetadata)
assert.NoError(t, err, "converting to eml")
post, err := api.BytesToPostable(body)
require.NoError(t, err, "creating post")
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
require.NoError(t, err, "reading created eml")
assert.Equal(t, postMetadata.Topic, eml.GetHeader("Subject"))
assert.Equal(t, post.GetCreatedDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
assert.Equal(t, formatAddress(post.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
// Test recipients. The post metadata should contain the group email address.
tos := strings.Split(eml.GetHeader("To"), ", ")
for _, sourceTo := range postMetadata.Recipients {
assert.Contains(t, tos, sourceTo)
}
// Assert cc, bcc to be empty since they are not supported for posts right now.
assert.Equal(t, "", eml.GetHeader("Cc"))
assert.Equal(t, "", eml.GetHeader("Bcc"))
// Test attachments using PostWithAttachments data as a reference.
// This data has 1 direct attachment and 1 inline attachment.
assert.Equal(t, 1, len(eml.Attachments), "direct attachment count")
assert.Equal(t, 1, len(eml.Inlines), "inline attachment count")
for _, sourceAttachment := range post.GetAttachments() {
targetContent := eml.Attachments[0].Content
if ptr.Val(sourceAttachment.GetIsInline()) {
targetContent = eml.Inlines[0].Content
}
sourceContent, err := sourceAttachment.GetBackingStore().Get("contentBytes")
assert.NoError(t, err, "getting source attachment content")
assert.Equal(t, sourceContent, targetContent)
}
// Test body
source := strings.ReplaceAll(eml.HTML, "\n", "")
target := strings.ReplaceAll(ptr.Val(post.GetBody().GetContent()), "\n", "")
// replace the cid with a constant value to make the comparison
re := regexp.MustCompile(`(?:src|originalSrc)="cid:[^"]*"`)
source = re.ReplaceAllString(source, `src="cid:replaced"`)
target = re.ReplaceAllString(target, `src="cid:replaced"`)
assert.Equal(t, source, target)
}
// Tests an ics within an eml within another eml
func (suite *EMLUnitSuite) TestConvert_message_in_messageble_to_eml() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(testdata.EmailWithinEmail)
out, err := FromJSON(ctx, body)
assert.NoError(t, err, "converting to eml")
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
require.NoError(t, err, "reading created eml")
assert.Equal(t, ptr.Val(msg.GetSubject()), eml.GetHeader("Subject"))
assert.Equal(t, msg.GetSentDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
assert.Equal(t, formatAddress(msg.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
attachments := eml.Attachments
assert.Equal(t, 3, len(attachments), "attachment count in parent email")
ieml, err := enmime.ReadEnvelope(strings.NewReader(string(attachments[0].Content)))
require.NoError(t, err, "reading created eml")
itm, err := msg.GetAttachments()[0].GetBackingStore().Get("item")
require.NoError(t, err, "getting item from message")
imsg := itm.(*models.Message)
assert.Equal(t, ptr.Val(imsg.GetSubject()), ieml.GetHeader("Subject"))
assert.Equal(t, imsg.GetSentDateTime().Format(time.RFC1123Z), ieml.GetHeader("Date"))
assert.Equal(t, formatAddress(imsg.GetFrom().GetEmailAddress()), ieml.GetHeader("From"))
iattachments := ieml.Attachments
assert.Equal(t, 1, len(iattachments), "attachment count in child email")
// Known from testdata
assert.Contains(t, string(iattachments[0].Content), "X-LIC-LOCATION:Africa/Abidjan")
}

View File

@ -104,19 +104,6 @@
"contentId": null,
"contentLocation": null,
"contentBytes": "W1BhdGhzXQpQcmVmaXggPSAuLgo="
},
{
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "application/octet-stream",
"id": "ZZMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAEwbDEWAAABEgAQAD3rU0iyzCdHgz0xmOrWc9g=",
"lastModifiedDateTime": "2023-11-16T05:42:47Z",
"name": "qt2.conf",
"contentType": "application/octet-stream",
"size": 156,
"isInline": false,
"contentId": null,
"contentLocation": null,
"contentBytes": "Z1BhdGhzXQpQcmVmaXggPSAuLgo="
}
]
}

View File

@ -1,268 +0,0 @@
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAA=",
"@odata.type": "#microsoft.graph.message",
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages/$entity",
"@odata.etag": "W/\"CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl\"",
"categories": [],
"changeKey": "CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl",
"createdDateTime": "2024-02-05T09:33:23Z",
"lastModifiedDateTime": "2024-02-05T09:33:48Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k%3D&exvsurl=1&viewmodel=ItemAttachment"
}
},
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl02=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life part 2",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl02%3D&exvsurl=1&viewmodel=ItemAttachment"
}
},
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl03=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life part 3",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl03%3D&exvsurl=1&viewmodel=ItemAttachment"
}
}
],
"bccRecipients": [],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div><span class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">Now, this is what we call nesting in this business.<br></span></div></body></html>",
"contentType": "html"
},
"bodyPreview": "Now, this is what we call nesting in this business.",
"ccRecipients": [],
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAIv2-4RHwDhJhlqBV5PTE3Y=",
"conversationIndex": "AQHaWBZdi/b/hEfAOEmGWoFXk9MTdg==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"inferenceClassification": "focused",
"internetMessageId": "<SJ0PR04MB729409CE8C191E01151C110DBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
"receivedDateTime": "2024-02-05T09:33:46Z",
"replyTo": [],
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:45Z",
"subject": "Fw: Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAA%3D&exvsurl=1&viewmodel=ReadMessageItem"
}

View File

@ -10,6 +10,3 @@ var EmailWithEventInfo string
//go:embed email-with-event-object.json
var EmailWithEventObject string
//go:embed email-within-email.json
var EmailWithinEmail string

View File

@ -166,20 +166,3 @@ var GraphTimeZoneToTZ = map[string]string{
"Yukon Standard Time": "America/Whitehorse",
"tzone://Microsoft/Utc": "Etc/UTC",
}
// Map from alternatives to the canonical time zone name
// There mapping are currently generated by manually going on the
// values in the GraphTimeZoneToTZ which is not available in the tzdb
var CanonicalTimeZoneMap = map[string]string{
"Africa/Asmara": "Africa/Asmera",
"Asia/Calcutta": "Asia/Kolkata",
"Asia/Rangoon": "Asia/Yangon",
"Asia/Saigon": "Asia/Ho_Chi_Minh",
"Europe/Kiev": "Europe/Kyiv",
"Europe/Warsaw": "Europe/Warszawa",
"America/Buenos_Aires": "America/Argentina/Buenos_Aires",
"America/Godthab": "America/Nuuk",
// NOTE: "Atlantic/Raykjavik" missing in tzdb but is in MS list
"Etc/UTC": "UTC", // simplifying the time zone name
}

View File

@ -5,7 +5,6 @@ import (
"encoding/base64"
"encoding/json"
"fmt"
"net/mail"
"strings"
"time"
"unicode"
@ -17,7 +16,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/converters/ics/tzdata"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
@ -33,9 +31,8 @@ import (
// TODO locations: https://github.com/alcionai/corso/issues/5003
const (
ICalDateTimeFormat = "20060102T150405"
ICalDateTimeFormatUTC = "20060102T150405Z"
ICalDateFormat = "20060102"
ICalDateTimeFormat = "20060102T150405Z"
ICalDateFormat = "20060102"
)
func keyValues(key, value string) *ics.KeyValues {
@ -175,17 +172,6 @@ func getRecurrencePattern(
recurComponents = append(recurComponents, "BYDAY="+prefix+strings.Join(dowComponents, ","))
}
// This is necessary to compute when weekly events recur
fdow := pat.GetFirstDayOfWeek()
if fdow != nil {
icalday, ok := GraphToICalDOW[fdow.String()]
if !ok {
return "", clues.NewWC(ctx, "unknown first day of week").With("day", fdow)
}
recurComponents = append(recurComponents, "WKST="+icalday)
}
rrange := recurrence.GetRangeEscaped()
if rrange != nil {
switch ptr.Val(rrange.GetTypeEscaped()) {
@ -209,7 +195,7 @@ func getRecurrencePattern(
return "", clues.WrapWC(ctx, err, "parsing end time")
}
recurComponents = append(recurComponents, "UNTIL="+endTime.Format(ICalDateTimeFormatUTC))
recurComponents = append(recurComponents, "UNTIL="+endTime.Format(ICalDateTimeFormat))
}
case models.NOEND_RECURRENCERANGETYPE:
// Nothing to do
@ -238,15 +224,10 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
cal := ics.NewCalendar()
cal.SetProductId("-//Alcion//Corso") // Does this have to be customizable?
err := addTimeZoneComponents(ctx, cal, event)
if err != nil {
return "", clues.Wrap(err, "adding timezone components")
}
id := ptr.Val(event.GetId())
iCalEvent := cal.AddEvent(id)
err = updateEventProperties(ctx, event, iCalEvent)
err := updateEventProperties(ctx, event, iCalEvent)
if err != nil {
return "", clues.Wrap(err, "updating event properties")
}
@ -277,7 +258,7 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
exICalEvent := cal.AddEvent(id)
start := exception.GetOriginalStart() // will always be in UTC
exICalEvent.AddProperty(ics.ComponentProperty(ics.PropertyRecurrenceId), start.Format(ICalDateTimeFormatUTC))
exICalEvent.AddProperty(ics.ComponentProperty(ics.PropertyRecurrenceId), start.Format(ICalDateTimeFormat))
err = updateEventProperties(ctx, exception, exICalEvent)
if err != nil {
@ -288,91 +269,6 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
return cal.Serialize(), nil
}
func getTZDataKeyValues(ctx context.Context, timezone string) (map[string]string, error) {
template, ok := tzdata.TZData[timezone]
if !ok {
return nil, clues.NewWC(ctx, "timezone not found in tz database").
With("timezone", timezone)
}
keyValues := map[string]string{}
for _, line := range strings.Split(template, "\n") {
splits := strings.SplitN(line, ":", 2)
if len(splits) != 2 {
return nil, clues.NewWC(ctx, "invalid tzdata line").
With("line", line).
With("timezone", timezone)
}
keyValues[splits[0]] = splits[1]
}
return keyValues, nil
}
func addTimeZoneComponents(ctx context.Context, cal *ics.Calendar, event models.Eventable) error {
// Handling of timezone get a bit tricky when we have to deal with
// relative recurrence. The issue comes up when we set a recurrence
// to be something like "repeat every 3rd Tuesday". Tuesday in UTC
// and in IST will be different and so we cannot just always use UTC.
//
// The way this is solved is by using the timezone in the
// recurrence for start and end timezones as we have to use UTC
// for UNTIL(mostly).
// https://www.rfc-editor.org/rfc/rfc5545#section-3.3.10
timezone, err := getRecurrenceTimezone(ctx, event)
if err != nil {
return clues.Stack(err)
}
if timezone != time.UTC {
kvs, err := getTZDataKeyValues(ctx, timezone.String())
if err != nil {
return clues.Stack(err)
}
tz := cal.AddTimezone(timezone.String())
for k, v := range kvs {
tz.AddProperty(ics.ComponentProperty(k), v)
}
}
return nil
}
// getRecurrenceTimezone get the timezone specified by the recurrence
// in the calendar. It does a normalization pass where we always convert
// the timezone to the value in tzdb If we don't have a recurrence
// timezone, we don't have to use a specific timezone in the export and
// is safe to return UTC from this method.
func getRecurrenceTimezone(ctx context.Context, event models.Eventable) (*time.Location, error) {
if event.GetRecurrence() != nil {
timezone := ptr.Val(event.GetRecurrence().GetRangeEscaped().GetRecurrenceTimeZone())
ctz, ok := GraphTimeZoneToTZ[timezone]
if ok {
timezone = ctz
}
cannon, ok := CanonicalTimeZoneMap[timezone]
if ok {
timezone = cannon
}
loc, err := time.LoadLocation(timezone)
if err != nil {
return nil, clues.WrapWC(ctx, err, "unknown timezone").
With("timezone", timezone)
}
return loc, nil
}
return time.UTC, nil
}
func isASCII(s string) bool {
for _, c := range s {
if c > unicode.MaxASCII {
@ -383,12 +279,6 @@ func isASCII(s string) bool {
return true
}
// Checks if a given string is a valid email address
func isEmail(em string) bool {
_, err := mail.ParseAddress(em)
return err == nil
}
func updateEventProperties(ctx context.Context, event models.Eventable, iCalEvent *ics.VEvent) error {
// CREATED - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.7.1
created := event.GetCreatedDateTime()
@ -402,11 +292,6 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
iCalEvent.SetModifiedAt(ptr.Val(modified))
}
timezone, err := getRecurrenceTimezone(ctx, event)
if err != nil {
return err
}
// DTSTART - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.4
allDay := ptr.Val(event.GetIsAllDay())
startString := event.GetStart().GetDateTime()
@ -418,7 +303,11 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
return clues.WrapWC(ctx, err, "parsing start time")
}
addTime(iCalEvent, ics.ComponentPropertyDtStart, start, allDay, timezone)
if allDay {
iCalEvent.SetStartAt(start, ics.WithValue(string(ics.ValueDataTypeDate)))
} else {
iCalEvent.SetStartAt(start)
}
}
// DTEND - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.2
@ -431,7 +320,11 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
return clues.WrapWC(ctx, err, "parsing end time")
}
addTime(iCalEvent, ics.ComponentPropertyDtEnd, end, allDay, timezone)
if allDay {
iCalEvent.SetEndAt(end, ics.WithValue(string(ics.ValueDataTypeDate)))
} else {
iCalEvent.SetEndAt(end)
}
}
recurrence := event.GetRecurrence()
@ -446,7 +339,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
// STATUS - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.11
cancelled := event.GetIsCancelled()
if cancelled != nil && ptr.Val(cancelled) {
if cancelled != nil {
iCalEvent.SetStatus(ics.ObjectStatusCancelled)
}
@ -484,14 +377,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
desc := replacer.Replace(description)
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
} else {
// Disable auto wrap, causes huge memory spikes
// https://github.com/jaytaylor/html2text/issues/48
prettyTablesOptions := html2text.NewPrettyTablesOptions()
prettyTablesOptions.AutoWrapText = false
stripped, err := html2text.FromString(
description,
html2text.Options{PrettyTables: true, PrettyTablesOptions: prettyTablesOptions})
stripped, err := html2text.FromString(description, html2text.Options{PrettyTables: true})
if err != nil {
return clues.Wrap(err, "converting html to text").
With("description_length", len(description))
@ -595,21 +481,8 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
}
}
// It is possible that we get non email items like the below
// one which is an internal representation of the user in the
// Exchange system. While we can technically output this as an
// attendee, it is not useful plus other downstream tools like
// ones to use PST can choke on this.
// /o=ExchangeLabs/ou=ExchangeAdministrative Group(FY...LT)/cn=Recipients/cn=883...4a-John Doe
addr := ptr.Val(attendee.GetEmailAddress().GetAddress())
if isEmail(addr) {
iCalEvent.AddAttendee(addr, props...)
} else {
logger.Ctx(ctx).
With("attendee_email", addr).
With("attendee_name", name).
Info("skipping non email attendee from ics export")
}
iCalEvent.AddAttendee(addr, props...)
}
// LOCATION - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.7
@ -737,26 +610,6 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
return nil
}
func addTime(iCalEvent *ics.VEvent, prop ics.ComponentProperty, tm time.Time, allDay bool, tzLoc *time.Location) {
if allDay {
if tzLoc == time.UTC {
iCalEvent.SetProperty(prop, tm.Format(ICalDateFormat), ics.WithValue(string(ics.ValueDataTypeDate)))
} else {
iCalEvent.SetProperty(
prop,
tm.In(tzLoc).Format(ICalDateFormat),
ics.WithValue(string(ics.ValueDataTypeDate)),
keyValues("TZID", tzLoc.String()))
}
} else {
if tzLoc == time.UTC {
iCalEvent.SetProperty(prop, tm.Format(ICalDateTimeFormatUTC))
} else {
iCalEvent.SetProperty(prop, tm.In(tzLoc).Format(ICalDateTimeFormat), keyValues("TZID", tzLoc.String()))
}
}
}
func getCancelledDates(ctx context.Context, event models.Eventable) ([]time.Time, error) {
dateStrings, err := api.GetCancelledEventDateStrings(event)
if err != nil {

View File

@ -13,7 +13,6 @@ import (
"testing"
"time"
ics "github.com/arran4/golang-ical"
"github.com/microsoft/kiota-abstractions-go/serialization"
kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models"
@ -22,7 +21,6 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/converters/ics/tzdata"
"github.com/alcionai/corso/src/internal/tester"
)
@ -34,7 +32,7 @@ func TestICSUnitSuite(t *testing.T) {
suite.Run(t, &ICSUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (s *ICSUnitSuite) TestGetLocationString() {
func (suite *ICSUnitSuite) TestGetLocationString() {
table := []struct {
name string
loc func() models.Locationable
@ -112,13 +110,13 @@ func (s *ICSUnitSuite) TestGetLocationString() {
}
for _, tt := range table {
s.Run(tt.name, func() {
assert.Equal(s.T(), tt.expect, getLocationString(tt.loc()))
suite.Run(tt.name, func() {
assert.Equal(suite.T(), tt.expect, getLocationString(tt.loc()))
})
}
}
func (s *ICSUnitSuite) TestGetUTCTime() {
func (suite *ICSUnitSuite) TestGetUTCTime() {
table := []struct {
name string
timestamp string
@ -164,18 +162,18 @@ func (s *ICSUnitSuite) TestGetUTCTime() {
}
for _, tt := range table {
s.Run(tt.name, func() {
suite.Run(tt.name, func() {
t, err := GetUTCTime(tt.timestamp, tt.timezone)
tt.errCheck(s.T(), err)
tt.errCheck(suite.T(), err)
if !tt.time.Equal(time.Time{}) {
assert.Equal(s.T(), tt.time, t)
assert.Equal(suite.T(), tt.time, t)
}
})
}
}
func (s *ICSUnitSuite) TestGetRecurrencePattern() {
func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
table := []struct {
name string
recurrence func() models.PatternedRecurrenceable
@ -189,37 +187,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=DAILY;INTERVAL=1;WKST=SU",
errCheck: require.NoError,
},
{
name: "daily different start of week",
recurrence: func() models.PatternedRecurrenceable {
rec := models.NewPatternedRecurrence()
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(s.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.MONDAY_DAYOFWEEK))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=DAILY;INTERVAL=1;WKST=MO",
expect: "FREQ=DAILY;INTERVAL=1",
errCheck: require.NoError,
},
{
@ -229,16 +206,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rng := models.NewRecurrenceRange()
rrtype, err := models.ParseRecurrenceRangeType("endDate")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
@ -251,7 +227,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=DAILY;INTERVAL=1;WKST=SU;UNTIL=20210101T182959Z",
expect: "FREQ=DAILY;INTERVAL=1;UNTIL=20210101T182959Z",
errCheck: require.NoError,
},
{
@ -261,17 +237,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("weekly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU",
expect: "FREQ=WEEKLY;INTERVAL=1",
errCheck: require.NoError,
},
{
@ -281,16 +256,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("weekly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rng := models.NewRecurrenceRange()
rrtype, err := models.ParseRecurrenceRangeType("endDate")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
@ -303,7 +277,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU;UNTIL=20210101T235959Z",
expect: "FREQ=WEEKLY;INTERVAL=1;UNTIL=20210101T235959Z",
errCheck: require.NoError,
},
{
@ -313,16 +287,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("weekly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rng := models.NewRecurrenceRange()
rrtype, err := models.ParseRecurrenceRangeType("numbered")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
@ -334,7 +307,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU;COUNT=10",
expect: "FREQ=WEEKLY;INTERVAL=1;COUNT=10",
errCheck: require.NoError,
},
{
@ -344,11 +317,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("weekly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
days := []models.DayOfWeek{
models.MONDAY_DAYOFWEEK,
@ -362,7 +334,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,TH;WKST=SU",
expect: "FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,TH",
errCheck: require.NoError,
},
{
@ -372,17 +344,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(2)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=DAILY;INTERVAL=2;WKST=SU",
expect: "FREQ=DAILY;INTERVAL=2",
errCheck: require.NoError,
},
{
@ -392,11 +363,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("absoluteMonthly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
pat.SetDayOfMonth(ptr.To(int32(5)))
@ -404,7 +374,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=5;WKST=SU",
expect: "FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=5",
errCheck: require.NoError,
},
{
@ -414,11 +384,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("absoluteYearly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(3)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
pat.SetMonth(ptr.To(int32(8)))
@ -426,7 +395,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
return rec
},
expect: "FREQ=YEARLY;INTERVAL=3;BYMONTH=8;WKST=SU",
expect: "FREQ=YEARLY;INTERVAL=3;BYMONTH=8",
errCheck: require.NoError,
},
{
@ -436,38 +405,37 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
pat := models.NewRecurrencePattern()
typ, err := models.ParseRecurrencePatternType("relativeYearly")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
pat.SetMonth(ptr.To(int32(8)))
pat.SetDaysOfWeek([]models.DayOfWeek{models.FRIDAY_DAYOFWEEK})
wi, err := models.ParseWeekIndex("first")
require.NoError(s.T(), err)
require.NoError(suite.T(), err)
pat.SetIndex(wi.(*models.WeekIndex))
rec.SetPattern(pat)
return rec
},
expect: "FREQ=YEARLY;INTERVAL=1;BYMONTH=8;BYDAY=1FR;WKST=SU",
expect: "FREQ=YEARLY;INTERVAL=1;BYMONTH=8;BYDAY=1FR",
errCheck: require.NoError,
},
// TODO(meain): could still use more tests for edge cases of time
}
for _, tt := range table {
s.Run(tt.name, func() {
ctx, flush := tester.NewContext(s.T())
suite.Run(tt.name, func() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
rec, err := getRecurrencePattern(ctx, tt.recurrence())
tt.errCheck(s.T(), err)
tt.errCheck(suite.T(), err)
assert.Equal(s.T(), tt.expect, rec)
assert.Equal(suite.T(), tt.expect, rec)
})
}
}
@ -492,8 +460,8 @@ func baseEvent() *models.Event {
return e
}
func (s *ICSUnitSuite) TestEventConversion() {
t := s.T()
func (suite *ICSUnitSuite) TestEventConversion() {
t := suite.T()
table := []struct {
name string
@ -578,19 +546,14 @@ func (s *ICSUnitSuite) TestEventConversion() {
rec := models.NewPatternedRecurrence()
pat := models.NewRecurrencePattern()
rng := models.NewRecurrenceRange()
typ, err := models.ParseRecurrencePatternType("daily")
require.NoError(t, err)
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
pat.SetInterval(ptr.To(int32(1)))
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
rng.SetRecurrenceTimeZone(ptr.To("UTC"))
rec.SetPattern(pat)
rec.SetRangeEscaped(rng)
e.SetRecurrence(rec)
@ -613,19 +576,6 @@ func (s *ICSUnitSuite) TestEventConversion() {
assert.Contains(t, out, "STATUS:CANCELLED", "cancelled status")
},
},
{
name: "not cancelled event",
event: func() *models.Event {
e := baseEvent()
e.SetIsCancelled(ptr.To(false))
return e
},
check: func(out string) {
assert.NotContains(t, out, "STATUS:CANCELLED", "cancelled status")
},
},
{
name: "text body",
event: func() *models.Event {
@ -867,8 +817,8 @@ func (s *ICSUnitSuite) TestEventConversion() {
}
for _, tt := range table {
s.Run(tt.name, func() {
t := s.T()
suite.Run(tt.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
@ -918,8 +868,8 @@ func checkAttendee(t *testing.T, out, check, msg string) {
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
}
func (s *ICSUnitSuite) TestAttendees() {
t := s.T()
func (suite *ICSUnitSuite) TestAttendees() {
t := suite.T()
table := []struct {
name string
@ -945,17 +895,6 @@ func (s *ICSUnitSuite) TestAttendees() {
"attendee")
},
},
{
name: "attendee with internal exchange representation for email",
att: [][]string{{
"/o=ExchangeLabs/ou=ExchangeAdministrative Group(FY...LT)/cn=Recipients/cn=883...4a-John Doe",
"required",
"declined",
}},
check: func(out string) {
assert.NotContains(t, out, "ATTENDEE")
},
},
{
name: "multiple attendees",
att: [][]string{
@ -986,8 +925,8 @@ func (s *ICSUnitSuite) TestAttendees() {
}
for _, tt := range table {
s.Run(tt.name, func() {
t := s.T()
suite.Run(tt.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
@ -1108,8 +1047,8 @@ func checkAttachment(t *testing.T, out, check, msg string) {
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
}
func (s *ICSUnitSuite) TestAttachments() {
t := s.T()
func (suite *ICSUnitSuite) TestAttachments() {
t := suite.T()
type attachment struct {
cid string // contentid
@ -1165,8 +1104,8 @@ func (s *ICSUnitSuite) TestAttachments() {
}
for _, tt := range table {
s.Run(tt.name, func() {
t := s.T()
suite.Run(tt.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
@ -1209,7 +1148,7 @@ func (s *ICSUnitSuite) TestAttachments() {
}
}
func (s *ICSUnitSuite) TestCancellations() {
func (suite *ICSUnitSuite) TestCancellations() {
table := []struct {
name string
cancelledIds []string
@ -1233,8 +1172,8 @@ func (s *ICSUnitSuite) TestCancellations() {
}
for _, tt := range table {
s.Run(tt.name, func() {
t := s.T()
suite.Run(tt.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
@ -1297,7 +1236,7 @@ func eventToJSON(e *models.Event) ([]byte, error) {
return bts, err
}
func (s *ICSUnitSuite) TestEventExceptions() {
func (suite *ICSUnitSuite) TestEventExceptions() {
table := []struct {
name string
event func() *models.Event
@ -1319,7 +1258,7 @@ func (s *ICSUnitSuite) TestEventExceptions() {
exception.SetEnd(newEnd)
parsed, err := eventToMap(exception)
require.NoError(s.T(), err, "parsing exception")
require.NoError(suite.T(), err, "parsing exception")
// add exception event to additional data
e.SetAdditionalData(map[string]any{
@ -1338,15 +1277,15 @@ func (s *ICSUnitSuite) TestEventExceptions() {
}
}
assert.Equal(s.T(), 2, events, "number of events")
assert.Equal(suite.T(), 2, events, "number of events")
assert.Contains(s.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id")
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id")
assert.Contains(s.T(), out, "SUMMARY:Subject", "original event")
assert.Contains(s.T(), out, "SUMMARY:Exception", "exception event")
assert.Contains(suite.T(), out, "SUMMARY:Subject", "original event")
assert.Contains(suite.T(), out, "SUMMARY:Exception", "exception event")
assert.Contains(s.T(), out, "DTSTART:20210101T130000Z", "new start time")
assert.Contains(s.T(), out, "DTEND:20210101T140000Z", "new end time")
assert.Contains(suite.T(), out, "DTSTART:20210101T130000Z", "new start time")
assert.Contains(suite.T(), out, "DTEND:20210101T140000Z", "new end time")
},
},
{
@ -1375,10 +1314,10 @@ func (s *ICSUnitSuite) TestEventExceptions() {
exception2.SetEnd(newEnd)
parsed1, err := eventToMap(exception1)
require.NoError(s.T(), err, "parsing exception 1")
require.NoError(suite.T(), err, "parsing exception 1")
parsed2, err := eventToMap(exception2)
require.NoError(s.T(), err, "parsing exception 2")
require.NoError(suite.T(), err, "parsing exception 2")
// add exception event to additional data
e.SetAdditionalData(map[string]any{
@ -1397,230 +1336,36 @@ func (s *ICSUnitSuite) TestEventExceptions() {
}
}
assert.Equal(s.T(), 3, events, "number of events")
assert.Equal(suite.T(), 3, events, "number of events")
assert.Contains(s.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id 1")
assert.Contains(s.T(), out, "RECURRENCE-ID:20210102T120000Z", "recurrence id 2")
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id 1")
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210102T120000Z", "recurrence id 2")
assert.Contains(s.T(), out, "SUMMARY:Subject", "original event")
assert.Contains(s.T(), out, "SUMMARY:Exception 1", "exception event 1")
assert.Contains(s.T(), out, "SUMMARY:Exception 2", "exception event 2")
assert.Contains(suite.T(), out, "SUMMARY:Subject", "original event")
assert.Contains(suite.T(), out, "SUMMARY:Exception 1", "exception event 1")
assert.Contains(suite.T(), out, "SUMMARY:Exception 2", "exception event 2")
assert.Contains(s.T(), out, "DTSTART:20210101T130000Z", "new start time 1")
assert.Contains(s.T(), out, "DTEND:20210101T140000Z", "new end time 1")
assert.Contains(suite.T(), out, "DTSTART:20210101T130000Z", "new start time 1")
assert.Contains(suite.T(), out, "DTEND:20210101T140000Z", "new end time 1")
assert.Contains(s.T(), out, "DTSTART:20210102T130000Z", "new start time 2")
assert.Contains(s.T(), out, "DTEND:20210102T140000Z", "new end time 2")
assert.Contains(suite.T(), out, "DTSTART:20210102T130000Z", "new start time 2")
assert.Contains(suite.T(), out, "DTEND:20210102T140000Z", "new end time 2")
},
},
}
for _, tt := range table {
s.Run(tt.name, func() {
ctx, flush := tester.NewContext(s.T())
suite.Run(tt.name, func() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
bts, err := eventToJSON(tt.event())
require.NoError(s.T(), err, "getting serialized content")
require.NoError(suite.T(), err, "getting serialized content")
out, err := FromJSON(ctx, bts)
require.NoError(s.T(), err, "converting to ics")
require.NoError(suite.T(), err, "converting to ics")
tt.check(out)
})
}
}
func (s *ICSUnitSuite) TestGetRecurrenceTimezone() {
table := []struct {
name string
intz string
outtz string
}{
{
name: "empty",
intz: "",
outtz: "UTC",
},
{
name: "utc",
intz: "UTC",
outtz: "UTC",
},
{
name: "simple",
intz: "Asia/Kolkata",
outtz: "Asia/Kolkata",
},
{
name: "windows tz",
intz: "India Standard Time",
outtz: "Asia/Kolkata",
},
{
name: "non canonical",
intz: "Asia/Calcutta",
outtz: "Asia/Kolkata",
},
}
for _, tt := range table {
s.Run(tt.name, func() {
ctx, flush := tester.NewContext(s.T())
defer flush()
event := baseEvent()
if len(tt.intz) > 0 {
recur := models.NewPatternedRecurrence()
rp := models.NewRecurrenceRange()
rp.SetRecurrenceTimeZone(ptr.To(tt.intz))
recur.SetRangeEscaped(rp)
event.SetRecurrence(recur)
}
timezone, err := getRecurrenceTimezone(ctx, event)
require.NoError(s.T(), err)
assert.Equal(s.T(), tt.outtz, timezone.String())
})
}
}
func (s *ICSUnitSuite) TestAddTimezoneComponents() {
event := baseEvent()
recur := models.NewPatternedRecurrence()
rp := models.NewRecurrenceRange()
rp.SetRecurrenceTimeZone(ptr.To("Asia/Kolkata"))
recur.SetRangeEscaped(rp)
event.SetRecurrence(recur)
ctx, flush := tester.NewContext(s.T())
defer flush()
cal := ics.NewCalendar()
err := addTimeZoneComponents(ctx, cal, event)
require.NoError(s.T(), err)
text := cal.Serialize()
assert.Contains(s.T(), text, "BEGIN:VTIMEZONE", "beginning of timezone")
assert.Contains(s.T(), text, "TZID:Asia/Kolkata", "timezone id")
assert.Contains(s.T(), text, "END:VTIMEZONE", "end of timezone")
}
func (s *ICSUnitSuite) TestAddTime() {
locak, err := time.LoadLocation("Asia/Kolkata")
require.NoError(s.T(), err)
table := []struct {
name string
prop ics.ComponentProperty
time time.Time
allDay bool
loc *time.Location
exp string
}{
{
name: "utc",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
allDay: false,
loc: time.UTC,
exp: "DTSTART:20210102T030405Z",
},
{
name: "local",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
allDay: false,
loc: locak,
exp: "DTSTART;TZID=Asia/Kolkata:20210102T083405",
},
{
name: "all day",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 0, 0, 0, 0, time.UTC),
allDay: true,
loc: time.UTC,
exp: "DTSTART;VALUE=DATE:20210102",
},
{
name: "all day local",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 0, 0, 0, 0, time.UTC),
allDay: true,
loc: locak,
exp: "DTSTART;VALUE=DATE;TZID=Asia/Kolkata:20210102",
},
{
name: "end",
prop: ics.ComponentPropertyDtEnd,
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
allDay: false,
loc: time.UTC,
exp: "DTEND:20210102T030405Z",
},
{
// This won't happen, but a good test to have to test loc handling
name: "windows tz",
prop: ics.ComponentPropertyDtStart,
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
allDay: false,
loc: time.FixedZone("India Standard Time", 5*60*60+30*60),
exp: "DTSTART;TZID=India Standard Time:20210102T083405",
},
}
for _, tt := range table {
s.Run(tt.name, func() {
cal := ics.NewCalendar()
evt := cal.AddEvent("id")
addTime(evt, tt.prop, tt.time, tt.allDay, tt.loc)
expSplits := strings.FieldsFunc(tt.exp, func(c rune) bool {
return c == ':' || c == ';'
})
text := cal.Serialize()
checkLine := ""
for _, l := range strings.Split(text, "\r\n") {
if strings.HasPrefix(l, string(tt.prop)) {
checkLine = l
break
}
}
actSplits := strings.FieldsFunc(checkLine, func(c rune) bool {
return c == ':' || c == ';'
})
assert.Greater(s.T(), len(checkLine), 0, "line not found")
assert.Equal(s.T(), len(expSplits), len(actSplits), "length of fields")
assert.ElementsMatch(s.T(), expSplits, actSplits, "fields")
})
}
}
// This tests and ensures that the generated data is int he format
// that we expect
func (s *ICSUnitSuite) TestGetTZDataKeyValues() {
for key := range tzdata.TZData {
s.Run(key, func() {
ctx, flush := tester.NewContext(s.T())
defer flush()
data, err := getTZDataKeyValues(ctx, key)
require.NoError(s.T(), err)
assert.NotEmpty(s.T(), data, "data")
assert.NotContains(s.T(), data, "BEGIN", "beginning of timezone") // should be stripped
assert.NotContains(s.T(), data, "END", "end of timezone") // should be stripped
assert.NotContains(s.T(), data, "TZID", "timezone id") // should be stripped
assert.Contains(s.T(), data, "DTSTART", "start time")
assert.Contains(s.T(), data, "TZOFFSETFROM", "offset from")
})
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,35 +0,0 @@
#!/bin/sh
set -eo pipefail
if ! echo "$PWD" | grep -q '/tzdata$'; then
echo "Please run this script from the tzdata dir"
exit 1
fi
# TODO: Generate from https://www.iana.org/time-zones
if [ ! -d /tmp/corso-tzdata ]; then
git clone --depth 1 https://github.com/add2cal/timezones-ical-library.git /tmp/corso-tzdata
else
cd /tmp/corso-tzdata
git pull
cd -
fi
# Generate a huge go file with all the timezones
echo "package tzdata" >data.go
echo "" >>data.go
echo "var TZData = map[string]string{" >>data.go
find /tmp/corso-tzdata/ -name '*.ics' | while read -r f; do
tz=$(echo "$f" | sed 's|/tmp/corso-tzdata/api/||;s|\.ics$||')
echo "Processing $tz"
printf "\t\"%s\": \`" "$tz" >>data.go
cat "$f" | grep -Ev "(BEGIN:|END:|TZID:)" |
sed 's|`|\\`|g;s|\r||;s|TZID:/timezones-ical-library/|TZID:|' |
perl -pe 'chomp if eof' >>data.go
echo "\`," >>data.go
done
echo "}" >>data.go

View File

@ -86,7 +86,7 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
data, err := api.BytesToContactable(body)
if err != nil {
return "", clues.WrapWC(ctx, err, "converting to contactable").
return "", clues.Wrap(err, "converting to contactable").
With("body_length", len(body))
}

View File

@ -4,7 +4,6 @@ import (
"context"
"fmt"
"path/filepath"
"reflect"
"sync"
"time"
@ -25,14 +24,11 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/kopia/retention"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/storage"
)
const (
corsoWrapperAlertNamespace = "corso-kopia-wrapper"
defaultKopiaConfigDir = "/tmp/"
kopiaConfigFileTemplate = "repository-%s.config"
defaultCompressor = "zstd-better-compression"
@ -59,15 +55,6 @@ const (
minEpochDurationUpperBound = 7 * 24 * time.Hour
)
// allValidCompressors is the set of compression algorithms either currently
// being used or that were previously used. Use this during the config verify
// command to avoid spurious errors. We can revisit whether we want to update
// the config in those old repos at a later time.
var allValidCompressors = map[compression.Name]struct{}{
compression.Name(defaultCompressor): {},
compression.Name("s2-default"): {},
}
var (
ErrSettingDefaultConfig = clues.New("setting default repo config values")
ErrorRepoAlreadyExists = clues.New("repo already exists")
@ -158,16 +145,12 @@ func (w *conn) Initialize(
RetentionPeriod: blobCfg.RetentionPeriod,
}
var initErr error
if err = repo.Initialize(ctx, bst, &kopiaOpts, cfg.CorsoPassphrase); err != nil {
if !errors.Is(err, repo.ErrAlreadyInitialized) {
return clues.WrapWC(ctx, err, "initializing repo")
if errors.Is(err, repo.ErrAlreadyInitialized) {
return clues.StackWC(ctx, ErrorRepoAlreadyExists, err)
}
logger.Ctx(ctx).Info("repo already exists, verifying repo config")
initErr = clues.StackWC(ctx, ErrorRepoAlreadyExists, err)
return clues.WrapWC(ctx, err, "initializing repo")
}
err = w.commonConnect(
@ -179,10 +162,7 @@ func (w *conn) Initialize(
cfg.CorsoPassphrase,
defaultCompressor)
if err != nil {
// If the repo already exists then give some indication to that to help the
// user debug. For example, they could have called init again on a repo that
// already exists but accidentally used a different passphrase.
return clues.Stack(err, initErr)
return err
}
if err := w.setDefaultConfigValues(ctx); err != nil {
@ -756,115 +736,3 @@ func (w *conn) updatePersistentConfig(
"persisting updated config").
OrNil()
}
func (w *conn) verifyDefaultPolicyConfigOptions(
ctx context.Context,
errs *fault.Bus,
) {
const alertName = "kopia-global-policy"
globalPol, err := w.getGlobalPolicyOrEmpty(ctx)
if err != nil {
errs.AddAlert(ctx, fault.NewAlert(
err.Error(),
corsoWrapperAlertNamespace,
"fetch-policy",
alertName,
nil))
return
}
ctx = clues.Add(ctx, "current_global_policy", globalPol.String())
if _, ok := allValidCompressors[globalPol.CompressionPolicy.CompressorName]; !ok {
errs.AddAlert(ctx, fault.NewAlert(
"unexpected compressor",
corsoWrapperAlertNamespace,
"compressor",
alertName,
nil))
}
// Need to use deep equals because the values are pointers to optional types.
// That makes regular equality checks fail even if the data contained in each
// policy is the same.
if !reflect.DeepEqual(globalPol.RetentionPolicy, defaultRetention) {
errs.AddAlert(ctx, fault.NewAlert(
"unexpected retention policy",
corsoWrapperAlertNamespace,
"retention-policy",
alertName,
nil))
}
if globalPol.SchedulingPolicy.Interval() != defaultSchedulingInterval {
errs.AddAlert(ctx, fault.NewAlert(
"unexpected scheduling interval",
corsoWrapperAlertNamespace,
"scheduling-interval",
alertName,
nil))
}
}
func (w *conn) verifyRetentionConfig(
ctx context.Context,
errs *fault.Bus,
) {
const alertName = "kopia-object-locking"
directRepo, ok := w.Repository.(repo.DirectRepository)
if !ok {
errs.AddAlert(ctx, fault.NewAlert(
"",
corsoWrapperAlertNamespace,
"fetch-direct-repo",
alertName,
nil))
return
}
blobConfig, maintenanceParams, err := getRetentionConfigs(ctx, directRepo)
if err != nil {
errs.AddAlert(ctx, fault.NewAlert(
err.Error(),
corsoWrapperAlertNamespace,
"fetch-config",
alertName,
nil))
return
}
err = retention.OptsFromConfigs(*blobConfig, *maintenanceParams).
Verify(ctx)
if err != nil {
errs.AddAlert(ctx, fault.NewAlert(
err.Error(),
corsoWrapperAlertNamespace,
"config-values",
alertName,
nil))
}
}
// verifyDefaultConfigOptions checks the following configurations:
// kopia global policy:
// - kopia snapshot retention is disabled
// - kopia compression matches the default compression for corso
// - kopia scheduling is disabled
//
// object locking:
// - maintenance and blob config blob parameters are consistent (i.e. all
// enabled or all disabled)
func (w *conn) verifyDefaultConfigOptions(
ctx context.Context,
errs *fault.Bus,
) {
logger.Ctx(ctx).Info("verifying config parameters")
w.verifyDefaultPolicyConfigOptions(ctx, errs)
w.verifyRetentionConfig(ctx, errs)
}

View File

@ -3,7 +3,6 @@ package kopia
import (
"context"
"math"
"strings"
"testing"
"time"
@ -16,13 +15,11 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/ptr"
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/storage"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
@ -96,7 +93,7 @@ func TestWrapperIntegrationSuite(t *testing.T) {
})
}
func (suite *WrapperIntegrationSuite) TestInitialize_SamePassphrase() {
func (suite *WrapperIntegrationSuite) TestRepoExistsError() {
t := suite.T()
repoNameHash := strTD.NewHashForRepoConfigName()
@ -112,46 +109,6 @@ func (suite *WrapperIntegrationSuite) TestInitialize_SamePassphrase() {
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
err = k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
assert.NoError(t, err, clues.ToCore(err))
}
func (suite *WrapperIntegrationSuite) TestInitialize_IncorrectPassphrase() {
t := suite.T()
repoNameHash := strTD.NewHashForRepoConfigName()
ctx, flush := tester.NewContext(t)
defer flush()
st1 := storeTD.NewFilesystemStorage(t)
k := NewConn(st1)
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
require.NoError(t, err, clues.ToCore(err))
// Hacky way to edit the existing passphrase for the repo so we can check that
// we get a sensible error back.
st2 := st1
st2.Config = maps.Clone(st1.Config)
var found bool
for k, v := range st2.Config {
if strings.Contains(strings.ToLower(k), "passphrase") {
st2.Config[k] = v + "1"
found = true
break
}
}
require.True(t, found, "unable to update passphrase for test")
k = NewConn(st2)
err = k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
assert.Error(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, ErrorRepoAlreadyExists)
@ -822,281 +779,3 @@ func (suite *ConnRetentionIntegrationSuite) TestInitWithAndWithoutRetention() {
// Some checks to make sure retention was fully initialized as expected.
checkRetentionParams(t, ctx, k2, blob.Governance, time.Hour*48, assert.True)
}
// TestVerifyDefaultConfigOptions checks that if the repo has misconfigured
// values an error is returned. This is easiest to do in a test suite that
// allows object locking because some of the configured values that are checked
// relate to object locking.
func (suite *ConnRetentionIntegrationSuite) TestVerifyDefaultConfigOptions() {
nonzeroOpt := policy.OptionalInt(42)
table := []struct {
name string
setupRepo func(context.Context, *testing.T, *conn)
expectAlerts int
}{
{
name: "ValidConfigs NoRetention",
setupRepo: func(context.Context, *testing.T, *conn) {},
},
{
name: "ValidConfigs Retention",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
err := con.setRetentionParameters(
ctx,
repository.Retention{
Mode: ptr.To(repository.GovernanceRetention),
Duration: ptr.To(48 * time.Hour),
Extend: ptr.To(true),
})
require.NoError(t, err, clues.ToCore(err))
},
},
{
name: "ValidRetentionButNotExtending",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
err := con.setRetentionParameters(
ctx,
repository.Retention{
Mode: ptr.To(repository.GovernanceRetention),
Duration: ptr.To(48 * time.Hour),
Extend: ptr.To(false),
})
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "ExtendingRetentionButNotConfigured",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
err := con.setRetentionParameters(
ctx,
repository.Retention{
Extend: ptr.To(true),
})
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "NonZeroScheduleInterval",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
updateSchedulingOnPolicy(time.Hour, pol)
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "OldValidCompressor",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
_, err = updateCompressionOnPolicy("s2-default", pol)
require.NoError(t, err, clues.ToCore(err))
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 0,
},
{
name: "NonDefaultCompression",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
_, err = updateCompressionOnPolicy("pgzip-best-speed", pol)
require.NoError(t, err, clues.ToCore(err))
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "NonZeroSnapshotRetentionLatest",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
retention := policy.RetentionPolicy{
KeepLatest: &nonzeroOpt,
KeepHourly: &zeroOpt,
KeepWeekly: &zeroOpt,
KeepDaily: &zeroOpt,
KeepMonthly: &zeroOpt,
KeepAnnual: &zeroOpt,
}
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
updateRetentionOnPolicy(retention, pol)
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "NonZeroSnapshotRetentionHourly",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
retention := policy.RetentionPolicy{
KeepLatest: &zeroOpt,
KeepHourly: &nonzeroOpt,
KeepWeekly: &zeroOpt,
KeepDaily: &zeroOpt,
KeepMonthly: &zeroOpt,
KeepAnnual: &zeroOpt,
}
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
updateRetentionOnPolicy(retention, pol)
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "NonZeroSnapshotRetentionWeekly",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
retention := policy.RetentionPolicy{
KeepLatest: &zeroOpt,
KeepHourly: &zeroOpt,
KeepWeekly: &nonzeroOpt,
KeepDaily: &zeroOpt,
KeepMonthly: &zeroOpt,
KeepAnnual: &zeroOpt,
}
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
updateRetentionOnPolicy(retention, pol)
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "NonZeroSnapshotRetentionDaily",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
retention := policy.RetentionPolicy{
KeepLatest: &zeroOpt,
KeepHourly: &zeroOpt,
KeepWeekly: &zeroOpt,
KeepDaily: &nonzeroOpt,
KeepMonthly: &zeroOpt,
KeepAnnual: &zeroOpt,
}
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
updateRetentionOnPolicy(retention, pol)
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "NonZeroSnapshotRetentionMonthly",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
retention := policy.RetentionPolicy{
KeepLatest: &zeroOpt,
KeepHourly: &zeroOpt,
KeepWeekly: &zeroOpt,
KeepDaily: &zeroOpt,
KeepMonthly: &nonzeroOpt,
KeepAnnual: &zeroOpt,
}
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
updateRetentionOnPolicy(retention, pol)
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "NonZeroSnapshotRetentionAnnual",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
retention := policy.RetentionPolicy{
KeepLatest: &zeroOpt,
KeepHourly: &zeroOpt,
KeepWeekly: &zeroOpt,
KeepDaily: &zeroOpt,
KeepMonthly: &zeroOpt,
KeepAnnual: &nonzeroOpt,
}
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
updateRetentionOnPolicy(retention, pol)
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 1,
},
{
name: "MultipleAlerts",
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
err := con.setRetentionParameters(
ctx,
repository.Retention{
Mode: ptr.To(repository.GovernanceRetention),
Duration: ptr.To(48 * time.Hour),
Extend: ptr.To(false),
})
require.NoError(t, err, clues.ToCore(err))
pol, err := con.getGlobalPolicyOrEmpty(ctx)
require.NoError(t, err, clues.ToCore(err))
updateSchedulingOnPolicy(time.Hour, pol)
_, err = updateCompressionOnPolicy("pgzip-best-speed", pol)
require.NoError(t, err, clues.ToCore(err))
err = con.writeGlobalPolicy(ctx, "test", pol)
require.NoError(t, err, clues.ToCore(err))
},
expectAlerts: 3,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
t.Cleanup(flush)
repoNameHash := strTD.NewHashForRepoConfigName()
st1 := storeTD.NewPrefixedS3Storage(t)
con := NewConn(st1)
err := con.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
require.NoError(t, err, clues.ToCore(err))
t.Cleanup(func() { con.Close(ctx) })
test.setupRepo(ctx, t, con)
errs := fault.New(true)
con.verifyDefaultConfigOptions(ctx, errs)
// There shouldn't be any reported failures because this is just to check
// if things are alright.
assert.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
assert.Len(t, errs.Alerts(), test.expectAlerts)
})
}
}

View File

@ -665,12 +665,7 @@ func (w Wrapper) RepoMaintenance(
ctx context.Context,
storer store.Storer,
opts repository.Maintenance,
errs *fault.Bus,
) error {
// Check the existing config parameters first so that even if we fail for some
// reason below we know we checked the config.
w.c.verifyDefaultConfigOptions(ctx, errs)
kopiaSafety, err := translateSafety(opts.Safety)
if err != nil {
return clues.WrapWC(ctx, err, "identifying safety level")
@ -701,9 +696,8 @@ func (w Wrapper) RepoMaintenance(
// Even if we fail this we don't want to fail the overall maintenance
// operation since there's other useful work we can still do.
if err := cleanupOrphanedData(ctx, storer, w.c, buffer, time.Now); err != nil {
errs.AddRecoverable(ctx, clues.Wrap(
err,
"cleaning up failed backups, some space may not be freed"))
logger.CtxErr(ctx, err).Info(
"cleaning up failed backups, some space may not be freed")
}
}

View File

@ -27,6 +27,7 @@ import (
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
istats "github.com/alcionai/corso/src/internal/stats"
"github.com/alcionai/corso/src/internal/tester"
@ -37,7 +38,6 @@ import (
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
@ -198,7 +198,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_FirstRun_NoChanges() {
Type: repository.MetadataMaintenance,
}
err = w.RepoMaintenance(ctx, nil, opts, fault.New(true))
err = w.RepoMaintenance(ctx, nil, opts)
require.NoError(t, err, clues.ToCore(err))
}
@ -220,7 +220,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails
}
// This will set the user.
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
err = w.RepoMaintenance(ctx, nil, mOpts)
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
@ -236,7 +236,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails
var notOwnedErr maintenance.NotOwnedError
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
err = w.RepoMaintenance(ctx, nil, mOpts)
assert.ErrorAs(t, err, &notOwnedErr, clues.ToCore(err))
}
@ -258,7 +258,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeed
}
// This will set the user.
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
err = w.RepoMaintenance(ctx, nil, mOpts)
require.NoError(t, err, clues.ToCore(err))
err = k.Close(ctx)
@ -275,13 +275,13 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeed
mOpts.Force = true
// This will set the user.
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
err = w.RepoMaintenance(ctx, nil, mOpts)
require.NoError(t, err, clues.ToCore(err))
mOpts.Force = false
// Running without force should succeed now.
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
err = w.RepoMaintenance(ctx, nil, mOpts)
require.NoError(t, err, clues.ToCore(err))
}
@ -715,6 +715,7 @@ func (suite *RetentionIntegrationSuite) TestSetRetentionParameters() {
}
func (suite *RetentionIntegrationSuite) TestSetRetentionParameters_And_Maintenance() {
suite.T().Skip()
t := suite.T()
ctx, flush := tester.NewContext(t)
@ -733,7 +734,7 @@ func (suite *RetentionIntegrationSuite) TestSetRetentionParameters_And_Maintenan
// This will set common maintenance config parameters. There's some interplay
// between the maintenance schedule and retention period that we want to check
// below.
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
err = w.RepoMaintenance(ctx, nil, mOpts)
require.NoError(t, err, clues.ToCore(err))
// Enable retention.
@ -796,6 +797,8 @@ func (suite *RetentionIntegrationSuite) TestSetRetentionParameters_And_Maintenan
}
func (suite *RetentionIntegrationSuite) TestSetAndUpdateRetentionParameters_RunMaintenance() {
suite.T().Skip()
table := []struct {
name string
reopen bool
@ -838,7 +841,7 @@ func (suite *RetentionIntegrationSuite) TestSetAndUpdateRetentionParameters_RunM
// This will set common maintenance config parameters. There's some interplay
// between the maintenance schedule and retention period that we want to check
// below.
err = w.RepoMaintenance(ctx, ms, mOpts, fault.New(true))
err = w.RepoMaintenance(ctx, ms, mOpts)
require.NoError(t, err, clues.ToCore(err))
// Enable retention.
@ -882,7 +885,7 @@ func (suite *RetentionIntegrationSuite) TestSetAndUpdateRetentionParameters_RunM
// Run full maintenance again. This should extend object locks for things if
// they exist.
err = w.RepoMaintenance(ctx, ms, mOpts, fault.New(true))
err = w.RepoMaintenance(ctx, ms, mOpts)
require.NoError(t, err, clues.ToCore(err))
})
}

View File

@ -2,7 +2,6 @@ package m365
import (
"context"
"fmt"
"github.com/alcionai/clues"
@ -14,10 +13,7 @@ import (
"github.com/alcionai/corso/src/internal/m365/service/groups"
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/m365/service/teamschats"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/account"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
@ -26,33 +22,9 @@ import (
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
type backupHandler interface {
produceBackupCollectionser
}
type produceBackupCollectionser interface {
ProduceBackupCollections(
ctx context.Context,
bpc inject.BackupProducerConfig,
ac api.Client,
creds account.M365Config,
su support.StatusUpdater,
counter *count.Bus,
errs *fault.Bus,
) (
collections []data.BackupCollection,
excludeItems *prefixmatcher.StringSetMatcher,
// canUsePreviousBacukp can be always returned true for impelementations
// that always return a tombstone collection when the metadata read fails
canUsePreviousBackup bool,
err error,
)
}
// ---------------------------------------------------------------------------
// Data Collections
// ---------------------------------------------------------------------------
@ -91,38 +63,65 @@ func (ctrl *Controller) ProduceBackupCollections(
canUsePreviousBackup bool
)
var handler backupHandler
switch service {
case path.ExchangeService:
handler = exchange.NewBackup()
colls, excludeItems, canUsePreviousBackup, err = exchange.ProduceBackupCollections(
ctx,
bpc,
ctrl.AC,
ctrl.credentials,
ctrl.UpdateStatus,
counter,
errs)
if err != nil {
return nil, nil, false, err
}
case path.OneDriveService:
handler = onedrive.NewBackup()
colls, excludeItems, canUsePreviousBackup, err = onedrive.ProduceBackupCollections(
ctx,
bpc,
ctrl.AC,
ctrl.credentials,
ctrl.UpdateStatus,
counter,
errs)
if err != nil {
return nil, nil, false, err
}
case path.SharePointService:
handler = sharepoint.NewBackup()
colls, excludeItems, canUsePreviousBackup, err = sharepoint.ProduceBackupCollections(
ctx,
bpc,
ctrl.AC,
ctrl.credentials,
ctrl.UpdateStatus,
counter,
errs)
if err != nil {
return nil, nil, false, err
}
case path.GroupsService:
handler = groups.NewBackup()
colls, excludeItems, err = groups.ProduceBackupCollections(
ctx,
bpc,
ctrl.AC,
ctrl.credentials,
ctrl.UpdateStatus,
counter,
errs)
if err != nil {
return nil, nil, false, err
}
case path.TeamsChatsService:
handler = teamschats.NewBackup()
// canUsePreviousBacukp can be always returned true for groups as we
// return a tombstone collection in case the metadata read fails
canUsePreviousBackup = true
default:
return nil, nil, false, clues.NewWC(ctx, fmt.Sprintf("service not supported: %s", service.HumanString()))
}
colls, excludeItems, canUsePreviousBackup, err = handler.ProduceBackupCollections(
ctx,
bpc,
ctrl.AC,
ctrl.credentials,
ctrl.UpdateStatus,
counter,
errs)
if err != nil {
return nil, nil, false, err
return nil, nil, false, clues.Wrap(clues.NewWC(ctx, service.String()), "service not supported")
}
for _, c := range colls {
@ -154,28 +153,25 @@ func (ctrl *Controller) IsServiceEnabled(
return sharepoint.IsServiceEnabled(ctx, ctrl.AC.Sites(), resourceOwner)
case path.GroupsService:
return groups.IsServiceEnabled(ctx, ctrl.AC.Groups(), resourceOwner)
case path.TeamsChatsService:
return teamschats.IsServiceEnabled(ctx, ctrl.AC.Users(), resourceOwner)
}
return false, clues.Wrap(clues.NewWC(ctx, service.String()), "service not supported")
}
func verifyBackupInputs(sel selectors.Selector, cachedIDs []string) error {
func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
var ids []string
switch sel.Service {
switch sels.Service {
case selectors.ServiceExchange, selectors.ServiceOneDrive:
// Exchange and OneDrive user existence now checked in checkServiceEnabled.
return nil
case selectors.ServiceSharePoint, selectors.ServiceGroups, selectors.ServiceTeamsChats:
case selectors.ServiceSharePoint, selectors.ServiceGroups:
ids = cachedIDs
}
if !filters.Contains(ids).Compare(sel.ID()) {
return clues.Wrap(core.ErrNotFound, "verifying existence of resource").
With("selector_protected_resource", sel.ID())
if !filters.Contains(ids).Compare(sels.ID()) {
return clues.Stack(core.ErrNotFound).With("selector_protected_resource", sels.DiscreteOwner)
}
return nil

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/m365/service/exchange"
@ -18,7 +19,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
@ -36,7 +36,10 @@ import (
type DataCollectionIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
user string
site string
tenantID string
ac api.Client
}
func TestDataCollectionIntgSuite(t *testing.T) {
@ -48,14 +51,29 @@ func TestDataCollectionIntgSuite(t *testing.T) {
}
func (suite *DataCollectionIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
t := suite.T()
suite.user = tconfig.M365UserID(t)
suite.site = tconfig.M365SiteID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
}
func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
selUsers := []string{suite.m365.User.ID}
selUsers := []string{suite.user}
ctrl := newController(ctx, suite.T(), path.ExchangeService)
tests := []struct {
@ -67,7 +85,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup(selUsers)
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.m365.User.ID
sel.DiscreteOwner = suite.user
return sel.Selector
},
},
@ -76,7 +94,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewExchangeBackup(selUsers)
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
sel.DiscreteOwner = suite.m365.User.ID
sel.DiscreteOwner = suite.user
return sel.Selector
},
},
@ -121,11 +139,11 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
Selector: sel,
}
collections, excludes, canUsePreviousBackup, err := exchange.NewBackup().ProduceBackupCollections(
collections, excludes, canUsePreviousBackup, err := exchange.ProduceBackupCollections(
ctx,
bpc,
suite.m365.AC,
suite.m365.Creds,
suite.ac,
suite.ac.Credentials,
ctrl.UpdateStatus,
count.New(),
fault.New(true))
@ -252,7 +270,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
ctx, flush := tester.NewContext(suite.T())
defer flush()
selSites := []string{suite.m365.Site.ID}
selSites := []string{suite.site}
ctrl := newController(ctx, suite.T(), path.SharePointService)
tests := []struct {
name string
@ -291,10 +309,10 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
Selector: sel,
}
collections, excludes, canUsePreviousBackup, err := sharepoint.NewBackup().ProduceBackupCollections(
collections, excludes, canUsePreviousBackup, err := sharepoint.ProduceBackupCollections(
ctx,
bpc,
suite.m365.AC,
suite.ac,
ctrl.credentials,
ctrl.UpdateStatus,
count.New(),
@ -333,7 +351,8 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
type SPCollectionIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
connector *Controller
user string
}
func TestSPCollectionIntgSuite(t *testing.T) {
@ -345,7 +364,13 @@ func TestSPCollectionIntgSuite(t *testing.T) {
}
func (suite *SPCollectionIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
ctx, flush := tester.NewContext(suite.T())
defer flush()
suite.connector = newController(ctx, suite.T(), path.SharePointService)
suite.user = tconfig.M365UserID(suite.T())
tester.LogTimeOfTest(suite.T())
}
func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
@ -354,20 +379,25 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
ctx, flush := tester.NewContext(t)
defer flush()
ctrl := newController(ctx, t, path.SharePointService)
var (
siteID = tconfig.M365SiteID(t)
ctrl = newController(ctx, t, path.SharePointService)
siteIDs = []string{siteID}
)
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Site.ID, nil)
site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup(siteIDs)
sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch()))
sel.Include(sel.Library("Documents"))
sel.SetDiscreteOwnerIDName(suite.m365.Site.ID, suite.m365.Site.WebURL)
sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Site.Provider,
ProtectedResource: site,
Selector: sel.Selector,
}
@ -385,15 +415,15 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
)
documentsColl, err := path.BuildPrefix(
suite.m365.TenantID,
suite.m365.Site.ID,
suite.connector.tenant,
siteID,
path.SharePointService,
path.LibrariesCategory)
require.NoError(t, err, clues.ToCore(err))
metadataColl, err := path.BuildMetadata(
suite.m365.TenantID,
suite.m365.Site.ID,
suite.connector.tenant,
siteID,
path.SharePointService,
path.LibrariesCategory,
false)
@ -420,19 +450,24 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
ctx, flush := tester.NewContext(t)
defer flush()
ctrl := newController(ctx, t, path.SharePointService)
var (
siteID = tconfig.M365SiteID(t)
ctrl = newController(ctx, t, path.SharePointService)
siteIDs = []string{siteID}
)
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Site.ID, nil)
site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
sel := selectors.NewSharePointBackup(siteIDs)
sel.Include(sel.Lists(selectors.Any()))
sel.SetDiscreteOwnerIDName(suite.m365.Site.ID, suite.m365.Site.WebURL)
sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Site.Provider,
ProtectedResource: site,
Selector: sel.Selector,
}
@ -467,7 +502,9 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
type GroupsCollectionIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
connector *Controller
tenantID string
user string
}
func TestGroupsCollectionIntgSuite(t *testing.T) {
@ -479,7 +516,21 @@ func TestGroupsCollectionIntgSuite(t *testing.T) {
}
func (suite *GroupsCollectionIntgSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.connector = newController(ctx, t, path.GroupsService)
suite.user = tconfig.M365UserID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
tester.LogTimeOfTest(t)
}
func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint() {
@ -488,19 +539,24 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
ctx, flush := tester.NewContext(t)
defer flush()
ctrl := newController(ctx, t, path.GroupsService)
var (
groupID = tconfig.M365TeamID(t)
ctrl = newController(ctx, t, path.GroupsService)
groupIDs = []string{groupID}
)
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Group.ID, nil)
group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewGroupsBackup(groupIDs)
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
sel.SetDiscreteOwnerIDName(suite.m365.Group.ID, suite.m365.Group.DisplayName)
sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Group.Provider,
ProtectedResource: group,
Selector: sel.Selector,
}
@ -519,8 +575,8 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
assert.Greater(t, len(collections), 1)
p, err := path.BuildMetadata(
suite.m365.TenantID,
suite.m365.Group.ID,
suite.tenantID,
groupID,
path.GroupsService,
path.LibrariesCategory,
false)
@ -558,23 +614,31 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
ctx, flush := tester.NewContext(t)
defer flush()
ctrl := newController(ctx, t, path.GroupsService)
var (
groupID = tconfig.M365TeamID(t)
ctrl = newController(ctx, t, path.GroupsService)
groupIDs = []string{groupID}
)
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Group.ID, nil)
group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
require.NoError(t, err, clues.ToCore(err))
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
sel := selectors.NewGroupsBackup(groupIDs)
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
sel.SetDiscreteOwnerIDName(suite.m365.Group.ID, suite.m365.Group.DisplayName)
sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
site, err := suite.connector.AC.Groups().GetRootSite(ctx, groupID)
require.NoError(t, err, clues.ToCore(err))
pth, err := path.Build(
suite.m365.TenantID,
suite.m365.Group.ID,
suite.tenantID,
groupID,
path.GroupsService,
path.LibrariesCategory,
true,
odConsts.SitesPathDir,
suite.m365.Group.RootSite.ID)
ptr.Val(site.GetId()))
require.NoError(t, err, clues.ToCore(err))
mmc := []data.RestoreCollection{
@ -592,7 +656,7 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.Group.Provider,
ProtectedResource: group,
Selector: sel.Selector,
MetadataCollections: mmc,
}
@ -612,8 +676,8 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
assert.Greater(t, len(collections), 1)
p, err := path.BuildMetadata(
suite.m365.TenantID,
suite.m365.Group.ID,
suite.tenantID,
groupID,
path.GroupsService,
path.LibrariesCategory,
false)
@ -626,13 +690,13 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
foundRootTombstone := false
sp, err := path.BuildPrefix(
suite.m365.TenantID,
suite.m365.Group.ID,
suite.tenantID,
groupID,
path.GroupsService,
path.LibrariesCategory)
require.NoError(t, err, clues.ToCore(err))
sp, err = sp.Append(false, odConsts.SitesPathDir, suite.m365.Group.RootSite.ID)
sp, err = sp.Append(false, odConsts.SitesPathDir, ptr.Val(site.GetId()))
require.NoError(t, err, clues.ToCore(err))
for _, coll := range collections {

View File

@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details"
@ -28,7 +29,6 @@ import (
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
"github.com/alcionai/corso/src/pkg/services/m365/custom"
)
@ -366,7 +366,7 @@ func downloadContent(
itemID := ptr.Val(item.GetId())
ctx = clues.Add(ctx, "item_id", itemID)
content, err := downloadItem(ctx, iaag, driveID, item)
content, err := downloadItem(ctx, iaag, item)
if err == nil {
return content, nil
} else if !graph.IsErrUnauthorizedOrBadToken(err) {
@ -395,7 +395,7 @@ func downloadContent(
cdi := custom.ToCustomDriveItem(di)
content, err = downloadItem(ctx, iaag, driveID, cdi)
content, err = downloadItem(ctx, iaag, cdi)
if err != nil {
return nil, clues.Wrap(err, "content download retry")
}
@ -426,7 +426,7 @@ func readItemContents(
return nil, core.ErrNotFound
}
rc, err := downloadFile(ctx, iaag, props.downloadURL, false)
rc, err := downloadFile(ctx, iaag, props.downloadURL)
if graph.IsErrUnauthorizedOrBadToken(err) {
logger.CtxErr(ctx, err).Debug("stale item in cache")
}

View File

@ -21,7 +21,7 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/data"
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
"github.com/alcionai/corso/src/internal/m365/support"
@ -34,7 +34,6 @@ import (
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
"github.com/alcionai/corso/src/pkg/services/m365/custom"
)
@ -74,13 +73,13 @@ func (suite *CollectionUnitSuite) TestCollection() {
stubMetaID = "testMetaID"
stubMetaEntityID = "email@provider.com"
stubMetaRoles = []string{"read", "write"}
stubMeta = odmetadata.Metadata{
stubMeta = metadata.Metadata{
FileName: stubItemName,
Permissions: []odmetadata.Permission{
Permissions: []metadata.Permission{
{
ID: stubMetaID,
EntityID: stubMetaEntityID,
EntityType: odmetadata.GV2User,
EntityType: metadata.GV2User,
Roles: stubMetaRoles,
Expiration: &now,
},
@ -209,7 +208,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
mbh.GetErrs = []error{test.getErr}
mbh.GI = getsItem{Err: assert.AnError}
pcr := metaTD.NewStubPermissionResponse(odmetadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
pcr := metaTD.NewStubPermissionResponse(metadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
mbh.GIP = getsItemPermission{Perm: pcr}
coll, err := NewCollection(
@ -295,7 +294,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
assert.Equal(t, readers.DefaultSerializationVersion, rr.Format().Version)
assert.False(t, rr.Format().DelInFlight)
readMeta := odmetadata.Metadata{}
readMeta := metadata.Metadata{}
err = json.NewDecoder(rr).Decode(&readMeta)
require.NoError(t, err, clues.ToCore(err))

View File

@ -14,6 +14,7 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/support"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
@ -25,7 +26,6 @@ import (
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
"github.com/alcionai/corso/src/pkg/services/m365/custom"
)

View File

@ -9,9 +9,9 @@ import (
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
"github.com/alcionai/corso/src/pkg/services/m365/custom"
)

View File

@ -7,13 +7,13 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
)
func NewExportCollection(

View File

@ -12,9 +12,9 @@ import (
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
)
type ExportUnitSuite struct {

View File

@ -19,9 +19,12 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/control"
@ -31,7 +34,6 @@ import (
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
"github.com/alcionai/corso/src/pkg/services/m365/custom"
@ -39,6 +41,50 @@ import (
const defaultFileSize int64 = 42
// TODO(ashmrtn): Merge with similar structs in graph and exchange packages.
type oneDriveService struct {
credentials account.M365Config
status support.ControllerOperationStatus
ac api.Client
}
func newOneDriveService(credentials account.M365Config) (*oneDriveService, error) {
ac, err := api.NewClient(
credentials,
control.DefaultOptions(),
count.New())
if err != nil {
return nil, err
}
service := oneDriveService{
ac: ac,
credentials: credentials,
}
return &service, nil
}
func (ods *oneDriveService) updateStatus(status *support.ControllerOperationStatus) {
if status == nil {
return
}
ods.status = support.MergeStatus(ods.status, *status)
}
func loadTestService(t *testing.T) *oneDriveService {
a := tconfig.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
service, err := newOneDriveService(creds)
require.NoError(t, err, clues.ToCore(err))
return service
}
// ---------------------------------------------------------------------------
// collections
// ---------------------------------------------------------------------------
@ -795,12 +841,7 @@ func (h mockBackupHandler[T]) AugmentItemInfo(
return h.ItemInfo
}
func (h *mockBackupHandler[T]) Get(
context.Context,
string,
map[string]string,
bool,
) (*http.Response, error) {
func (h *mockBackupHandler[T]) Get(context.Context, string, map[string]string) (*http.Response, error) {
c := h.getCall
h.getCall++

View File

@ -21,10 +21,8 @@ import (
)
const (
acceptHeaderKey = "Accept"
acceptHeaderValue = "*/*"
gigabyte = 1024 * 1024 * 1024
largeFileDownloadLimit = 15 * gigabyte
acceptHeaderKey = "Accept"
acceptHeaderValue = "*/*"
)
// downloadUrlKeys is used to find the download URL in a DriveItem response.
@ -35,8 +33,7 @@ var downloadURLKeys = []string{
func downloadItem(
ctx context.Context,
getter api.Getter,
driveID string,
ag api.Getter,
item *custom.DriveItem,
) (io.ReadCloser, error) {
if item == nil {
@ -44,37 +41,36 @@ func downloadItem(
}
var (
// very large file content needs to be downloaded through a different endpoint, or else
// the download could take longer than the lifespan of the download token in the cached
// url, which will cause us to timeout on every download request, even if we refresh the
// download url right before the query.
url = "https://graph.microsoft.com/v1.0/drives/" + driveID + "/items/" + ptr.Val(item.GetId()) + "/content"
reader io.ReadCloser
err error
isLargeFile = ptr.Val(item.GetSize()) > largeFileDownloadLimit
rc io.ReadCloser
isFile = item.GetFile() != nil
err error
)
// if this isn't a file, no content is available for download
if item.GetFile() == nil {
return reader, nil
if isFile {
var (
url string
ad = item.GetAdditionalData()
)
for _, key := range downloadURLKeys {
if v, err := str.AnyValueToString(key, ad); err == nil {
url = v
break
}
}
rc, err = downloadFile(ctx, ag, url)
if err != nil {
return nil, clues.Stack(err)
}
}
// smaller files will maintain our current behavior (prefetching the download url with the
// url cache). That pattern works for us in general, and we only need to deviate for very
// large file sizes.
if !isLargeFile {
url = str.FirstIn(item.GetAdditionalData(), downloadURLKeys...)
}
reader, err = downloadFile(ctx, getter, url, isLargeFile)
return reader, clues.StackWC(ctx, err).OrNil()
return rc, nil
}
type downloadWithRetries struct {
getter api.Getter
requireAuth bool
url string
getter api.Getter
url string
}
func (dg *downloadWithRetries) SupportsRange() bool {
@ -90,7 +86,7 @@ func (dg *downloadWithRetries) Get(
// wouldn't work without it (get 416 responses instead of 206).
headers[acceptHeaderKey] = acceptHeaderValue
resp, err := dg.getter.Get(ctx, dg.url, headers, dg.requireAuth)
resp, err := dg.getter.Get(ctx, dg.url, headers)
if err != nil {
return nil, clues.Wrap(err, "getting file")
}
@ -100,7 +96,7 @@ func (dg *downloadWithRetries) Get(
resp.Body.Close()
}
return nil, clues.NewWC(ctx, "malware detected").Label(graph.LabelsMalware)
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
}
if resp != nil && (resp.StatusCode/100) != 2 {
@ -111,7 +107,7 @@ func (dg *downloadWithRetries) Get(
// upstream error checks can compare the status with
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
return nil, clues.
Wrap(clues.NewWC(ctx, resp.Status), "non-2xx http response").
Wrap(clues.New(resp.Status), "non-2xx http response").
Label(graph.LabelStatus(resp.StatusCode))
}
@ -122,7 +118,6 @@ func downloadFile(
ctx context.Context,
ag api.Getter,
url string,
requireAuth bool,
) (io.ReadCloser, error) {
if len(url) == 0 {
return nil, clues.NewWC(ctx, "empty file url")
@ -146,9 +141,8 @@ func downloadFile(
rc, err := readers.NewResetRetryHandler(
ctx,
&downloadWithRetries{
getter: ag,
requireAuth: requireAuth,
url: url,
getter: ag,
url: url,
})
return rc, clues.Stack(err).OrNil()

View File

@ -12,7 +12,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
@ -154,8 +153,7 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
{
Values: nil,
NextLink: nil,
// needs graph.Stack, not clues.Stack
Err: graph.Stack(ctx, mySiteURLNotFound),
Err: graph.Stack(ctx, mySiteURLNotFound),
},
},
expectedErr: assert.NoError,
@ -167,8 +165,7 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
{
Values: nil,
NextLink: nil,
// needs graph.Stack, not clues.Stack
Err: graph.Stack(ctx, mySiteNotFound),
Err: graph.Stack(ctx, mySiteNotFound),
},
},
expectedErr: assert.NoError,
@ -234,18 +231,6 @@ func (suite *OneDriveIntgSuite) SetupSuite() {
require.NoError(t, err, clues.ToCore(err))
}
type stubStatusUpdater struct {
status support.ControllerOperationStatus
}
func (ssu *stubStatusUpdater) updateStatus(status *support.ControllerOperationStatus) {
if status == nil {
return
}
ssu.status = support.MergeStatus(ssu.status, *status)
}
func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
creds, err := tconfig.NewM365Account(suite.T()).M365Config()
require.NoError(suite.T(), err, clues.ToCore(err))
@ -271,10 +256,10 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
defer flush()
var (
scope = selectors.
service = loadTestService(t)
scope = selectors.
NewOneDriveBackup([]string{test.user}).
AllData()[0]
statusUpdater = stubStatusUpdater{}
)
colls := NewCollections(
@ -287,7 +272,7 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
},
creds.AzureTenantID,
idname.NewProvider(test.user, test.user),
statusUpdater.updateStatus,
service.updateStatus,
control.Options{
ToggleFeatures: control.Toggles{},
},

View File

@ -17,7 +17,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
@ -31,7 +30,9 @@ import (
type ItemIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
user string
userDriveID string
service *oneDriveService
}
func TestItemIntegrationSuite(t *testing.T) {
@ -43,7 +44,25 @@ func TestItemIntegrationSuite(t *testing.T) {
}
func (suite *ItemIntegrationSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.service = loadTestService(t)
suite.user = tconfig.SecondaryM365UserID(t)
graph.InitializeConcurrencyLimiter(ctx, true, 4)
pager := suite.service.ac.Drives().NewUserDrivePager(suite.user, nil)
odDrives, err := api.GetAllDrives(ctx, pager)
require.NoError(t, err, clues.ToCore(err))
// Test Requirement 1: Need a drive
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
// Pick the first drive
suite.userDriveID = ptr.Val(odDrives[0].GetId())
}
func getOneDriveItem(
@ -84,36 +103,28 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
defer flush()
sc := selectors.
NewOneDriveBackup([]string{suite.m365.User.ID}).
NewOneDriveBackup([]string{suite.user}).
AllData()[0]
driveItem := getOneDriveItem(
ctx,
t,
suite.m365.AC,
suite.m365.User.DriveID)
driveItem := getOneDriveItem(ctx, t, suite.service.ac, suite.userDriveID)
// Test Requirement 2: Need a file
require.NotEmpty(
t,
driveItem,
"no file item found for user %q drive %q",
suite.m365.User.ID,
suite.m365.User.DriveID)
"no file item found for user %s drive %s",
suite.user,
suite.userDriveID)
bh := &userDriveBackupHandler{
baseUserDriveHandler: baseUserDriveHandler{
ac: suite.m365.AC.Drives(),
ac: suite.service.ac.Drives(),
},
userID: suite.m365.User.ID,
userID: suite.user,
scope: sc,
}
// Read data for the file
itemData, err := downloadItem(
ctx,
bh,
suite.m365.User.DriveID,
custom.ToCustomDriveItem(driveItem))
itemData, err := downloadItem(ctx, bh, custom.ToCustomDriveItem(driveItem))
require.NoError(t, err, clues.ToCore(err))
size, err := io.Copy(io.Discard, itemData)
@ -131,13 +142,13 @@ func (suite *ItemIntegrationSuite) TestIsURLExpired() {
ctx, flush := tester.NewContext(t)
defer flush()
driveItem := getOneDriveItem(ctx, t, suite.m365.AC, suite.m365.User.DriveID)
driveItem := getOneDriveItem(ctx, t, suite.service.ac, suite.userDriveID)
require.NotEmpty(
t,
driveItem,
"no file item found for user %q drive %q",
suite.m365.User.ID,
suite.m365.User.DriveID)
"no file item found for user %s drive %s",
suite.user,
suite.userDriveID)
var url string
@ -162,7 +173,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
}{
{
name: "",
driveID: suite.m365.User.DriveID,
driveID: suite.userDriveID,
},
// {
// name: "sharePoint",
@ -172,12 +183,12 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
rh := NewUserDriveRestoreHandler(suite.m365.AC)
rh := NewUserDriveRestoreHandler(suite.service.ac)
ctx, flush := tester.NewContext(t)
defer flush()
root, err := suite.m365.AC.Drives().GetRootFolder(ctx, test.driveID)
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
require.NoError(t, err, clues.ToCore(err))
newFolderName := testdata.DefaultRestoreConfig("folder").Location
@ -206,7 +217,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
// newly created item should fail because it's a file not a folder
_, err = suite.m365.AC.Drives().GetFolderByName(
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(newFolder.GetId()),
@ -250,7 +261,7 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
}{
{
name: "oneDrive",
driveID: suite.m365.User.DriveID,
driveID: suite.userDriveID,
},
// {
// name: "sharePoint",
@ -264,11 +275,11 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
ctx, flush := tester.NewContext(t)
defer flush()
root, err := suite.m365.AC.Drives().GetRootFolder(ctx, test.driveID)
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
require.NoError(t, err, clues.ToCore(err))
// Lookup a folder that doesn't exist
_, err = suite.m365.AC.Drives().GetFolderByName(
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(root.GetId()),
@ -276,7 +287,7 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
// Lookup a folder that does exist
_, err = suite.m365.AC.Drives().GetFolderByName(
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(root.GetId()),
@ -296,7 +307,6 @@ func (m mockGetter) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return m.GetFunc(ctx, url)
}
@ -384,7 +394,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
return nil, clues.New("test error")
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
{
name: "download url is empty",
@ -421,7 +431,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
}, nil
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
{
name: "non-2xx http response",
@ -440,7 +450,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
}, nil
},
errorExpected: require.Error,
rcExpected: require.NotNil,
rcExpected: require.Nil,
},
}
@ -453,78 +463,9 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
mg := mockGetter{
GetFunc: test.GetFunc,
}
rc, err := downloadItem(
ctx,
mg,
"driveID",
custom.ToCustomDriveItem(test.itemFunc()))
rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(test.itemFunc()))
test.errorExpected(t, err, clues.ToCore(err))
test.rcExpected(t, rc, "reader should only be nil if item is nil")
})
}
}
func (suite *ItemUnitTestSuite) TestDownloadItem_urlByFileSize() {
var (
testRc = io.NopCloser(bytes.NewReader([]byte("test")))
url = "https://example.com"
okResp = &http.Response{
StatusCode: http.StatusOK,
Body: testRc,
}
)
table := []struct {
name string
itemFunc func() models.DriveItemable
GetFunc func(ctx context.Context, url string) (*http.Response, error)
errorExpected require.ErrorAssertionFunc
rcExpected require.ValueAssertionFunc
label string
}{
{
name: "big file",
itemFunc: func() models.DriveItemable {
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
di.SetSize(ptr.To[int64](20 * gigabyte))
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
assert.Contains(suite.T(), url, "/content")
return okResp, nil
},
},
{
name: "small file",
itemFunc: func() models.DriveItemable {
di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": url})
di.SetSize(ptr.To[int64](2 * gigabyte))
return di
},
GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
assert.NotContains(suite.T(), url, "/content")
return okResp, nil
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := downloadItem(
ctx,
mockGetter{GetFunc: test.GetFunc},
"driveID",
custom.ToCustomDriveItem(test.itemFunc()))
require.NoError(t, err, clues.ToCore(err))
test.rcExpected(t, rc)
})
}
}
@ -581,11 +522,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
mg := mockGetter{
GetFunc: GetFunc,
}
rc, err := downloadItem(
ctx,
mg,
"driveID",
custom.ToCustomDriveItem(itemFunc()))
rc, err := downloadItem(ctx, mg, custom.ToCustomDriveItem(itemFunc()))
errorExpected(t, err, clues.ToCore(err))
rcExpected(t, rc)

View File

@ -1,8 +1,6 @@
package metadata
import (
"strings"
)
import "strings"
const (
MetaFileSuffix = ".meta"

View File

@ -10,13 +10,12 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/syncd"
"github.com/alcionai/corso/src/internal/data"
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
)
// empty string is used to indicate that a permission cannot be restored
@ -24,20 +23,20 @@ const nonRestorablePermission = ""
func getParentMetadata(
parentPath path.Path,
parentDirToMeta syncd.MapTo[odmetadata.Metadata],
) (odmetadata.Metadata, error) {
parentDirToMeta syncd.MapTo[metadata.Metadata],
) (metadata.Metadata, error) {
parentMeta, ok := parentDirToMeta.Load(parentPath.String())
if !ok {
drivePath, err := path.ToDrivePath(parentPath)
if err != nil {
return odmetadata.Metadata{}, clues.Wrap(err, "invalid restore path")
return metadata.Metadata{}, clues.Wrap(err, "invalid restore path")
}
if len(drivePath.Folders) != 0 {
return odmetadata.Metadata{}, clues.Wrap(err, "computing item permissions")
return metadata.Metadata{}, clues.Wrap(err, "computing item permissions")
}
parentMeta = odmetadata.Metadata{}
parentMeta = metadata.Metadata{}
}
return parentMeta, nil
@ -50,9 +49,9 @@ func getCollectionMetadata(
caches *restoreCaches,
backupVersion int,
restorePerms bool,
) (odmetadata.Metadata, error) {
) (metadata.Metadata, error) {
if !restorePerms || backupVersion < version.OneDrive1DataAndMetaFiles {
return odmetadata.Metadata{}, nil
return metadata.Metadata{}, nil
}
var (
@ -62,13 +61,13 @@ func getCollectionMetadata(
if len(drivePath.Folders) == 0 {
// No permissions for root folder
return odmetadata.Metadata{}, nil
return metadata.Metadata{}, nil
}
if backupVersion < version.OneDrive4DirIncludesPermissions {
colMeta, err := getParentMetadata(fullPath, caches.ParentDirToMeta)
if err != nil {
return odmetadata.Metadata{}, clues.Wrap(err, "collection metadata")
return metadata.Metadata{}, clues.Wrap(err, "collection metadata")
}
return colMeta, nil
@ -83,7 +82,7 @@ func getCollectionMetadata(
meta, err := FetchAndReadMetadata(ctx, dc, metaName)
if err != nil {
return odmetadata.Metadata{}, clues.Wrap(err, "collection metadata")
return metadata.Metadata{}, clues.Wrap(err, "collection metadata")
}
return meta, nil
@ -94,9 +93,9 @@ func getCollectionMetadata(
func computePreviousLinkShares(
ctx context.Context,
originDir path.Path,
parentMetas syncd.MapTo[odmetadata.Metadata],
) ([]odmetadata.LinkShare, error) {
linkShares := []odmetadata.LinkShare{}
parentMetas syncd.MapTo[metadata.Metadata],
) ([]metadata.LinkShare, error) {
linkShares := []metadata.LinkShare{}
ctx = clues.Add(ctx, "origin_dir", originDir)
parent, err := originDir.Dir()
@ -123,7 +122,7 @@ func computePreviousLinkShares(
// Any change in permissions would change it to custom
// permission set and so we can filter on that.
if meta.SharingMode == odmetadata.SharingModeCustom {
if meta.SharingMode == metadata.SharingModeCustom {
linkShares = append(linkShares, meta.LinkShares...)
}
@ -144,11 +143,11 @@ func computePreviousMetadata(
ctx context.Context,
originDir path.Path,
// map parent dir -> parent's metadata
parentMetas syncd.MapTo[odmetadata.Metadata],
) (odmetadata.Metadata, error) {
parentMetas syncd.MapTo[metadata.Metadata],
) (metadata.Metadata, error) {
var (
parent path.Path
meta odmetadata.Metadata
meta metadata.Metadata
err error
ok bool
@ -159,26 +158,26 @@ func computePreviousMetadata(
for {
parent, err = parent.Dir()
if err != nil {
return odmetadata.Metadata{}, clues.WrapWC(ctx, err, "getting parent")
return metadata.Metadata{}, clues.WrapWC(ctx, err, "getting parent")
}
ictx := clues.Add(ctx, "parent_dir", parent)
drivePath, err := path.ToDrivePath(parent)
if err != nil {
return odmetadata.Metadata{}, clues.WrapWC(ictx, err, "transforming dir to drivePath")
return metadata.Metadata{}, clues.WrapWC(ictx, err, "transforming dir to drivePath")
}
if len(drivePath.Folders) == 0 {
return odmetadata.Metadata{}, nil
return metadata.Metadata{}, nil
}
meta, ok = parentMetas.Load(parent.String())
if !ok {
return odmetadata.Metadata{}, clues.NewWC(ictx, "no metadata found for parent folder: "+parent.String())
return metadata.Metadata{}, clues.NewWC(ictx, "no metadata found for parent folder: "+parent.String())
}
if meta.SharingMode == odmetadata.SharingModeCustom {
if meta.SharingMode == metadata.SharingModeCustom {
return meta, nil
}
}
@ -196,7 +195,7 @@ func UpdatePermissions(
udip updateDeleteItemPermissioner,
driveID string,
itemID string,
permAdded, permRemoved []odmetadata.Permission,
permAdded, permRemoved []metadata.Permission,
oldPermIDToNewID syncd.MapTo[string],
errs *fault.Bus,
) error {
@ -261,7 +260,7 @@ func UpdatePermissions(
// TODO: sitegroup support. Currently errors with "One or more users could not be resolved",
// likely due to the site group entityID consisting of a single integer (ex: 4)
if len(roles) == 0 || p.EntityType == odmetadata.GV2SiteGroup {
if len(roles) == 0 || p.EntityType == metadata.GV2SiteGroup {
continue
}
@ -316,7 +315,7 @@ func UpdateLinkShares(
upils updateDeleteItemLinkSharer,
driveID string,
itemID string,
lsAdded, lsRemoved []odmetadata.LinkShare,
lsAdded, lsRemoved []metadata.LinkShare,
oldLinkShareIDToNewID syncd.MapTo[string],
errs *fault.Bus,
) (bool, error) {
@ -348,7 +347,7 @@ func UpdateLinkShares(
for _, iden := range ls.Entities {
// TODO: sitegroup support. Currently errors with "One or more users could not be resolved",
// likely due to the site group entityID consisting of a single integer (ex: 4)
if iden.EntityType == odmetadata.GV2SiteGroup {
if iden.EntityType == metadata.GV2SiteGroup {
continue
}
@ -458,11 +457,11 @@ func UpdateLinkShares(
func filterUnavailableEntitiesInLinkShare(
ctx context.Context,
linkShares []odmetadata.LinkShare,
linkShares []metadata.LinkShare,
availableEntities ResourceIDNames,
oldLinkShareIDToNewID syncd.MapTo[string],
) []odmetadata.LinkShare {
filtered := []odmetadata.LinkShare{}
) []metadata.LinkShare {
filtered := []metadata.LinkShare{}
if availableEntities.Users == nil || availableEntities.Groups == nil {
// This should not be happening unless we missed to fill in the caches
@ -471,20 +470,20 @@ func filterUnavailableEntitiesInLinkShare(
}
for _, p := range linkShares {
entities := []odmetadata.Entity{}
entities := []metadata.Entity{}
for _, e := range p.Entities {
available := false
switch e.EntityType {
case odmetadata.GV2User:
case metadata.GV2User:
// Link shares with external users won't have IDs
if len(e.ID) == 0 && len(e.Email) > 0 {
available = true
} else {
_, available = availableEntities.Users.NameOf(e.ID)
}
case odmetadata.GV2Group:
case metadata.GV2Group:
_, available = availableEntities.Groups.NameOf(e.ID)
default:
// We only know about users and groups
@ -514,26 +513,26 @@ func filterUnavailableEntitiesInLinkShare(
func filterUnavailableEntitiesInPermissions(
ctx context.Context,
perms []odmetadata.Permission,
perms []metadata.Permission,
availableEntities ResourceIDNames,
oldPermIDToNewID syncd.MapTo[string],
) []odmetadata.Permission {
) []metadata.Permission {
if availableEntities.Users == nil || availableEntities.Groups == nil {
// This should not be happening unless we missed to fill in the caches
logger.Ctx(ctx).Info("no available entities, not filtering link shares")
return perms
}
filtered := []odmetadata.Permission{}
filtered := []metadata.Permission{}
for _, p := range perms {
available := false
switch p.EntityType {
case odmetadata.GV2User:
case metadata.GV2User:
_, ok := availableEntities.Users.NameOf(p.EntityID)
available = available || ok
case odmetadata.GV2Group:
case metadata.GV2Group:
_, ok := availableEntities.Groups.NameOf(p.EntityID)
available = available || ok
default:
@ -565,11 +564,11 @@ func RestorePermissions(
driveID string,
itemID string,
itemPath path.Path,
current odmetadata.Metadata,
current metadata.Metadata,
caches *restoreCaches,
errs *fault.Bus,
) {
if current.SharingMode == odmetadata.SharingModeInherited {
if current.SharingMode == metadata.SharingModeInherited {
return
}
@ -583,7 +582,7 @@ func RestorePermissions(
}
if previousLinkShares != nil {
lsAdded, lsRemoved := odmetadata.DiffLinkShares(previousLinkShares, current.LinkShares)
lsAdded, lsRemoved := metadata.DiffLinkShares(previousLinkShares, current.LinkShares)
lsAdded = filterUnavailableEntitiesInLinkShare(ctx, lsAdded, caches.AvailableEntities, caches.OldLinkShareIDToNewID)
// Link shares have to be updated before permissions as we have to
@ -609,7 +608,7 @@ func RestorePermissions(
return
}
permAdded, permRemoved := odmetadata.DiffPermissions(previous.Permissions, current.Permissions)
permAdded, permRemoved := metadata.DiffPermissions(previous.Permissions, current.Permissions)
permAdded = filterUnavailableEntitiesInPermissions(ctx, permAdded, caches.AvailableEntities, caches.OldPermIDToNewID)
if didReset {
@ -618,7 +617,7 @@ func RestorePermissions(
// that an item has as they too will be removed.
logger.Ctx(ctx).Debug("link share creation reset all inherited permissions")
permRemoved = []odmetadata.Permission{}
permRemoved = []metadata.Permission{}
permAdded = current.Permissions
}

View File

@ -17,7 +17,7 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations/inject"
@ -31,7 +31,6 @@ import (
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
"github.com/alcionai/corso/src/pkg/services/m365/custom"
)
@ -553,7 +552,7 @@ func CreateRestoreFolders(
drivePath *path.DrivePath,
restoreDir *path.Builder,
folderPath path.Path,
folderMetadata odmetadata.Metadata,
folderMetadata metadata.Metadata,
caches *restoreCaches,
restorePerms bool,
errs *fault.Bus,
@ -877,12 +876,12 @@ func FetchAndReadMetadata(
ctx context.Context,
fibn data.FetchItemByNamer,
metaName string,
) (odmetadata.Metadata, error) {
) (metadata.Metadata, error) {
ctx = clues.Add(ctx, "meta_file_name", metaName)
metaFile, err := fibn.FetchItemByName(ctx, metaName)
if err != nil {
return odmetadata.Metadata{}, clues.Wrap(err, "getting item metadata")
return metadata.Metadata{}, clues.Wrap(err, "getting item metadata")
}
metaReader := metaFile.ToReader()
@ -890,25 +889,25 @@ func FetchAndReadMetadata(
meta, err := getMetadata(metaReader)
if err != nil {
return odmetadata.Metadata{}, clues.Wrap(err, "deserializing item metadata")
return metadata.Metadata{}, clues.Wrap(err, "deserializing item metadata")
}
return meta, nil
}
// getMetadata read and parses the metadata info for an item
func getMetadata(metar io.ReadCloser) (odmetadata.Metadata, error) {
var meta odmetadata.Metadata
func getMetadata(metar io.ReadCloser) (metadata.Metadata, error) {
var meta metadata.Metadata
// `metar` will be nil for the top level container folder
if metar != nil {
metaraw, err := io.ReadAll(metar)
if err != nil {
return odmetadata.Metadata{}, err
return metadata.Metadata{}, err
}
err = json.Unmarshal(metaraw, &meta)
if err != nil {
return odmetadata.Metadata{}, err
return metadata.Metadata{}, err
}
}

View File

@ -93,9 +93,8 @@ func (h siteBackupHandler) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return h.ac.Get(ctx, url, headers, requireAuth)
return h.ac.Get(ctx, url, headers)
}
func (h siteBackupHandler) PathPrefix(

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata"
@ -35,7 +34,9 @@ import (
type URLCacheIntegrationSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
ac api.Client
user string
driveID string
}
func TestURLCacheIntegrationSuite(t *testing.T) {
@ -48,12 +49,29 @@ func TestURLCacheIntegrationSuite(t *testing.T) {
func (suite *URLCacheIntegrationSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.user = tconfig.SecondaryM365UserID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
drive, err := suite.ac.Users().GetDefaultDrive(ctx, suite.user)
require.NoError(t, err, clues.ToCore(err))
suite.driveID = ptr.Val(drive.GetId())
}
// Basic test for urlCache. Create some files in onedrive, then access them via
@ -61,18 +79,22 @@ func (suite *URLCacheIntegrationSuite) SetupSuite() {
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
var (
t = suite.T()
ac = suite.m365.AC.Drives()
driveID = suite.m365.User.DriveID
ac = suite.ac.Drives()
driveID = suite.driveID
newFolderName = testdata.DefaultRestoreConfig("folder").Location
)
ctx, flush := tester.NewContext(t)
defer flush()
// Create a new test folder
root, err := ac.GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
newFolder, err := ac.PostItemInContainer(
ctx,
driveID,
suite.m365.User.DriveRootFolderID,
ptr.Val(root.GetId()),
api.NewDriveItem(newFolderName, true),
control.Copy)
require.NoError(t, err, clues.ToCore(err))
@ -83,7 +105,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
// Get the previous delta to feed into url cache
pager := ac.EnumerateDriveItemsDelta(
ctx,
driveID,
suite.driveID,
"",
api.CallConfig{
Select: api.URLCacheDriveItemProps(),
@ -120,10 +142,10 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
// Create a new URL cache with a long TTL
uc, err := newURLCache(
driveID,
suite.driveID,
du.URL,
1*time.Hour,
ac,
suite.ac.Drives(),
count.New(),
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -154,8 +176,7 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
http.MethodGet,
props.downloadURL,
nil,
nil,
false)
nil)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, resp)

View File

@ -93,9 +93,8 @@ func (h userDriveBackupHandler) Get(
ctx context.Context,
url string,
headers map[string]string,
requireAuth bool,
) (*http.Response, error) {
return h.ac.Get(ctx, url, headers, requireAuth)
return h.ac.Get(ctx, url, headers)
}
func (h userDriveBackupHandler) PathPrefix(

View File

@ -296,7 +296,6 @@ func populateCollections(
cl),
qp.ProtectedResource.ID(),
bh.itemHandler(),
bh,
addAndRem.Added,
addAndRem.Removed,
// TODO: produce a feature flag that allows selective

View File

@ -24,7 +24,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
@ -88,14 +87,6 @@ func (bh mockBackupHandler) folderGetter() containerGetter { return
func (bh mockBackupHandler) previewIncludeContainers() []string { return bh.previewIncludes }
func (bh mockBackupHandler) previewExcludeContainers() []string { return bh.previewExcludes }
func (bh mockBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return "", false
}
func (bh mockBackupHandler) NewContainerCache(
userID string,
) (string, graph.ContainerResolver) {
@ -481,7 +472,10 @@ func newStatusUpdater(t *testing.T, wg *sync.WaitGroup) func(status *support.Con
type BackupIntgSuite struct {
tester.Suite
m365 its.M365IntgTestSetup
user string
site string
tenantID string
ac api.Client
}
func TestBackupIntgSuite(t *testing.T) {
@ -494,18 +488,35 @@ func TestBackupIntgSuite(t *testing.T) {
func (suite *BackupIntgSuite) SetupSuite() {
t := suite.T()
suite.m365 = its.GetM365(t)
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
suite.user = tconfig.M365UserID(t)
suite.site = tconfig.M365SiteID(t)
acct := tconfig.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
suite.tenantID = creds.AzureTenantID
tester.LogTimeOfTest(t)
}
func (suite *BackupIntgSuite) TestMailFetch() {
var (
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
userID = tconfig.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
@ -549,14 +560,14 @@ func (suite *BackupIntgSuite) TestMailFetch() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: ctrlOpts,
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(userID, userID),
}
collections, err := CreateCollections(
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
metadata.DeltaPaths{},
func(status *support.ControllerOperationStatus) {},
@ -591,8 +602,9 @@ func (suite *BackupIntgSuite) TestMailFetch() {
func (suite *BackupIntgSuite) TestDelta() {
var (
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
userID = tconfig.M365UserID(suite.T())
users = []string{userID}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
@ -628,7 +640,7 @@ func (suite *BackupIntgSuite) TestDelta() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(userID, userID),
}
// get collections without providing any delta history (ie: full backup)
@ -636,7 +648,7 @@ func (suite *BackupIntgSuite) TestDelta() {
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
metadata.DeltaPaths{},
func(status *support.ControllerOperationStatus) {},
@ -669,7 +681,7 @@ func (suite *BackupIntgSuite) TestDelta() {
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
dps,
func(status *support.ControllerOperationStatus) {},
@ -691,8 +703,8 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
var (
wg sync.WaitGroup
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
sel := selectors.NewExchangeBackup(users)
@ -701,7 +713,7 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
Selector: sel.Selector,
}
@ -709,7 +721,7 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
sel.Scopes()[0],
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),
@ -761,8 +773,8 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
// a regression test to ensure that downloaded items can be uploaded.
func (suite *BackupIntgSuite) TestContactSerializationRegression() {
var (
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
@ -789,14 +801,14 @@ func (suite *BackupIntgSuite) TestContactSerializationRegression() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
}
edcs, err := CreateCollections(
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),
@ -863,8 +875,8 @@ func (suite *BackupIntgSuite) TestContactSerializationRegression() {
// to be able to successfully query, download and restore event objects
func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
var (
users = []string{suite.m365.User.ID}
handlers = BackupHandlers(suite.m365.AC)
users = []string{suite.user}
handlers = BackupHandlers(suite.ac)
)
tests := []struct {
@ -899,14 +911,14 @@ func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
bpc := inject.BackupProducerConfig{
LastBackupVersion: version.NoBackup,
Options: control.DefaultOptions(),
ProtectedResource: suite.m365.User.Provider,
ProtectedResource: inMock.NewProvider(suite.user, suite.user),
}
collections, err := CreateCollections(
ctx,
bpc,
handlers,
suite.m365.TenantID,
suite.tenantID,
test.scope,
metadata.DeltaPaths{},
newStatusUpdater(t, &wg),

View File

@ -19,7 +19,6 @@ import (
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/fault"
@ -69,21 +68,21 @@ func getItemAndInfo(
ctx context.Context,
getter itemGetterSerializer,
userID string,
itemID string,
id string,
useImmutableIDs bool,
parentPath string,
) ([]byte, *details.ExchangeInfo, error) {
item, info, err := getter.GetItem(
ctx,
userID,
itemID,
id,
fault.New(true)) // temporary way to force a failFast error
if err != nil {
return nil, nil, clues.WrapWC(ctx, err, "fetching item").
Label(fault.LabelForceNoBackupCreation)
}
itemData, err := getter.Serialize(ctx, item, userID, itemID)
itemData, err := getter.Serialize(ctx, item, userID, id)
if err != nil {
return nil, nil, clues.WrapWC(ctx, err, "serializing item")
}
@ -109,7 +108,6 @@ func NewCollection(
bc data.BaseCollection,
user string,
items itemGetterSerializer,
canSkipFailChecker canSkipItemFailurer,
origAdded map[string]time.Time,
origRemoved []string,
validModTimes bool,
@ -142,7 +140,6 @@ func NewCollection(
added: added,
removed: removed,
getter: items,
skipChecker: canSkipFailChecker,
statusUpdater: statusUpdater,
}
}
@ -153,7 +150,6 @@ func NewCollection(
added: added,
removed: removed,
getter: items,
skipChecker: canSkipFailChecker,
statusUpdater: statusUpdater,
counter: counter,
}
@ -171,8 +167,7 @@ type prefetchCollection struct {
// removed is a list of item IDs that were deleted from, or moved out, of a container
removed map[string]struct{}
getter itemGetterSerializer
skipChecker canSkipItemFailurer
getter itemGetterSerializer
statusUpdater support.StatusUpdater
}
@ -199,12 +194,11 @@ func (col *prefetchCollection) streamItems(
wg sync.WaitGroup
progressMessage chan<- struct{}
user = col.user
dataCategory = col.Category().String()
)
ctx = clues.Add(
ctx,
"category", dataCategory)
"category", col.Category().String())
defer func() {
close(stream)
@ -233,7 +227,7 @@ func (col *prefetchCollection) streamItems(
defer close(semaphoreCh)
// delete all removed items
for itemID := range col.removed {
for id := range col.removed {
semaphoreCh <- struct{}{}
wg.Add(1)
@ -253,7 +247,7 @@ func (col *prefetchCollection) streamItems(
if progressMessage != nil {
progressMessage <- struct{}{}
}
}(itemID)
}(id)
}
var (
@ -262,7 +256,7 @@ func (col *prefetchCollection) streamItems(
)
// add any new items
for itemID := range col.added {
for id := range col.added {
if el.Failure() != nil {
break
}
@ -283,23 +277,8 @@ func (col *prefetchCollection) streamItems(
col.Opts().ToggleFeatures.ExchangeImmutableIDs,
parentPath)
if err != nil {
// pulled outside the switch due to multiple return values.
cause, canSkip := col.skipChecker.CanSkipItemFailure(
err,
user,
col.Opts())
// Handle known error cases
switch {
case canSkip:
// this is a special case handler that allows the item to be skipped
// instead of producing an error.
errs.AddSkip(ctx, fault.FileSkip(
cause,
dataCategory,
id,
id,
nil))
case errors.Is(err, core.ErrNotFound):
// Don't report errors for deleted items as there's no way for us to
// back up data that is gone. Record it as a "success", since there's
@ -321,19 +300,6 @@ func (col *prefetchCollection) streamItems(
id,
map[string]any{"parentPath": parentPath}))
atomic.AddInt64(&success, 1)
case graph.IsErrCorruptData(err):
// These items cannot be downloaded, graph error indicates that the item
// data is corrupted. Add to skipped list.
logger.
CtxErr(ctx, err).
With("skipped_reason", fault.SkipCorruptData).
Info("inaccessible email")
errs.AddSkip(ctx, fault.EmailSkip(
fault.SkipCorruptData,
user,
id,
map[string]any{"parentPath": parentPath}))
atomic.AddInt64(&success, 1)
default:
col.Counter.Inc(count.StreamItemsErred)
el.AddRecoverable(ctx, clues.Wrap(err, "fetching item").Label(fault.LabelForceNoBackupCreation))
@ -370,7 +336,7 @@ func (col *prefetchCollection) streamItems(
if progressMessage != nil {
progressMessage <- struct{}{}
}
}(itemID)
}(id)
}
wg.Wait()
@ -398,8 +364,7 @@ type lazyFetchCollection struct {
// removed is a list of item IDs that were deleted from, or moved out, of a container
removed map[string]struct{}
getter itemGetterSerializer
skipChecker canSkipItemFailurer
getter itemGetterSerializer
statusUpdater support.StatusUpdater
@ -426,8 +391,8 @@ func (col *lazyFetchCollection) streamItems(
var (
success int64
progressMessage chan<- struct{}
user = col.user
el = errs.Local()
user = col.user
)
defer func() {
@ -439,7 +404,7 @@ func (col *lazyFetchCollection) streamItems(
int(success),
0,
col.FullPath().Folder(false),
el.Failure())
errs.Failure())
}()
if len(col.added)+len(col.removed) > 0 {
@ -465,7 +430,7 @@ func (col *lazyFetchCollection) streamItems(
// add any new items
for id, modTime := range col.added {
if el.Failure() != nil {
if errs.Failure() != nil {
break
}
@ -481,18 +446,15 @@ func (col *lazyFetchCollection) streamItems(
&lazyItemGetter{
userID: user,
itemID: id,
category: col.Category(),
getter: col.getter,
modTime: modTime,
immutableIDs: col.Opts().ToggleFeatures.ExchangeImmutableIDs,
parentPath: parentPath,
skipChecker: col.skipChecker,
opts: col.Opts(),
},
id,
modTime,
col.counter,
el)
errs)
atomic.AddInt64(&success, 1)
@ -506,12 +468,9 @@ type lazyItemGetter struct {
getter itemGetterSerializer
userID string
itemID string
category path.CategoryType
parentPath string
modTime time.Time
immutableIDs bool
skipChecker canSkipItemFailurer
opts control.Options
}
func (lig *lazyItemGetter) GetData(
@ -526,25 +485,6 @@ func (lig *lazyItemGetter) GetData(
lig.immutableIDs,
lig.parentPath)
if err != nil {
if lig.skipChecker != nil {
cause, canSkip := lig.skipChecker.CanSkipItemFailure(
err,
lig.userID,
lig.opts)
if canSkip {
errs.AddSkip(ctx, fault.FileSkip(
cause,
lig.category.String(),
lig.itemID,
lig.itemID,
nil))
return nil, nil, false, clues.
NewWC(ctx, "error marked as skippable by handler").
Label(graph.LabelsSkippable)
}
}
// If an item was deleted then return an empty file so we don't fail
// the backup and return a sentinel error when asked for ItemInfo so
// we don't display the item in the backup.
@ -559,7 +499,7 @@ func (lig *lazyItemGetter) GetData(
err = clues.Stack(err)
errs.AddRecoverable(ctx, err)
return nil, nil, false, clues.Stack(err)
return nil, nil, false, err
}
// Update the mod time to what we already told kopia about. This is required

View File

@ -28,7 +28,6 @@ import (
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
graphTD "github.com/alcionai/corso/src/pkg/services/m365/api/graph/testdata"
)
@ -154,7 +153,6 @@ func (suite *CollectionUnitSuite) TestNewCollection_state() {
count.New()),
"u",
mock.DefaultItemGetSerialize(),
mock.NeverCanSkipFailChecker(),
nil,
nil,
colType.validModTimes,
@ -300,7 +298,6 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
count.New()),
"",
&mock.ItemGetSerialize{},
mock.NeverCanSkipFailChecker(),
test.added,
maps.Keys(test.removed),
false,
@ -336,232 +333,6 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
}
}
func (suite *CollectionUnitSuite) TestPrefetchCollection_Items_skipFailure() {
var (
start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {}
)
table := []struct {
name string
category path.CategoryType
handler backupHandler
added map[string]time.Time
removed map[string]struct{}
expectItemCount int
expectSkippedCount int
expectErr assert.ErrorAssertionFunc
}{
{
name: "no items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
expectErr: assert.NoError,
},
{
name: "events only added items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 3,
expectErr: assert.NoError,
},
{
name: "events only removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "events added and removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "contacts only added items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 0,
expectErr: assert.Error,
},
{
name: "contacts only removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "contacts added and removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "mail only added items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": {},
"flannigan": {},
"fitzbog": {},
},
expectItemCount: 0,
expectSkippedCount: 0,
expectErr: assert.Error,
},
{
name: "mail only removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: assert.NoError,
},
{
name: "mail added and removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
var (
t = suite.T()
errs = fault.New(true)
itemCount int
)
ctx, flush := tester.NewContext(t)
defer flush()
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
opts := control.DefaultOptions()
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
col := NewCollection(
data.NewBaseCollection(
fullPath,
nil,
locPath.ToBuilder(),
opts,
false,
count.New()),
"pr",
&mock.ItemGetSerialize{
SerializeErr: graph.ErrServiceUnavailableEmptyResp,
},
test.handler,
test.added,
maps.Keys(test.removed),
false,
statusUpdater,
count.New())
for item := range col.Items(ctx, errs) {
itemCount++
_, rok := test.removed[item.ID()]
if rok {
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
_, aok := test.added[item.ID()]
if !rok && aok {
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
}
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
}
test.expectErr(t, errs.Failure())
assert.Equal(
t,
test.expectItemCount,
itemCount,
"should see all expected items")
assert.Len(t, errs.Skipped(), test.expectSkippedCount)
})
}
}
// This test verifies skipped error cases are handled correctly by collection enumeration
func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
var (
@ -593,17 +364,6 @@ func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
},
expectedSkipError: fault.EmailSkip(fault.SkipInvalidRecipients, "", "fisher", nil),
},
{
name: "ErrorCorruptData",
added: map[string]time.Time{
"fisher": {},
},
expectItemCount: 0,
itemGetter: &mock.ItemGetSerialize{
GetErr: graphTD.ODataErr(string(graph.ErrorCorruptData)),
},
expectedSkipError: fault.EmailSkip(fault.SkipCorruptData, "", "fisher", nil),
},
}
for _, test := range table {
@ -627,7 +387,6 @@ func (suite *CollectionUnitSuite) TestCollection_SkippedErrors() {
count.New()),
"",
test.itemGetter,
mock.NeverCanSkipFailChecker(),
test.added,
nil,
false,
@ -708,7 +467,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
expectItemCount: 3,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
},
@ -761,7 +519,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
count.New()),
"",
mlg,
mock.NeverCanSkipFailChecker(),
test.added,
maps.Keys(test.removed),
true,
@ -773,10 +530,10 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
_, rok := test.removed[item.ID()]
if rok {
assert.True(t, item.Deleted(), "removals should be marked as deleted")
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
modTime, aok := test.added[item.ID()]
@ -785,6 +542,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
// initializer.
assert.Implements(t, (*data.ItemModTime)(nil), item)
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
// Check if the test want's us to read the item's data so the lazy
@ -804,8 +562,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
// collection initializer.
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, modTime, info.Modified(), "ItemInfo mod time")
} else {
assert.Fail(t, "unexpected read on item %s", item.ID())
}
}
@ -822,294 +578,6 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
}
}
func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_skipFailure() {
var (
start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {}
expectSkip = func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
assert.ErrorContains(t, err, "skip")
assert.True(t, clues.HasLabel(err, graph.LabelsSkippable), clues.ToCore(err))
}
expectNotSkipped = func(t *testing.T, err error) {
assert.Error(t, err, clues.ToCore(err))
assert.NotContains(t, err.Error(), "skip")
}
)
table := []struct {
name string
added map[string]time.Time
removed map[string]struct{}
category path.CategoryType
handler backupHandler
expectItemCount int
expectSkippedCount int
expectReads []string
expectErr func(t *testing.T, err error)
expectFailure assert.ErrorAssertionFunc
}{
{
name: "no items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
expectFailure: assert.NoError,
},
{
name: "events only added items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 3,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "events only removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "events added and removed items",
category: path.EventsCategory,
handler: newEventBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectSkip,
expectFailure: assert.NoError,
},
{
name: "contacts only added items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 0,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectNotSkipped,
expectFailure: assert.Error,
},
{
name: "contacts only removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "contacts added and removed items",
category: path.ContactsCategory,
handler: newContactBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "mail only added items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"fisher": start.Add(time.Minute),
"flannigan": start.Add(2 * time.Minute),
"fitzbog": start.Add(3 * time.Minute),
},
expectItemCount: 3,
expectSkippedCount: 0,
expectReads: []string{
"fisher",
"flannigan",
"fitzbog",
},
expectErr: expectNotSkipped,
expectFailure: assert.Error,
},
{
name: "mail only removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
removed: map[string]struct{}{
"princess": {},
"poppy": {},
"petunia": {},
},
expectItemCount: 3,
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
{
name: "mail added and removed items",
category: path.EmailCategory,
handler: newMailBackupHandler(api.Client{}),
added: map[string]time.Time{
"general": {},
},
removed: map[string]struct{}{
"general": {},
"goose": {},
"grumbles": {},
},
expectItemCount: 3,
// not 1, because general is removed from the added
// map due to being in the removed map
expectSkippedCount: 0,
expectErr: expectNotSkipped,
expectFailure: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
var (
t = suite.T()
errs = fault.New(false)
itemCount int
)
ctx, flush := tester.NewContext(t)
defer flush()
fullPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
locPath, err := path.Build("t", "pr", path.ExchangeService, test.category, false, "fnords", "smarf")
require.NoError(t, err, clues.ToCore(err))
mlg := &mockLazyItemGetterSerializer{
ItemGetSerialize: &mock.ItemGetSerialize{
SerializeErr: graph.ErrServiceUnavailableEmptyResp,
},
}
defer mlg.check(t, test.expectReads)
opts := control.DefaultOptions()
opts.SkipEventsOnInstance503ForResources = map[string]struct{}{}
opts.SkipEventsOnInstance503ForResources["pr"] = struct{}{}
col := NewCollection(
data.NewBaseCollection(
fullPath,
nil,
locPath.ToBuilder(),
opts,
false,
count.New()),
"pr",
mlg,
test.handler,
test.added,
maps.Keys(test.removed),
true,
statusUpdater,
count.New())
for item := range col.Items(ctx, errs) {
itemCount++
_, rok := test.removed[item.ID()]
if rok {
dimt, ok := item.(data.ItemModTime)
require.True(t, ok, "item implements data.ItemModTime")
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
assert.True(t, item.Deleted(), "removals should be marked as deleted")
}
modTime, aok := test.added[item.ID()]
if !rok && aok {
// Item's mod time should be what's passed into the collection
// initializer.
assert.Implements(t, (*data.ItemModTime)(nil), item)
assert.Equal(t, modTime, item.(data.ItemModTime).ModTime(), "item mod time")
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
// Check if the test want's us to read the item's data so the lazy
// data fetch is executed.
if slices.Contains(test.expectReads, item.ID()) {
r := item.ToReader()
_, err := io.ReadAll(r)
test.expectErr(t, err)
r.Close()
} else {
assert.Fail(t, "unexpected read on item %s", item.ID())
}
}
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
}
failure := errs.Failure()
if failure == nil && len(errs.Recovered()) > 0 {
failure = errs.Recovered()[0]
}
test.expectFailure(t, failure, clues.ToCore(failure))
assert.Equal(
t,
test.expectItemCount,
itemCount,
"should see all expected items")
assert.Len(t, errs.Skipped(), test.expectSkippedCount)
})
}
}
func (suite *CollectionUnitSuite) TestLazyItem_NoRead_GetInfo_Errors() {
t := suite.T()

View File

@ -1,8 +1,6 @@
package exchange
import (
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
)
@ -54,11 +52,3 @@ func (h contactBackupHandler) NewContainerCache(
getter: h.ac,
}
}
func (h contactBackupHandler) CanSkipItemFailure(
err error,
resourceID string,
opts control.Options,
) (fault.SkipCause, bool) {
return "", false
}

View File

@ -1,83 +0,0 @@
package exchange
import (
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type ContactsBackupHandlerUnitSuite struct {
tester.Suite
}
func TestContactsBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &ContactsBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ContactsBackupHandlerUnitSuite) TestHandler_CanSkipItemFailure() {
resourceID := uuid.NewString()
table := []struct {
name string
err error
opts control.Options
expect assert.BoolAssertionFunc
expectCause fault.SkipCause
}{
{
name: "no config",
err: assert.AnError,
opts: control.Options{},
expect: assert.False,
},
{
name: "false when map is empty",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{},
},
expect: assert.False,
},
{
name: "false on nil error",
err: nil,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
{
name: "false even if resource matches",
err: assert.AnError,
opts: control.Options{
SkipEventsOnInstance503ForResources: map[string]struct{}{
resourceID: {},
},
},
expect: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := newContactBackupHandler(api.Client{})
cause, result := h.CanSkipItemFailure(
test.err,
resourceID,
test.opts)
test.expect(t, result)
assert.Equal(t, test.expectCause, cause)
})
}
}

Some files were not shown because too many files have changed in this diff Show More