Compare commits
18 Commits
main
...
ci-speedup
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66ea3d6638 | ||
|
|
fbd3b79d96 | ||
|
|
3182cbb4d9 | ||
|
|
8a1d2ceeb9 | ||
|
|
8caff5da01 | ||
|
|
c6cdd87780 | ||
|
|
9856347b78 | ||
|
|
ebe21472dc | ||
|
|
765ba32b23 | ||
|
|
def7382220 | ||
|
|
450e270313 | ||
|
|
4de2a33b73 | ||
|
|
0bdeb8f26c | ||
|
|
36037c7905 | ||
|
|
b01bbd7414 | ||
|
|
49af829cb9 | ||
|
|
3818b518f1 | ||
|
|
1a85120e84 |
69
.github/actions/backup-restore-test/action.yml
vendored
69
.github/actions/backup-restore-test/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Backup Restore Test
|
name: Backup Restore Test
|
||||||
description: Run various backup/restore/export tests for a service.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
service:
|
service:
|
||||||
@ -19,10 +18,6 @@ inputs:
|
|||||||
description: Arguments to pass for restore; restore is skipped when missing.
|
description: Arguments to pass for restore; restore is skipped when missing.
|
||||||
required: false
|
required: false
|
||||||
default: ""
|
default: ""
|
||||||
export-args:
|
|
||||||
description: Arguments to pass for export.
|
|
||||||
required: false
|
|
||||||
default: ""
|
|
||||||
restore-container:
|
restore-container:
|
||||||
description: Folder to use for testing
|
description: Folder to use for testing
|
||||||
required: true
|
required: true
|
||||||
@ -37,9 +32,6 @@ inputs:
|
|||||||
description: Runs export tests when true
|
description: Runs export tests when true
|
||||||
required: false
|
required: false
|
||||||
default: false
|
default: false
|
||||||
category:
|
|
||||||
description: category of data for given service
|
|
||||||
required: false
|
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
backup-id:
|
backup-id:
|
||||||
@ -57,9 +49,7 @@ runs:
|
|||||||
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
|
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CATEGORY_SUFFIX=""
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-backup-${{inputs.kind }}.log
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-backup-${{inputs.kind }}.log
|
|
||||||
./corso backup create '${{ inputs.service }}' \
|
./corso backup create '${{ inputs.service }}' \
|
||||||
--no-stats --hide-progress --json \
|
--no-stats --hide-progress --json \
|
||||||
${{ inputs.backup-args }} |
|
${{ inputs.backup-args }} |
|
||||||
@ -78,9 +68,7 @@ runs:
|
|||||||
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
|
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CATEGORY_SUFFIX=""
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
|
||||||
./corso restore '${{ inputs.service }}' \
|
./corso restore '${{ inputs.service }}' \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
@ -103,17 +91,11 @@ runs:
|
|||||||
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }}
|
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }}
|
||||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||||
# lists are not restored to a different folder. they get created adjacent to their originals
|
|
||||||
# hence SANITY_TEST_RESTORE_CONTAINER_PREFIX is necessary to differentiate restored from original
|
|
||||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
|
||||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
|
||||||
run: |
|
run: |
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
|
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
CATEGORY_SUFFIX=""
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
|
||||||
./sanity-test restore ${{ inputs.service }}
|
./sanity-test restore ${{ inputs.service }}
|
||||||
|
|
||||||
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
|
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
@ -126,11 +108,9 @@ runs:
|
|||||||
echo Export ${{ inputs.service }} ${{ inputs.kind }}
|
echo Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CATEGORY_SUFFIX=""
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
|
||||||
./corso export '${{ inputs.service }}' \
|
./corso export '${{ inputs.service }}' \
|
||||||
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }} \
|
/tmp/export-${{ inputs.service }}-${{inputs.kind }} \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
${{ inputs.export-args }} \
|
${{ inputs.export-args }} \
|
||||||
@ -143,19 +123,14 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
working-directory: src
|
working-directory: src
|
||||||
env:
|
env:
|
||||||
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{ inputs.kind }}
|
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}
|
||||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||||
# applies only for sharepoint lists
|
|
||||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
|
||||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
|
||||||
run: |
|
run: |
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
|
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
CATEGORY_SUFFIX=""
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
|
||||||
./sanity-test export ${{ inputs.service }}
|
./sanity-test export ${{ inputs.service }}
|
||||||
|
|
||||||
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
|
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
@ -168,19 +143,17 @@ runs:
|
|||||||
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CATEGORY_SUFFIX=""
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
|
||||||
./corso export '${{ inputs.service }}' \
|
./corso export '${{ inputs.service }}' \
|
||||||
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive \
|
/tmp/export-${{ inputs.service }}-${{inputs.kind }}-archive \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
--archive \
|
--archive \
|
||||||
${{ inputs.export-args }} \
|
${{ inputs.export-args }} \
|
||||||
--backup '${{ steps.backup.outputs.result }}'
|
--backup '${{ steps.backup.outputs.result }}'
|
||||||
|
|
||||||
unzip /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive/*.zip \
|
unzip /tmp/export-${{ inputs.service }}-${{inputs.kind }}-archive/*.zip \
|
||||||
-d /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-unzipped
|
-d /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
|
||||||
cat /tmp/corsologs
|
cat /tmp/corsologs
|
||||||
|
|
||||||
- name: Check archive export ${{ inputs.service }} ${{ inputs.kind }}
|
- name: Check archive export ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
@ -188,19 +161,14 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
working-directory: src
|
working-directory: src
|
||||||
env:
|
env:
|
||||||
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{inputs.kind }}-unzipped
|
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
|
||||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||||
# applies only for sharepoint lists
|
|
||||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
|
||||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
|
||||||
run: |
|
run: |
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
CATEGORY_SUFFIX=""
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
|
||||||
./sanity-test export ${{ inputs.service }}
|
./sanity-test export ${{ inputs.service }}
|
||||||
|
|
||||||
- name: List ${{ inputs.service }} ${{ inputs.kind }}
|
- name: List ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
@ -211,9 +179,7 @@ runs:
|
|||||||
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
|
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
CATEGORY_SUFFIX=""
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}-list-${{inputs.kind }}.log
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}${CATEGORY_SUFFIX}-list-${{inputs.kind }}.log
|
|
||||||
./corso backup list ${{ inputs.service }} \
|
./corso backup list ${{ inputs.service }} \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
@ -234,10 +200,7 @@ runs:
|
|||||||
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
|
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||||
echo "---------------------------"
|
echo "---------------------------"
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
# Include category in the log file name if present
|
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}-single-${{inputs.kind }}.log
|
||||||
CATEGORY_SUFFIX=""
|
|
||||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
|
||||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}${CATEGORY_SUFFIX}-single-${{inputs.kind }}.log
|
|
||||||
./corso backup list ${{ inputs.service }} \
|
./corso backup list ${{ inputs.service }} \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
@ -265,4 +228,4 @@ runs:
|
|||||||
name: "${{ inputs.service }}-${{ inputs.kind }}-logs"
|
name: "${{ inputs.service }}-${{ inputs.kind }}-logs"
|
||||||
path: ${{ inputs.log-dir }}/*
|
path: ${{ inputs.log-dir }}/*
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 14
|
retention-days: 14
|
||||||
1
.github/actions/go-setup-cache/action.yml
vendored
1
.github/actions/go-setup-cache/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Setup and Cache Golang
|
name: Setup and Cache Golang
|
||||||
description: Build golang binaries for later use in CI.
|
|
||||||
|
|
||||||
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
||||||
#
|
#
|
||||||
|
|||||||
1
.github/actions/publish-binary/action.yml
vendored
1
.github/actions/publish-binary/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Publish Binary
|
name: Publish Binary
|
||||||
description: Publish binary artifacts.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
version:
|
version:
|
||||||
|
|||||||
1
.github/actions/publish-website/action.yml
vendored
1
.github/actions/publish-website/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Publish Website
|
name: Publish Website
|
||||||
description: Publish website artifacts.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
aws-iam-role:
|
aws-iam-role:
|
||||||
|
|||||||
44
.github/actions/purge-m365-data/action.yml
vendored
44
.github/actions/purge-m365-data/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Purge M365 User Data
|
name: Purge M365 User Data
|
||||||
description: Deletes M365 data generated during CI tests.
|
|
||||||
|
|
||||||
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
||||||
# of data churn (creation and immediate deletion) of files, the likes
|
# of data churn (creation and immediate deletion) of files, the likes
|
||||||
@ -31,19 +30,12 @@ inputs:
|
|||||||
description: Secret value of for AZURE_CLIENT_ID
|
description: Secret value of for AZURE_CLIENT_ID
|
||||||
azure-client-secret:
|
azure-client-secret:
|
||||||
description: Secret value of for AZURE_CLIENT_SECRET
|
description: Secret value of for AZURE_CLIENT_SECRET
|
||||||
azure-pnp-client-id:
|
|
||||||
description: Secret value of AZURE_PNP_CLIENT_ID
|
|
||||||
azure-pnp-client-cert:
|
|
||||||
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
|
|
||||||
azure-tenant-id:
|
azure-tenant-id:
|
||||||
description: Secret value of AZURE_TENANT_ID
|
description: Secret value of for AZURE_TENANT_ID
|
||||||
m365-admin-user:
|
m365-admin-user:
|
||||||
description: Secret value of for M365_TENANT_ADMIN_USER
|
description: Secret value of for M365_TENANT_ADMIN_USER
|
||||||
m365-admin-password:
|
m365-admin-password:
|
||||||
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
||||||
tenant-domain:
|
|
||||||
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
|
|
||||||
required: true
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: composite
|
using: composite
|
||||||
@ -61,13 +53,7 @@ runs:
|
|||||||
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
|
||||||
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
||||||
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
||||||
run: |
|
run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
|
||||||
{
|
|
||||||
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
|
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
|
||||||
#- name: Reset retention for all mailboxes to 0
|
#- name: Reset retention for all mailboxes to 0
|
||||||
@ -88,16 +74,10 @@ runs:
|
|||||||
shell: pwsh
|
shell: pwsh
|
||||||
working-directory: ./src/cmd/purge/scripts
|
working-directory: ./src/cmd/purge/scripts
|
||||||
env:
|
env:
|
||||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
|
||||||
run: |
|
run: |
|
||||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||||
{
|
|
||||||
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
################################################################################################################
|
################################################################################################################
|
||||||
# Sharepoint
|
# Sharepoint
|
||||||
@ -108,14 +88,6 @@ runs:
|
|||||||
shell: pwsh
|
shell: pwsh
|
||||||
working-directory: ./src/cmd/purge/scripts
|
working-directory: ./src/cmd/purge/scripts
|
||||||
env:
|
env:
|
||||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||||
run: |
|
|
||||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
|
||||||
{
|
|
||||||
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|||||||
1
.github/actions/teams-message/action.yml
vendored
1
.github/actions/teams-message/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Send a message to Teams
|
name: Send a message to Teams
|
||||||
description: Send messages to communication apps.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
msg:
|
msg:
|
||||||
|
|||||||
1
.github/actions/website-linting/action.yml
vendored
1
.github/actions/website-linting/action.yml
vendored
@ -1,5 +1,4 @@
|
|||||||
name: Lint Website
|
name: Lint Website
|
||||||
description: Lint website content.
|
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
version:
|
version:
|
||||||
|
|||||||
2
.github/workflows/_filechange_checker.yml
vendored
2
.github/workflows/_filechange_checker.yml
vendored
@ -28,7 +28,7 @@ jobs:
|
|||||||
|
|
||||||
# only run CI tests if the src folder or workflow actions have changed
|
# only run CI tests if the src folder or workflow actions have changed
|
||||||
- name: Check for file changes in src/ or .github/workflows/
|
- name: Check for file changes in src/ or .github/workflows/
|
||||||
uses: dorny/paths-filter@v3
|
uses: dorny/paths-filter@v2
|
||||||
id: dornycheck
|
id: dornycheck
|
||||||
with:
|
with:
|
||||||
list-files: json
|
list-files: json
|
||||||
|
|||||||
2
.github/workflows/binary-publish.yml
vendored
2
.github/workflows/binary-publish.yml
vendored
@ -40,5 +40,5 @@ jobs:
|
|||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] Publishing Binary"
|
msg: "[FAILED] Publishing Binary"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
60
.github/workflows/ci.yml
vendored
60
.github/workflows/ci.yml
vendored
@ -7,7 +7,7 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
push:
|
push:
|
||||||
branches: [main]
|
branches: [main, ci-speedup]
|
||||||
tags: ["v*.*.*"]
|
tags: ["v*.*.*"]
|
||||||
|
|
||||||
repository_dispatch:
|
repository_dispatch:
|
||||||
@ -130,7 +130,7 @@ jobs:
|
|||||||
environment: Testing
|
environment: Testing
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
|
# if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: src
|
working-directory: src
|
||||||
@ -188,7 +188,7 @@ jobs:
|
|||||||
|
|
||||||
# Upload the original go test output as an artifact for later review.
|
# Upload the original go test output as an artifact for later review.
|
||||||
- name: Upload test log
|
- name: Upload test log
|
||||||
if: failure()
|
if: always()
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ci-test-log
|
name: ci-test-log
|
||||||
@ -277,7 +277,7 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
working-directory: src
|
working-directory: src
|
||||||
env:
|
env:
|
||||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
|
CORSO_LOG_FILE: /tmp/corso-testlog/run-unit.log
|
||||||
LOG_GRAPH_REQUESTS: true
|
LOG_GRAPH_REQUESTS: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@ -286,12 +286,13 @@ jobs:
|
|||||||
uses: magnetikonline/action-golang-cache@v4
|
uses: magnetikonline/action-golang-cache@v4
|
||||||
with:
|
with:
|
||||||
go-version-file: src/go.mod
|
go-version-file: src/go.mod
|
||||||
|
cache-key-suffix: unit-test-${{ github.sha }}
|
||||||
|
|
||||||
- run: mkdir testlog
|
- run: mkdir -p /tmp/corso-testlog
|
||||||
|
|
||||||
# Install gotestfmt
|
# # Install gotestfmt
|
||||||
- name: Set up gotestfmt
|
# - name: Set up gotestfmt
|
||||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
# run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||||
|
|
||||||
# run the tests
|
# run the tests
|
||||||
- name: Unit Tests
|
- name: Unit Tests
|
||||||
@ -302,6 +303,7 @@ jobs:
|
|||||||
CORSO_SECONDARY_M365_TEST_USER_ID: 'foo'
|
CORSO_SECONDARY_M365_TEST_USER_ID: 'foo'
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
export GODEBUG=gocachetest=1
|
||||||
go test \
|
go test \
|
||||||
-tags testing \
|
-tags testing \
|
||||||
-json \
|
-json \
|
||||||
@ -309,16 +311,34 @@ jobs:
|
|||||||
-failfast \
|
-failfast \
|
||||||
-p 1 \
|
-p 1 \
|
||||||
-timeout 20m \
|
-timeout 20m \
|
||||||
./... \
|
./...
|
||||||
2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests
|
|
||||||
|
# run the tests
|
||||||
|
- name: Unit Tests Again
|
||||||
|
env:
|
||||||
|
# Set these to a bad value so we don't accidentally fall back to
|
||||||
|
# something elsewhere.
|
||||||
|
CORSO_M365_TEST_USER_ID: 'foo'
|
||||||
|
CORSO_SECONDARY_M365_TEST_USER_ID: 'foo'
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
export GODEBUG=gocachetest=1
|
||||||
|
go test \
|
||||||
|
-tags testing \
|
||||||
|
-json \
|
||||||
|
-v \
|
||||||
|
-failfast \
|
||||||
|
-p 1 \
|
||||||
|
-timeout 20m \
|
||||||
|
./...
|
||||||
|
|
||||||
# Upload the original go test output as an artifact for later review.
|
# Upload the original go test output as an artifact for later review.
|
||||||
- name: Upload test log
|
- name: Upload test log
|
||||||
if: failure()
|
if: always()
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: unit-test-log
|
name: unit-test-log
|
||||||
path: src/testlog/*
|
path: /tmp/corso-testlog/*
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 14
|
retention-days: 14
|
||||||
|
|
||||||
@ -463,7 +483,7 @@ jobs:
|
|||||||
go-version-file: src/go.mod
|
go-version-file: src/go.mod
|
||||||
|
|
||||||
- name: Go Lint
|
- name: Go Lint
|
||||||
uses: golangci/golangci-lint-action@v4
|
uses: golangci/golangci-lint-action@v3
|
||||||
with:
|
with:
|
||||||
# Keep pinned to a verson as sometimes updates will add new lint
|
# Keep pinned to a verson as sometimes updates will add new lint
|
||||||
# failures in unchanged code.
|
# failures in unchanged code.
|
||||||
@ -518,20 +538,6 @@ jobs:
|
|||||||
echo "Make sure to propagate errors with clues"
|
echo "Make sure to propagate errors with clues"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
- name: Check if clues without context are used when context is passed in
|
|
||||||
run: |
|
|
||||||
# Using `grep .` as the exit codes are always true for correct grammar
|
|
||||||
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
|
|
||||||
echo "Do not use clues.*WC when context is passed in"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
- name: Check clues with context is used when context is not passed in
|
|
||||||
run: |
|
|
||||||
# Using `grep .` as the exit codes are always true for correct grammar
|
|
||||||
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
|
|
||||||
echo "Use clues.*WC when context is not passed in"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------------------------
|
||||||
# --- GitHub Actions Linting -------------------------------------------------------------------------
|
# --- GitHub Actions Linting -------------------------------------------------------------------------
|
||||||
|
|||||||
14
.github/workflows/ci_test_cleanup.yml
vendored
14
.github/workflows/ci_test_cleanup.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
|||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
|
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@ -33,15 +33,12 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|
||||||
- name: Notify failure in teams
|
- name: Notify failure in teams
|
||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|
||||||
Test-Site-Data-Cleanup:
|
Test-Site-Data-Cleanup:
|
||||||
@ -50,7 +47,7 @@ jobs:
|
|||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
|
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@ -73,13 +70,10 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|
||||||
- name: Notify failure in teams
|
- name: Notify failure in teams
|
||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
3
.github/workflows/load_test.yml
vendored
3
.github/workflows/load_test.yml
vendored
@ -155,6 +155,3 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|||||||
17
.github/workflows/longevity_test.yml
vendored
17
.github/workflows/longevity_test.yml
vendored
@ -6,7 +6,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
user:
|
user:
|
||||||
description: "User to run longevity test on"
|
description: 'User to run longevity test on'
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
# required to retrieve AWS credentials
|
# required to retrieve AWS credentials
|
||||||
@ -23,7 +23,7 @@ jobs:
|
|||||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||||
|
|
||||||
Longevity-Tests:
|
Longevity-Tests:
|
||||||
needs: [SetM365App]
|
needs: [ SetM365App ]
|
||||||
environment: Testing
|
environment: Testing
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
@ -37,7 +37,7 @@ jobs:
|
|||||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
||||||
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
||||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||||
PREFIX: "longevity"
|
PREFIX: 'longevity'
|
||||||
|
|
||||||
# Options for retention.
|
# Options for retention.
|
||||||
RETENTION_MODE: GOVERNANCE
|
RETENTION_MODE: GOVERNANCE
|
||||||
@ -46,7 +46,7 @@ jobs:
|
|||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: src
|
working-directory: src
|
||||||
|
|
||||||
############################################################################
|
############################################################################
|
||||||
# setup
|
# setup
|
||||||
steps:
|
steps:
|
||||||
@ -78,7 +78,7 @@ jobs:
|
|||||||
|
|
||||||
- run: go build -o corso
|
- run: go build -o corso
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
|
|
||||||
- run: mkdir ${CORSO_LOG_DIR}
|
- run: mkdir ${CORSO_LOG_DIR}
|
||||||
|
|
||||||
# Use shorter-lived credentials obtained from assume-role since these
|
# Use shorter-lived credentials obtained from assume-role since these
|
||||||
@ -113,6 +113,7 @@ jobs:
|
|||||||
--extend-retention \
|
--extend-retention \
|
||||||
--prefix ${{ env.PREFIX }} \
|
--prefix ${{ env.PREFIX }} \
|
||||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||||
|
--succeed-if-exists \
|
||||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||||
|
|
||||||
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||||
@ -163,7 +164,7 @@ jobs:
|
|||||||
|
|
||||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||||
echo result=$data >> $GITHUB_OUTPUT
|
echo result=$data >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
##########################################################################
|
##########################################################################
|
||||||
# Onedrive
|
# Onedrive
|
||||||
|
|
||||||
@ -328,7 +329,7 @@ jobs:
|
|||||||
--hide-progress \
|
--hide-progress \
|
||||||
--force \
|
--force \
|
||||||
--json \
|
--json \
|
||||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
|
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
|
||||||
|
|
||||||
- name: Maintenance test Weekly
|
- name: Maintenance test Weekly
|
||||||
id: maintenance-test-weekly
|
id: maintenance-test-weekly
|
||||||
@ -392,5 +393,5 @@ jobs:
|
|||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] Longevity Test"
|
msg: "[FAILED] Longevity Test"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
8
.github/workflows/nightly_test.yml
vendored
8
.github/workflows/nightly_test.yml
vendored
@ -48,7 +48,7 @@ jobs:
|
|||||||
# ----------------------------------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
Test-Suite-Trusted:
|
Test-Suite-Trusted:
|
||||||
needs: [Checkout, SetM365App]
|
needs: [ Checkout, SetM365App]
|
||||||
environment: Testing
|
environment: Testing
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
defaults:
|
defaults:
|
||||||
@ -100,9 +100,9 @@ jobs:
|
|||||||
-timeout 2h \
|
-timeout 2h \
|
||||||
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Logging & Notifications
|
# Logging & Notifications
|
||||||
|
|
||||||
# Upload the original go test output as an artifact for later review.
|
# Upload the original go test output as an artifact for later review.
|
||||||
- name: Upload test log
|
- name: Upload test log
|
||||||
@ -118,5 +118,5 @@ jobs:
|
|||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[COROS FAILED] Nightly Checks"
|
msg: "[FAILED] Nightly Checks"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
2
.github/workflows/ok-to-test.yml
vendored
2
.github/workflows/ok-to-test.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
private_key: ${{ secrets.PRIVATE_KEY }}
|
private_key: ${{ secrets.PRIVATE_KEY }}
|
||||||
|
|
||||||
- name: Slash Command Dispatch
|
- name: Slash Command Dispatch
|
||||||
uses: peter-evans/slash-command-dispatch@v4
|
uses: peter-evans/slash-command-dispatch@v3
|
||||||
env:
|
env:
|
||||||
TOKEN: ${{ steps.generate_token.outputs.token }}
|
TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||||
with:
|
with:
|
||||||
|
|||||||
187
.github/workflows/sanity-test.yaml
vendored
187
.github/workflows/sanity-test.yaml
vendored
@ -6,7 +6,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
user:
|
user:
|
||||||
description: "User to run sanity test on"
|
description: 'User to run sanity test on'
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
# required to retrieve AWS credentials
|
# required to retrieve AWS credentials
|
||||||
@ -23,7 +23,7 @@ jobs:
|
|||||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||||
|
|
||||||
Sanity-Tests:
|
Sanity-Tests:
|
||||||
needs: [SetM365App]
|
needs: [ SetM365App ]
|
||||||
environment: Testing
|
environment: Testing
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
@ -43,11 +43,12 @@ jobs:
|
|||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
working-directory: src
|
working-directory: src
|
||||||
|
|
||||||
|
##########################################################################################################################################
|
||||||
|
|
||||||
##########################################################################################################################################
|
# setup
|
||||||
|
|
||||||
# setup
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Golang with cache
|
- name: Setup Golang with cache
|
||||||
@ -63,9 +64,9 @@ jobs:
|
|||||||
|
|
||||||
- run: mkdir ${CORSO_LOG_DIR}
|
- run: mkdir ${CORSO_LOG_DIR}
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Pre-Run cleanup
|
# Pre-Run cleanup
|
||||||
|
|
||||||
# unlike CI tests, sanity tests are not expected to run concurrently.
|
# unlike CI tests, sanity tests are not expected to run concurrently.
|
||||||
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
|
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
|
||||||
@ -90,9 +91,6 @@ jobs:
|
|||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|
||||||
- name: Purge CI-Produced Folders for Sites
|
- name: Purge CI-Produced Folders for Sites
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
@ -101,20 +99,17 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
|
site: ${{ vars.CORSO_M365_TEST_SITE_URL }}
|
||||||
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
|
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
|
||||||
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
||||||
older-than: ${{ env.NOW }}
|
older-than: ${{ env.NOW }}
|
||||||
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
||||||
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
||||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
|
||||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
|
||||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Repository commands
|
# Repository commands
|
||||||
|
|
||||||
- name: Version Test
|
- name: Version Test
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
@ -174,9 +169,9 @@ jobs:
|
|||||||
--mode complete \
|
--mode complete \
|
||||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
|
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Exchange
|
# Exchange
|
||||||
|
|
||||||
# generate new entries to roll into the next load test
|
# generate new entries to roll into the next load test
|
||||||
# only runs if the test was successful
|
# only runs if the test was successful
|
||||||
@ -198,8 +193,8 @@ jobs:
|
|||||||
service: exchange
|
service: exchange
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -211,8 +206,8 @@ jobs:
|
|||||||
service: exchange
|
service: exchange
|
||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
@ -225,8 +220,8 @@ jobs:
|
|||||||
service: exchange
|
service: exchange
|
||||||
kind: non-delta
|
kind: non-delta
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
|
||||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
@ -239,15 +234,16 @@ jobs:
|
|||||||
service: exchange
|
service: exchange
|
||||||
kind: non-delta-incremental
|
kind: non-delta-incremental
|
||||||
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
|
||||||
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
|
||||||
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
##########################################################################################################################################
|
|
||||||
|
|
||||||
# Onedrive
|
##########################################################################################################################################
|
||||||
|
|
||||||
|
# Onedrive
|
||||||
|
|
||||||
# generate new entries for test
|
# generate new entries for test
|
||||||
- name: OneDrive - Create new data
|
- name: OneDrive - Create new data
|
||||||
@ -274,8 +270,8 @@ jobs:
|
|||||||
service: onedrive
|
service: onedrive
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -299,14 +295,14 @@ jobs:
|
|||||||
service: onedrive
|
service: onedrive
|
||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--user "${{ env.TEST_USER }}"'
|
backup-args: '--user "${{ env.TEST_USER }}"'
|
||||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Sharepoint Library
|
# Sharepoint
|
||||||
|
|
||||||
# generate new entries for test
|
# generate new entries for test
|
||||||
- name: SharePoint - Create new data
|
- name: SharePoint - Create new data
|
||||||
@ -333,12 +329,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
service: sharepoint
|
service: sharepoint
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}"'
|
||||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
category: libraries
|
|
||||||
|
|
||||||
# generate some more enteries for incremental check
|
# generate some more enteries for incremental check
|
||||||
- name: SharePoint - Create new data (for incremental)
|
- name: SharePoint - Create new data (for incremental)
|
||||||
@ -360,107 +355,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
service: sharepoint
|
service: sharepoint
|
||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
|
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}"'
|
||||||
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
category: libraries
|
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Sharepoint Lists
|
# Groups and Teams
|
||||||
|
|
||||||
# generate new entries for test
|
|
||||||
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
|
|
||||||
- name: SharePoint Lists - Create new data
|
|
||||||
id: new-data-creation-sharepoint-lists
|
|
||||||
timeout-minutes: 30
|
|
||||||
working-directory: ./src/cmd/factory
|
|
||||||
run: |
|
|
||||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
|
||||||
|
|
||||||
go run . sharepoint lists \
|
|
||||||
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
|
|
||||||
--user ${{ env.TEST_USER }} \
|
|
||||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
|
||||||
--tenant ${{ secrets.TENANT_ID }} \
|
|
||||||
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
|
|
||||||
--count 4 |
|
|
||||||
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
|
|
||||||
tee $GITHUB_OUTPUT
|
|
||||||
# Extracts the common prefix for the Sharepoint list names.
|
|
||||||
- name: SharePoint Lists - Store restore container
|
|
||||||
id: sharepoint-lists-store-restore-container
|
|
||||||
run: |
|
|
||||||
echo ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} |
|
|
||||||
cut -d',' -f1 |
|
|
||||||
cut -d'_' -f1,2,3,4,5 |
|
|
||||||
sed -e 's/^/result=/' |
|
|
||||||
tee $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: SharePoint Lists - Backup
|
|
||||||
id: sharepoint-lists-backup
|
|
||||||
timeout-minutes: 30
|
|
||||||
uses: ./.github/actions/backup-restore-test
|
|
||||||
with:
|
|
||||||
service: sharepoint
|
|
||||||
kind: first-backup-lists
|
|
||||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
|
|
||||||
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
|
|
||||||
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
|
|
||||||
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
|
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
|
||||||
with-export: true
|
|
||||||
category: lists
|
|
||||||
on-collision: copy
|
|
||||||
|
|
||||||
# generate some more enteries for incremental check
|
|
||||||
- name: SharePoint Lists - Create new data (for incremental)
|
|
||||||
id: inc-data-creation-sharepoint-lists
|
|
||||||
timeout-minutes: 30
|
|
||||||
working-directory: ./src/cmd/factory
|
|
||||||
run: |
|
|
||||||
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
|
|
||||||
|
|
||||||
go run . sharepoint lists \
|
|
||||||
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
|
|
||||||
--user ${{ env.TEST_USER }} \
|
|
||||||
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
|
|
||||||
--tenant ${{ secrets.TENANT_ID }} \
|
|
||||||
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
|
|
||||||
--count 4 |
|
|
||||||
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
|
|
||||||
tee $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: SharePoint Lists - Store restore container (for incremental)
|
|
||||||
id: sharepoint-lists-store-restore-container-inc
|
|
||||||
run: |
|
|
||||||
echo ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }} |
|
|
||||||
cut -d',' -f1 |
|
|
||||||
cut -d'_' -f1,2,3,4,5 |
|
|
||||||
sed -e 's/^/result=/' |
|
|
||||||
tee $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: SharePoint Lists - Incremental backup
|
|
||||||
id: sharepoint-lists-incremental
|
|
||||||
timeout-minutes: 30
|
|
||||||
uses: ./.github/actions/backup-restore-test
|
|
||||||
with:
|
|
||||||
service: sharepoint
|
|
||||||
kind: incremental-lists
|
|
||||||
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
|
|
||||||
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
|
|
||||||
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
|
|
||||||
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
|
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
|
||||||
with-export: true
|
|
||||||
category: lists
|
|
||||||
on-collision: copy
|
|
||||||
|
|
||||||
##########################################################################################################################################
|
|
||||||
|
|
||||||
# Groups and Teams
|
|
||||||
|
|
||||||
# generate new entries for test
|
# generate new entries for test
|
||||||
- name: Groups - Create new data
|
- name: Groups - Create new data
|
||||||
@ -487,8 +390,8 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
service: groups
|
service: groups
|
||||||
kind: first-backup
|
kind: first-backup
|
||||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
@ -512,15 +415,15 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
service: groups
|
service: groups
|
||||||
kind: incremental
|
kind: incremental
|
||||||
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
|
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
|
||||||
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
|
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
|
||||||
log-dir: ${{ env.CORSO_LOG_DIR }}
|
log-dir: ${{ env.CORSO_LOG_DIR }}
|
||||||
with-export: true
|
with-export: true
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################################################################################
|
||||||
|
|
||||||
# Logging & Notifications
|
# Logging & Notifications
|
||||||
|
|
||||||
# Upload the original go test output as an artifact for later review.
|
# Upload the original go test output as an artifact for later review.
|
||||||
- name: Upload test log
|
- name: Upload test log
|
||||||
@ -536,5 +439,5 @@ jobs:
|
|||||||
if: failure()
|
if: failure()
|
||||||
uses: ./.github/actions/teams-message
|
uses: ./.github/actions/teams-message
|
||||||
with:
|
with:
|
||||||
msg: "[CORSO FAILED] Sanity Tests"
|
msg: "[FAILED] Sanity Tests"
|
||||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||||
|
|||||||
25
CHANGELOG.md
25
CHANGELOG.md
@ -6,46 +6,24 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
|||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
## [Unreleased] (beta)
|
## [Unreleased] (beta)
|
||||||
### Fixed
|
|
||||||
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
|
|
||||||
- Emails attached within other emails are now correctly exported
|
|
||||||
- Gracefully handle email and post attachments without name when exporting to eml
|
|
||||||
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
|
|
||||||
- Fixed an issue causing exports dealing with calendar data to have high memory usage
|
|
||||||
|
|
||||||
## [v0.19.0] (beta) - 2024-02-06
|
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
- Events can now be exported from Exchange backups as .ics files.
|
- Events can now be exported from Exchange backups as .ics files.
|
||||||
- Update repo init configuration to reduce the total number of GET requests sent
|
|
||||||
to the object store when using corso. This affects repos that have many
|
|
||||||
backups created in them per day the most.
|
|
||||||
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
|
|
||||||
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
|
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
|
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
|
||||||
- Backup attachments associated with group mailbox items.
|
- Backup attachments associated with group mailbox items.
|
||||||
- Groups and Teams backups no longer fail when a resource has no display name.
|
- Groups and Teams backups no longer fail when a resource has no display name.
|
||||||
- Contacts in-place restore failed if the restore destination was empty.
|
- Contacts in-place restore failed if the restore destination was empty.
|
||||||
- Link shares with external users are now backed up and restored as expected
|
|
||||||
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
|
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
- When running `backup details` on an empty backup returns a more helpful error message.
|
- When running `backup details` on an empty backup returns a more helpful error message.
|
||||||
- Backup List additionally shows the data category for each backup.
|
|
||||||
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
|
|
||||||
|
|
||||||
### Known issues
|
### Known issues
|
||||||
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
|
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
|
||||||
- Event description for exchange exports might look slightly different for certain events.
|
- Event description for exchange exports might look slightly different for certain events.
|
||||||
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
|
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
|
||||||
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
|
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
|
||||||
- External users with access through shared links will not receive these links as they are not sent via email during restore.
|
|
||||||
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
|
|
||||||
- SharePoint list item attachments are not available due to graph API limitations.
|
|
||||||
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
|
|
||||||
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
|
|
||||||
|
|
||||||
## [v0.18.0] (beta) - 2024-01-02
|
## [v0.18.0] (beta) - 2024-01-02
|
||||||
|
|
||||||
@ -502,8 +480,7 @@ this case, Corso will skip over the item but report this in the backup summary.
|
|||||||
- Miscellaneous
|
- Miscellaneous
|
||||||
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
|
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
|
||||||
|
|
||||||
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD
|
[Unreleased]: https://github.com/alcionai/corso/compare/v0.18.0...HEAD
|
||||||
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
|
|
||||||
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
|
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
|
||||||
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
|
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
|
||||||
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
|
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
|
||||||
|
|||||||
@ -1,6 +1,3 @@
|
|||||||
> [!NOTE]
|
|
||||||
> **The Corso project is no longer actively maintained and has been archived**.
|
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
||||||
</p>
|
</p>
|
||||||
|
|||||||
@ -4,7 +4,6 @@ run:
|
|||||||
linters:
|
linters:
|
||||||
enable:
|
enable:
|
||||||
- errcheck
|
- errcheck
|
||||||
- exhaustive
|
|
||||||
- forbidigo
|
- forbidigo
|
||||||
- gci
|
- gci
|
||||||
- gofmt
|
- gofmt
|
||||||
@ -26,11 +25,6 @@ linters:
|
|||||||
- staticcheck
|
- staticcheck
|
||||||
|
|
||||||
linters-settings:
|
linters-settings:
|
||||||
exhaustive:
|
|
||||||
check:
|
|
||||||
- switch
|
|
||||||
default-signifies-exhaustive: false
|
|
||||||
explicit-exhaustive-switch: true
|
|
||||||
gci:
|
gci:
|
||||||
sections:
|
sections:
|
||||||
- standard
|
- standard
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package backup
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -45,7 +46,6 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
|
|||||||
addOneDriveCommands,
|
addOneDriveCommands,
|
||||||
addSharePointCommands,
|
addSharePointCommands,
|
||||||
addGroupsCommands,
|
addGroupsCommands,
|
||||||
addTeamsChatsCommands,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddCommands attaches all `corso backup * *` commands to the parent.
|
// AddCommands attaches all `corso backup * *` commands to the parent.
|
||||||
@ -197,11 +197,12 @@ func genericCreateCommand(
|
|||||||
cerr := clues.WrapWC(ictx, err, owner)
|
cerr := clues.WrapWC(ictx, err, owner)
|
||||||
errs = append(errs, cerr)
|
errs = append(errs, cerr)
|
||||||
|
|
||||||
Errf(
|
meta, err := json.Marshal(cerr.Core().Values)
|
||||||
ictx,
|
if err != nil {
|
||||||
"%s\nCause: %s",
|
meta = []byte("Unable to marshal error metadata")
|
||||||
"Unable to initiate backup",
|
}
|
||||||
err.Error())
|
|
||||||
|
Errf(ictx, "%s\nMessage: %v\nMetadata:%s", "Unable to complete backup", err, meta)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -226,11 +227,12 @@ func genericCreateCommand(
|
|||||||
cerr := clues.Wrap(err, owner)
|
cerr := clues.Wrap(err, owner)
|
||||||
errs = append(errs, cerr)
|
errs = append(errs, cerr)
|
||||||
|
|
||||||
Errf(
|
meta, err := json.Marshal(cerr.Core().Values)
|
||||||
ictx,
|
if err != nil {
|
||||||
"%s\nCause: %s",
|
meta = []byte("Unable to marshal error metadata")
|
||||||
"Unable to complete backup",
|
}
|
||||||
err.Error())
|
|
||||||
|
Errf(ictx, "%s\nMessage: %v\nMetadata:%s", "Unable to complete backup", err, meta)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
@ -18,7 +18,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
"github.com/alcionai/corso/src/internal/operations"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/its"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/config"
|
"github.com/alcionai/corso/src/pkg/config"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -40,7 +39,7 @@ var (
|
|||||||
type NoBackupExchangeE2ESuite struct {
|
type NoBackupExchangeE2ESuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
dpnd dependencies
|
dpnd dependencies
|
||||||
m365 its.M365IntgTestSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNoBackupExchangeE2ESuite(t *testing.T) {
|
func TestNoBackupExchangeE2ESuite(t *testing.T) {
|
||||||
@ -55,7 +54,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
suite.its = newIntegrationTesterSetup(t)
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,7 +93,7 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
|
|||||||
type BackupExchangeE2ESuite struct {
|
type BackupExchangeE2ESuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
dpnd dependencies
|
dpnd dependencies
|
||||||
m365 its.M365IntgTestSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBackupExchangeE2ESuite(t *testing.T) {
|
func TestBackupExchangeE2ESuite(t *testing.T) {
|
||||||
@ -109,7 +108,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
suite.its = newIntegrationTesterSetup(t)
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -139,7 +138,7 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
|
|||||||
cmd, ctx := buildExchangeBackupCmd(
|
cmd, ctx := buildExchangeBackupCmd(
|
||||||
ctx,
|
ctx,
|
||||||
suite.dpnd.configFilePath,
|
suite.dpnd.configFilePath,
|
||||||
suite.m365.User.ID,
|
suite.its.user.ID,
|
||||||
category.String(),
|
category.String(),
|
||||||
&recorder)
|
&recorder)
|
||||||
|
|
||||||
@ -150,11 +149,8 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
|
|||||||
result := recorder.String()
|
result := recorder.String()
|
||||||
t.Log("backup results", result)
|
t.Log("backup results", result)
|
||||||
|
|
||||||
// As an offhand check: the result should contain the m365 user's email.
|
// as an offhand check: the result should contain the m365 user id
|
||||||
assert.Contains(
|
assert.Contains(t, result, suite.its.user.ID)
|
||||||
t,
|
|
||||||
strings.ToLower(result),
|
|
||||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
|
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
|
||||||
@ -177,7 +173,7 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
|
|||||||
cmd, ctx := buildExchangeBackupCmd(
|
cmd, ctx := buildExchangeBackupCmd(
|
||||||
ctx,
|
ctx,
|
||||||
suite.dpnd.configFilePath,
|
suite.dpnd.configFilePath,
|
||||||
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.m365.User.ID),
|
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.its.user.ID),
|
||||||
category.String(),
|
category.String(),
|
||||||
&recorder)
|
&recorder)
|
||||||
err := cmd.ExecuteContext(ctx)
|
err := cmd.ExecuteContext(ctx)
|
||||||
@ -186,11 +182,8 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
|
|||||||
result := recorder.String()
|
result := recorder.String()
|
||||||
t.Log("backup results", result)
|
t.Log("backup results", result)
|
||||||
|
|
||||||
// As an offhand check: the result should contain the m365 user's email.
|
// as an offhand check: the result should contain the m365 user id
|
||||||
assert.Contains(
|
assert.Contains(t, result, suite.its.user.ID)
|
||||||
t,
|
|
||||||
strings.ToLower(result),
|
|
||||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
|
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
|
||||||
@ -249,7 +242,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFl
|
|||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
cmd := cliTD.StubRootCmd(
|
||||||
"backup", "create", "exchange",
|
"backup", "create", "exchange",
|
||||||
"--user", suite.m365.User.ID,
|
"--user", suite.its.user.ID,
|
||||||
"--azure-client-id", "invalid-value")
|
"--azure-client-id", "invalid-value")
|
||||||
cli.BuildCommandTree(cmd)
|
cli.BuildCommandTree(cmd)
|
||||||
|
|
||||||
@ -273,7 +266,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
|
|||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
cmd := cliTD.StubRootCmd(
|
||||||
"backup", "create", "exchange",
|
"backup", "create", "exchange",
|
||||||
"--user", suite.m365.User.ID,
|
"--user", suite.its.user.ID,
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||||
cli.BuildCommandTree(cmd)
|
cli.BuildCommandTree(cmd)
|
||||||
|
|
||||||
@ -288,11 +281,8 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
|
|||||||
result := suite.dpnd.recorder.String()
|
result := suite.dpnd.recorder.String()
|
||||||
t.Log("backup results", result)
|
t.Log("backup results", result)
|
||||||
|
|
||||||
// As an offhand check: the result should contain the m365 user's email.
|
// as an offhand check: the result should contain the m365 user id
|
||||||
assert.Contains(
|
assert.Contains(t, result, suite.its.user.ID)
|
||||||
t,
|
|
||||||
strings.ToLower(result),
|
|
||||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// AWS flags
|
// AWS flags
|
||||||
@ -306,7 +296,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
|
|||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
cmd := cliTD.StubRootCmd(
|
||||||
"backup", "create", "exchange",
|
"backup", "create", "exchange",
|
||||||
"--user", suite.m365.User.ID,
|
"--user", suite.its.user.ID,
|
||||||
"--aws-access-key", "invalid-value",
|
"--aws-access-key", "invalid-value",
|
||||||
"--aws-secret-access-key", "some-invalid-value")
|
"--aws-secret-access-key", "some-invalid-value")
|
||||||
cli.BuildCommandTree(cmd)
|
cli.BuildCommandTree(cmd)
|
||||||
@ -329,7 +319,7 @@ type PreparedBackupExchangeE2ESuite struct {
|
|||||||
tester.Suite
|
tester.Suite
|
||||||
dpnd dependencies
|
dpnd dependencies
|
||||||
backupOps map[path.CategoryType]string
|
backupOps map[path.CategoryType]string
|
||||||
m365 its.M365IntgTestSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
|
func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
|
||||||
@ -346,13 +336,13 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
suite.its = newIntegrationTesterSetup(t)
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||||
suite.backupOps = make(map[path.CategoryType]string)
|
suite.backupOps = make(map[path.CategoryType]string)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
users = []string{suite.m365.User.ID}
|
users = []string{suite.its.user.ID}
|
||||||
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
|
ins = idname.NewCache(map[string]string{suite.its.user.ID: suite.its.user.ID})
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, set := range []path.CategoryType{email, contacts, events} {
|
for _, set := range []path.CategoryType{email, contacts, events} {
|
||||||
|
|||||||
@ -35,12 +35,9 @@ const (
|
|||||||
groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group
|
groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group
|
||||||
corso backup create groups --group Marketing
|
corso backup create groups --group Marketing
|
||||||
|
|
||||||
# Backup only Teams channel messages
|
# Backup only Teams conversations messages
|
||||||
corso backup create groups --group Marketing --data messages
|
corso backup create groups --group Marketing --data messages
|
||||||
|
|
||||||
# Backup only group mailbox posts
|
|
||||||
corso backup create groups --group Marketing --data conversations
|
|
||||||
|
|
||||||
# Backup all Groups and Teams data for all groups
|
# Backup all Groups and Teams data for all groups
|
||||||
corso backup create groups --group '*'`
|
corso backup create groups --group '*'`
|
||||||
|
|
||||||
@ -53,10 +50,7 @@ corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
|
|||||||
|
|
||||||
# Explore Marketing messages posted after the start of 2022
|
# Explore Marketing messages posted after the start of 2022
|
||||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||||
--last-message-reply-after 2022-01-01T00:00:00
|
--last-message-reply-after 2022-01-01T00:00:00`
|
||||||
|
|
||||||
# Explore group mailbox posts with conversation subject "hello world"
|
|
||||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"`
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// called by backup.go to map subcommands to provider-specific handling.
|
// called by backup.go to map subcommands to provider-specific handling.
|
||||||
@ -316,7 +310,7 @@ func groupsBackupCreateSelectors(
|
|||||||
group, cats []string,
|
group, cats []string,
|
||||||
) *selectors.GroupsBackup {
|
) *selectors.GroupsBackup {
|
||||||
if filters.PathContains(group).Compare(flags.Wildcard) {
|
if filters.PathContains(group).Compare(flags.Wildcard) {
|
||||||
return includeAllGroupsWithCategories(ins, cats)
|
return includeAllGroupWithCategories(ins, cats)
|
||||||
}
|
}
|
||||||
|
|
||||||
sel := selectors.NewGroupsBackup(slices.Clone(group))
|
sel := selectors.NewGroupsBackup(slices.Clone(group))
|
||||||
@ -324,6 +318,6 @@ func groupsBackupCreateSelectors(
|
|||||||
return utils.AddGroupsCategories(sel, cats)
|
return utils.AddGroupsCategories(sel, cats)
|
||||||
}
|
}
|
||||||
|
|
||||||
func includeAllGroupsWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
|
func includeAllGroupWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
|
||||||
return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
|
return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -20,7 +20,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
"github.com/alcionai/corso/src/internal/operations"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/its"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/config"
|
"github.com/alcionai/corso/src/pkg/config"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -36,7 +35,7 @@ import (
|
|||||||
type NoBackupGroupsE2ESuite struct {
|
type NoBackupGroupsE2ESuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
dpnd dependencies
|
dpnd dependencies
|
||||||
m365 its.M365IntgTestSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNoBackupGroupsE2ESuite(t *testing.T) {
|
func TestNoBackupGroupsE2ESuite(t *testing.T) {
|
||||||
@ -51,7 +50,7 @@ func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
suite.its = newIntegrationTesterSetup(t)
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -90,7 +89,7 @@ func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
|
|||||||
type BackupGroupsE2ESuite struct {
|
type BackupGroupsE2ESuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
dpnd dependencies
|
dpnd dependencies
|
||||||
m365 its.M365IntgTestSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBackupGroupsE2ESuite(t *testing.T) {
|
func TestBackupGroupsE2ESuite(t *testing.T) {
|
||||||
@ -105,7 +104,7 @@ func (suite *BackupGroupsE2ESuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
suite.its = newIntegrationTesterSetup(t)
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,8 +113,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
||||||
// skip
|
|
||||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
|
||||||
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,7 +134,7 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
|
|||||||
cmd, ctx := buildGroupsBackupCmd(
|
cmd, ctx := buildGroupsBackupCmd(
|
||||||
ctx,
|
ctx,
|
||||||
suite.dpnd.configFilePath,
|
suite.dpnd.configFilePath,
|
||||||
suite.m365.Group.ID,
|
suite.its.group.ID,
|
||||||
category,
|
category,
|
||||||
&recorder)
|
&recorder)
|
||||||
|
|
||||||
@ -205,7 +202,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
|
|||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
cmd := cliTD.StubRootCmd(
|
||||||
"backup", "create", "groups",
|
"backup", "create", "groups",
|
||||||
"--group", suite.m365.Group.ID,
|
"--group", suite.its.group.ID,
|
||||||
"--azure-client-id", "invalid-value")
|
"--azure-client-id", "invalid-value")
|
||||||
cli.BuildCommandTree(cmd)
|
cli.BuildCommandTree(cmd)
|
||||||
|
|
||||||
@ -219,9 +216,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
||||||
// Skip
|
|
||||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
|
||||||
|
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||||
@ -232,7 +226,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
|||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
cmd := cliTD.StubRootCmd(
|
||||||
"backup", "create", "groups",
|
"backup", "create", "groups",
|
||||||
"--group", suite.m365.Group.ID,
|
"--group", suite.its.group.ID,
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||||
cli.BuildCommandTree(cmd)
|
cli.BuildCommandTree(cmd)
|
||||||
|
|
||||||
@ -256,7 +250,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
|
|||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
cmd := cliTD.StubRootCmd(
|
||||||
"backup", "create", "groups",
|
"backup", "create", "groups",
|
||||||
"--group", suite.m365.Group.ID,
|
"--group", suite.its.group.ID,
|
||||||
"--aws-access-key", "invalid-value",
|
"--aws-access-key", "invalid-value",
|
||||||
"--aws-secret-access-key", "some-invalid-value")
|
"--aws-secret-access-key", "some-invalid-value")
|
||||||
cli.BuildCommandTree(cmd)
|
cli.BuildCommandTree(cmd)
|
||||||
@ -279,7 +273,7 @@ type PreparedBackupGroupsE2ESuite struct {
|
|||||||
tester.Suite
|
tester.Suite
|
||||||
dpnd dependencies
|
dpnd dependencies
|
||||||
backupOps map[path.CategoryType]string
|
backupOps map[path.CategoryType]string
|
||||||
m365 its.M365IntgTestSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
|
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
|
||||||
@ -296,19 +290,16 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
suite.its = newIntegrationTesterSetup(t)
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||||
suite.backupOps = make(map[path.CategoryType]string)
|
suite.backupOps = make(map[path.CategoryType]string)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
groups = []string{suite.m365.Group.ID}
|
groups = []string{suite.its.group.ID}
|
||||||
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
|
ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
|
||||||
cats = []path.CategoryType{
|
cats = []path.CategoryType{
|
||||||
path.ChannelMessagesCategory,
|
path.ChannelMessagesCategory,
|
||||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
path.ConversationPostsCategory,
|
||||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
|
||||||
// we go fix the group mailbox.
|
|
||||||
// path.ConversationPostsCategory,
|
|
||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -462,8 +453,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
||||||
// skip
|
|
||||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
|
||||||
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -14,16 +14,141 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
"github.com/alcionai/corso/src/cli/print"
|
"github.com/alcionai/corso/src/cli/print"
|
||||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/config"
|
"github.com/alcionai/corso/src/pkg/config"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/count"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/repository"
|
"github.com/alcionai/corso/src/pkg/repository"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
"github.com/alcionai/corso/src/pkg/storage"
|
"github.com/alcionai/corso/src/pkg/storage"
|
||||||
"github.com/alcionai/corso/src/pkg/storage/testdata"
|
"github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Gockable client
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// GockClient produces a new exchange api client that can be
|
||||||
|
// mocked using gock.
|
||||||
|
func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
|
||||||
|
s, err := graph.NewGockService(creds, counter)
|
||||||
|
if err != nil {
|
||||||
|
return api.Client{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
li, err := graph.NewGockService(creds, counter, graph.NoTimeout())
|
||||||
|
if err != nil {
|
||||||
|
return api.Client{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return api.Client{
|
||||||
|
Credentials: creds,
|
||||||
|
Stable: s,
|
||||||
|
LargeItem: li,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Suite Setup
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type ids struct {
|
||||||
|
ID string
|
||||||
|
DriveID string
|
||||||
|
DriveRootFolderID string
|
||||||
|
}
|
||||||
|
|
||||||
|
type intgTesterSetup struct {
|
||||||
|
acct account.Account
|
||||||
|
ac api.Client
|
||||||
|
gockAC api.Client
|
||||||
|
user ids
|
||||||
|
site ids
|
||||||
|
group ids
|
||||||
|
team ids
|
||||||
|
}
|
||||||
|
|
||||||
|
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
|
||||||
|
its := intgTesterSetup{}
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
graph.InitializeConcurrencyLimiter(ctx, true, 4)
|
||||||
|
|
||||||
|
its.acct = tconfig.NewM365Account(t)
|
||||||
|
creds, err := its.acct.M365Config()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
its.ac, err = api.NewClient(
|
||||||
|
creds,
|
||||||
|
control.DefaultOptions(),
|
||||||
|
count.New())
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
its.gockAC, err = gockClient(creds, count.New())
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
// user drive
|
||||||
|
|
||||||
|
uids := ids{}
|
||||||
|
|
||||||
|
uids.ID = tconfig.M365UserID(t)
|
||||||
|
|
||||||
|
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, uids.ID)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
uids.DriveID = ptr.Val(userDrive.GetId())
|
||||||
|
|
||||||
|
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, uids.DriveID)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
uids.DriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
|
||||||
|
|
||||||
|
its.user = uids
|
||||||
|
|
||||||
|
// site
|
||||||
|
|
||||||
|
sids := ids{}
|
||||||
|
|
||||||
|
sids.ID = tconfig.M365SiteID(t)
|
||||||
|
|
||||||
|
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, sids.ID)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
sids.DriveID = ptr.Val(siteDrive.GetId())
|
||||||
|
|
||||||
|
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, sids.DriveID)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
sids.DriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
|
||||||
|
|
||||||
|
its.site = sids
|
||||||
|
|
||||||
|
// group
|
||||||
|
|
||||||
|
gids := ids{}
|
||||||
|
|
||||||
|
// use of the TeamID is intentional here, so that we are assured
|
||||||
|
// the group has full usage of the teams api.
|
||||||
|
gids.ID = tconfig.M365TeamID(t)
|
||||||
|
|
||||||
|
its.group = gids
|
||||||
|
|
||||||
|
// team
|
||||||
|
|
||||||
|
tids := ids{}
|
||||||
|
tids.ID = tconfig.M365TeamID(t)
|
||||||
|
its.team = tids
|
||||||
|
|
||||||
|
return its
|
||||||
|
}
|
||||||
|
|
||||||
type dependencies struct {
|
type dependencies struct {
|
||||||
st storage.Storage
|
st storage.Storage
|
||||||
repo repository.Repositoryer
|
repo repository.Repositoryer
|
||||||
|
|||||||
@ -67,11 +67,11 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
|
|||||||
flags.AddUserFlag(c)
|
flags.AddUserFlag(c)
|
||||||
flags.AddGenericBackupFlags(c)
|
flags.AddGenericBackupFlags(c)
|
||||||
fs.BoolVar(
|
fs.BoolVar(
|
||||||
&flags.UseOldDeltaProcessFV,
|
&flags.UseDeltaTreeFV,
|
||||||
flags.UseOldDeltaProcessFN,
|
flags.UseDeltaTreeFN,
|
||||||
false,
|
false,
|
||||||
"process backups using the old delta processor instead of tree-based enumeration")
|
"process backups using the delta tree instead of standard enumeration")
|
||||||
cobra.CheckErr(fs.MarkHidden(flags.UseOldDeltaProcessFN))
|
cobra.CheckErr(fs.MarkHidden(flags.UseDeltaTreeFN))
|
||||||
|
|
||||||
case listCommand:
|
case listCommand:
|
||||||
c, _ = utils.AddCommand(cmd, oneDriveListCmd())
|
c, _ = utils.AddCommand(cmd, oneDriveListCmd())
|
||||||
|
|||||||
@ -37,11 +37,7 @@ corso backup create sharepoint --site https://example.com/hr
|
|||||||
corso backup create sharepoint --site https://example.com/hr,https://example.com/team
|
corso backup create sharepoint --site https://example.com/hr,https://example.com/team
|
||||||
|
|
||||||
# Backup all SharePoint data for all Sites
|
# Backup all SharePoint data for all Sites
|
||||||
corso backup create sharepoint --site '*'
|
corso backup create sharepoint --site '*'`
|
||||||
|
|
||||||
# Backup all SharePoint list data for a Site
|
|
||||||
corso backup create sharepoint --site https://example.com/hr --data lists
|
|
||||||
`
|
|
||||||
|
|
||||||
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
||||||
and 1234abcd-12ab-cd34-56de-1234abce
|
and 1234abcd-12ab-cd34-56de-1234abce
|
||||||
@ -61,26 +57,7 @@ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|||||||
# Explore all files within the document library "Work Documents"
|
# Explore all files within the document library "Work Documents"
|
||||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||||
--library "Work Documents"
|
--library "Work Documents"
|
||||||
|
`
|
||||||
# Explore lists by their name(s)
|
|
||||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list "list-name-1,list-name-2"
|
|
||||||
|
|
||||||
# Explore lists created after a given time
|
|
||||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-created-after 2024-01-01T12:23:34
|
|
||||||
|
|
||||||
# Explore lists created before a given time
|
|
||||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-created-before 2024-01-01T12:23:34
|
|
||||||
|
|
||||||
# Explore lists modified before a given time
|
|
||||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-modified-before 2024-01-01T12:23:34
|
|
||||||
|
|
||||||
# Explore lists modified after a given time
|
|
||||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-modified-after 2024-01-01T12:23:34`
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// called by backup.go to map subcommands to provider-specific handling.
|
// called by backup.go to map subcommands to provider-specific handling.
|
||||||
@ -96,8 +73,6 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
|||||||
|
|
||||||
flags.AddSiteFlag(c, true)
|
flags.AddSiteFlag(c, true)
|
||||||
flags.AddSiteIDFlag(c, true)
|
flags.AddSiteIDFlag(c, true)
|
||||||
// [TODO](hitesh) to add lists flag to invoke backup for lists
|
|
||||||
// when explicit invoke is not required anymore
|
|
||||||
flags.AddDataFlag(c, []string{flags.DataLibraries}, true)
|
flags.AddDataFlag(c, []string{flags.DataLibraries}, true)
|
||||||
flags.AddGenericBackupFlags(c)
|
flags.AddGenericBackupFlags(c)
|
||||||
|
|
||||||
|
|||||||
@ -20,7 +20,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
"github.com/alcionai/corso/src/internal/operations"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/its"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/config"
|
"github.com/alcionai/corso/src/pkg/config"
|
||||||
@ -90,7 +89,7 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
|
|||||||
type BackupSharepointE2ESuite struct {
|
type BackupSharepointE2ESuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
dpnd dependencies
|
dpnd dependencies
|
||||||
m365 its.M365IntgTestSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBackupSharepointE2ESuite(t *testing.T) {
|
func TestBackupSharepointE2ESuite(t *testing.T) {
|
||||||
@ -105,7 +104,7 @@ func (suite *BackupSharepointE2ESuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
suite.its = newIntegrationTesterSetup(t)
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,7 +128,7 @@ func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category s
|
|||||||
cmd, ctx := buildSharepointBackupCmd(
|
cmd, ctx := buildSharepointBackupCmd(
|
||||||
ctx,
|
ctx,
|
||||||
suite.dpnd.configFilePath,
|
suite.dpnd.configFilePath,
|
||||||
suite.m365.Site.ID,
|
suite.its.site.ID,
|
||||||
category,
|
category,
|
||||||
&recorder)
|
&recorder)
|
||||||
|
|
||||||
@ -188,7 +187,7 @@ type PreparedBackupSharepointE2ESuite struct {
|
|||||||
tester.Suite
|
tester.Suite
|
||||||
dpnd dependencies
|
dpnd dependencies
|
||||||
backupOps map[path.CategoryType]string
|
backupOps map[path.CategoryType]string
|
||||||
m365 its.M365IntgTestSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
|
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
|
||||||
@ -205,13 +204,13 @@ func (suite *PreparedBackupSharepointE2ESuite) SetupSuite() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
suite.its = newIntegrationTesterSetup(t)
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
||||||
suite.backupOps = make(map[path.CategoryType]string)
|
suite.backupOps = make(map[path.CategoryType]string)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
sites = []string{suite.m365.Site.ID}
|
sites = []string{suite.its.site.ID}
|
||||||
ins = idname.NewCache(map[string]string{suite.m365.Site.ID: suite.m365.Site.ID})
|
ins = idname.NewCache(map[string]string{suite.its.site.ID: suite.its.site.ID})
|
||||||
cats = []path.CategoryType{
|
cats = []path.CategoryType{
|
||||||
path.ListsCategory,
|
path.ListsCategory,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -253,12 +253,14 @@ func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() {
|
|||||||
cats: []string{"invalid category"},
|
cats: []string{"invalid category"},
|
||||||
expect: assert.Error,
|
expect: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
// [TODO]: Uncomment when lists are enabled
|
||||||
name: "site with lists category",
|
|
||||||
site: []string{"smarf"},
|
// {
|
||||||
cats: []string{flags.DataLists},
|
// name: "site with lists category",
|
||||||
expect: assert.NoError,
|
// site: []string{"smarf"},
|
||||||
},
|
// cats: []string{flags.DataLists},
|
||||||
|
// expect: assert.NoError,
|
||||||
|
// },
|
||||||
|
|
||||||
// [TODO]: Uncomment when pages are enabled
|
// [TODO]: Uncomment when pages are enabled
|
||||||
|
|
||||||
|
|||||||
@ -1,305 +0,0 @@
|
|||||||
package backup
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"golang.org/x/exp/slices"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
|
||||||
"github.com/alcionai/corso/src/pkg/filters"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
// setup and globals
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
const (
|
|
||||||
teamschatsServiceCommand = "chats"
|
|
||||||
teamschatsServiceCommandCreateUseSuffix = "--user <userEmail> | '" + flags.Wildcard + "'"
|
|
||||||
teamschatsServiceCommandDeleteUseSuffix = "--backups <backupId>"
|
|
||||||
teamschatsServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
teamschatsServiceCommandCreateExamples = `# Backup all chats with bob@company.hr
|
|
||||||
corso backup create chats --user bob@company.hr
|
|
||||||
|
|
||||||
# Backup all chats for all users
|
|
||||||
corso backup create chats --user '*'`
|
|
||||||
|
|
||||||
teamschatsServiceCommandDeleteExamples = `# Delete chats backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
and 1234abcd-12ab-cd34-56de-1234abce
|
|
||||||
corso backup delete chats --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
|
|
||||||
|
|
||||||
teamschatsServiceCommandDetailsExamples = `# Explore chats in Bob's latest backup (1234abcd...)
|
|
||||||
corso backup details chats --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
|
||||||
)
|
|
||||||
|
|
||||||
// called by backup.go to map subcommands to provider-specific handling.
|
|
||||||
func addTeamsChatsCommands(cmd *cobra.Command) *cobra.Command {
|
|
||||||
var c *cobra.Command
|
|
||||||
|
|
||||||
switch cmd.Use {
|
|
||||||
case createCommand:
|
|
||||||
c, _ = utils.AddCommand(cmd, teamschatsCreateCmd(), utils.MarkPreReleaseCommand())
|
|
||||||
|
|
||||||
c.Use = c.Use + " " + teamschatsServiceCommandCreateUseSuffix
|
|
||||||
c.Example = teamschatsServiceCommandCreateExamples
|
|
||||||
|
|
||||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
|
||||||
flags.AddUserFlag(c)
|
|
||||||
flags.AddDataFlag(c, []string{flags.DataChats}, false)
|
|
||||||
flags.AddGenericBackupFlags(c)
|
|
||||||
|
|
||||||
case listCommand:
|
|
||||||
c, _ = utils.AddCommand(cmd, teamschatsListCmd(), utils.MarkPreReleaseCommand())
|
|
||||||
|
|
||||||
flags.AddBackupIDFlag(c, false)
|
|
||||||
flags.AddAllBackupListFlags(c)
|
|
||||||
|
|
||||||
case detailsCommand:
|
|
||||||
c, _ = utils.AddCommand(cmd, teamschatsDetailsCmd(), utils.MarkPreReleaseCommand())
|
|
||||||
|
|
||||||
c.Use = c.Use + " " + teamschatsServiceCommandDetailsUseSuffix
|
|
||||||
c.Example = teamschatsServiceCommandDetailsExamples
|
|
||||||
|
|
||||||
flags.AddSkipReduceFlag(c)
|
|
||||||
|
|
||||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
|
||||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
|
||||||
flags.AddBackupIDFlag(c, true)
|
|
||||||
flags.AddTeamsChatsDetailsAndRestoreFlags(c)
|
|
||||||
|
|
||||||
case deleteCommand:
|
|
||||||
c, _ = utils.AddCommand(cmd, teamschatsDeleteCmd(), utils.MarkPreReleaseCommand())
|
|
||||||
|
|
||||||
c.Use = c.Use + " " + teamschatsServiceCommandDeleteUseSuffix
|
|
||||||
c.Example = teamschatsServiceCommandDeleteExamples
|
|
||||||
|
|
||||||
flags.AddMultipleBackupIDsFlag(c, false)
|
|
||||||
flags.AddBackupIDFlag(c, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
// backup create
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
// `corso backup create chats [<flag>...]`
|
|
||||||
func teamschatsCreateCmd() *cobra.Command {
|
|
||||||
return &cobra.Command{
|
|
||||||
Use: teamschatsServiceCommand,
|
|
||||||
Aliases: []string{teamsServiceCommand},
|
|
||||||
Short: "Backup M365 Chats data",
|
|
||||||
RunE: createTeamsChatsCmd,
|
|
||||||
Args: cobra.NoArgs,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// processes a teamschats backup.
|
|
||||||
func createTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
|
||||||
ctx := cmd.Context()
|
|
||||||
|
|
||||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := validateTeamsChatsBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
|
|
||||||
ctx,
|
|
||||||
cmd,
|
|
||||||
path.TeamsChatsService)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
defer utils.CloseRepo(ctx, r)
|
|
||||||
|
|
||||||
// TODO: log/print recoverable errors
|
|
||||||
errs := fault.New(false)
|
|
||||||
|
|
||||||
svcCli, err := m365.NewM365Client(ctx, *acct)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, clues.Stack(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
ins, err := svcCli.AC.Users().GetAllIDsAndNames(ctx, errs)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 teamschats"))
|
|
||||||
}
|
|
||||||
|
|
||||||
sel := teamschatsBackupCreateSelectors(ctx, ins, flags.UserFV, flags.CategoryDataFV)
|
|
||||||
selectorSet := []selectors.Selector{}
|
|
||||||
|
|
||||||
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
|
|
||||||
selectorSet = append(selectorSet, discSel.Selector)
|
|
||||||
}
|
|
||||||
|
|
||||||
return genericCreateCommand(
|
|
||||||
ctx,
|
|
||||||
r,
|
|
||||||
"Chats",
|
|
||||||
selectorSet,
|
|
||||||
ins)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
// backup list
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
// `corso backup list teamschats [<flag>...]`
|
|
||||||
func teamschatsListCmd() *cobra.Command {
|
|
||||||
return &cobra.Command{
|
|
||||||
Use: teamschatsServiceCommand,
|
|
||||||
Short: "List the history of M365 Chats backups",
|
|
||||||
RunE: listTeamsChatsCmd,
|
|
||||||
Args: cobra.NoArgs,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// lists the history of backup operations
|
|
||||||
func listTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
|
||||||
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsChatsService, args)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
// backup details
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
// `corso backup details teamschats [<flag>...]`
|
|
||||||
func teamschatsDetailsCmd() *cobra.Command {
|
|
||||||
return &cobra.Command{
|
|
||||||
Use: teamschatsServiceCommand,
|
|
||||||
Short: "Shows the details of a M365 Chats backup",
|
|
||||||
RunE: detailsTeamsChatsCmd,
|
|
||||||
Args: cobra.NoArgs,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// processes a teamschats backup.
|
|
||||||
func detailsTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
|
||||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return runDetailsTeamsChatsCmd(cmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
func runDetailsTeamsChatsCmd(cmd *cobra.Command) error {
|
|
||||||
ctx := cmd.Context()
|
|
||||||
opts := utils.MakeTeamsChatsOpts(cmd)
|
|
||||||
|
|
||||||
sel := utils.IncludeTeamsChatsRestoreDataSelectors(ctx, opts)
|
|
||||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
|
||||||
utils.FilterTeamsChatsRestoreInfoSelectors(sel, opts)
|
|
||||||
|
|
||||||
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(ds.Entries) > 0 {
|
|
||||||
ds.PrintEntries(ctx)
|
|
||||||
} else {
|
|
||||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
// backup delete
|
|
||||||
// ------------------------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
// `corso backup delete teamschats [<flag>...]`
|
|
||||||
func teamschatsDeleteCmd() *cobra.Command {
|
|
||||||
return &cobra.Command{
|
|
||||||
Use: teamschatsServiceCommand,
|
|
||||||
Short: "Delete backed-up M365 Chats data",
|
|
||||||
RunE: deleteTeamsChatsCmd,
|
|
||||||
Args: cobra.NoArgs,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// deletes an teamschats backup.
|
|
||||||
func deleteTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
|
||||||
backupIDValue := []string{}
|
|
||||||
|
|
||||||
if len(flags.BackupIDsFV) > 0 {
|
|
||||||
backupIDValue = flags.BackupIDsFV
|
|
||||||
} else if len(flags.BackupIDFV) > 0 {
|
|
||||||
backupIDValue = append(backupIDValue, flags.BackupIDFV)
|
|
||||||
} else {
|
|
||||||
return clues.New("either --backup or --backups flag is required")
|
|
||||||
}
|
|
||||||
|
|
||||||
return genericDeleteCommand(cmd, path.TeamsChatsService, "TeamsChats", backupIDValue, args)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// helpers
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
func validateTeamsChatsBackupCreateFlags(teamschats, cats []string) error {
|
|
||||||
if len(teamschats) == 0 {
|
|
||||||
return clues.New(
|
|
||||||
"requires one or more --" +
|
|
||||||
flags.UserFN + " ids, or the wildcard --" +
|
|
||||||
flags.UserFN + " *")
|
|
||||||
}
|
|
||||||
|
|
||||||
msg := fmt.Sprintf(
|
|
||||||
" is an unrecognized data type; only %s is supported",
|
|
||||||
flags.DataChats)
|
|
||||||
|
|
||||||
allowedCats := utils.TeamsChatsAllowedCategories()
|
|
||||||
|
|
||||||
for _, d := range cats {
|
|
||||||
if _, ok := allowedCats[d]; !ok {
|
|
||||||
return clues.New(d + msg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func teamschatsBackupCreateSelectors(
|
|
||||||
ctx context.Context,
|
|
||||||
ins idname.Cacher,
|
|
||||||
users, cats []string,
|
|
||||||
) *selectors.TeamsChatsBackup {
|
|
||||||
if filters.PathContains(users).Compare(flags.Wildcard) {
|
|
||||||
return includeAllTeamsChatsWithCategories(ins, cats)
|
|
||||||
}
|
|
||||||
|
|
||||||
sel := selectors.NewTeamsChatsBackup(slices.Clone(users))
|
|
||||||
|
|
||||||
return utils.AddTeamsChatsCategories(sel, cats)
|
|
||||||
}
|
|
||||||
|
|
||||||
func includeAllTeamsChatsWithCategories(ins idname.Cacher, categories []string) *selectors.TeamsChatsBackup {
|
|
||||||
return utils.AddTeamsChatsCategories(selectors.NewTeamsChatsBackup(ins.IDs()), categories)
|
|
||||||
}
|
|
||||||
@ -1,636 +0,0 @@
|
|||||||
package backup_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/google/uuid"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/stretchr/testify/suite"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli"
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
|
||||||
"github.com/alcionai/corso/src/cli/print"
|
|
||||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester/its"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
|
||||||
"github.com/alcionai/corso/src/pkg/config"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
|
||||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
|
||||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// tests that require no existing backups
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
type NoBackupTeamsChatsE2ESuite struct {
|
|
||||||
tester.Suite
|
|
||||||
dpnd dependencies
|
|
||||||
m365 its.M365IntgTestSetup
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNoBackupTeamsChatsE2ESuite(t *testing.T) {
|
|
||||||
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
|
|
||||||
t,
|
|
||||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *NoBackupTeamsChatsE2ESuite) SetupSuite() {
|
|
||||||
t := suite.T()
|
|
||||||
t.Skip("not fully implemented")
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *NoBackupTeamsChatsE2ESuite) TestTeamsChatsBackupListCmd_noBackups() {
|
|
||||||
t := suite.T()
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
suite.dpnd.recorder.Reset()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "list", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
cmd.SetErr(&suite.dpnd.recorder)
|
|
||||||
|
|
||||||
ctx = print.SetRootCmd(ctx, cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
result := suite.dpnd.recorder.String()
|
|
||||||
|
|
||||||
// as an offhand check: the result should contain the m365 teamschat id
|
|
||||||
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// tests with no prior backup
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
type BackupTeamsChatsE2ESuite struct {
|
|
||||||
tester.Suite
|
|
||||||
dpnd dependencies
|
|
||||||
m365 its.M365IntgTestSetup
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBackupTeamsChatsE2ESuite(t *testing.T) {
|
|
||||||
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
|
|
||||||
t,
|
|
||||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupTeamsChatsE2ESuite) SetupSuite() {
|
|
||||||
t := suite.T()
|
|
||||||
t.Skip("not fully implemented")
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_chats() {
|
|
||||||
runTeamsChatsBackupCategoryTest(suite, flags.DataChats)
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTeamsChatsBackupCategoryTest(suite *BackupTeamsChatsE2ESuite, category string) {
|
|
||||||
recorder := strings.Builder{}
|
|
||||||
recorder.Reset()
|
|
||||||
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cmd, ctx := buildTeamsChatsBackupCmd(
|
|
||||||
ctx,
|
|
||||||
suite.dpnd.configFilePath,
|
|
||||||
suite.m365.User.ID,
|
|
||||||
category,
|
|
||||||
&recorder)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
result := recorder.String()
|
|
||||||
t.Log("backup results", result)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_teamschatNotFound_chats() {
|
|
||||||
runTeamsChatsBackupTeamsChatNotFoundTest(suite, flags.DataChats)
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTeamsChatsBackupTeamsChatNotFoundTest(suite *BackupTeamsChatsE2ESuite, category string) {
|
|
||||||
recorder := strings.Builder{}
|
|
||||||
recorder.Reset()
|
|
||||||
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cmd, ctx := buildTeamsChatsBackupCmd(
|
|
||||||
ctx,
|
|
||||||
suite.dpnd.configFilePath,
|
|
||||||
"foo@not-there.com",
|
|
||||||
category,
|
|
||||||
&recorder)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.Error(t, err, clues.ToCore(err))
|
|
||||||
assert.Contains(
|
|
||||||
t,
|
|
||||||
err.Error(),
|
|
||||||
"not found",
|
|
||||||
"error missing user not found")
|
|
||||||
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
|
|
||||||
|
|
||||||
t.Logf("backup error message: %s", err.Error())
|
|
||||||
|
|
||||||
result := recorder.String()
|
|
||||||
t.Log("backup results", result)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAzureClientIDFlag() {
|
|
||||||
t := suite.T()
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
suite.dpnd.recorder.Reset()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "create", "chats",
|
|
||||||
"--teamschat", suite.m365.User.ID,
|
|
||||||
"--azure-client-id", "invalid-value")
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
cmd.SetErr(&suite.dpnd.recorder)
|
|
||||||
|
|
||||||
ctx = print.SetRootCmd(ctx, cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.Error(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_fromConfigFile() {
|
|
||||||
t := suite.T()
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
suite.dpnd.recorder.Reset()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "create", "chats",
|
|
||||||
"--teamschat", suite.m365.User.ID,
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
cmd.SetOut(&suite.dpnd.recorder)
|
|
||||||
|
|
||||||
ctx = print.SetRootCmd(ctx, cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
// AWS flags
|
|
||||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAWSFlags() {
|
|
||||||
t := suite.T()
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
suite.dpnd.recorder.Reset()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "create", "chats",
|
|
||||||
"--teamschat", suite.m365.User.ID,
|
|
||||||
"--aws-access-key", "invalid-value",
|
|
||||||
"--aws-secret-access-key", "some-invalid-value")
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
cmd.SetOut(&suite.dpnd.recorder)
|
|
||||||
|
|
||||||
ctx = print.SetRootCmd(ctx, cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
// since invalid aws creds are explicitly set, should see a failure
|
|
||||||
require.Error(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// tests prepared with a previous backup
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
type PreparedBackupTeamsChatsE2ESuite struct {
|
|
||||||
tester.Suite
|
|
||||||
dpnd dependencies
|
|
||||||
backupOps map[path.CategoryType]string
|
|
||||||
m365 its.M365IntgTestSetup
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPreparedBackupTeamsChatsE2ESuite(t *testing.T) {
|
|
||||||
suite.Run(t, &PreparedBackupTeamsChatsE2ESuite{
|
|
||||||
Suite: tester.NewE2ESuite(
|
|
||||||
t,
|
|
||||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *PreparedBackupTeamsChatsE2ESuite) SetupSuite() {
|
|
||||||
t := suite.T()
|
|
||||||
t.Skip("not fully implemented")
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
suite.m365 = its.GetM365(t)
|
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
|
||||||
suite.backupOps = make(map[path.CategoryType]string)
|
|
||||||
|
|
||||||
var (
|
|
||||||
teamschats = []string{suite.m365.User.ID}
|
|
||||||
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
|
|
||||||
cats = []path.CategoryType{
|
|
||||||
path.ChatsCategory,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
for _, set := range cats {
|
|
||||||
var (
|
|
||||||
sel = selectors.NewTeamsChatsBackup(teamschats)
|
|
||||||
scopes []selectors.TeamsChatsScope
|
|
||||||
)
|
|
||||||
|
|
||||||
switch set {
|
|
||||||
case path.ChatsCategory:
|
|
||||||
scopes = selTD.TeamsChatsBackupChatScope(sel)
|
|
||||||
}
|
|
||||||
|
|
||||||
sel.Include(scopes)
|
|
||||||
|
|
||||||
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = bop.Run(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
bIDs := string(bop.Results.BackupID)
|
|
||||||
|
|
||||||
// sanity check, ensure we can find the backup and its details immediately
|
|
||||||
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
|
|
||||||
require.NoError(t, err, "retrieving recent backup by ID")
|
|
||||||
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
|
|
||||||
|
|
||||||
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
|
|
||||||
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
|
|
||||||
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
|
|
||||||
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
|
|
||||||
|
|
||||||
suite.backupOps[set] = string(b.ID)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_chats() {
|
|
||||||
runTeamsChatsListCmdTest(suite, path.ChatsCategory)
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTeamsChatsListCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
|
||||||
suite.dpnd.recorder.Reset()
|
|
||||||
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "list", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
cmd.SetOut(&suite.dpnd.recorder)
|
|
||||||
|
|
||||||
ctx = print.SetRootCmd(ctx, cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
// compare the output
|
|
||||||
result := suite.dpnd.recorder.String()
|
|
||||||
assert.Contains(t, result, suite.backupOps[category])
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_singleID_chats() {
|
|
||||||
runTeamsChatsListSingleCmdTest(suite, path.ChatsCategory)
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTeamsChatsListSingleCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
|
||||||
suite.dpnd.recorder.Reset()
|
|
||||||
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
bID := suite.backupOps[category]
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "list", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
|
||||||
"--backup", string(bID))
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
cmd.SetOut(&suite.dpnd.recorder)
|
|
||||||
|
|
||||||
ctx = print.SetRootCmd(ctx, cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
// compare the output
|
|
||||||
result := suite.dpnd.recorder.String()
|
|
||||||
assert.Contains(t, result, bID)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_badID() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "list", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
|
||||||
"--backup", "smarfs")
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
ctx = print.SetRootCmd(ctx, cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.Error(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsDetailsCmd_chats() {
|
|
||||||
runTeamsChatsDetailsCmdTest(suite, path.ChatsCategory)
|
|
||||||
}
|
|
||||||
|
|
||||||
func runTeamsChatsDetailsCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
|
||||||
suite.dpnd.recorder.Reset()
|
|
||||||
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
bID := suite.backupOps[category]
|
|
||||||
|
|
||||||
// fetch the details from the repo first
|
|
||||||
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
|
|
||||||
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
|
|
||||||
require.Empty(t, errs.Recovered())
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "details", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
|
||||||
"--"+flags.BackupFN, string(bID))
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
cmd.SetOut(&suite.dpnd.recorder)
|
|
||||||
|
|
||||||
ctx = print.SetRootCmd(ctx, cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
// compare the output
|
|
||||||
result := suite.dpnd.recorder.String()
|
|
||||||
|
|
||||||
i := 0
|
|
||||||
foundFolders := 0
|
|
||||||
|
|
||||||
for _, ent := range deets.Entries {
|
|
||||||
// Skip folders as they don't mean anything to the end teamschat.
|
|
||||||
if ent.Folder != nil {
|
|
||||||
foundFolders++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
suite.Run(fmt.Sprintf("detail %d", i), func() {
|
|
||||||
assert.Contains(suite.T(), result, ent.ShortRef)
|
|
||||||
})
|
|
||||||
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
|
|
||||||
// We only backup the default folder for each category so there should be at
|
|
||||||
// least that folder (we don't make details entries for prefix folders).
|
|
||||||
assert.GreaterOrEqual(t, foundFolders, 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// tests for deleting backups
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
type BackupDeleteTeamsChatsE2ESuite struct {
|
|
||||||
tester.Suite
|
|
||||||
dpnd dependencies
|
|
||||||
backupOps [3]operations.BackupOperation
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBackupDeleteTeamsChatsE2ESuite(t *testing.T) {
|
|
||||||
suite.Run(t, &BackupDeleteTeamsChatsE2ESuite{
|
|
||||||
Suite: tester.NewE2ESuite(
|
|
||||||
t,
|
|
||||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupDeleteTeamsChatsE2ESuite) SetupSuite() {
|
|
||||||
t := suite.T()
|
|
||||||
t.Skip("not fully implemented")
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
|
||||||
|
|
||||||
m365TeamsChatID := tconfig.M365TeamID(t)
|
|
||||||
teamschats := []string{m365TeamsChatID}
|
|
||||||
|
|
||||||
// some tests require an existing backup
|
|
||||||
sel := selectors.NewTeamsChatsBackup(teamschats)
|
|
||||||
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
|
|
||||||
|
|
||||||
for i := 0; i < cap(suite.backupOps); i++ {
|
|
||||||
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
suite.backupOps[i] = backupOp
|
|
||||||
|
|
||||||
err = suite.backupOps[i].Run(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "delete", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
|
||||||
"--"+flags.BackupIDsFN,
|
|
||||||
fmt.Sprintf("%s,%s",
|
|
||||||
string(suite.backupOps[0].Results.BackupID),
|
|
||||||
string(suite.backupOps[1].Results.BackupID)))
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
// a follow-up details call should fail, due to the backup ID being deleted
|
|
||||||
cmd = cliTD.StubRootCmd(
|
|
||||||
"backup", "details", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
|
||||||
"--backups", string(suite.backupOps[0].Results.BackupID))
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
err = cmd.ExecuteContext(ctx)
|
|
||||||
require.Error(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_SingleID() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "delete", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
|
||||||
"--"+flags.BackupFN,
|
|
||||||
string(suite.backupOps[2].Results.BackupID))
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
// run the command
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
// a follow-up details call should fail, due to the backup ID being deleted
|
|
||||||
cmd = cliTD.StubRootCmd(
|
|
||||||
"backup", "details", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
|
||||||
"--backup", string(suite.backupOps[2].Results.BackupID))
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
err = cmd.ExecuteContext(ctx)
|
|
||||||
require.Error(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_UnknownID() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "delete", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
|
||||||
"--"+flags.BackupIDsFN, uuid.NewString())
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
// unknown backupIDs should error since the modelStore can't find the backup
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.Error(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_NoBackupID() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "delete", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
|
|
||||||
// empty backupIDs should error since no data provided
|
|
||||||
err := cmd.ExecuteContext(ctx)
|
|
||||||
require.Error(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
// helpers
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
func buildTeamsChatsBackupCmd(
|
|
||||||
ctx context.Context,
|
|
||||||
configFile, resource, category string,
|
|
||||||
recorder *strings.Builder,
|
|
||||||
) (*cobra.Command, context.Context) {
|
|
||||||
cmd := cliTD.StubRootCmd(
|
|
||||||
"backup", "create", "chats",
|
|
||||||
"--"+flags.ConfigFileFN, configFile,
|
|
||||||
"--"+flags.UserFN, resource,
|
|
||||||
"--"+flags.CategoryDataFN, category)
|
|
||||||
cli.BuildCommandTree(cmd)
|
|
||||||
cmd.SetOut(recorder)
|
|
||||||
|
|
||||||
return cmd, print.SetRootCmd(ctx, cmd)
|
|
||||||
}
|
|
||||||
@ -1,248 +0,0 @@
|
|||||||
package backup
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
"github.com/stretchr/testify/suite"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
|
||||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
|
||||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
|
||||||
)
|
|
||||||
|
|
||||||
type TeamsChatsUnitSuite struct {
|
|
||||||
tester.Suite
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTeamsChatsUnitSuite(t *testing.T) {
|
|
||||||
suite.Run(t, &TeamsChatsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *TeamsChatsUnitSuite) TestAddTeamsChatsCommands() {
|
|
||||||
expectUse := teamschatsServiceCommand
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
use string
|
|
||||||
expectUse string
|
|
||||||
expectShort string
|
|
||||||
expectRunE func(*cobra.Command, []string) error
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "create teamschats",
|
|
||||||
use: createCommand,
|
|
||||||
expectUse: expectUse + " " + teamschatsServiceCommandCreateUseSuffix,
|
|
||||||
expectShort: teamschatsCreateCmd().Short,
|
|
||||||
expectRunE: createTeamsChatsCmd,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "list teamschats",
|
|
||||||
use: listCommand,
|
|
||||||
expectUse: expectUse,
|
|
||||||
expectShort: teamschatsListCmd().Short,
|
|
||||||
expectRunE: listTeamsChatsCmd,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "details teamschats",
|
|
||||||
use: detailsCommand,
|
|
||||||
expectUse: expectUse + " " + teamschatsServiceCommandDetailsUseSuffix,
|
|
||||||
expectShort: teamschatsDetailsCmd().Short,
|
|
||||||
expectRunE: detailsTeamsChatsCmd,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "delete teamschats",
|
|
||||||
use: deleteCommand,
|
|
||||||
expectUse: expectUse + " " + teamschatsServiceCommandDeleteUseSuffix,
|
|
||||||
expectShort: teamschatsDeleteCmd().Short,
|
|
||||||
expectRunE: deleteTeamsChatsCmd,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
cmd := &cobra.Command{Use: test.use}
|
|
||||||
|
|
||||||
c := addTeamsChatsCommands(cmd)
|
|
||||||
require.NotNil(t, c)
|
|
||||||
|
|
||||||
cmds := cmd.Commands()
|
|
||||||
require.Len(t, cmds, 1)
|
|
||||||
|
|
||||||
child := cmds[0]
|
|
||||||
assert.Equal(t, test.expectUse, child.Use)
|
|
||||||
assert.Equal(t, test.expectShort, child.Short)
|
|
||||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *TeamsChatsUnitSuite) TestValidateTeamsChatsBackupCreateFlags() {
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
cats []string
|
|
||||||
expect assert.ErrorAssertionFunc
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "none",
|
|
||||||
cats: []string{},
|
|
||||||
expect: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "chats",
|
|
||||||
cats: []string{flags.DataChats},
|
|
||||||
expect: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "all allowed",
|
|
||||||
cats: []string{
|
|
||||||
flags.DataChats,
|
|
||||||
},
|
|
||||||
expect: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "bad inputs",
|
|
||||||
cats: []string{"foo"},
|
|
||||||
expect: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
err := validateTeamsChatsBackupCreateFlags([]string{"*"}, test.cats)
|
|
||||||
test.expect(suite.T(), err, clues.ToCore(err))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *TeamsChatsUnitSuite) TestBackupCreateFlags() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
cmd := cliTD.SetUpCmdHasFlags(
|
|
||||||
t,
|
|
||||||
&cobra.Command{Use: createCommand},
|
|
||||||
addTeamsChatsCommands,
|
|
||||||
[]cliTD.UseCobraCommandFn{
|
|
||||||
flags.AddAllProviderFlags,
|
|
||||||
flags.AddAllStorageFlags,
|
|
||||||
},
|
|
||||||
flagsTD.WithFlags(
|
|
||||||
teamschatsServiceCommand,
|
|
||||||
[]string{
|
|
||||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
|
||||||
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
|
|
||||||
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.TeamsChatsCategoryDataInput),
|
|
||||||
},
|
|
||||||
flagsTD.PreparedGenericBackupFlags(),
|
|
||||||
flagsTD.PreparedProviderFlags(),
|
|
||||||
flagsTD.PreparedStorageFlags()))
|
|
||||||
|
|
||||||
opts := utils.MakeTeamsChatsOpts(cmd)
|
|
||||||
co := utils.Control()
|
|
||||||
backupOpts := utils.ParseBackupOptions()
|
|
||||||
|
|
||||||
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
|
|
||||||
// restore flags are switched over too and we no longer parse flags beyond
|
|
||||||
// connection info into control.Options.
|
|
||||||
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
|
|
||||||
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
|
|
||||||
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
|
|
||||||
|
|
||||||
assert.Equal(t, control.FailFast, co.FailureHandling)
|
|
||||||
assert.True(t, co.ToggleFeatures.DisableIncrementals)
|
|
||||||
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
|
|
||||||
|
|
||||||
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
|
|
||||||
flagsTD.AssertGenericBackupFlags(t, cmd)
|
|
||||||
flagsTD.AssertProviderFlags(t, cmd)
|
|
||||||
flagsTD.AssertStorageFlags(t, cmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *TeamsChatsUnitSuite) TestBackupListFlags() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
cmd := cliTD.SetUpCmdHasFlags(
|
|
||||||
t,
|
|
||||||
&cobra.Command{Use: listCommand},
|
|
||||||
addTeamsChatsCommands,
|
|
||||||
[]cliTD.UseCobraCommandFn{
|
|
||||||
flags.AddAllProviderFlags,
|
|
||||||
flags.AddAllStorageFlags,
|
|
||||||
},
|
|
||||||
flagsTD.WithFlags(
|
|
||||||
teamschatsServiceCommand,
|
|
||||||
[]string{
|
|
||||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
|
||||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
|
||||||
},
|
|
||||||
flagsTD.PreparedBackupListFlags(),
|
|
||||||
flagsTD.PreparedProviderFlags(),
|
|
||||||
flagsTD.PreparedStorageFlags()))
|
|
||||||
|
|
||||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
|
||||||
flagsTD.AssertBackupListFlags(t, cmd)
|
|
||||||
flagsTD.AssertProviderFlags(t, cmd)
|
|
||||||
flagsTD.AssertStorageFlags(t, cmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *TeamsChatsUnitSuite) TestBackupDetailsFlags() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
cmd := cliTD.SetUpCmdHasFlags(
|
|
||||||
t,
|
|
||||||
&cobra.Command{Use: detailsCommand},
|
|
||||||
addTeamsChatsCommands,
|
|
||||||
[]cliTD.UseCobraCommandFn{
|
|
||||||
flags.AddAllProviderFlags,
|
|
||||||
flags.AddAllStorageFlags,
|
|
||||||
},
|
|
||||||
flagsTD.WithFlags(
|
|
||||||
teamschatsServiceCommand,
|
|
||||||
[]string{
|
|
||||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
|
||||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
|
||||||
"--" + flags.SkipReduceFN,
|
|
||||||
},
|
|
||||||
flagsTD.PreparedTeamsChatsFlags(),
|
|
||||||
flagsTD.PreparedProviderFlags(),
|
|
||||||
flagsTD.PreparedStorageFlags()))
|
|
||||||
|
|
||||||
co := utils.Control()
|
|
||||||
|
|
||||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
|
||||||
assert.True(t, co.SkipReduce)
|
|
||||||
flagsTD.AssertProviderFlags(t, cmd)
|
|
||||||
flagsTD.AssertStorageFlags(t, cmd)
|
|
||||||
flagsTD.AssertTeamsChatsFlags(t, cmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *TeamsChatsUnitSuite) TestBackupDeleteFlags() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
cmd := cliTD.SetUpCmdHasFlags(
|
|
||||||
t,
|
|
||||||
&cobra.Command{Use: deleteCommand},
|
|
||||||
addTeamsChatsCommands,
|
|
||||||
[]cliTD.UseCobraCommandFn{
|
|
||||||
flags.AddAllProviderFlags,
|
|
||||||
flags.AddAllStorageFlags,
|
|
||||||
},
|
|
||||||
flagsTD.WithFlags(
|
|
||||||
teamschatsServiceCommand,
|
|
||||||
[]string{
|
|
||||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
|
||||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
|
||||||
},
|
|
||||||
flagsTD.PreparedProviderFlags(),
|
|
||||||
flagsTD.PreparedStorageFlags()))
|
|
||||||
|
|
||||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
|
||||||
flagsTD.AssertProviderFlags(t, cmd)
|
|
||||||
flagsTD.AssertStorageFlags(t, cmd)
|
|
||||||
}
|
|
||||||
@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
|
|
||||||
// called by export.go to map subcommands to provider-specific handling.
|
// called by export.go to map subcommands to provider-specific handling.
|
||||||
@ -50,13 +51,7 @@ corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
|
|||||||
|
|
||||||
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
|
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
|
||||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00
|
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
|
||||||
|
|
||||||
# Export all posts from a conversation with topic "hello world" from group mailbox's last backup to /my-exports
|
|
||||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"
|
|
||||||
|
|
||||||
# Export post with ID 98765abcdef from a conversation from group mailbox's last backup to /my-exports
|
|
||||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world" --post 98765abcdef`
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// `corso export groups [<flag>...] <destination>`
|
// `corso export groups [<flag>...] <destination>`
|
||||||
@ -98,6 +93,10 @@ func exportGroupsCmd(cmd *cobra.Command, args []string) error {
|
|||||||
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
||||||
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
||||||
|
|
||||||
|
// TODO(pandeyabs): Exclude conversations from export since they are not
|
||||||
|
// supported yet. https://github.com/alcionai/corso/issues/4822
|
||||||
|
sel.Exclude(sel.Conversation(selectors.Any()))
|
||||||
|
|
||||||
acceptedGroupsFormatTypes := []string{
|
acceptedGroupsFormatTypes := []string{
|
||||||
string(control.DefaultFormat),
|
string(control.DefaultFormat),
|
||||||
string(control.JSONFormat),
|
string(control.JSONFormat),
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
|
|
||||||
// called by export.go to map subcommands to provider-specific handling.
|
// called by export.go to map subcommands to provider-specific handling.
|
||||||
@ -45,27 +46,7 @@ corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|||||||
|
|
||||||
# Export all files in the "Documents" library to the current directory.
|
# Export all files in the "Documents" library to the current directory.
|
||||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||||
--library Documents --folder "Display Templates/Style Sheets" .
|
--library Documents --folder "Display Templates/Style Sheets" .`
|
||||||
|
|
||||||
# Export lists by their name(s)
|
|
||||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list "list-name-1,list-name-2" .
|
|
||||||
|
|
||||||
# Export lists created after a given time
|
|
||||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-created-after 2024-01-01T12:23:34 .
|
|
||||||
|
|
||||||
# Export lists created before a given time
|
|
||||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-created-before 2024-01-01T12:23:34 .
|
|
||||||
|
|
||||||
# Export lists modified before a given time
|
|
||||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-modified-before 2024-01-01T12:23:34 .
|
|
||||||
|
|
||||||
# Export lists modified after a given time
|
|
||||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-modified-after 2024-01-01T12:23:34 .`
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// `corso export sharepoint [<flag>...] <destination>`
|
// `corso export sharepoint [<flag>...] <destination>`
|
||||||
@ -106,6 +87,9 @@ func exportSharePointCmd(cmd *cobra.Command, args []string) error {
|
|||||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||||
|
|
||||||
|
// Exclude lists from exports since they are not supported yet.
|
||||||
|
sel.Exclude(sel.Lists(selectors.Any()))
|
||||||
|
|
||||||
return runExport(
|
return runExport(
|
||||||
ctx,
|
ctx,
|
||||||
cmd,
|
cmd,
|
||||||
|
|||||||
@ -61,10 +61,6 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
|||||||
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
|
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
|
||||||
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
|
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
|
||||||
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
|
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
|
||||||
"--" + flags.ListCreatedAfterFN, flagsTD.ListCreatedAfterInput,
|
|
||||||
"--" + flags.ListCreatedBeforeFN, flagsTD.ListCreatedBeforeInput,
|
|
||||||
"--" + flags.ListModifiedAfterFN, flagsTD.ListModifiedAfterInput,
|
|
||||||
"--" + flags.ListModifiedBeforeFN, flagsTD.ListModifiedBeforeInput,
|
|
||||||
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
|
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
|
||||||
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
|
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
|
||||||
"--" + flags.FormatFN, flagsTD.FormatType,
|
"--" + flags.FormatFN, flagsTD.FormatType,
|
||||||
@ -92,10 +88,6 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
|||||||
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
|
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
|
||||||
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
|
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
|
||||||
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
|
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
|
||||||
assert.Equal(t, flagsTD.ListCreatedAfterInput, opts.ListCreatedAfter)
|
|
||||||
assert.Equal(t, flagsTD.ListCreatedBeforeInput, opts.ListCreatedBefore)
|
|
||||||
assert.Equal(t, flagsTD.ListModifiedAfterInput, opts.ListModifiedAfter)
|
|
||||||
assert.Equal(t, flagsTD.ListModifiedBeforeInput, opts.ListModifiedBefore)
|
|
||||||
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
|
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
|
||||||
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
|
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
|
||||||
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
|
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
|
||||||
|
|||||||
@ -28,6 +28,13 @@ func AddFilesystemFlags(cmd *cobra.Command) {
|
|||||||
"",
|
"",
|
||||||
"path to local or network storage")
|
"path to local or network storage")
|
||||||
cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN))
|
cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN))
|
||||||
|
|
||||||
|
fs.BoolVar(
|
||||||
|
&SucceedIfExistsFV,
|
||||||
|
SucceedIfExistsFN,
|
||||||
|
false,
|
||||||
|
"Exit with success if the repo has already been initialized.")
|
||||||
|
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string {
|
func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string {
|
||||||
|
|||||||
@ -13,7 +13,7 @@ const (
|
|||||||
FileModifiedAfterFN = "file-modified-after"
|
FileModifiedAfterFN = "file-modified-after"
|
||||||
FileModifiedBeforeFN = "file-modified-before"
|
FileModifiedBeforeFN = "file-modified-before"
|
||||||
|
|
||||||
UseOldDeltaProcessFN = "use-old-delta-process"
|
UseDeltaTreeFN = "use-delta-tree"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -25,7 +25,7 @@ var (
|
|||||||
FileModifiedAfterFV string
|
FileModifiedAfterFV string
|
||||||
FileModifiedBeforeFV string
|
FileModifiedBeforeFV string
|
||||||
|
|
||||||
UseOldDeltaProcessFV bool
|
UseDeltaTreeFV bool
|
||||||
)
|
)
|
||||||
|
|
||||||
// AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the
|
// AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the
|
||||||
|
|||||||
@ -12,8 +12,9 @@ const (
|
|||||||
AWSSessionTokenFN = "aws-session-token"
|
AWSSessionTokenFN = "aws-session-token"
|
||||||
|
|
||||||
// Corso Flags
|
// Corso Flags
|
||||||
PassphraseFN = "passphrase"
|
PassphraseFN = "passphrase"
|
||||||
NewPassphraseFN = "new-passphrase"
|
NewPassphraseFN = "new-passphrase"
|
||||||
|
SucceedIfExistsFN = "succeed-if-exists"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -24,6 +25,7 @@ var (
|
|||||||
AWSSessionTokenFV string
|
AWSSessionTokenFV string
|
||||||
PassphraseFV string
|
PassphraseFV string
|
||||||
NewPhasephraseFV string
|
NewPhasephraseFV string
|
||||||
|
SucceedIfExistsFV bool
|
||||||
)
|
)
|
||||||
|
|
||||||
// AddMultipleBackupIDsFlag adds the --backups flag.
|
// AddMultipleBackupIDsFlag adds the --backups flag.
|
||||||
|
|||||||
@ -38,6 +38,11 @@ func AddS3BucketFlags(cmd *cobra.Command) {
|
|||||||
fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.")
|
fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.")
|
||||||
fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)")
|
fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)")
|
||||||
fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.")
|
fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.")
|
||||||
|
|
||||||
|
// In general, we don't want to expose this flag to users and have them mistake it
|
||||||
|
// for a broad-scale idempotency solution. We can un-hide it later the need arises.
|
||||||
|
fs.BoolVar(&SucceedIfExistsFV, SucceedIfExistsFN, false, "Exit with success if the repo has already been initialized.")
|
||||||
|
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func S3FlagOverrides(cmd *cobra.Command) map[string]string {
|
func S3FlagOverrides(cmd *cobra.Command) map[string]string {
|
||||||
|
|||||||
@ -11,35 +11,21 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
LibraryFN = "library"
|
LibraryFN = "library"
|
||||||
|
ListFN = "list"
|
||||||
ListFN = "list"
|
|
||||||
ListModifiedAfterFN = "list-modified-after"
|
|
||||||
ListModifiedBeforeFN = "list-modified-before"
|
|
||||||
ListCreatedAfterFN = "list-created-after"
|
|
||||||
ListCreatedBeforeFN = "list-created-before"
|
|
||||||
|
|
||||||
PageFolderFN = "page-folder"
|
PageFolderFN = "page-folder"
|
||||||
PageFN = "page"
|
PageFN = "page"
|
||||||
|
SiteFN = "site" // site only accepts WebURL values
|
||||||
SiteFN = "site" // site only accepts WebURL values
|
SiteIDFN = "site-id" // site-id accepts actual site ids
|
||||||
SiteIDFN = "site-id" // site-id accepts actual site ids
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
LibraryFV string
|
LibraryFV string
|
||||||
|
ListFV []string
|
||||||
ListFV []string
|
|
||||||
ListModifiedAfterFV string
|
|
||||||
ListModifiedBeforeFV string
|
|
||||||
ListCreatedAfterFV string
|
|
||||||
ListCreatedBeforeFV string
|
|
||||||
|
|
||||||
PageFolderFV []string
|
PageFolderFV []string
|
||||||
PageFV []string
|
PageFV []string
|
||||||
|
SiteIDFV []string
|
||||||
SiteIDFV []string
|
WebURLFV []string
|
||||||
WebURLFV []string
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// AddSharePointDetailsAndRestoreFlags adds flags that are common to both the
|
// AddSharePointDetailsAndRestoreFlags adds flags that are common to both the
|
||||||
@ -82,23 +68,8 @@ func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
|
|||||||
fs.StringSliceVar(
|
fs.StringSliceVar(
|
||||||
&ListFV,
|
&ListFV,
|
||||||
ListFN, nil,
|
ListFN, nil,
|
||||||
"Select lists by name.")
|
"Select lists by name; accepts '"+Wildcard+"' to select all lists.")
|
||||||
fs.StringVar(
|
cobra.CheckErr(fs.MarkHidden(ListFN))
|
||||||
&ListModifiedAfterFV,
|
|
||||||
ListModifiedAfterFN, "",
|
|
||||||
"Select lists modified after this datetime.")
|
|
||||||
fs.StringVar(
|
|
||||||
&ListModifiedBeforeFV,
|
|
||||||
ListModifiedBeforeFN, "",
|
|
||||||
"Select lists modified before this datetime.")
|
|
||||||
fs.StringVar(
|
|
||||||
&ListCreatedAfterFV,
|
|
||||||
ListCreatedAfterFN, "",
|
|
||||||
"Select lists created after this datetime.")
|
|
||||||
fs.StringVar(
|
|
||||||
&ListCreatedBeforeFV,
|
|
||||||
ListCreatedBeforeFN, "",
|
|
||||||
"Select lists created before this datetime.")
|
|
||||||
|
|
||||||
// pages
|
// pages
|
||||||
|
|
||||||
|
|||||||
@ -1,13 +0,0 @@
|
|||||||
package flags
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
DataChats = "chats"
|
|
||||||
)
|
|
||||||
|
|
||||||
func AddTeamsChatsDetailsAndRestoreFlags(cmd *cobra.Command) {
|
|
||||||
// TODO: add details flags
|
|
||||||
}
|
|
||||||
7
src/cli/flags/testdata/flags.go
vendored
7
src/cli/flags/testdata/flags.go
vendored
@ -21,7 +21,6 @@ var (
|
|||||||
ExchangeCategoryDataInput = []string{"email", "events", "contacts"}
|
ExchangeCategoryDataInput = []string{"email", "events", "contacts"}
|
||||||
SharepointCategoryDataInput = []string{"files", "lists", "pages"}
|
SharepointCategoryDataInput = []string{"files", "lists", "pages"}
|
||||||
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
|
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
|
||||||
TeamsChatsCategoryDataInput = []string{"chats"}
|
|
||||||
|
|
||||||
ChannelInput = []string{"channel1", "channel2"}
|
ChannelInput = []string{"channel1", "channel2"}
|
||||||
MessageInput = []string{"message1", "message2"}
|
MessageInput = []string{"message1", "message2"}
|
||||||
@ -60,11 +59,7 @@ var (
|
|||||||
FileModifiedAfterInput = "fileModifiedAfter"
|
FileModifiedAfterInput = "fileModifiedAfter"
|
||||||
FileModifiedBeforeInput = "fileModifiedBefore"
|
FileModifiedBeforeInput = "fileModifiedBefore"
|
||||||
|
|
||||||
ListsInput = []string{"listName1", "listName2"}
|
ListsInput = []string{"listName1", "listName2"}
|
||||||
ListCreatedAfterInput = "listCreatedAfter"
|
|
||||||
ListCreatedBeforeInput = "listCreatedBefore"
|
|
||||||
ListModifiedAfterInput = "listModifiedAfter"
|
|
||||||
ListModifiedBeforeInput = "listModifiedBefore"
|
|
||||||
|
|
||||||
PageFolderInput = []string{"pageFolder1", "pageFolder2"}
|
PageFolderInput = []string{"pageFolder1", "pageFolder2"}
|
||||||
PageInput = []string{"page1", "page2"}
|
PageInput = []string{"page1", "page2"}
|
||||||
|
|||||||
25
src/cli/flags/testdata/teamschats.go
vendored
25
src/cli/flags/testdata/teamschats.go
vendored
@ -1,25 +0,0 @@
|
|||||||
package testdata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
func PreparedTeamsChatsFlags() []string {
|
|
||||||
return []string{
|
|
||||||
// FIXME: populate when adding filters
|
|
||||||
// "--" + flags.ChatCreatedAfterFN, ChatCreatedAfterInput,
|
|
||||||
// "--" + flags.ChatCreatedBeforeFN, ChatCreatedBeforeInput,
|
|
||||||
// "--" + flags.ChatLastMessageAfterFN, ChatLastMessageAfterInput,
|
|
||||||
// "--" + flags.ChatLastMessageBeforeFN, ChatLastMessageBeforeInput,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func AssertTeamsChatsFlags(t *testing.T, cmd *cobra.Command) {
|
|
||||||
// FIXME: populate when adding filters
|
|
||||||
// assert.Equal(t, ChatCreatedAfterInput, flags.ChatCreatedAfterFV)
|
|
||||||
// assert.Equal(t, ChatCreatedBeforeInput, flags.ChatCreatedBeforeFV)
|
|
||||||
// assert.Equal(t, ChatLastMessageAfterInput, flags.ChatLastMessageAfterFV)
|
|
||||||
// assert.Equal(t, ChatLastMessageBeforeInput, flags.ChatLastMessageBeforeFV)
|
|
||||||
}
|
|
||||||
@ -133,7 +133,7 @@ func Pretty(ctx context.Context, a any) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
printPrettyJSON(ctx, getRootCmd(ctx).ErrOrStderr(), a)
|
printPrettyJSON(getRootCmd(ctx).ErrOrStderr(), a)
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrettyJSON prettifies and prints the value.
|
// PrettyJSON prettifies and prints the value.
|
||||||
@ -143,7 +143,7 @@ func PrettyJSON(ctx context.Context, p minimumPrintabler) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
outputJSON(ctx, getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
|
outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
|
||||||
}
|
}
|
||||||
|
|
||||||
// out is the testable core of exported print funcs
|
// out is the testable core of exported print funcs
|
||||||
@ -193,56 +193,56 @@ type minimumPrintabler interface {
|
|||||||
|
|
||||||
// Item prints the printable, according to the caller's requested format.
|
// Item prints the printable, according to the caller's requested format.
|
||||||
func Item(ctx context.Context, p Printable) {
|
func Item(ctx context.Context, p Printable) {
|
||||||
printItem(ctx, getRootCmd(ctx).OutOrStdout(), p)
|
printItem(getRootCmd(ctx).OutOrStdout(), p)
|
||||||
}
|
}
|
||||||
|
|
||||||
// print prints the printable items,
|
// print prints the printable items,
|
||||||
// according to the caller's requested format.
|
// according to the caller's requested format.
|
||||||
func printItem(ctx context.Context, w io.Writer, p Printable) {
|
func printItem(w io.Writer, p Printable) {
|
||||||
if outputAsJSON || outputAsJSONDebug {
|
if outputAsJSON || outputAsJSONDebug {
|
||||||
outputJSON(ctx, w, p, outputAsJSONDebug)
|
outputJSON(w, p, outputAsJSONDebug)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
outputTable(ctx, w, []Printable{p})
|
outputTable(w, []Printable{p})
|
||||||
}
|
}
|
||||||
|
|
||||||
// ItemProperties prints the printable either as in a single line or a json
|
// ItemProperties prints the printable either as in a single line or a json
|
||||||
// The difference between this and Item is that this one does not print the ID
|
// The difference between this and Item is that this one does not print the ID
|
||||||
func ItemProperties(ctx context.Context, p Printable) {
|
func ItemProperties(ctx context.Context, p Printable) {
|
||||||
printItemProperties(ctx, getRootCmd(ctx).OutOrStdout(), p)
|
printItemProperties(getRootCmd(ctx).OutOrStdout(), p)
|
||||||
}
|
}
|
||||||
|
|
||||||
// print prints the printable items,
|
// print prints the printable items,
|
||||||
// according to the caller's requested format.
|
// according to the caller's requested format.
|
||||||
func printItemProperties(ctx context.Context, w io.Writer, p Printable) {
|
func printItemProperties(w io.Writer, p Printable) {
|
||||||
if outputAsJSON || outputAsJSONDebug {
|
if outputAsJSON || outputAsJSONDebug {
|
||||||
outputJSON(ctx, w, p, outputAsJSONDebug)
|
outputJSON(w, p, outputAsJSONDebug)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
outputOneLine(ctx, w, []Printable{p})
|
outputOneLine(w, []Printable{p})
|
||||||
}
|
}
|
||||||
|
|
||||||
// All prints the slice of printable items,
|
// All prints the slice of printable items,
|
||||||
// according to the caller's requested format.
|
// according to the caller's requested format.
|
||||||
func All(ctx context.Context, ps ...Printable) {
|
func All(ctx context.Context, ps ...Printable) {
|
||||||
printAll(ctx, getRootCmd(ctx).OutOrStdout(), ps)
|
printAll(getRootCmd(ctx).OutOrStdout(), ps)
|
||||||
}
|
}
|
||||||
|
|
||||||
// printAll prints the slice of printable items,
|
// printAll prints the slice of printable items,
|
||||||
// according to the caller's requested format.
|
// according to the caller's requested format.
|
||||||
func printAll(ctx context.Context, w io.Writer, ps []Printable) {
|
func printAll(w io.Writer, ps []Printable) {
|
||||||
if len(ps) == 0 {
|
if len(ps) == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if outputAsJSON || outputAsJSONDebug {
|
if outputAsJSON || outputAsJSONDebug {
|
||||||
outputJSONArr(ctx, w, ps, outputAsJSONDebug)
|
outputJSONArr(w, ps, outputAsJSONDebug)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
outputTable(ctx, w, ps)
|
outputTable(w, ps)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------
|
||||||
@ -252,11 +252,11 @@ func printAll(ctx context.Context, w io.Writer, ps []Printable) {
|
|||||||
// Table writes the printables in a tabular format. Takes headers from
|
// Table writes the printables in a tabular format. Takes headers from
|
||||||
// the 0th printable only.
|
// the 0th printable only.
|
||||||
func Table(ctx context.Context, ps []Printable) {
|
func Table(ctx context.Context, ps []Printable) {
|
||||||
outputTable(ctx, getRootCmd(ctx).OutOrStdout(), ps)
|
outputTable(getRootCmd(ctx).OutOrStdout(), ps)
|
||||||
}
|
}
|
||||||
|
|
||||||
// output to stdout the list of printable structs in a table
|
// output to stdout the list of printable structs in a table
|
||||||
func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
|
func outputTable(w io.Writer, ps []Printable) {
|
||||||
t := table.Table{
|
t := table.Table{
|
||||||
Headers: ps[0].Headers(false),
|
Headers: ps[0].Headers(false),
|
||||||
Rows: [][]string{},
|
Rows: [][]string{},
|
||||||
@ -266,9 +266,6 @@ func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
|
|||||||
t.Rows = append(t.Rows, p.Values(false))
|
t.Rows = append(t.Rows, p.Values(false))
|
||||||
}
|
}
|
||||||
|
|
||||||
// observe bars needs to be flushed before printing
|
|
||||||
observe.Flush(ctx)
|
|
||||||
|
|
||||||
_ = t.WriteTable(
|
_ = t.WriteTable(
|
||||||
w,
|
w,
|
||||||
&table.Config{
|
&table.Config{
|
||||||
@ -282,20 +279,20 @@ func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
|
|||||||
// JSON
|
// JSON
|
||||||
// ------------------------------------------------------------------------------------------
|
// ------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
func outputJSON(ctx context.Context, w io.Writer, p minimumPrintabler, debug bool) {
|
func outputJSON(w io.Writer, p minimumPrintabler, debug bool) {
|
||||||
if debug {
|
if debug {
|
||||||
printJSON(ctx, w, p)
|
printJSON(w, p)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if debug {
|
if debug {
|
||||||
printJSON(ctx, w, p)
|
printJSON(w, p)
|
||||||
} else {
|
} else {
|
||||||
printJSON(ctx, w, p.MinimumPrintable())
|
printJSON(w, p.MinimumPrintable())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool) {
|
func outputJSONArr(w io.Writer, ps []Printable, debug bool) {
|
||||||
sl := make([]any, 0, len(ps))
|
sl := make([]any, 0, len(ps))
|
||||||
|
|
||||||
for _, p := range ps {
|
for _, p := range ps {
|
||||||
@ -306,14 +303,11 @@ func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
printJSON(ctx, w, sl)
|
printJSON(w, sl)
|
||||||
}
|
}
|
||||||
|
|
||||||
// output to stdout the list of printable structs as json.
|
// output to stdout the list of printable structs as json.
|
||||||
func printJSON(ctx context.Context, w io.Writer, a any) {
|
func printJSON(w io.Writer, a any) {
|
||||||
// observe bars needs to be flushed before printing
|
|
||||||
observe.Flush(ctx)
|
|
||||||
|
|
||||||
bs, err := json.Marshal(a)
|
bs, err := json.Marshal(a)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
|
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
|
||||||
@ -324,10 +318,7 @@ func printJSON(ctx context.Context, w io.Writer, a any) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// output to stdout the list of printable structs as prettified json.
|
// output to stdout the list of printable structs as prettified json.
|
||||||
func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
|
func printPrettyJSON(w io.Writer, a any) {
|
||||||
// observe bars needs to be flushed before printing
|
|
||||||
observe.Flush(ctx)
|
|
||||||
|
|
||||||
bs, err := json.MarshalIndent(a, "", " ")
|
bs, err := json.MarshalIndent(a, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
|
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
|
||||||
@ -343,10 +334,7 @@ func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
|
|||||||
|
|
||||||
// Output in the following format:
|
// Output in the following format:
|
||||||
// Bytes Uploaded: 401 kB | Items Uploaded: 59 | Items Skipped: 0 | Errors: 0
|
// Bytes Uploaded: 401 kB | Items Uploaded: 59 | Items Skipped: 0 | Errors: 0
|
||||||
func outputOneLine(ctx context.Context, w io.Writer, ps []Printable) {
|
func outputOneLine(w io.Writer, ps []Printable) {
|
||||||
// observe bars needs to be flushed before printing
|
|
||||||
observe.Flush(ctx)
|
|
||||||
|
|
||||||
headers := ps[0].Headers(true)
|
headers := ps[0].Headers(true)
|
||||||
rows := [][]string{}
|
rows := [][]string{}
|
||||||
|
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package repo
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/pkg/errors"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
@ -109,6 +110,10 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
|
|||||||
ric := repository.InitConfig{RetentionOpts: retentionOpts}
|
ric := repository.InitConfig{RetentionOpts: retentionOpts}
|
||||||
|
|
||||||
if err = r.Initialize(ctx, ric); err != nil {
|
if err = r.Initialize(ctx, ric); err != nil {
|
||||||
|
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
|
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
@ -81,9 +82,9 @@ func (suite *FilesystemE2ESuite) TestInitFilesystemCmd() {
|
|||||||
err = cmd.ExecuteContext(ctx)
|
err = cmd.ExecuteContext(ctx)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// noop
|
// a second initialization should result in an error
|
||||||
err = cmd.ExecuteContext(ctx)
|
err = cmd.ExecuteContext(ctx)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,6 +4,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/pkg/errors"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
@ -131,6 +132,10 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
|
|||||||
ric := repository.InitConfig{RetentionOpts: retentionOpts}
|
ric := repository.InitConfig{RetentionOpts: retentionOpts}
|
||||||
|
|
||||||
if err = r.Initialize(ctx, ric); err != nil {
|
if err = r.Initialize(ctx, ric); err != nil {
|
||||||
|
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
|
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -89,9 +89,9 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
|
|||||||
err = cmd.ExecuteContext(ctx)
|
err = cmd.ExecuteContext(ctx)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// noop
|
// a second initialization should result in an error
|
||||||
err = cmd.ExecuteContext(ctx)
|
err = cmd.ExecuteContext(ctx)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -116,7 +116,8 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
|
|||||||
"repo", "init", "s3",
|
"repo", "init", "s3",
|
||||||
"--"+flags.ConfigFileFN, configFP,
|
"--"+flags.ConfigFileFN, configFP,
|
||||||
"--bucket", cfg.Bucket,
|
"--bucket", cfg.Bucket,
|
||||||
"--prefix", cfg.Prefix)
|
"--prefix", cfg.Prefix,
|
||||||
|
"--succeed-if-exists")
|
||||||
cli.BuildCommandTree(cmd)
|
cli.BuildCommandTree(cmd)
|
||||||
|
|
||||||
// run the command
|
// run the command
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/pkg/dttm"
|
"github.com/alcionai/corso/src/pkg/dttm"
|
||||||
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
|
|
||||||
// called by restore.go to map subcommands to provider-specific handling.
|
// called by restore.go to map subcommands to provider-specific handling.
|
||||||
@ -50,27 +51,7 @@ corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|||||||
|
|
||||||
# Restore all files in the "Documents" library.
|
# Restore all files in the "Documents" library.
|
||||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||||
--library Documents --folder "Display Templates/Style Sheets"
|
--library Documents --folder "Display Templates/Style Sheets" `
|
||||||
|
|
||||||
# Restore lists by their name(s)
|
|
||||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list "list-name-1,list-name-2"
|
|
||||||
|
|
||||||
# Restore lists created after a given time
|
|
||||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-created-after 2024-01-01T12:23:34
|
|
||||||
|
|
||||||
# Restore lists created before a given time
|
|
||||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-created-before 2024-01-01T12:23:34
|
|
||||||
|
|
||||||
# Restore lists modified before a given time
|
|
||||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-modified-before 2024-01-01T12:23:34
|
|
||||||
|
|
||||||
# Restore lists modified after a given time
|
|
||||||
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
|
||||||
--list-modified-after 2024-01-01T12:23:34`
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// `corso restore sharepoint [<flag>...]`
|
// `corso restore sharepoint [<flag>...]`
|
||||||
@ -106,6 +87,9 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
|
|||||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||||
|
|
||||||
|
// Exclude lists from restore since they are not supported yet.
|
||||||
|
sel.Exclude(sel.Lists(selectors.Any()))
|
||||||
|
|
||||||
return runRestore(
|
return runRestore(
|
||||||
ctx,
|
ctx,
|
||||||
cmd,
|
cmd,
|
||||||
|
|||||||
@ -60,10 +60,6 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
|||||||
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
|
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
|
||||||
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
|
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
|
||||||
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
|
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
|
||||||
"--" + flags.ListCreatedAfterFN, flagsTD.ListCreatedAfterInput,
|
|
||||||
"--" + flags.ListCreatedBeforeFN, flagsTD.ListCreatedBeforeInput,
|
|
||||||
"--" + flags.ListModifiedAfterFN, flagsTD.ListModifiedAfterInput,
|
|
||||||
"--" + flags.ListModifiedBeforeFN, flagsTD.ListModifiedBeforeInput,
|
|
||||||
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
|
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
|
||||||
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
|
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
|
||||||
"--" + flags.CollisionsFN, flagsTD.Collisions,
|
"--" + flags.CollisionsFN, flagsTD.Collisions,
|
||||||
@ -93,10 +89,6 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
|||||||
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
|
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
|
||||||
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
|
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
|
||||||
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
|
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
|
||||||
assert.Equal(t, flagsTD.ListCreatedAfterInput, opts.ListCreatedAfter)
|
|
||||||
assert.Equal(t, flagsTD.ListCreatedBeforeInput, opts.ListCreatedBefore)
|
|
||||||
assert.Equal(t, flagsTD.ListModifiedAfterInput, opts.ListModifiedAfter)
|
|
||||||
assert.Equal(t, flagsTD.ListModifiedBeforeInput, opts.ListModifiedBefore)
|
|
||||||
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
|
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
|
||||||
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
|
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
|
||||||
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
|
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
|
||||||
|
|||||||
@ -50,10 +50,6 @@ func validateCommonTimeFlags(opts any) error {
|
|||||||
flags.FileCreatedBeforeFN,
|
flags.FileCreatedBeforeFN,
|
||||||
flags.FileModifiedAfterFN,
|
flags.FileModifiedAfterFN,
|
||||||
flags.FileModifiedBeforeFN,
|
flags.FileModifiedBeforeFN,
|
||||||
flags.ListCreatedAfterFN,
|
|
||||||
flags.ListCreatedBeforeFN,
|
|
||||||
flags.ListModifiedAfterFN,
|
|
||||||
flags.ListModifiedBeforeFN,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
isFlagPopulated := func(opts any, flag string) bool {
|
isFlagPopulated := func(opts any, flag string) bool {
|
||||||
|
|||||||
@ -103,6 +103,7 @@ func (suite *FlagUnitSuite) TestAddS3BucketFlags() {
|
|||||||
assert.Equal(t, "prefix1", flags.PrefixFV, flags.PrefixFN)
|
assert.Equal(t, "prefix1", flags.PrefixFV, flags.PrefixFN)
|
||||||
assert.True(t, flags.DoNotUseTLSFV, flags.DoNotUseTLSFN)
|
assert.True(t, flags.DoNotUseTLSFV, flags.DoNotUseTLSFN)
|
||||||
assert.True(t, flags.DoNotVerifyTLSFV, flags.DoNotVerifyTLSFN)
|
assert.True(t, flags.DoNotVerifyTLSFV, flags.DoNotVerifyTLSFN)
|
||||||
|
assert.True(t, flags.SucceedIfExistsFV, flags.SucceedIfExistsFN)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -115,6 +116,7 @@ func (suite *FlagUnitSuite) TestAddS3BucketFlags() {
|
|||||||
"--" + flags.PrefixFN, "prefix1",
|
"--" + flags.PrefixFN, "prefix1",
|
||||||
"--" + flags.DoNotUseTLSFN,
|
"--" + flags.DoNotUseTLSFN,
|
||||||
"--" + flags.DoNotVerifyTLSFN,
|
"--" + flags.DoNotVerifyTLSFN,
|
||||||
|
"--" + flags.SucceedIfExistsFN,
|
||||||
})
|
})
|
||||||
|
|
||||||
err := cmd.Execute()
|
err := cmd.Execute()
|
||||||
@ -128,6 +130,7 @@ func (suite *FlagUnitSuite) TestFilesystemFlags() {
|
|||||||
Use: "test",
|
Use: "test",
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
assert.Equal(t, "/tmp/test", flags.FilesystemPathFV, flags.FilesystemPathFN)
|
assert.Equal(t, "/tmp/test", flags.FilesystemPathFV, flags.FilesystemPathFN)
|
||||||
|
assert.True(t, flags.SucceedIfExistsFV, flags.SucceedIfExistsFN)
|
||||||
assert.Equal(t, "tenantID", flags.AzureClientTenantFV, flags.AzureClientTenantFN)
|
assert.Equal(t, "tenantID", flags.AzureClientTenantFV, flags.AzureClientTenantFN)
|
||||||
assert.Equal(t, "clientID", flags.AzureClientIDFV, flags.AzureClientIDFN)
|
assert.Equal(t, "clientID", flags.AzureClientIDFV, flags.AzureClientIDFN)
|
||||||
assert.Equal(t, "secret", flags.AzureClientSecretFV, flags.AzureClientSecretFN)
|
assert.Equal(t, "secret", flags.AzureClientSecretFV, flags.AzureClientSecretFN)
|
||||||
@ -140,6 +143,7 @@ func (suite *FlagUnitSuite) TestFilesystemFlags() {
|
|||||||
cmd.SetArgs([]string{
|
cmd.SetArgs([]string{
|
||||||
"test",
|
"test",
|
||||||
"--" + flags.FilesystemPathFN, "/tmp/test",
|
"--" + flags.FilesystemPathFN, "/tmp/test",
|
||||||
|
"--" + flags.SucceedIfExistsFN,
|
||||||
"--" + flags.AzureClientIDFN, "clientID",
|
"--" + flags.AzureClientIDFN, "clientID",
|
||||||
"--" + flags.AzureClientTenantFN, "tenantID",
|
"--" + flags.AzureClientTenantFN, "tenantID",
|
||||||
"--" + flags.AzureClientSecretFN, "secret",
|
"--" + flags.AzureClientSecretFN, "secret",
|
||||||
|
|||||||
@ -266,14 +266,9 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
|
|||||||
opts.Conversations = selectors.Any()
|
opts.Conversations = selectors.Any()
|
||||||
}
|
}
|
||||||
|
|
||||||
// if no post is specified, select all posts in the conversation
|
|
||||||
if convPosts == 0 {
|
|
||||||
opts.Posts = selectors.Any()
|
|
||||||
}
|
|
||||||
|
|
||||||
// if no post is specified, only select conversations;
|
// if no post is specified, only select conversations;
|
||||||
// otherwise, look for conv/post pairs
|
// otherwise, look for channel/message pairs
|
||||||
if convs == 0 {
|
if chanMsgs == 0 {
|
||||||
sel.Include(sel.Conversation(opts.Conversations))
|
sel.Include(sel.Conversation(opts.Conversations))
|
||||||
} else {
|
} else {
|
||||||
sel.Include(sel.ConversationPosts(opts.Conversations, opts.Posts))
|
sel.Include(sel.ConversationPosts(opts.Conversations, opts.Posts))
|
||||||
|
|||||||
@ -28,7 +28,7 @@ func Control() control.Options {
|
|||||||
opt.ToggleFeatures.DisableSlidingWindowLimiter = flags.DisableSlidingWindowLimiterFV
|
opt.ToggleFeatures.DisableSlidingWindowLimiter = flags.DisableSlidingWindowLimiterFV
|
||||||
opt.ToggleFeatures.DisableLazyItemReader = flags.DisableLazyItemReaderFV
|
opt.ToggleFeatures.DisableLazyItemReader = flags.DisableLazyItemReaderFV
|
||||||
opt.ToggleFeatures.ExchangeImmutableIDs = flags.EnableImmutableIDFV
|
opt.ToggleFeatures.ExchangeImmutableIDs = flags.EnableImmutableIDFV
|
||||||
opt.ToggleFeatures.UseOldDeltaProcess = flags.UseOldDeltaProcessFV
|
opt.ToggleFeatures.UseDeltaTree = flags.UseDeltaTreeFV
|
||||||
opt.Parallelism.ItemFetch = flags.FetchParallelismFV
|
opt.Parallelism.ItemFetch = flags.FetchParallelismFV
|
||||||
|
|
||||||
return opt
|
return opt
|
||||||
@ -58,7 +58,7 @@ func ParseBackupOptions() control.BackupConfig {
|
|||||||
opt.M365.DeltaPageSize = dps
|
opt.M365.DeltaPageSize = dps
|
||||||
opt.M365.DisableDeltaEndpoint = flags.DisableDeltaFV
|
opt.M365.DisableDeltaEndpoint = flags.DisableDeltaFV
|
||||||
opt.M365.ExchangeImmutableIDs = flags.EnableImmutableIDFV
|
opt.M365.ExchangeImmutableIDs = flags.EnableImmutableIDFV
|
||||||
opt.M365.UseOldDriveDeltaProcess = flags.UseOldDeltaProcessFV
|
opt.M365.UseDriveDeltaTree = flags.UseDeltaTreeFV
|
||||||
opt.ServiceRateLimiter.DisableSlidingWindowLimiter = flags.DisableSlidingWindowLimiterFV
|
opt.ServiceRateLimiter.DisableSlidingWindowLimiter = flags.DisableSlidingWindowLimiterFV
|
||||||
opt.Parallelism.ItemFetch = flags.FetchParallelismFV
|
opt.Parallelism.ItemFetch = flags.FetchParallelismFV
|
||||||
opt.Incrementals.ForceFullEnumeration = flags.DisableIncrementalsFV
|
opt.Incrementals.ForceFullEnumeration = flags.DisableIncrementalsFV
|
||||||
|
|||||||
@ -25,11 +25,7 @@ type SharePointOpts struct {
|
|||||||
FileModifiedAfter string
|
FileModifiedAfter string
|
||||||
FileModifiedBefore string
|
FileModifiedBefore string
|
||||||
|
|
||||||
Lists []string
|
Lists []string
|
||||||
ListModifiedAfter string
|
|
||||||
ListModifiedBefore string
|
|
||||||
ListCreatedBefore string
|
|
||||||
ListCreatedAfter string
|
|
||||||
|
|
||||||
PageFolder []string
|
PageFolder []string
|
||||||
Page []string
|
Page []string
|
||||||
@ -50,14 +46,6 @@ func (s SharePointOpts) GetFileTimeField(flag string) string {
|
|||||||
return s.FileModifiedAfter
|
return s.FileModifiedAfter
|
||||||
case flags.FileModifiedBeforeFN:
|
case flags.FileModifiedBeforeFN:
|
||||||
return s.FileModifiedBefore
|
return s.FileModifiedBefore
|
||||||
case flags.ListModifiedAfterFN:
|
|
||||||
return s.ListModifiedAfter
|
|
||||||
case flags.ListModifiedBeforeFN:
|
|
||||||
return s.ListModifiedBefore
|
|
||||||
case flags.ListCreatedBeforeFN:
|
|
||||||
return s.ListCreatedBefore
|
|
||||||
case flags.ListCreatedAfterFN:
|
|
||||||
return s.ListCreatedAfter
|
|
||||||
default:
|
default:
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
@ -76,11 +64,7 @@ func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
|
|||||||
FileModifiedAfter: flags.FileModifiedAfterFV,
|
FileModifiedAfter: flags.FileModifiedAfterFV,
|
||||||
FileModifiedBefore: flags.FileModifiedBeforeFV,
|
FileModifiedBefore: flags.FileModifiedBeforeFV,
|
||||||
|
|
||||||
Lists: flags.ListFV,
|
Lists: flags.ListFV,
|
||||||
ListModifiedAfter: flags.ListModifiedAfterFV,
|
|
||||||
ListModifiedBefore: flags.ListModifiedBeforeFV,
|
|
||||||
ListCreatedAfter: flags.ListCreatedAfterFV,
|
|
||||||
ListCreatedBefore: flags.ListCreatedBeforeFV,
|
|
||||||
|
|
||||||
Page: flags.PageFV,
|
Page: flags.PageFV,
|
||||||
PageFolder: flags.PageFolderFV,
|
PageFolder: flags.PageFolderFV,
|
||||||
@ -98,21 +82,22 @@ func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
|
|||||||
func SharePointAllowedCategories() map[string]struct{} {
|
func SharePointAllowedCategories() map[string]struct{} {
|
||||||
return map[string]struct{}{
|
return map[string]struct{}{
|
||||||
flags.DataLibraries: {},
|
flags.DataLibraries: {},
|
||||||
flags.DataLists: {},
|
// flags.DataLists: {}, [TODO]: uncomment when lists are enabled
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
|
func AddCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
|
||||||
if len(cats) == 0 {
|
if len(cats) == 0 {
|
||||||
// [TODO](hitesh) to enable lists without being invoked explicitly via --data flag
|
// backup of sharepoint lists not enabled yet
|
||||||
// sel.Include(sel.LibraryFolders(selectors.Any()), sel.Lists(selectors.Any()))
|
// sel.Include(sel.LibraryFolders(selectors.Any()), sel.Lists(selectors.Any()))
|
||||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, d := range cats {
|
for _, d := range cats {
|
||||||
switch d {
|
switch d {
|
||||||
case flags.DataLists:
|
// backup of sharepoint lists not enabled yet
|
||||||
sel.Include(sel.Lists(selectors.Any()))
|
// case flags.DataLists:
|
||||||
|
// sel.Include(sel.Lists(selectors.Any()))
|
||||||
case flags.DataLibraries:
|
case flags.DataLibraries:
|
||||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||||
}
|
}
|
||||||
@ -253,8 +238,4 @@ func FilterSharePointRestoreInfoSelectors(
|
|||||||
AddSharePointInfo(sel, opts.FileCreatedBefore, sel.CreatedBefore)
|
AddSharePointInfo(sel, opts.FileCreatedBefore, sel.CreatedBefore)
|
||||||
AddSharePointInfo(sel, opts.FileModifiedAfter, sel.ModifiedAfter)
|
AddSharePointInfo(sel, opts.FileModifiedAfter, sel.ModifiedAfter)
|
||||||
AddSharePointInfo(sel, opts.FileModifiedBefore, sel.ModifiedBefore)
|
AddSharePointInfo(sel, opts.FileModifiedBefore, sel.ModifiedBefore)
|
||||||
AddSharePointInfo(sel, opts.ListModifiedAfter, sel.ListModifiedAfter)
|
|
||||||
AddSharePointInfo(sel, opts.ListModifiedBefore, sel.ListModifiedBefore)
|
|
||||||
AddSharePointInfo(sel, opts.ListCreatedAfter, sel.ListCreatedAfter)
|
|
||||||
AddSharePointInfo(sel, opts.ListCreatedBefore, sel.ListCreatedBefore)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -279,20 +279,12 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
|
|||||||
FileCreatedBefore: dttm.Now(),
|
FileCreatedBefore: dttm.Now(),
|
||||||
FileModifiedAfter: dttm.Now(),
|
FileModifiedAfter: dttm.Now(),
|
||||||
FileModifiedBefore: dttm.Now(),
|
FileModifiedBefore: dttm.Now(),
|
||||||
ListCreatedAfter: dttm.Now(),
|
|
||||||
ListCreatedBefore: dttm.Now(),
|
|
||||||
ListModifiedAfter: dttm.Now(),
|
|
||||||
ListModifiedBefore: dttm.Now(),
|
|
||||||
Populated: flags.PopulatedFlags{
|
Populated: flags.PopulatedFlags{
|
||||||
flags.SiteFN: struct{}{},
|
flags.SiteFN: struct{}{},
|
||||||
flags.FileCreatedAfterFN: struct{}{},
|
flags.FileCreatedAfterFN: struct{}{},
|
||||||
flags.FileCreatedBeforeFN: struct{}{},
|
flags.FileCreatedBeforeFN: struct{}{},
|
||||||
flags.FileModifiedAfterFN: struct{}{},
|
flags.FileModifiedAfterFN: struct{}{},
|
||||||
flags.FileModifiedBeforeFN: struct{}{},
|
flags.FileModifiedBeforeFN: struct{}{},
|
||||||
flags.ListCreatedAfterFN: struct{}{},
|
|
||||||
flags.ListCreatedBeforeFN: struct{}{},
|
|
||||||
flags.ListModifiedAfterFN: struct{}{},
|
|
||||||
flags.ListModifiedBeforeFN: struct{}{},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expect: assert.NoError,
|
expect: assert.NoError,
|
||||||
@ -358,50 +350,6 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
|
|||||||
},
|
},
|
||||||
expect: assert.Error,
|
expect: assert.Error,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "invalid list created after",
|
|
||||||
backupID: "id",
|
|
||||||
opts: utils.SharePointOpts{
|
|
||||||
ListCreatedAfter: "1235",
|
|
||||||
Populated: flags.PopulatedFlags{
|
|
||||||
flags.ListCreatedAfterFN: struct{}{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expect: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "invalid list created before",
|
|
||||||
backupID: "id",
|
|
||||||
opts: utils.SharePointOpts{
|
|
||||||
ListCreatedBefore: "1235",
|
|
||||||
Populated: flags.PopulatedFlags{
|
|
||||||
flags.ListCreatedBeforeFN: struct{}{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expect: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "invalid list modified after",
|
|
||||||
backupID: "id",
|
|
||||||
opts: utils.SharePointOpts{
|
|
||||||
ListModifiedAfter: "1235",
|
|
||||||
Populated: flags.PopulatedFlags{
|
|
||||||
flags.ListModifiedAfterFN: struct{}{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expect: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "invalid list modified before",
|
|
||||||
backupID: "id",
|
|
||||||
opts: utils.SharePointOpts{
|
|
||||||
ListModifiedBefore: "1235",
|
|
||||||
Populated: flags.PopulatedFlags{
|
|
||||||
flags.ListModifiedBeforeFN: struct{}{},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
expect: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
@ -411,35 +359,36 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// [TODO] uncomment the test cases once sharepoint list backup is enabled
|
||||||
func (suite *SharePointUtilsSuite) TestAddSharepointCategories() {
|
func (suite *SharePointUtilsSuite) TestAddSharepointCategories() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
cats []string
|
cats []string
|
||||||
expectScopeLen int
|
expectScopeLen int
|
||||||
}{
|
}{
|
||||||
{
|
// {
|
||||||
name: "none",
|
// name: "none",
|
||||||
cats: []string{},
|
// cats: []string{},
|
||||||
expectScopeLen: 1,
|
// expectScopeLen: 2,
|
||||||
},
|
// },
|
||||||
{
|
{
|
||||||
name: "libraries",
|
name: "libraries",
|
||||||
cats: []string{flags.DataLibraries},
|
cats: []string{flags.DataLibraries},
|
||||||
expectScopeLen: 1,
|
expectScopeLen: 1,
|
||||||
},
|
},
|
||||||
{
|
// {
|
||||||
name: "lists",
|
// name: "lists",
|
||||||
cats: []string{flags.DataLists},
|
// cats: []string{flags.DataLists},
|
||||||
expectScopeLen: 1,
|
// expectScopeLen: 1,
|
||||||
},
|
// },
|
||||||
{
|
// {
|
||||||
name: "all allowed",
|
// name: "all allowed",
|
||||||
cats: []string{
|
// cats: []string{
|
||||||
flags.DataLibraries,
|
// flags.DataLibraries,
|
||||||
flags.DataLists,
|
// flags.DataLists,
|
||||||
},
|
// },
|
||||||
expectScopeLen: 2,
|
// expectScopeLen: 2,
|
||||||
},
|
// },
|
||||||
{
|
{
|
||||||
name: "bad inputs",
|
name: "bad inputs",
|
||||||
cats: []string{"foo"},
|
cats: []string{"foo"},
|
||||||
|
|||||||
@ -1,101 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
|
||||||
)
|
|
||||||
|
|
||||||
type TeamsChatsOpts struct {
|
|
||||||
Users []string
|
|
||||||
|
|
||||||
ExportCfg ExportCfgOpts
|
|
||||||
|
|
||||||
Populated flags.PopulatedFlags
|
|
||||||
}
|
|
||||||
|
|
||||||
func TeamsChatsAllowedCategories() map[string]struct{} {
|
|
||||||
return map[string]struct{}{
|
|
||||||
flags.DataChats: {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddTeamsChatsCategories(sel *selectors.TeamsChatsBackup, cats []string) *selectors.TeamsChatsBackup {
|
|
||||||
if len(cats) == 0 {
|
|
||||||
sel.Include(sel.AllData())
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, d := range cats {
|
|
||||||
switch d {
|
|
||||||
case flags.DataChats:
|
|
||||||
sel.Include(sel.Chats(selectors.Any()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return sel
|
|
||||||
}
|
|
||||||
|
|
||||||
func MakeTeamsChatsOpts(cmd *cobra.Command) TeamsChatsOpts {
|
|
||||||
return TeamsChatsOpts{
|
|
||||||
Users: flags.UserFV,
|
|
||||||
|
|
||||||
ExportCfg: makeExportCfgOpts(cmd),
|
|
||||||
|
|
||||||
// populated contains the list of flags that appear in the
|
|
||||||
// command, according to pflags. Use this to differentiate
|
|
||||||
// between an "empty" and a "missing" value.
|
|
||||||
Populated: flags.GetPopulatedFlags(cmd),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ValidateTeamsChatsRestoreFlags checks common flags for correctness and interdependencies
|
|
||||||
func ValidateTeamsChatsRestoreFlags(backupID string, opts TeamsChatsOpts, isRestore bool) error {
|
|
||||||
if len(backupID) == 0 {
|
|
||||||
return clues.New("a backup ID is required")
|
|
||||||
}
|
|
||||||
|
|
||||||
// restore isn't currently supported
|
|
||||||
if isRestore {
|
|
||||||
return clues.New("restore not supported")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddTeamsChatsFilter adds the scope of the provided values to the selector's
|
|
||||||
// filter set
|
|
||||||
func AddTeamsChatsFilter(
|
|
||||||
sel *selectors.TeamsChatsRestore,
|
|
||||||
v string,
|
|
||||||
f func(string) []selectors.TeamsChatsScope,
|
|
||||||
) {
|
|
||||||
if len(v) == 0 {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
sel.Filter(f(v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// IncludeTeamsChatsRestoreDataSelectors builds the common data-selector
|
|
||||||
// inclusions for teamschats commands.
|
|
||||||
func IncludeTeamsChatsRestoreDataSelectors(ctx context.Context, opts TeamsChatsOpts) *selectors.TeamsChatsRestore {
|
|
||||||
users := opts.Users
|
|
||||||
|
|
||||||
if len(opts.Users) == 0 {
|
|
||||||
users = selectors.Any()
|
|
||||||
}
|
|
||||||
|
|
||||||
return selectors.NewTeamsChatsRestore(users)
|
|
||||||
}
|
|
||||||
|
|
||||||
// FilterTeamsChatsRestoreInfoSelectors builds the common info-selector filters.
|
|
||||||
func FilterTeamsChatsRestoreInfoSelectors(
|
|
||||||
sel *selectors.TeamsChatsRestore,
|
|
||||||
opts TeamsChatsOpts,
|
|
||||||
) {
|
|
||||||
// TODO: populate when adding filters
|
|
||||||
}
|
|
||||||
@ -18,7 +18,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/m365"
|
"github.com/alcionai/corso/src/internal/m365"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||||
siteMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
|
|
||||||
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
|
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -59,7 +58,7 @@ func generateAndRestoreItems(
|
|||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
cat path.CategoryType,
|
cat path.CategoryType,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
tenantID, resourceID, destFldr string,
|
tenantID, userID, destFldr string,
|
||||||
howMany int,
|
howMany int,
|
||||||
dbf dataBuilderFunc,
|
dbf dataBuilderFunc,
|
||||||
opts control.Options,
|
opts control.Options,
|
||||||
@ -74,7 +73,7 @@ func generateAndRestoreItems(
|
|||||||
nowLegacy = dttm.FormatToLegacy(time.Now())
|
nowLegacy = dttm.FormatToLegacy(time.Now())
|
||||||
id = uuid.NewString()
|
id = uuid.NewString()
|
||||||
subject = "automated " + now[:16] + " - " + id[:8]
|
subject = "automated " + now[:16] + " - " + id[:8]
|
||||||
body = "automated " + cat.HumanString() + " generation for " + resourceID + " at " + now + " - " + id
|
body = "automated " + cat.HumanString() + " generation for " + userID + " at " + now + " - " + id
|
||||||
)
|
)
|
||||||
|
|
||||||
items = append(items, item{
|
items = append(items, item{
|
||||||
@ -95,7 +94,7 @@ func generateAndRestoreItems(
|
|||||||
|
|
||||||
dataColls, err := buildCollections(
|
dataColls, err := buildCollections(
|
||||||
service,
|
service,
|
||||||
tenantID, resourceID,
|
tenantID, userID,
|
||||||
restoreCfg,
|
restoreCfg,
|
||||||
collections)
|
collections)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -193,31 +192,29 @@ type collection struct {
|
|||||||
|
|
||||||
func buildCollections(
|
func buildCollections(
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
tenant, resource string,
|
tenant, user string,
|
||||||
restoreCfg control.RestoreConfig,
|
restoreCfg control.RestoreConfig,
|
||||||
colls []collection,
|
colls []collection,
|
||||||
) ([]data.RestoreCollection, error) {
|
) ([]data.RestoreCollection, error) {
|
||||||
var (
|
collections := make([]data.RestoreCollection, 0, len(colls))
|
||||||
collections = make([]data.RestoreCollection, 0, len(colls))
|
|
||||||
mc data.Collection
|
|
||||||
)
|
|
||||||
|
|
||||||
for _, c := range colls {
|
for _, c := range colls {
|
||||||
switch {
|
pth, err := path.Build(
|
||||||
case service == path.ExchangeService:
|
tenant,
|
||||||
emc, err := generateExchangeMockColls(tenant, resource, c)
|
user,
|
||||||
if err != nil {
|
service,
|
||||||
return nil, err
|
c.category,
|
||||||
}
|
false,
|
||||||
|
c.PathElements...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
mc = emc
|
mc := exchMock.NewCollection(pth, pth, len(c.items))
|
||||||
case service == path.SharePointService:
|
|
||||||
smc, err := generateSharepointListsMockColls(tenant, resource, c)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
mc = smc
|
for i := 0; i < len(c.items); i++ {
|
||||||
|
mc.Names[i] = c.items[i].name
|
||||||
|
mc.Data[i] = c.items[i].data
|
||||||
}
|
}
|
||||||
|
|
||||||
collections = append(collections, data.NoFetchRestoreCollection{Collection: mc})
|
collections = append(collections, data.NoFetchRestoreCollection{Collection: mc})
|
||||||
@ -226,49 +223,6 @@ func buildCollections(
|
|||||||
return collections, nil
|
return collections, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateExchangeMockColls(tenant string, resource string, c collection) (*exchMock.DataCollection, error) {
|
|
||||||
pth, err := path.Build(
|
|
||||||
tenant,
|
|
||||||
resource,
|
|
||||||
path.ExchangeService,
|
|
||||||
c.category,
|
|
||||||
false,
|
|
||||||
c.PathElements...)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
emc := exchMock.NewCollection(pth, pth, len(c.items))
|
|
||||||
|
|
||||||
for i := 0; i < len(c.items); i++ {
|
|
||||||
emc.Names[i] = c.items[i].name
|
|
||||||
emc.Data[i] = c.items[i].data
|
|
||||||
}
|
|
||||||
|
|
||||||
return emc, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func generateSharepointListsMockColls(tenant string, resource string, c collection) (*siteMock.ListCollection, error) {
|
|
||||||
pth, err := path.BuildOrPrefix(
|
|
||||||
tenant,
|
|
||||||
resource,
|
|
||||||
path.SharePointService,
|
|
||||||
c.category,
|
|
||||||
false)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
smc := siteMock.NewCollection(pth, pth, len(c.items))
|
|
||||||
|
|
||||||
for i := 0; i < len(c.items); i++ {
|
|
||||||
smc.Names[i] = c.items[i].name
|
|
||||||
smc.Data[i] = c.items[i].data
|
|
||||||
}
|
|
||||||
|
|
||||||
return smc, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
folderAName = "folder-a"
|
folderAName = "folder-a"
|
||||||
folderBName = "b"
|
folderBName = "b"
|
||||||
|
|||||||
@ -7,8 +7,6 @@ import (
|
|||||||
|
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
. "github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
siteMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
|
||||||
"github.com/alcionai/corso/src/pkg/count"
|
"github.com/alcionai/corso/src/pkg/count"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
@ -16,23 +14,14 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var spFilesCmd = &cobra.Command{
|
||||||
spFilesCmd = &cobra.Command{
|
Use: "files",
|
||||||
Use: "files",
|
Short: "Generate SharePoint files",
|
||||||
Short: "Generate SharePoint files",
|
RunE: handleSharePointLibraryFileFactory,
|
||||||
RunE: handleSharePointLibraryFileFactory,
|
}
|
||||||
}
|
|
||||||
|
|
||||||
spListsCmd = &cobra.Command{
|
|
||||||
Use: "lists",
|
|
||||||
Short: "Generate SharePoint lists",
|
|
||||||
RunE: handleSharepointListsFactory,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
func AddSharePointCommands(cmd *cobra.Command) {
|
func AddSharePointCommands(cmd *cobra.Command) {
|
||||||
cmd.AddCommand(spFilesCmd)
|
cmd.AddCommand(spFilesCmd)
|
||||||
cmd.AddCommand(spListsCmd)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error {
|
func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error {
|
||||||
@ -81,52 +70,3 @@ func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func handleSharepointListsFactory(cmd *cobra.Command, args []string) error {
|
|
||||||
var (
|
|
||||||
ctx = cmd.Context()
|
|
||||||
service = path.SharePointService
|
|
||||||
category = path.ListsCategory
|
|
||||||
errs = fault.New(false)
|
|
||||||
)
|
|
||||||
|
|
||||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
ctrl, _, ins, err := getControllerAndVerifyResourceOwner(ctx, Site, path.SharePointService)
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
deets, err := generateAndRestoreItems(
|
|
||||||
ctx,
|
|
||||||
ctrl,
|
|
||||||
service,
|
|
||||||
category,
|
|
||||||
selectors.NewSharePointRestore([]string{ins.ID()}).Selector,
|
|
||||||
Tenant, ins.ID(), Destination,
|
|
||||||
Count,
|
|
||||||
func(id, now, subject, body string) []byte {
|
|
||||||
listBytes, err := siteMock.ListBytes(id)
|
|
||||||
if err != nil {
|
|
||||||
logger.CtxErr(ctx, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return listBytes
|
|
||||||
},
|
|
||||||
control.DefaultOptions(),
|
|
||||||
errs,
|
|
||||||
count.New())
|
|
||||||
if err != nil {
|
|
||||||
return Only(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, e := range errs.Recovered() {
|
|
||||||
logger.CtxErr(ctx, err).Error(e.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
deets.PrintEntries(ctx)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|||||||
@ -6,6 +6,12 @@ Param (
|
|||||||
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
|
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
|
||||||
[String]$Site,
|
[String]$Site,
|
||||||
|
|
||||||
|
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
|
||||||
|
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
|
||||||
|
|
||||||
|
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
|
||||||
|
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
|
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
|
||||||
[String[]]$LibraryNameList = @(),
|
[String[]]$LibraryNameList = @(),
|
||||||
|
|
||||||
@ -16,16 +22,7 @@ Param (
|
|||||||
[String[]]$FolderPrefixPurgeList,
|
[String[]]$FolderPrefixPurgeList,
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
|
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
|
||||||
[String[]]$LibraryPrefixDeleteList = @(),
|
[String[]]$LibraryPrefixDeleteList = @()
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
|
|
||||||
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
|
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
|
|
||||||
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
|
|
||||||
|
|
||||||
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
|
|
||||||
[String]$AppCert = $ENV:AZURE_APP_CERT
|
|
||||||
)
|
)
|
||||||
|
|
||||||
Set-StrictMode -Version 2.0
|
Set-StrictMode -Version 2.0
|
||||||
@ -40,7 +37,7 @@ function Get-TimestampFromFolderName {
|
|||||||
|
|
||||||
$name = $folder.Name
|
$name = $folder.Name
|
||||||
|
|
||||||
#fallback on folder create time
|
#fallback on folder create time
|
||||||
[datetime]$timestamp = $folder.TimeCreated
|
[datetime]$timestamp = $folder.TimeCreated
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -69,7 +66,7 @@ function Get-TimestampFromListName {
|
|||||||
|
|
||||||
$name = $list.Title
|
$name = $list.Title
|
||||||
|
|
||||||
#fallback on list create time
|
#fallback on list create time
|
||||||
[datetime]$timestamp = $list.LastItemUserModifiedDate
|
[datetime]$timestamp = $list.LastItemUserModifiedDate
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -109,9 +106,8 @@ function Purge-Library {
|
|||||||
Write-Host "`nPurging library: $LibraryName"
|
Write-Host "`nPurging library: $LibraryName"
|
||||||
|
|
||||||
$foldersToPurge = @()
|
$foldersToPurge = @()
|
||||||
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
|
||||||
|
|
||||||
Write-Host "`nFolders: $folders"
|
|
||||||
foreach ($f in $folders) {
|
foreach ($f in $folders) {
|
||||||
$folderName = $f.Name
|
$folderName = $f.Name
|
||||||
$createTime = Get-TimestampFromFolderName -Folder $f
|
$createTime = Get-TimestampFromFolderName -Folder $f
|
||||||
@ -163,7 +159,7 @@ function Delete-LibraryByPrefix {
|
|||||||
Write-Host "`nDeleting library: $LibraryNamePrefix"
|
Write-Host "`nDeleting library: $LibraryNamePrefix"
|
||||||
|
|
||||||
$listsToDelete = @()
|
$listsToDelete = @()
|
||||||
$lists = Get-PnPList
|
$lists = Get-PnPList
|
||||||
|
|
||||||
foreach ($l in $lists) {
|
foreach ($l in $lists) {
|
||||||
$listName = $l.Title
|
$listName = $l.Title
|
||||||
@ -187,7 +183,7 @@ function Delete-LibraryByPrefix {
|
|||||||
Write-Host "Deleting list: "$l.Title
|
Write-Host "Deleting list: "$l.Title
|
||||||
try {
|
try {
|
||||||
$listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden
|
$listInfo = Get-PnPList -Identity $l.Id | Select-Object -Property Hidden
|
||||||
|
|
||||||
# Check if the 'hidden' property is true
|
# Check if the 'hidden' property is true
|
||||||
if ($listInfo.Hidden) {
|
if ($listInfo.Hidden) {
|
||||||
Write-Host "List: $($l.Title) is hidden. Skipping..."
|
Write-Host "List: $($l.Title) is hidden. Skipping..."
|
||||||
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
|
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
|
||||||
Write-Host "ClientId and AppCert required as arguments or environment variables."
|
Write-Host "Admin user name and password required as arguments or environment variables."
|
||||||
Exit
|
Exit
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -255,8 +251,12 @@ else {
|
|||||||
Exit
|
Exit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
|
||||||
|
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
|
||||||
|
|
||||||
Write-Host "`nAuthenticating and connecting to $SiteUrl"
|
Write-Host "`nAuthenticating and connecting to $SiteUrl"
|
||||||
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
|
Connect-PnPOnline -Url $siteUrl -Credential $cred
|
||||||
Write-Host "Connected to $siteUrl`n"
|
Write-Host "Connected to $siteUrl`n"
|
||||||
|
|
||||||
# ensure that there are no unexpanded entries in the list of parameters
|
# ensure that there are no unexpanded entries in the list of parameters
|
||||||
|
|||||||
@ -19,40 +19,33 @@ type PermissionInfo struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
sanityBackupID = "SANITY_BACKUP_ID"
|
sanityBackupID = "SANITY_BACKUP_ID"
|
||||||
sanityTestSourceContainer = "SANITY_TEST_SOURCE_CONTAINER"
|
sanityTestSourceContainer = "SANITY_TEST_SOURCE_CONTAINER"
|
||||||
sanityTestRestoreContainer = "SANITY_TEST_RESTORE_CONTAINER"
|
sanityTestRestoreContainer = "SANITY_TEST_RESTORE_CONTAINER"
|
||||||
sanityTestRestoreContainerPrefix = "SANITY_TEST_RESTORE_CONTAINER_PREFIX"
|
sanityTestUser = "SANITY_TEST_USER"
|
||||||
sanityTestUser = "SANITY_TEST_USER"
|
|
||||||
sanityTestCategory = "SANITY_TEST_CATEGORY"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Envs struct {
|
type Envs struct {
|
||||||
BackupID string
|
BackupID string
|
||||||
SourceContainer string
|
SourceContainer string
|
||||||
RestoreContainer string
|
RestoreContainer string
|
||||||
// applies for sharepoint lists only
|
GroupID string
|
||||||
RestoreContainerPrefix string
|
SiteID string
|
||||||
Category string
|
UserID string
|
||||||
GroupID string
|
TeamSiteID string
|
||||||
SiteID string
|
|
||||||
UserID string
|
|
||||||
TeamSiteID string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func EnvVars(ctx context.Context) Envs {
|
func EnvVars(ctx context.Context) Envs {
|
||||||
folder := strings.TrimSpace(os.Getenv(sanityTestRestoreContainer))
|
folder := strings.TrimSpace(os.Getenv(sanityTestRestoreContainer))
|
||||||
|
|
||||||
e := Envs{
|
e := Envs{
|
||||||
BackupID: os.Getenv(sanityBackupID),
|
BackupID: os.Getenv(sanityBackupID),
|
||||||
SourceContainer: os.Getenv(sanityTestSourceContainer),
|
SourceContainer: os.Getenv(sanityTestSourceContainer),
|
||||||
RestoreContainer: folder,
|
RestoreContainer: folder,
|
||||||
Category: os.Getenv(sanityTestCategory),
|
GroupID: tconfig.GetM365TeamID(ctx),
|
||||||
RestoreContainerPrefix: os.Getenv(sanityTestRestoreContainerPrefix),
|
SiteID: tconfig.GetM365SiteID(ctx),
|
||||||
GroupID: tconfig.GetM365TeamID(ctx),
|
UserID: tconfig.GetM365UserID(ctx),
|
||||||
SiteID: tconfig.GetM365SiteID(ctx),
|
TeamSiteID: tconfig.GetM365TeamSiteID(ctx),
|
||||||
UserID: tconfig.GetM365UserID(ctx),
|
|
||||||
TeamSiteID: tconfig.GetM365TeamSiteID(ctx),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(os.Getenv(sanityTestUser)) > 0 {
|
if len(os.Getenv(sanityTestUser)) > 0 {
|
||||||
|
|||||||
@ -20,17 +20,30 @@ func BuildFilepathSanitree(
|
|||||||
info os.FileInfo,
|
info os.FileInfo,
|
||||||
err error,
|
err error,
|
||||||
) error {
|
) error {
|
||||||
if root == nil {
|
if err != nil {
|
||||||
root = CreateNewRoot(info, true)
|
Fatal(ctx, "error passed to filepath walker", err)
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
relPath := GetRelativePath(
|
relPath, err := filepath.Rel(rootDir, p)
|
||||||
ctx,
|
if err != nil {
|
||||||
rootDir,
|
Fatal(ctx, "getting relative filepath", err)
|
||||||
p,
|
}
|
||||||
info,
|
|
||||||
err)
|
if info != nil {
|
||||||
|
Debugf(ctx, "adding: %s", relPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
if root == nil {
|
||||||
|
root = &Sanitree[fs.FileInfo, fs.FileInfo]{
|
||||||
|
Self: info,
|
||||||
|
ID: info.Name(),
|
||||||
|
Name: info.Name(),
|
||||||
|
Leaves: map[string]*Sanileaf[fs.FileInfo, fs.FileInfo]{},
|
||||||
|
Children: map[string]*Sanitree[fs.FileInfo, fs.FileInfo]{},
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
elems := path.Split(relPath)
|
elems := path.Split(relPath)
|
||||||
node := root.NodeAt(ctx, elems[:len(elems)-1])
|
node := root.NodeAt(ctx, elems[:len(elems)-1])
|
||||||
@ -65,41 +78,3 @@ func BuildFilepathSanitree(
|
|||||||
|
|
||||||
return root
|
return root
|
||||||
}
|
}
|
||||||
|
|
||||||
func CreateNewRoot(info fs.FileInfo, initChildren bool) *Sanitree[fs.FileInfo, fs.FileInfo] {
|
|
||||||
root := &Sanitree[fs.FileInfo, fs.FileInfo]{
|
|
||||||
Self: info,
|
|
||||||
ID: info.Name(),
|
|
||||||
Name: info.Name(),
|
|
||||||
Leaves: map[string]*Sanileaf[fs.FileInfo, fs.FileInfo]{},
|
|
||||||
Children: map[string]*Sanitree[fs.FileInfo, fs.FileInfo]{},
|
|
||||||
}
|
|
||||||
|
|
||||||
if initChildren {
|
|
||||||
root.Children = map[string]*Sanitree[fs.FileInfo, fs.FileInfo]{}
|
|
||||||
}
|
|
||||||
|
|
||||||
return root
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetRelativePath(
|
|
||||||
ctx context.Context,
|
|
||||||
rootDir, p string,
|
|
||||||
info fs.FileInfo,
|
|
||||||
fileWalkerErr error,
|
|
||||||
) string {
|
|
||||||
if fileWalkerErr != nil {
|
|
||||||
Fatal(ctx, "error passed to filepath walker", fileWalkerErr)
|
|
||||||
}
|
|
||||||
|
|
||||||
relPath, err := filepath.Rel(rootDir, p)
|
|
||||||
if err != nil {
|
|
||||||
Fatal(ctx, "getting relative filepath", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if info != nil {
|
|
||||||
Debugf(ctx, "adding: %s", relPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
return relPath
|
|
||||||
}
|
|
||||||
|
|||||||
@ -3,9 +3,7 @@ package driveish
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"golang.org/x/exp/slices"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
@ -17,24 +15,10 @@ const (
|
|||||||
owner = "owner"
|
owner = "owner"
|
||||||
)
|
)
|
||||||
|
|
||||||
// sanitree population will grab a superset of data in the drive.
|
|
||||||
// this increases the chance that we'll run into a race collision with
|
|
||||||
// the cleanup script. Sometimes that's okay (deleting old data that
|
|
||||||
// isn't scrutinized in the test), other times it's not. We mark whether
|
|
||||||
// that's okay to do or not by specifying the folders being
|
|
||||||
// scrutinized for the test. Any errors within those folders should cause
|
|
||||||
// a fatal exit. Errors outside of those folders get ignored.
|
|
||||||
//
|
|
||||||
// since we're using folder names, mustPopulateFolders will
|
|
||||||
// work best (ie: have the fewest collisions/side-effects) if the folder
|
|
||||||
// names are very specific. Standard sanity tests should include timestamps,
|
|
||||||
// which should help ensure that. Be warned if you try to use it with
|
|
||||||
// a more generic name: unintended effects could occur.
|
|
||||||
func populateSanitree(
|
func populateSanitree(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ac api.Client,
|
ac api.Client,
|
||||||
driveID string,
|
driveID string,
|
||||||
mustPopulateFolders []string,
|
|
||||||
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
|
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
|
||||||
common.Infof(ctx, "building sanitree for drive: %s", driveID)
|
common.Infof(ctx, "building sanitree for drive: %s", driveID)
|
||||||
|
|
||||||
@ -43,12 +27,10 @@ func populateSanitree(
|
|||||||
common.Fatal(ctx, "getting drive root folder", err)
|
common.Fatal(ctx, "getting drive root folder", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
rootName := ptr.Val(root.GetName())
|
|
||||||
|
|
||||||
stree := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
|
stree := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
|
||||||
Self: root,
|
Self: root,
|
||||||
ID: ptr.Val(root.GetId()),
|
ID: ptr.Val(root.GetId()),
|
||||||
Name: rootName,
|
Name: ptr.Val(root.GetName()),
|
||||||
Leaves: map[string]*common.Sanileaf[models.DriveItemable, models.DriveItemable]{},
|
Leaves: map[string]*common.Sanileaf[models.DriveItemable, models.DriveItemable]{},
|
||||||
Children: map[string]*common.Sanitree[models.DriveItemable, models.DriveItemable]{},
|
Children: map[string]*common.Sanitree[models.DriveItemable, models.DriveItemable]{},
|
||||||
}
|
}
|
||||||
@ -58,8 +40,6 @@ func populateSanitree(
|
|||||||
ac,
|
ac,
|
||||||
driveID,
|
driveID,
|
||||||
stree.Name+"/",
|
stree.Name+"/",
|
||||||
mustPopulateFolders,
|
|
||||||
slices.Contains(mustPopulateFolders, rootName),
|
|
||||||
stree)
|
stree)
|
||||||
|
|
||||||
return stree
|
return stree
|
||||||
@ -68,29 +48,14 @@ func populateSanitree(
|
|||||||
func recursivelyBuildTree(
|
func recursivelyBuildTree(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ac api.Client,
|
ac api.Client,
|
||||||
driveID string,
|
driveID, location string,
|
||||||
location string,
|
|
||||||
mustPopulateFolders []string,
|
|
||||||
isChildOfFolderRequiringNoErrors bool,
|
|
||||||
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
|
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
|
||||||
) {
|
) {
|
||||||
common.Debugf(ctx, "adding: %s", location)
|
common.Debugf(ctx, "adding: %s", location)
|
||||||
|
|
||||||
children, err := ac.Drives().GetFolderChildren(ctx, driveID, stree.ID)
|
children, err := ac.Drives().GetFolderChildren(ctx, driveID, stree.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if isChildOfFolderRequiringNoErrors {
|
common.Fatal(ctx, "getting drive children by id", err)
|
||||||
common.Fatal(ctx, "getting drive children by id", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
common.Infof(
|
|
||||||
ctx,
|
|
||||||
"ignoring error getting children in directory %q because it is not within directory set %v\nerror: %s\n%+v",
|
|
||||||
location,
|
|
||||||
mustPopulateFolders,
|
|
||||||
err.Error(),
|
|
||||||
clues.ToCore(err))
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, driveItem := range children {
|
for _, driveItem := range children {
|
||||||
@ -103,20 +68,17 @@ func recursivelyBuildTree(
|
|||||||
// currently we don't restore blank folders.
|
// currently we don't restore blank folders.
|
||||||
// skip permission check for empty folders
|
// skip permission check for empty folders
|
||||||
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
|
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
|
||||||
common.Infof(ctx, "skipped empty folder: %s%s", location, itemName)
|
common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
cannotAllowErrors := isChildOfFolderRequiringNoErrors ||
|
|
||||||
slices.Contains(mustPopulateFolders, itemName)
|
|
||||||
|
|
||||||
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
|
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
|
||||||
Parent: stree,
|
Parent: stree,
|
||||||
Self: driveItem,
|
Self: driveItem,
|
||||||
ID: itemID,
|
ID: itemID,
|
||||||
Name: itemName,
|
Name: itemName,
|
||||||
Expand: map[string]any{
|
Expand: map[string]any{
|
||||||
expandPermissions: permissionIn(ctx, ac, driveID, itemID, cannotAllowErrors),
|
expandPermissions: permissionIn(ctx, ac, driveID, itemID),
|
||||||
},
|
},
|
||||||
Leaves: map[string]*common.Sanileaf[models.DriveItemable, models.DriveItemable]{},
|
Leaves: map[string]*common.Sanileaf[models.DriveItemable, models.DriveItemable]{},
|
||||||
Children: map[string]*common.Sanitree[models.DriveItemable, models.DriveItemable]{},
|
Children: map[string]*common.Sanitree[models.DriveItemable, models.DriveItemable]{},
|
||||||
@ -129,8 +91,6 @@ func recursivelyBuildTree(
|
|||||||
ac,
|
ac,
|
||||||
driveID,
|
driveID,
|
||||||
location+branch.Name+"/",
|
location+branch.Name+"/",
|
||||||
mustPopulateFolders,
|
|
||||||
cannotAllowErrors,
|
|
||||||
branch)
|
branch)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -31,8 +31,7 @@ func CheckExport(
|
|||||||
root := populateSanitree(
|
root := populateSanitree(
|
||||||
ctx,
|
ctx,
|
||||||
ac,
|
ac,
|
||||||
driveID,
|
driveID)
|
||||||
[]string{envs.SourceContainer})
|
|
||||||
|
|
||||||
sourceTree, ok := root.Children[envs.SourceContainer]
|
sourceTree, ok := root.Children[envs.SourceContainer]
|
||||||
common.Assert(
|
common.Assert(
|
||||||
|
|||||||
@ -45,14 +45,7 @@ func CheckRestoration(
|
|||||||
"drive_id", driveID,
|
"drive_id", driveID,
|
||||||
"drive_name", driveName)
|
"drive_name", driveName)
|
||||||
|
|
||||||
root := populateSanitree(
|
root := populateSanitree(ctx, ac, driveID)
|
||||||
ctx,
|
|
||||||
ac,
|
|
||||||
driveID,
|
|
||||||
[]string{
|
|
||||||
envs.SourceContainer,
|
|
||||||
envs.RestoreContainer,
|
|
||||||
})
|
|
||||||
|
|
||||||
sourceTree, ok := root.Children[envs.SourceContainer]
|
sourceTree, ok := root.Children[envs.SourceContainer]
|
||||||
common.Assert(
|
common.Assert(
|
||||||
@ -92,24 +85,12 @@ func permissionIn(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ac api.Client,
|
ac api.Client,
|
||||||
driveID, itemID string,
|
driveID, itemID string,
|
||||||
cannotAllowErrors bool,
|
|
||||||
) []common.PermissionInfo {
|
) []common.PermissionInfo {
|
||||||
pi := []common.PermissionInfo{}
|
pi := []common.PermissionInfo{}
|
||||||
|
|
||||||
pcr, err := ac.Drives().GetItemPermission(ctx, driveID, itemID)
|
pcr, err := ac.Drives().GetItemPermission(ctx, driveID, itemID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if cannotAllowErrors {
|
common.Fatal(ctx, "getting permission", err)
|
||||||
common.Fatal(ctx, "getting permission", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
common.Infof(
|
|
||||||
ctx,
|
|
||||||
"ignoring error getting permissions for %q\nerror: %s,%+v",
|
|
||||||
itemID,
|
|
||||||
err.Error(),
|
|
||||||
clues.ToCore(err))
|
|
||||||
|
|
||||||
return []common.PermissionInfo{}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, perm := range pcr.GetValue() {
|
for _, perm := range pcr.GetValue() {
|
||||||
|
|||||||
@ -2,20 +2,10 @@ package export
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"io"
|
|
||||||
"io/fs"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
|
||||||
"github.com/tidwall/gjson"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/restore"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -24,128 +14,15 @@ func CheckSharePointExport(
|
|||||||
ac api.Client,
|
ac api.Client,
|
||||||
envs common.Envs,
|
envs common.Envs,
|
||||||
) {
|
) {
|
||||||
if envs.Category == path.ListsCategory.String() {
|
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
|
||||||
CheckSharepointListsExport(ctx, ac, envs)
|
|
||||||
}
|
|
||||||
|
|
||||||
if envs.Category == path.LibrariesCategory.String() {
|
|
||||||
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(ctx, "getting the drive:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
envs.RestoreContainer = filepath.Join(envs.RestoreContainer, "Libraries/Documents") // check in default loc
|
|
||||||
driveish.CheckExport(
|
|
||||||
ctx,
|
|
||||||
ac,
|
|
||||||
drive,
|
|
||||||
envs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func CheckSharepointListsExport(
|
|
||||||
ctx context.Context,
|
|
||||||
ac api.Client,
|
|
||||||
envs common.Envs,
|
|
||||||
) {
|
|
||||||
exportFolderName := path.ListsCategory.HumanString()
|
|
||||||
|
|
||||||
sourceTree := restore.BuildListsSanitree(ctx, ac, envs.SiteID, envs.SourceContainer, exportFolderName)
|
|
||||||
|
|
||||||
listsExportDir := filepath.Join(envs.RestoreContainer, exportFolderName)
|
|
||||||
exportedTree := BuildFilepathSanitreeForSharepointLists(ctx, listsExportDir)
|
|
||||||
|
|
||||||
ctx = clues.Add(
|
|
||||||
ctx,
|
|
||||||
"export_container_id", exportedTree.ID,
|
|
||||||
"export_container_name", exportedTree.Name,
|
|
||||||
"source_container_id", sourceTree.ID,
|
|
||||||
"source_container_name", sourceTree.Name)
|
|
||||||
|
|
||||||
comparator := func(
|
|
||||||
ctx context.Context,
|
|
||||||
expect *common.Sanitree[models.Siteable, models.Listable],
|
|
||||||
result *common.Sanitree[fs.FileInfo, fs.FileInfo],
|
|
||||||
) {
|
|
||||||
modifiedResultLeaves := map[string]*common.Sanileaf[fs.FileInfo, fs.FileInfo]{}
|
|
||||||
|
|
||||||
for key, val := range result.Leaves {
|
|
||||||
fixedName := strings.TrimSuffix(key, ".json")
|
|
||||||
|
|
||||||
modifiedResultLeaves[fixedName] = val
|
|
||||||
}
|
|
||||||
|
|
||||||
common.CompareLeaves(ctx, expect.Leaves, modifiedResultLeaves, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
common.CompareDiffTrees(
|
|
||||||
ctx,
|
|
||||||
sourceTree,
|
|
||||||
exportedTree,
|
|
||||||
comparator)
|
|
||||||
|
|
||||||
common.Infof(ctx, "Success")
|
|
||||||
}
|
|
||||||
|
|
||||||
func BuildFilepathSanitreeForSharepointLists(
|
|
||||||
ctx context.Context,
|
|
||||||
rootDir string,
|
|
||||||
) *common.Sanitree[fs.FileInfo, fs.FileInfo] {
|
|
||||||
var root *common.Sanitree[fs.FileInfo, fs.FileInfo]
|
|
||||||
|
|
||||||
walker := func(
|
|
||||||
p string,
|
|
||||||
info os.FileInfo,
|
|
||||||
err error,
|
|
||||||
) error {
|
|
||||||
if root == nil {
|
|
||||||
root = common.CreateNewRoot(info, false)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
relPath := common.GetRelativePath(
|
|
||||||
ctx,
|
|
||||||
rootDir,
|
|
||||||
p,
|
|
||||||
info,
|
|
||||||
err)
|
|
||||||
|
|
||||||
if !info.IsDir() {
|
|
||||||
file, err := os.Open(p)
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(ctx, "opening file to read", err)
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
content, err := io.ReadAll(file)
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(ctx, "reading file", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
res := gjson.Get(string(content), "items.#")
|
|
||||||
itemsCount := res.Num
|
|
||||||
|
|
||||||
elems := path.Split(relPath)
|
|
||||||
|
|
||||||
node := root.NodeAt(ctx, elems[:len(elems)-2])
|
|
||||||
node.CountLeaves++
|
|
||||||
node.Leaves[info.Name()] = &common.Sanileaf[fs.FileInfo, fs.FileInfo]{
|
|
||||||
Parent: node,
|
|
||||||
Self: info,
|
|
||||||
ID: info.Name(),
|
|
||||||
Name: info.Name(),
|
|
||||||
// using list item count as size for lists
|
|
||||||
Size: int64(itemsCount),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
err := filepath.Walk(rootDir, walker)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.Fatal(ctx, "walking filepath", err)
|
common.Fatal(ctx, "getting the drive:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return root
|
envs.RestoreContainer = filepath.Join(envs.RestoreContainer, "Libraries/Documents") // check in default loc
|
||||||
|
driveish.CheckExport(
|
||||||
|
ctx,
|
||||||
|
ac,
|
||||||
|
drive,
|
||||||
|
envs)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -2,15 +2,9 @@ package restore
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
"github.com/alcionai/corso/src/cmd/sanity_test/common"
|
||||||
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -19,155 +13,16 @@ func CheckSharePointRestoration(
|
|||||||
ac api.Client,
|
ac api.Client,
|
||||||
envs common.Envs,
|
envs common.Envs,
|
||||||
) {
|
) {
|
||||||
if envs.Category == "lists" {
|
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
|
||||||
CheckSharePointListsRestoration(ctx, ac, envs)
|
if err != nil {
|
||||||
|
common.Fatal(ctx, "getting site's default drive:", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if envs.Category == "libraries" {
|
driveish.CheckRestoration(
|
||||||
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(ctx, "getting site's default drive:", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
driveish.CheckRestoration(
|
|
||||||
ctx,
|
|
||||||
ac,
|
|
||||||
drive,
|
|
||||||
envs,
|
|
||||||
// skip permissions tests
|
|
||||||
nil)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func CheckSharePointListsRestoration(
|
|
||||||
ctx context.Context,
|
|
||||||
ac api.Client,
|
|
||||||
envs common.Envs,
|
|
||||||
) {
|
|
||||||
restoredTree := BuildListsSanitree(ctx, ac, envs.SiteID, envs.RestoreContainerPrefix, "")
|
|
||||||
sourceTree := BuildListsSanitree(ctx, ac, envs.SiteID, envs.SourceContainer, "")
|
|
||||||
|
|
||||||
ctx = clues.Add(
|
|
||||||
ctx,
|
ctx,
|
||||||
"restore_container_id", restoredTree.ID,
|
ac,
|
||||||
"restore_container_name", restoredTree.Name,
|
drive,
|
||||||
"source_container_id", sourceTree.ID,
|
envs,
|
||||||
"source_container_name", sourceTree.Name)
|
// skip permissions tests
|
||||||
|
|
||||||
common.CompareDiffTrees[models.Siteable, models.Listable](
|
|
||||||
ctx,
|
|
||||||
sourceTree,
|
|
||||||
restoredTree,
|
|
||||||
nil)
|
nil)
|
||||||
|
|
||||||
common.Infof(ctx, "Success")
|
|
||||||
}
|
|
||||||
|
|
||||||
func BuildListsSanitree(
|
|
||||||
ctx context.Context,
|
|
||||||
ac api.Client,
|
|
||||||
siteID string,
|
|
||||||
restoreContainerPrefix, exportFolderName string,
|
|
||||||
) *common.Sanitree[models.Siteable, models.Listable] {
|
|
||||||
common.Infof(ctx, "building sanitree for lists of site: %s", siteID)
|
|
||||||
|
|
||||||
site, err := ac.Sites().GetByID(ctx, siteID, api.CallConfig{})
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(
|
|
||||||
ctx,
|
|
||||||
fmt.Sprintf("finding site by id %q", siteID),
|
|
||||||
err)
|
|
||||||
}
|
|
||||||
|
|
||||||
cfg := api.CallConfig{
|
|
||||||
Select: idAnd("displayName", "list", "lastModifiedDateTime"),
|
|
||||||
}
|
|
||||||
|
|
||||||
lists, err := ac.Lists().GetLists(ctx, siteID, cfg)
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(
|
|
||||||
ctx,
|
|
||||||
fmt.Sprintf("finding lists of site with id %q", siteID),
|
|
||||||
err)
|
|
||||||
}
|
|
||||||
|
|
||||||
lists = filterToSupportedLists(lists)
|
|
||||||
|
|
||||||
filteredLists := filterListsByPrefix(lists, restoreContainerPrefix)
|
|
||||||
|
|
||||||
rootTreeName := ptr.Val(site.GetDisplayName())
|
|
||||||
// lists get stored into the local dir at destination/Lists/
|
|
||||||
if len(exportFolderName) > 0 {
|
|
||||||
rootTreeName = exportFolderName
|
|
||||||
}
|
|
||||||
|
|
||||||
root := &common.Sanitree[models.Siteable, models.Listable]{
|
|
||||||
Self: site,
|
|
||||||
ID: ptr.Val(site.GetId()),
|
|
||||||
Name: rootTreeName,
|
|
||||||
CountLeaves: len(filteredLists),
|
|
||||||
Leaves: map[string]*common.Sanileaf[models.Siteable, models.Listable]{},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, list := range filteredLists {
|
|
||||||
listID := ptr.Val(list.GetId())
|
|
||||||
|
|
||||||
listItems, err := ac.Lists().GetListItems(ctx, siteID, listID, api.CallConfig{})
|
|
||||||
if err != nil {
|
|
||||||
common.Fatal(
|
|
||||||
ctx,
|
|
||||||
fmt.Sprintf("finding listItems of list with id %q", listID),
|
|
||||||
err)
|
|
||||||
}
|
|
||||||
|
|
||||||
m := &common.Sanileaf[models.Siteable, models.Listable]{
|
|
||||||
Parent: root,
|
|
||||||
Self: list,
|
|
||||||
ID: listID,
|
|
||||||
Name: ptr.Val(list.GetDisplayName()),
|
|
||||||
// using list item count as size for lists
|
|
||||||
Size: int64(len(listItems)),
|
|
||||||
}
|
|
||||||
|
|
||||||
root.Leaves[m.ID] = m
|
|
||||||
}
|
|
||||||
|
|
||||||
return root
|
|
||||||
}
|
|
||||||
|
|
||||||
func filterToSupportedLists(lists []models.Listable) []models.Listable {
|
|
||||||
filteredLists := make([]models.Listable, 0)
|
|
||||||
|
|
||||||
for _, list := range lists {
|
|
||||||
if !api.SkipListTemplates.HasKey(ptr.Val(list.GetList().GetTemplate())) {
|
|
||||||
filteredLists = append(filteredLists, list)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return filteredLists
|
|
||||||
}
|
|
||||||
|
|
||||||
func filterListsByPrefix(lists []models.Listable, prefix string) []models.Listable {
|
|
||||||
result := []models.Listable{}
|
|
||||||
|
|
||||||
for _, list := range lists {
|
|
||||||
for _, pfx := range strings.Split(prefix, ",") {
|
|
||||||
if strings.HasPrefix(ptr.Val(list.GetDisplayName()), pfx) {
|
|
||||||
result = append(result, list)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func idAnd(ss ...string) []string {
|
|
||||||
id := []string{"id"}
|
|
||||||
|
|
||||||
if len(ss) == 0 {
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
|
|
||||||
return append(id, ss...)
|
|
||||||
}
|
}
|
||||||
|
|||||||
28
src/go.mod
28
src/go.mod
@ -2,8 +2,11 @@ module github.com/alcionai/corso/src
|
|||||||
|
|
||||||
go 1.21
|
go 1.21
|
||||||
|
|
||||||
|
replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20231205231702-863c24d6f8b1
|
||||||
|
|
||||||
replace (
|
replace (
|
||||||
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4
|
// No tags in the alcion fork of the repo so use v7 as that's in the import path.
|
||||||
|
github.com/minio/minio-go/v7 => github.com/alcionai/minio-go/v7 v7.0.0-20231130221740-c745a3d084aa
|
||||||
|
|
||||||
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
|
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
|
||||||
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
|
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
|
||||||
@ -11,13 +14,13 @@ replace (
|
|||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
|
||||||
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c
|
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2
|
||||||
github.com/armon/go-metrics v0.4.1
|
github.com/armon/go-metrics v0.4.1
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.3
|
github.com/aws/aws-xray-sdk-go v1.8.3
|
||||||
github.com/cenkalti/backoff/v4 v4.2.1
|
github.com/cenkalti/backoff/v4 v4.2.1
|
||||||
github.com/fatih/color v1.16.0
|
github.com/fatih/color v1.16.0
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.0
|
github.com/golang-jwt/jwt/v5 v5.2.0
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.5.0
|
||||||
github.com/h2non/gock v1.2.0
|
github.com/h2non/gock v1.2.0
|
||||||
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056
|
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056
|
||||||
github.com/jhillyerd/enmime v1.1.0
|
github.com/jhillyerd/enmime v1.1.0
|
||||||
@ -51,13 +54,12 @@ require (
|
|||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/arran4/golang-ical v0.2.4
|
github.com/arran4/golang-ical v0.2.3
|
||||||
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9
|
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9
|
||||||
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056
|
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.1 // indirect
|
|
||||||
github.com/VividCortex/ewma v1.2.0 // indirect
|
github.com/VividCortex/ewma v1.2.0 // indirect
|
||||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
|
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
|
||||||
github.com/andybalholm/brotli v1.0.6 // indirect
|
github.com/andybalholm/brotli v1.0.6 // indirect
|
||||||
@ -88,7 +90,7 @@ require (
|
|||||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||||
github.com/valyala/fasthttp v1.51.0 // indirect
|
github.com/valyala/fasthttp v1.51.0 // indirect
|
||||||
go.opentelemetry.io/otel/metric v1.21.0 // indirect
|
go.opentelemetry.io/otel/metric v1.21.0 // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20231212172506-995d672761c0 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20231127180814-3a041ad873d4 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
@ -110,10 +112,10 @@ require (
|
|||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/klauspost/compress v1.17.4 // indirect
|
github.com/klauspost/compress v1.17.3 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.6 // indirect
|
github.com/klauspost/cpuid/v2 v2.2.6 // indirect
|
||||||
github.com/klauspost/pgzip v1.2.6 // indirect
|
github.com/klauspost/pgzip v1.2.6 // indirect
|
||||||
github.com/klauspost/reedsolomon v1.12.0 // indirect
|
github.com/klauspost/reedsolomon v1.11.8 // indirect
|
||||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
@ -121,7 +123,7 @@ require (
|
|||||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||||
github.com/microsoft/kiota-serialization-text-go v1.0.0
|
github.com/microsoft/kiota-serialization-text-go v1.0.0
|
||||||
github.com/minio/md5-simd v1.1.2 // indirect
|
github.com/minio/md5-simd v1.1.2 // indirect
|
||||||
github.com/minio/minio-go/v7 v7.0.67
|
github.com/minio/minio-go/v7 v7.0.64
|
||||||
github.com/minio/sha256-simd v1.0.1 // indirect
|
github.com/minio/sha256-simd v1.0.1 // indirect
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
@ -129,7 +131,7 @@ require (
|
|||||||
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
||||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
github.com/prometheus/client_golang v1.18.0 // indirect
|
github.com/prometheus/client_golang v1.17.0 // indirect
|
||||||
github.com/prometheus/client_model v0.5.0 // indirect
|
github.com/prometheus/client_model v0.5.0 // indirect
|
||||||
github.com/prometheus/common v0.45.0 // indirect
|
github.com/prometheus/common v0.45.0 // indirect
|
||||||
github.com/prometheus/procfs v0.12.0 // indirect
|
github.com/prometheus/procfs v0.12.0 // indirect
|
||||||
@ -137,7 +139,7 @@ require (
|
|||||||
github.com/rs/xid v1.5.0 // indirect
|
github.com/rs/xid v1.5.0 // indirect
|
||||||
github.com/segmentio/backo-go v1.0.1 // indirect
|
github.com/segmentio/backo-go v1.0.1 // indirect
|
||||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||||
github.com/tidwall/gjson v1.17.0
|
github.com/tidwall/gjson v1.17.0 // indirect
|
||||||
github.com/tidwall/match v1.1.1 // indirect
|
github.com/tidwall/match v1.1.1 // indirect
|
||||||
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
|
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
|
||||||
github.com/zeebo/blake3 v0.2.3 // indirect
|
github.com/zeebo/blake3 v0.2.3 // indirect
|
||||||
@ -150,8 +152,8 @@ require (
|
|||||||
golang.org/x/sync v0.6.0 // indirect
|
golang.org/x/sync v0.6.0 // indirect
|
||||||
golang.org/x/sys v0.16.0 // indirect
|
golang.org/x/sys v0.16.0 // indirect
|
||||||
golang.org/x/text v0.14.0 // indirect
|
golang.org/x/text v0.14.0 // indirect
|
||||||
google.golang.org/grpc v1.60.1 // indirect
|
google.golang.org/grpc v1.59.0 // indirect
|
||||||
google.golang.org/protobuf v1.32.0 // indirect
|
google.golang.org/protobuf v1.31.0 // indirect
|
||||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
)
|
)
|
||||||
|
|||||||
52
src/go.sum
52
src/go.sum
@ -4,10 +4,6 @@ github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+
|
|||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo=
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 h1:6oNBlSdi1QqM1PNW7FPA6xOGA5UNsXnkaYZz9vdPGhA=
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 h1:6oNBlSdi1QqM1PNW7FPA6xOGA5UNsXnkaYZz9vdPGhA=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1/go.mod h1:s4kgfzA0covAXNicZHDMN58jExvcng2mC/DepXiF1EI=
|
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1/go.mod h1:s4kgfzA0covAXNicZHDMN58jExvcng2mC/DepXiF1EI=
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.5.0 h1:AifHbc4mg0x9zW52WOpKbsHaDKuRhlI7TVl47thgQ70=
|
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.5.0/go.mod h1:T5RfihdXtBDxt1Ch2wobif3TvzTdumDy29kahv6AV9A=
|
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.1 h1:AMf7YbZOZIW5b66cXNHMWWT/zkjhz5+a+k/3x40EO7E=
|
|
||||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.1/go.mod h1:uwfk06ZBcvL/g4VHNjurPfVln9NMbsk2XIZxJ+hu81k=
|
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA=
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA=
|
||||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
|
||||||
github.com/DATA-DOG/go-sqlmock v1.4.1 h1:ThlnYciV1iM/V0OSF/dtkqWb6xo5qITT1TJBG1MRDJM=
|
github.com/DATA-DOG/go-sqlmock v1.4.1 h1:ThlnYciV1iM/V0OSF/dtkqWb6xo5qITT1TJBG1MRDJM=
|
||||||
@ -19,12 +15,14 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o
|
|||||||
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
|
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
|
||||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
|
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
|
||||||
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
|
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
|
||||||
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEejr07KFf2iyfCAdTxYGRAAFveLjFA=
|
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2 h1:Oiz7puLziTpDUsEoiZMNor3j6um8RSvPOSIf4heGgTk=
|
||||||
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
|
github.com/alcionai/clues v0.0.0-20231222002615-24ee69e6ecc2/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
|
||||||
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
|
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
|
||||||
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
|
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
|
||||||
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk=
|
github.com/alcionai/kopia v0.12.2-0.20231205231702-863c24d6f8b1 h1:UM4YDqNmwRsajtoQT4BkMCRrjIeMDwTuTbJs5fPQTTA=
|
||||||
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
|
github.com/alcionai/kopia v0.12.2-0.20231205231702-863c24d6f8b1/go.mod h1:f4PligAuyEicX+lfTlZltc69nM0eMoXX2nE5sCBdo6Y=
|
||||||
|
github.com/alcionai/minio-go/v7 v7.0.0-20231130221740-c745a3d084aa h1:PHzp5TkXgsardwMG6O2nnyk3zBsGW8CqgsOWQCYkykQ=
|
||||||
|
github.com/alcionai/minio-go/v7 v7.0.0-20231130221740-c745a3d084aa/go.mod h1:R4WVUR6ZTedlCcGwZRauLMIKjgyaWxhs4Mqi/OMPmEc=
|
||||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
@ -35,8 +33,8 @@ github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sx
|
|||||||
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
||||||
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
||||||
github.com/arran4/golang-ical v0.2.4 h1:0/rTXn2qqEekLKec3SzRRy+z7pCLtniMb0KD/dPogUo=
|
github.com/arran4/golang-ical v0.2.3 h1:C4Vj7+BjJBIrAJhHgi6Ku+XUkQVugRq4re5Cqj5QVdE=
|
||||||
github.com/arran4/golang-ical v0.2.4/go.mod h1:RqMuPGmwRRwjkb07hmm+JBqcWa1vF1LvVmPtSZN2OhQ=
|
github.com/arran4/golang-ical v0.2.3/go.mod h1:RqMuPGmwRRwjkb07hmm+JBqcWa1vF1LvVmPtSZN2OhQ=
|
||||||
github.com/aws/aws-sdk-go v1.48.6 h1:hnL/TE3eRigirDLrdRE9AWE1ALZSVLAsC4wK8TGsMqk=
|
github.com/aws/aws-sdk-go v1.48.6 h1:hnL/TE3eRigirDLrdRE9AWE1ALZSVLAsC4wK8TGsMqk=
|
||||||
github.com/aws/aws-sdk-go v1.48.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
|
github.com/aws/aws-sdk-go v1.48.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.3 h1:S8GdgVncBRhzbNnNUgTPwhEqhwt2alES/9rLASyhxjU=
|
github.com/aws/aws-xray-sdk-go v1.8.3 h1:S8GdgVncBRhzbNnNUgTPwhEqhwt2alES/9rLASyhxjU=
|
||||||
@ -117,8 +115,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
|||||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU=
|
||||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||||
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw=
|
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw=
|
||||||
@ -158,16 +156,16 @@ github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/u
|
|||||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||||
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
|
github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA=
|
||||||
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
|
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
|
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||||
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
|
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
|
||||||
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
|
||||||
github.com/klauspost/reedsolomon v1.12.0 h1:I5FEp3xSwVCcEh3F5A7dofEfhXdF/bWhQWPH+XwBFno=
|
github.com/klauspost/reedsolomon v1.11.8 h1:s8RpUW5TK4hjr+djiOpbZJB4ksx+TdYbRH7vHQpwPOY=
|
||||||
github.com/klauspost/reedsolomon v1.12.0/go.mod h1:EPLZJeh4l27pUGC3aXOjheaoh1I9yut7xTURiW3LQ9Y=
|
github.com/klauspost/reedsolomon v1.11.8/go.mod h1:4bXRN+cVzMdml6ti7qLouuYi32KHJ5MGv0Qd8a47h6A=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/kopia/htmluibuild v0.0.1-0.20231019063300-75c2a788c7d0 h1:TvupyyfbUZzsO4DQJpQhKZnUa61xERcJ+ejCbHWG2NY=
|
github.com/kopia/htmluibuild v0.0.1-0.20231019063300-75c2a788c7d0 h1:TvupyyfbUZzsO4DQJpQhKZnUa61xERcJ+ejCbHWG2NY=
|
||||||
github.com/kopia/htmluibuild v0.0.1-0.20231019063300-75c2a788c7d0/go.mod h1:cSImbrlwvv2phvj5RfScL2v08ghX6xli0PcK6f+t8S0=
|
github.com/kopia/htmluibuild v0.0.1-0.20231019063300-75c2a788c7d0/go.mod h1:cSImbrlwvv2phvj5RfScL2v08ghX6xli0PcK6f+t8S0=
|
||||||
@ -219,8 +217,6 @@ github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1 h1:uq4qZD8VXLiNZY0t4NoRpLDo
|
|||||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1/go.mod h1:HUITyuFN556+0QZ/IVfH5K4FyJM7kllV6ExKi2ImKhE=
|
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1/go.mod h1:HUITyuFN556+0QZ/IVfH5K4FyJM7kllV6ExKi2ImKhE=
|
||||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||||
github.com/minio/minio-go/v7 v7.0.67 h1:BeBvZWAS+kRJm1vGTMJYVjKUNoo0FoEt/wUWdUtfmh8=
|
|
||||||
github.com/minio/minio-go/v7 v7.0.67/go.mod h1:+UXocnUeZ3wHvVh5s95gcrA4YjMIbccT6ubB+1m054A=
|
|
||||||
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
|
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
|
||||||
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
|
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
|
||||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||||
@ -258,8 +254,8 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH
|
|||||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||||
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
|
||||||
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
|
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
|
||||||
github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk=
|
github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q=
|
||||||
github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA=
|
github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY=
|
||||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||||
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
@ -328,8 +324,8 @@ github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcU
|
|||||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||||
github.com/tg123/go-htpasswd v1.2.2 h1:tmNccDsQ+wYsoRfiONzIhDm5OkVHQzN3w4FOBAlN6BY=
|
github.com/tg123/go-htpasswd v1.2.1 h1:i4wfsX1KvvkyoMiHZzjS0VzbAPWfxzI8INcZAKtutoU=
|
||||||
github.com/tg123/go-htpasswd v1.2.2/go.mod h1:FcIrK0J+6zptgVwK1JDlqyajW/1B4PtuJ/FLWl7nx8A=
|
github.com/tg123/go-htpasswd v1.2.1/go.mod h1:erHp1B86KXdwQf1X5ZrLb7erXZnWueEQezb2dql4q58=
|
||||||
github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM=
|
github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM=
|
||||||
github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||||
@ -410,14 +406,14 @@ golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
|||||||
golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc=
|
golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc=
|
||||||
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
|
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20231212172506-995d672761c0 h1:/jFB8jK5R3Sq3i/lmeZO0cATSzFfZaJq1J2Euan3XKU=
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20231127180814-3a041ad873d4 h1:DC7wcm+i+P1rN3Ff07vL+OndGg5OhNddHyTA+ocPqYE=
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20231212172506-995d672761c0/go.mod h1:FUoWkonphQm3RhTS+kOEhF8h0iDpm4tdXolVCeZ9KKA=
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20231127180814-3a041ad873d4/go.mod h1:eJVxU6o+4G1PSczBr85xmyvSNYAKvAYgkub40YGomFM=
|
||||||
google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU=
|
google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk=
|
||||||
google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM=
|
google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98=
|
||||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
|
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
||||||
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
|||||||
@ -10,7 +10,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/pkg/dttm"
|
"github.com/alcionai/corso/src/pkg/dttm"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -57,22 +56,12 @@ func ZipExportCollection(
|
|||||||
defer wr.Close()
|
defer wr.Close()
|
||||||
|
|
||||||
buf := make([]byte, ZipCopyBufferSize)
|
buf := make([]byte, ZipCopyBufferSize)
|
||||||
counted := 0
|
|
||||||
log := logger.Ctx(ctx).
|
|
||||||
With("collection_count", len(expCollections))
|
|
||||||
|
|
||||||
for _, ec := range expCollections {
|
for _, ec := range expCollections {
|
||||||
folder := ec.BasePath()
|
folder := ec.BasePath()
|
||||||
items := ec.Items(ctx)
|
items := ec.Items(ctx)
|
||||||
|
|
||||||
for item := range items {
|
for item := range items {
|
||||||
counted++
|
|
||||||
|
|
||||||
// Log every 1000 items that are processed
|
|
||||||
if counted%1000 == 0 {
|
|
||||||
log.Infow("progress zipping export items", "count_items", counted)
|
|
||||||
}
|
|
||||||
|
|
||||||
err := item.Error
|
err := item.Error
|
||||||
if err != nil {
|
if err != nil {
|
||||||
writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID))
|
writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID))
|
||||||
@ -99,12 +88,8 @@ func ZipExportCollection(
|
|||||||
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
|
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
item.Body.Close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Infow("completed zipping export items", "count_items", counted)
|
|
||||||
}()
|
}()
|
||||||
|
|
||||||
return zipCollection{reader}, nil
|
return zipCollection{reader}, nil
|
||||||
|
|||||||
@ -1,13 +1,10 @@
|
|||||||
package jwt
|
package jwt
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
jwt "github.com/golang-jwt/jwt/v5"
|
jwt "github.com/golang-jwt/jwt/v5"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
|
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
|
||||||
@ -40,51 +37,3 @@ func IsJWTExpired(
|
|||||||
|
|
||||||
return expired, nil
|
return expired, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetJWTLifetime returns the issued at(iat) and expiration time(exp) claims
|
|
||||||
// present in the JWT token. These are optional claims and may not be present
|
|
||||||
// in the token. Absence is not reported as an error.
|
|
||||||
//
|
|
||||||
// An error is returned if the supplied token is malformed. Times are returned
|
|
||||||
// in UTC to have parity with graph responses.
|
|
||||||
func GetJWTLifetime(
|
|
||||||
ctx context.Context,
|
|
||||||
rawToken string,
|
|
||||||
) (time.Time, time.Time, error) {
|
|
||||||
var (
|
|
||||||
issuedAt time.Time
|
|
||||||
expiresAt time.Time
|
|
||||||
)
|
|
||||||
|
|
||||||
p := jwt.NewParser()
|
|
||||||
|
|
||||||
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
|
|
||||||
if err != nil {
|
|
||||||
logger.CtxErr(ctx, err).Debug("parsing jwt token")
|
|
||||||
return time.Time{}, time.Time{}, clues.Wrap(err, "invalid jwt")
|
|
||||||
}
|
|
||||||
|
|
||||||
exp, err := token.Claims.GetExpirationTime()
|
|
||||||
if err != nil {
|
|
||||||
logger.CtxErr(ctx, err).Debug("extracting exp claim")
|
|
||||||
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token expiry time")
|
|
||||||
}
|
|
||||||
|
|
||||||
iat, err := token.Claims.GetIssuedAt()
|
|
||||||
if err != nil {
|
|
||||||
logger.CtxErr(ctx, err).Debug("extracting iat claim")
|
|
||||||
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token issued at time")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Absence of iat or exp claims is not reported as an error by jwt library as these
|
|
||||||
// are optional as per spec.
|
|
||||||
if iat != nil {
|
|
||||||
issuedAt = iat.UTC()
|
|
||||||
}
|
|
||||||
|
|
||||||
if exp != nil {
|
|
||||||
expiresAt = exp.UTC()
|
|
||||||
}
|
|
||||||
|
|
||||||
return issuedAt, expiresAt, nil
|
|
||||||
}
|
|
||||||
|
|||||||
@ -113,134 +113,3 @@ func (suite *JWTUnitSuite) TestIsJWTExpired() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *JWTUnitSuite) TestGetJWTLifetime() {
|
|
||||||
// Set of time values to be used in the tests.
|
|
||||||
// Truncate to seconds for comparisons since jwt tokens have second
|
|
||||||
// level precision.
|
|
||||||
idToTime := map[string]time.Time{
|
|
||||||
"T0": time.Now().UTC().Add(-time.Hour).Truncate(time.Second),
|
|
||||||
"T1": time.Now().UTC().Truncate(time.Second),
|
|
||||||
"T2": time.Now().UTC().Add(time.Hour).Truncate(time.Second),
|
|
||||||
}
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
getToken func() (string, error)
|
|
||||||
expectFunc func(t *testing.T, iat time.Time, exp time.Time)
|
|
||||||
expectErr assert.ErrorAssertionFunc
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "alive token",
|
|
||||||
getToken: func() (string, error) {
|
|
||||||
return createJWTToken(
|
|
||||||
jwt.RegisteredClaims{
|
|
||||||
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
|
|
||||||
ExpiresAt: jwt.NewNumericDate(idToTime["T1"]),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
|
||||||
assert.Equal(t, idToTime["T0"], iat)
|
|
||||||
assert.Equal(t, idToTime["T1"], exp)
|
|
||||||
},
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
// Test with a token which is not generated using the go-jwt lib.
|
|
||||||
// This is a long lived token which is valid for 100 years.
|
|
||||||
{
|
|
||||||
name: "alive raw token with iat and exp claims",
|
|
||||||
getToken: func() (string, error) {
|
|
||||||
return rawToken, nil
|
|
||||||
},
|
|
||||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
|
||||||
assert.Less(t, iat, time.Now(), "iat should be in the past")
|
|
||||||
assert.Greater(t, exp, time.Now(), "exp should be in the future")
|
|
||||||
},
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
// Regardless of whether the token is expired or not, we should be able to
|
|
||||||
// extract the iat and exp claims from it without error.
|
|
||||||
{
|
|
||||||
name: "expired token",
|
|
||||||
getToken: func() (string, error) {
|
|
||||||
return createJWTToken(
|
|
||||||
jwt.RegisteredClaims{
|
|
||||||
IssuedAt: jwt.NewNumericDate(idToTime["T1"]),
|
|
||||||
ExpiresAt: jwt.NewNumericDate(idToTime["T0"]),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
|
||||||
assert.Equal(t, idToTime["T1"], iat)
|
|
||||||
assert.Equal(t, idToTime["T0"], exp)
|
|
||||||
},
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "missing iat claim",
|
|
||||||
getToken: func() (string, error) {
|
|
||||||
return createJWTToken(
|
|
||||||
jwt.RegisteredClaims{
|
|
||||||
ExpiresAt: jwt.NewNumericDate(idToTime["T2"]),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
|
||||||
assert.Equal(t, time.Time{}, iat)
|
|
||||||
assert.Equal(t, idToTime["T2"], exp)
|
|
||||||
},
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "missing exp claim",
|
|
||||||
getToken: func() (string, error) {
|
|
||||||
return createJWTToken(
|
|
||||||
jwt.RegisteredClaims{
|
|
||||||
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
|
|
||||||
})
|
|
||||||
},
|
|
||||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
|
||||||
assert.Equal(t, idToTime["T0"], iat)
|
|
||||||
assert.Equal(t, time.Time{}, exp)
|
|
||||||
},
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "both claims missing",
|
|
||||||
getToken: func() (string, error) {
|
|
||||||
return createJWTToken(jwt.RegisteredClaims{})
|
|
||||||
},
|
|
||||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
|
||||||
assert.Equal(t, time.Time{}, iat)
|
|
||||||
assert.Equal(t, time.Time{}, exp)
|
|
||||||
},
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "malformed token",
|
|
||||||
getToken: func() (string, error) {
|
|
||||||
return "header.claims.signature", nil
|
|
||||||
},
|
|
||||||
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
|
|
||||||
assert.Equal(t, time.Time{}, iat)
|
|
||||||
assert.Equal(t, time.Time{}, exp)
|
|
||||||
},
|
|
||||||
expectErr: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
token, err := test.getToken()
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
iat, exp, err := GetJWTLifetime(ctx, token)
|
|
||||||
test.expectErr(t, err)
|
|
||||||
|
|
||||||
test.expectFunc(t, iat, exp)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -59,19 +59,6 @@ func First(vs ...string) string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// FirstIn returns the first entry in the map with a non-zero value
|
|
||||||
// when iterating the provided list of keys.
|
|
||||||
func FirstIn(m map[string]any, keys ...string) string {
|
|
||||||
for _, key := range keys {
|
|
||||||
v, err := AnyValueToString(key, m)
|
|
||||||
if err == nil && len(v) > 0 {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Preview reduces the string to the specified size.
|
// Preview reduces the string to the specified size.
|
||||||
// If the string is longer than the size, the last three
|
// If the string is longer than the size, the last three
|
||||||
// characters are replaced with an ellipsis. Size < 4
|
// characters are replaced with an ellipsis. Size < 4
|
||||||
|
|||||||
@ -118,96 +118,3 @@ func TestGenerateHash(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFirstIn(t *testing.T) {
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
m map[string]any
|
|
||||||
keys []string
|
|
||||||
expect string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "nil map",
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "empty map",
|
|
||||||
m: map[string]any{},
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no match",
|
|
||||||
m: map[string]any{
|
|
||||||
"baz": "baz",
|
|
||||||
},
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no keys",
|
|
||||||
m: map[string]any{
|
|
||||||
"baz": "baz",
|
|
||||||
},
|
|
||||||
keys: []string{},
|
|
||||||
expect: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "nil match",
|
|
||||||
m: map[string]any{
|
|
||||||
"foo": nil,
|
|
||||||
},
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "empty match",
|
|
||||||
m: map[string]any{
|
|
||||||
"foo": "",
|
|
||||||
},
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "matches first key",
|
|
||||||
m: map[string]any{
|
|
||||||
"foo": "fnords",
|
|
||||||
},
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "fnords",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "matches second key",
|
|
||||||
m: map[string]any{
|
|
||||||
"bar": "smarf",
|
|
||||||
},
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "smarf",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "matches second key with nil first match",
|
|
||||||
m: map[string]any{
|
|
||||||
"foo": nil,
|
|
||||||
"bar": "smarf",
|
|
||||||
},
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "smarf",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "matches second key with empty first match",
|
|
||||||
m: map[string]any{
|
|
||||||
"foo": "",
|
|
||||||
"bar": "smarf",
|
|
||||||
},
|
|
||||||
keys: []string{"foo", "bar"},
|
|
||||||
expect: "smarf",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range table {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
result := FirstIn(test.m, test.keys...)
|
|
||||||
assert.Equal(t, test.expect, result)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -22,8 +22,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/common/str"
|
"github.com/alcionai/corso/src/internal/common/str"
|
||||||
"github.com/alcionai/corso/src/internal/converters/ics"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
@ -52,212 +50,6 @@ func formatAddress(entry models.EmailAddressable) string {
|
|||||||
return fmt.Sprintf(addressFormat, name, email)
|
return fmt.Sprintf(addressFormat, name, email)
|
||||||
}
|
}
|
||||||
|
|
||||||
// getICalData converts the emails to an event so that ical generation
|
|
||||||
// can generate from it.
|
|
||||||
func getICalData(ctx context.Context, data models.Messageable) (string, error) {
|
|
||||||
msg, ok := data.(*models.EventMessageRequest)
|
|
||||||
if !ok {
|
|
||||||
return "", clues.NewWC(ctx, "unexpected message type").
|
|
||||||
With("interface_type", fmt.Sprintf("%T", data))
|
|
||||||
}
|
|
||||||
|
|
||||||
// This method returns nil if data is not pulled using the necessary expand property
|
|
||||||
// .../messages/<message_id>/?expand=Microsoft.Graph.EventMessage/Event
|
|
||||||
// Also works for emails which are a result of someone accepting an
|
|
||||||
// invite. If we add this expand query parameter value when directly
|
|
||||||
// fetching a cancellation mail, the request fails. It however looks
|
|
||||||
// to be OK to run when listing emails although it gives empty({})
|
|
||||||
// event value for cancellations.
|
|
||||||
// TODO(meain): cancelled event details are available when pulling .eml
|
|
||||||
if mevent := msg.GetEvent(); mevent != nil {
|
|
||||||
return ics.FromEventable(ctx, mevent)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Exceptions(modifications) are covered under this, although graph just sends the
|
|
||||||
// exception event and not the parent, which what eml obtained from graph also contains
|
|
||||||
if ptr.Val(msg.GetMeetingMessageType()) != models.MEETINGREQUEST_MEETINGMESSAGETYPE {
|
|
||||||
// We don't have event data if it not "REQUEST" type.
|
|
||||||
// Both cancellation and acceptance does not return enough
|
|
||||||
// information to recreate an event.
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// If data was not fetch with an expand property, then we can
|
|
||||||
// approximate the details with the following
|
|
||||||
event := models.NewEvent()
|
|
||||||
event.SetId(msg.GetId())
|
|
||||||
event.SetCreatedDateTime(msg.GetCreatedDateTime())
|
|
||||||
event.SetLastModifiedDateTime(msg.GetLastModifiedDateTime())
|
|
||||||
event.SetIsAllDay(msg.GetIsAllDay())
|
|
||||||
event.SetStart(msg.GetStartDateTime())
|
|
||||||
event.SetEnd(msg.GetEndDateTime())
|
|
||||||
event.SetRecurrence(msg.GetRecurrence())
|
|
||||||
// event.SetIsCancelled()
|
|
||||||
event.SetSubject(msg.GetSubject())
|
|
||||||
event.SetBodyPreview(msg.GetBodyPreview())
|
|
||||||
event.SetBody(msg.GetBody())
|
|
||||||
|
|
||||||
// https://learn.microsoft.com/en-us/graph/api/resources/eventmessage?view=graph-rest-1.0
|
|
||||||
// In addition, Outlook automatically creates an event instance in
|
|
||||||
// the invitee's calendar, with the showAs property as tentative.
|
|
||||||
event.SetShowAs(ptr.To(models.TENTATIVE_FREEBUSYSTATUS))
|
|
||||||
|
|
||||||
event.SetCategories(msg.GetCategories())
|
|
||||||
event.SetWebLink(msg.GetWebLink())
|
|
||||||
event.SetOrganizer(msg.GetFrom())
|
|
||||||
|
|
||||||
// NOTE: If an event was previously created and we added people to
|
|
||||||
// it, the original list of attendee are not available.
|
|
||||||
atts := []models.Attendeeable{}
|
|
||||||
|
|
||||||
for _, to := range msg.GetToRecipients() {
|
|
||||||
att := models.NewAttendee()
|
|
||||||
att.SetEmailAddress(to.GetEmailAddress())
|
|
||||||
att.SetTypeEscaped(ptr.To(models.REQUIRED_ATTENDEETYPE))
|
|
||||||
atts = append(atts, att)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, cc := range msg.GetCcRecipients() {
|
|
||||||
att := models.NewAttendee()
|
|
||||||
att.SetEmailAddress(cc.GetEmailAddress())
|
|
||||||
att.SetTypeEscaped(ptr.To(models.OPTIONAL_ATTENDEETYPE))
|
|
||||||
atts = append(atts, att)
|
|
||||||
}
|
|
||||||
|
|
||||||
// bcc did not show up in my tests, but adding for completeness
|
|
||||||
for _, bcc := range msg.GetBccRecipients() {
|
|
||||||
att := models.NewAttendee()
|
|
||||||
att.SetEmailAddress(bcc.GetEmailAddress())
|
|
||||||
att.SetTypeEscaped(ptr.To(models.OPTIONAL_ATTENDEETYPE))
|
|
||||||
atts = append(atts, att)
|
|
||||||
}
|
|
||||||
|
|
||||||
event.SetAttendees(atts)
|
|
||||||
|
|
||||||
event.SetLocation(msg.GetLocation())
|
|
||||||
// event.SetSensitivity() // unavailable in msg
|
|
||||||
event.SetImportance(msg.GetImportance())
|
|
||||||
// event.SetOnlineMeeting() // not available in eml either
|
|
||||||
event.SetAttachments(msg.GetAttachments())
|
|
||||||
|
|
||||||
return ics.FromEventable(ctx, event)
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFileAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
|
|
||||||
kind := ptr.Val(attachment.GetContentType())
|
|
||||||
|
|
||||||
bytes, err := attachment.GetBackingStore().Get("contentBytes")
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.WrapWC(ctx, err, "failed to get attachment bytes").
|
|
||||||
With("kind", kind)
|
|
||||||
}
|
|
||||||
|
|
||||||
if bytes == nil {
|
|
||||||
// TODO(meain): Handle non file attachments
|
|
||||||
// https://github.com/alcionai/corso/issues/4772
|
|
||||||
logger.Ctx(ctx).
|
|
||||||
With("attachment_id", ptr.Val(attachment.GetId()),
|
|
||||||
"attachment_type", ptr.Val(attachment.GetOdataType())).
|
|
||||||
Info("no contentBytes for attachment")
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
bts, ok := bytes.([]byte)
|
|
||||||
if !ok {
|
|
||||||
return nil, clues.WrapWC(ctx, err, "invalid content bytes").
|
|
||||||
With("kind", kind).
|
|
||||||
With("interface_type", fmt.Sprintf("%T", bytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
name := ptr.Val(attachment.GetName())
|
|
||||||
if len(name) == 0 {
|
|
||||||
// Graph as of now does not let us create any attachments
|
|
||||||
// without a name, but we have run into instances where we have
|
|
||||||
// see attachments without a name, possibly from old
|
|
||||||
// data. This is for those cases.
|
|
||||||
name = "Unnamed"
|
|
||||||
}
|
|
||||||
|
|
||||||
contentID, err := attachment.GetBackingStore().Get("contentId")
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.WrapWC(ctx, err, "getting content id for attachment").
|
|
||||||
With("kind", kind)
|
|
||||||
}
|
|
||||||
|
|
||||||
if contentID != nil {
|
|
||||||
cids, _ := str.AnyToString(contentID)
|
|
||||||
if len(cids) > 0 {
|
|
||||||
name = cids
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &mail.File{
|
|
||||||
// cannot use filename as inline attachment will not get mapped properly
|
|
||||||
Name: name,
|
|
||||||
MimeType: kind,
|
|
||||||
Data: bts,
|
|
||||||
Inline: ptr.Val(attachment.GetIsInline()),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getItemAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
|
|
||||||
it, err := attachment.GetBackingStore().Get("item")
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.WrapWC(ctx, err, "getting item for attachment").
|
|
||||||
With("attachment_id", ptr.Val(attachment.GetId()))
|
|
||||||
}
|
|
||||||
|
|
||||||
name := ptr.Val(attachment.GetName())
|
|
||||||
if len(name) == 0 {
|
|
||||||
// Graph as of now does not let us create any attachments
|
|
||||||
// without a name, but we have run into instances where we have
|
|
||||||
// see attachments without a name, possibly from old
|
|
||||||
// data. This is for those cases.
|
|
||||||
name = "Unnamed"
|
|
||||||
}
|
|
||||||
|
|
||||||
switch it := it.(type) {
|
|
||||||
case *models.Message:
|
|
||||||
cb, err := FromMessageable(ctx, it)
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.WrapWC(ctx, err, "converting item attachment to eml").
|
|
||||||
With("attachment_id", ptr.Val(attachment.GetId()))
|
|
||||||
}
|
|
||||||
|
|
||||||
return &mail.File{
|
|
||||||
Name: name,
|
|
||||||
MimeType: "message/rfc822",
|
|
||||||
Data: []byte(cb),
|
|
||||||
}, nil
|
|
||||||
default:
|
|
||||||
logger.Ctx(ctx).
|
|
||||||
With("attachment_id", ptr.Val(attachment.GetId()),
|
|
||||||
"attachment_type", ptr.Val(attachment.GetOdataType())).
|
|
||||||
Info("unknown item attachment type")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getMailAttachment(ctx context.Context, att models.Attachmentable) (*mail.File, error) {
|
|
||||||
otyp := ptr.Val(att.GetOdataType())
|
|
||||||
|
|
||||||
switch otyp {
|
|
||||||
case "#microsoft.graph.fileAttachment":
|
|
||||||
return getFileAttachment(ctx, att)
|
|
||||||
case "#microsoft.graph.itemAttachment":
|
|
||||||
return getItemAttachment(ctx, att)
|
|
||||||
default:
|
|
||||||
logger.Ctx(ctx).
|
|
||||||
With("attachment_id", ptr.Val(att.GetId()),
|
|
||||||
"attachment_type", otyp).
|
|
||||||
Info("unknown attachment type")
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FromJSON converts a Messageable (as json) to .eml format
|
// FromJSON converts a Messageable (as json) to .eml format
|
||||||
func FromJSON(ctx context.Context, body []byte) (string, error) {
|
func FromJSON(ctx context.Context, body []byte) (string, error) {
|
||||||
ctx = clues.Add(ctx, "body_len", len(body))
|
ctx = clues.Add(ctx, "body_len", len(body))
|
||||||
@ -267,19 +59,13 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
|||||||
return "", clues.WrapWC(ctx, err, "converting to messageble")
|
return "", clues.WrapWC(ctx, err, "converting to messageble")
|
||||||
}
|
}
|
||||||
|
|
||||||
return FromMessageable(ctx, data)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Converts a Messageable to .eml format
|
|
||||||
func FromMessageable(ctx context.Context, data models.Messageable) (string, error) {
|
|
||||||
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
|
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
|
||||||
|
|
||||||
email := mail.NewMSG()
|
email := mail.NewMSG()
|
||||||
email.Encoding = mail.EncodingBase64 // Doing it to be safe for when we have eventMessage (newline issues)
|
email.AllowDuplicateAddress = true // More "correct" conversion
|
||||||
email.AllowDuplicateAddress = true // More "correct" conversion
|
email.AddBccToHeader = true // Don't ignore Bcc
|
||||||
email.AddBccToHeader = true // Don't ignore Bcc
|
email.AllowEmptyAttachments = true // Don't error on empty attachments
|
||||||
email.AllowEmptyAttachments = true // Don't error on empty attachments
|
email.UseProvidedAddress = true // Don't try to parse the email address
|
||||||
email.UseProvidedAddress = true // Don't try to parse the email address
|
|
||||||
|
|
||||||
if data.GetFrom() != nil {
|
if data.GetFrom() != nil {
|
||||||
email.SetFrom(formatAddress(data.GetFrom().GetEmailAddress()))
|
email.SetFrom(formatAddress(data.GetFrom().GetEmailAddress()))
|
||||||
@ -347,115 +133,6 @@ func FromMessageable(ctx context.Context, data models.Messageable) (string, erro
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if data.GetAttachments() != nil {
|
|
||||||
for _, attachment := range data.GetAttachments() {
|
|
||||||
att, err := getMailAttachment(ctx, attachment)
|
|
||||||
if err != nil {
|
|
||||||
return "", clues.WrapWC(ctx, err, "getting mail attachment")
|
|
||||||
}
|
|
||||||
|
|
||||||
// There are known cases where we just wanna log and
|
|
||||||
// ignore instead of erroring out
|
|
||||||
if att != nil {
|
|
||||||
email.Attach(att)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch data.(type) {
|
|
||||||
case *models.EventMessageResponse, *models.EventMessage:
|
|
||||||
// We can't handle this as of now, not enough information
|
|
||||||
// TODO: Fetch event object from graph when fetching email
|
|
||||||
case *models.CalendarSharingMessage:
|
|
||||||
// TODO: Parse out calendar sharing message
|
|
||||||
// https://github.com/alcionai/corso/issues/5041
|
|
||||||
case *models.EventMessageRequest:
|
|
||||||
cal, err := getICalData(ctx, data)
|
|
||||||
if err != nil {
|
|
||||||
return "", clues.Wrap(err, "getting ical attachment")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(cal) > 0 {
|
|
||||||
email.AddAlternative(mail.TextCalendar, cal)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := email.GetError(); err != nil {
|
|
||||||
return "", clues.WrapWC(ctx, err, "converting to eml")
|
|
||||||
}
|
|
||||||
|
|
||||||
return email.GetMessage(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
//-------------------------------------------------------------
|
|
||||||
// Postable -> EML
|
|
||||||
//-------------------------------------------------------------
|
|
||||||
|
|
||||||
// FromJSONPostToEML converts a postable (as json) to .eml format.
|
|
||||||
// TODO(pandeyabs): This is a stripped down copy of messageable to
|
|
||||||
// eml conversion, it can be folded into one function by having a post
|
|
||||||
// to messageable converter.
|
|
||||||
func FromJSONPostToEML(
|
|
||||||
ctx context.Context,
|
|
||||||
body []byte,
|
|
||||||
postMetadata metadata.ConversationPostMetadata,
|
|
||||||
) (string, error) {
|
|
||||||
ctx = clues.Add(ctx, "body_len", len(body))
|
|
||||||
|
|
||||||
data, err := api.BytesToPostable(body)
|
|
||||||
if err != nil {
|
|
||||||
return "", clues.WrapWC(ctx, err, "converting to postable")
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
|
|
||||||
|
|
||||||
email := mail.NewMSG()
|
|
||||||
email.Encoding = mail.EncodingBase64 // Doing it to be safe for when we have eventMessage (newline issues)
|
|
||||||
email.AllowDuplicateAddress = true // More "correct" conversion
|
|
||||||
email.AddBccToHeader = true // Don't ignore Bcc
|
|
||||||
email.AllowEmptyAttachments = true // Don't error on empty attachments
|
|
||||||
email.UseProvidedAddress = true // Don't try to parse the email address
|
|
||||||
|
|
||||||
if data.GetFrom() != nil {
|
|
||||||
email.SetFrom(formatAddress(data.GetFrom().GetEmailAddress()))
|
|
||||||
}
|
|
||||||
|
|
||||||
// We don't have the To, Cc, Bcc recipient information for posts due to a graph
|
|
||||||
// limitation. All posts carry the group email address as the only recipient
|
|
||||||
// for now.
|
|
||||||
email.AddTo(postMetadata.Recipients...)
|
|
||||||
email.SetSubject(postMetadata.Topic)
|
|
||||||
|
|
||||||
// Reply-To email address is not available for posts. Note that this is different
|
|
||||||
// from inReplyTo field.
|
|
||||||
|
|
||||||
if data.GetCreatedDateTime() != nil {
|
|
||||||
email.SetDate(ptr.Val(data.GetCreatedDateTime()).Format(dateFormat))
|
|
||||||
}
|
|
||||||
|
|
||||||
if data.GetBody() != nil {
|
|
||||||
if data.GetBody().GetContentType() != nil {
|
|
||||||
var contentType mail.ContentType
|
|
||||||
|
|
||||||
switch data.GetBody().GetContentType().String() {
|
|
||||||
case "html":
|
|
||||||
contentType = mail.TextHTML
|
|
||||||
case "text":
|
|
||||||
contentType = mail.TextPlain
|
|
||||||
default:
|
|
||||||
// https://learn.microsoft.com/en-us/graph/api/resources/itembody?view=graph-rest-1.0#properties
|
|
||||||
// This should not be possible according to the documentation
|
|
||||||
logger.Ctx(ctx).
|
|
||||||
With("body_type", data.GetBody().GetContentType().String()).
|
|
||||||
Info("unknown body content type")
|
|
||||||
|
|
||||||
contentType = mail.TextPlain
|
|
||||||
}
|
|
||||||
|
|
||||||
email.SetBody(contentType, ptr.Val(data.GetBody().GetContent()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if data.GetAttachments() != nil {
|
if data.GetAttachments() != nil {
|
||||||
for _, attachment := range data.GetAttachments() {
|
for _, attachment := range data.GetAttachments() {
|
||||||
kind := ptr.Val(attachment.GetContentType())
|
kind := ptr.Val(attachment.GetContentType())
|
||||||
@ -467,15 +144,15 @@ func FromJSONPostToEML(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if bytes == nil {
|
if bytes == nil {
|
||||||
// TODO(meain): Handle non file attachments
|
// Some attachments have an "item" field instead of
|
||||||
|
// "contentBytes". There are items like contacts, emails
|
||||||
|
// or calendar events which will not be a normal format
|
||||||
|
// and will have to be converted to a text format.
|
||||||
|
// TODO(meain): Handle custom attachments
|
||||||
// https://github.com/alcionai/corso/issues/4772
|
// https://github.com/alcionai/corso/issues/4772
|
||||||
//
|
|
||||||
// TODO(pandeyabs): Above issue is for messages.
|
|
||||||
// This is not a problem for posts but leaving it here for safety.
|
|
||||||
logger.Ctx(ctx).
|
logger.Ctx(ctx).
|
||||||
With("attachment_id", ptr.Val(attachment.GetId()),
|
With("attachment_id", ptr.Val(attachment.GetId())).
|
||||||
"attachment_type", ptr.Val(attachment.GetOdataType())).
|
Info("unhandled attachment type")
|
||||||
Info("no contentBytes for attachment")
|
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -488,9 +165,6 @@ func FromJSONPostToEML(
|
|||||||
}
|
}
|
||||||
|
|
||||||
name := ptr.Val(attachment.GetName())
|
name := ptr.Val(attachment.GetName())
|
||||||
if len(name) == 0 {
|
|
||||||
name = "Unnamed"
|
|
||||||
}
|
|
||||||
|
|
||||||
contentID, err := attachment.GetBackingStore().Get("contentId")
|
contentID, err := attachment.GetBackingStore().Get("contentId")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -515,8 +189,6 @@ func FromJSONPostToEML(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: Posts cannot be of type EventMessageResponse, EventMessage or
|
|
||||||
// CalendarSharingMessage. So we don't need to handle those cases here.
|
|
||||||
if err = email.GetError(); err != nil {
|
if err = email.GetError(); err != nil {
|
||||||
return "", clues.WrapWC(ctx, err, "converting to eml")
|
return "", clues.WrapWC(ctx, err, "converting to eml")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,13 +1,11 @@
|
|||||||
package eml
|
package eml
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
ical "github.com/arran4/golang-ical"
|
|
||||||
"github.com/jhillyerd/enmime"
|
"github.com/jhillyerd/enmime"
|
||||||
kjson "github.com/microsoft/kiota-serialization-json-go"
|
kjson "github.com/microsoft/kiota-serialization-json-go"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
@ -17,9 +15,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/converters/eml/testdata"
|
"github.com/alcionai/corso/src/internal/converters/eml/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/converters/ics"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
|
|
||||||
stub "github.com/alcionai/corso/src/internal/m365/service/groups/mock"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
@ -137,11 +132,6 @@ func (suite *EMLUnitSuite) TestConvert_messageble_to_eml() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *EMLUnitSuite) TestConvert_edge_cases() {
|
func (suite *EMLUnitSuite) TestConvert_edge_cases() {
|
||||||
bodies := []string{
|
|
||||||
testdata.EmailWithAttachments,
|
|
||||||
testdata.EmailWithinEmail,
|
|
||||||
}
|
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
transform func(models.Messageable)
|
transform func(models.Messageable)
|
||||||
@ -167,324 +157,34 @@ func (suite *EMLUnitSuite) TestConvert_edge_cases() {
|
|||||||
require.NoError(suite.T(), err, "setting attachment content")
|
require.NoError(suite.T(), err, "setting attachment content")
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "attachment without name",
|
|
||||||
transform: func(msg models.Messageable) {
|
|
||||||
attachments := msg.GetAttachments()
|
|
||||||
attachments[1].SetName(ptr.To(""))
|
|
||||||
|
|
||||||
// This test has to be run on a non inline attachment
|
|
||||||
// as inline attachments use contentID instead of name
|
|
||||||
// even when there is a name.
|
|
||||||
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "attachment with nil name",
|
|
||||||
transform: func(msg models.Messageable) {
|
|
||||||
attachments := msg.GetAttachments()
|
|
||||||
attachments[1].SetName(nil)
|
|
||||||
|
|
||||||
// This test has to be run on a non inline attachment
|
|
||||||
// as inline attachments use contentID instead of name
|
|
||||||
// even when there is a name.
|
|
||||||
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "multiple attachments without name",
|
|
||||||
transform: func(msg models.Messageable) {
|
|
||||||
attachments := msg.GetAttachments()
|
|
||||||
attachments[1].SetName(ptr.To(""))
|
|
||||||
attachments[2].SetName(ptr.To(""))
|
|
||||||
|
|
||||||
// This test has to be run on a non inline attachment
|
|
||||||
// as inline attachments use contentID instead of name
|
|
||||||
// even when there is a name.
|
|
||||||
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
|
|
||||||
assert.False(suite.T(), ptr.Val(attachments[2].GetIsInline()))
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, b := range bodies {
|
for _, test := range tests {
|
||||||
for _, test := range tests {
|
suite.Run(test.name, func() {
|
||||||
suite.Run(test.name, func() {
|
t := suite.T()
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
body := []byte(b)
|
body := []byte(testdata.EmailWithAttachments)
|
||||||
|
|
||||||
msg, err := api.BytesToMessageable(body)
|
msg, err := api.BytesToMessageable(body)
|
||||||
require.NoError(t, err, "creating message")
|
require.NoError(t, err, "creating message")
|
||||||
|
|
||||||
test.transform(msg)
|
test.transform(msg)
|
||||||
|
|
||||||
writer := kjson.NewJsonSerializationWriter()
|
writer := kjson.NewJsonSerializationWriter()
|
||||||
|
|
||||||
defer writer.Close()
|
defer writer.Close()
|
||||||
|
|
||||||
err = writer.WriteObjectValue("", msg)
|
err = writer.WriteObjectValue("", msg)
|
||||||
require.NoError(t, err, "serializing message")
|
require.NoError(t, err, "serializing message")
|
||||||
|
|
||||||
nbody, err := writer.GetSerializedContent()
|
nbody, err := writer.GetSerializedContent()
|
||||||
require.NoError(t, err, "getting serialized content")
|
require.NoError(t, err, "getting serialized content")
|
||||||
|
|
||||||
_, err = FromJSON(ctx, nbody)
|
_, err = FromJSON(ctx, nbody)
|
||||||
assert.NoError(t, err, "converting to eml")
|
assert.NoError(t, err, "converting to eml")
|
||||||
})
|
})
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *EMLUnitSuite) TestConvert_eml_ics() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
body := []byte(testdata.EmailWithEventInfo)
|
|
||||||
|
|
||||||
out, err := FromJSON(ctx, body)
|
|
||||||
assert.NoError(t, err, "converting to eml")
|
|
||||||
|
|
||||||
rmsg, err := api.BytesToMessageable(body)
|
|
||||||
require.NoError(t, err, "creating message")
|
|
||||||
|
|
||||||
msg := rmsg.(*models.EventMessageRequest)
|
|
||||||
|
|
||||||
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
|
|
||||||
require.NoError(t, err, "reading created eml")
|
|
||||||
require.NotNil(t, eml, "eml should not be nil")
|
|
||||||
|
|
||||||
require.Equal(t, 1, len(eml.OtherParts), "eml should have 1 attachment")
|
|
||||||
require.Equal(t, "text/calendar", eml.OtherParts[0].ContentType, "eml attachment should be a calendar")
|
|
||||||
|
|
||||||
catt := *eml.OtherParts[0]
|
|
||||||
cal, err := ical.ParseCalendar(bytes.NewReader(catt.Content))
|
|
||||||
require.NoError(t, err, "parsing calendar")
|
|
||||||
|
|
||||||
event := cal.Events()[0]
|
|
||||||
|
|
||||||
assert.Equal(t, ptr.Val(msg.GetId()), event.Id())
|
|
||||||
assert.Equal(t, ptr.Val(msg.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
|
|
||||||
|
|
||||||
assert.Equal(
|
|
||||||
t,
|
|
||||||
msg.GetCreatedDateTime().Format(ics.ICalDateTimeFormatUTC),
|
|
||||||
event.GetProperty(ical.ComponentPropertyCreated).Value)
|
|
||||||
assert.Equal(
|
|
||||||
t,
|
|
||||||
msg.GetLastModifiedDateTime().Format(ics.ICalDateTimeFormatUTC),
|
|
||||||
event.GetProperty(ical.ComponentPropertyLastModified).Value)
|
|
||||||
|
|
||||||
st, err := ics.GetUTCTime(
|
|
||||||
ptr.Val(msg.GetStartDateTime().GetDateTime()),
|
|
||||||
ptr.Val(msg.GetStartDateTime().GetTimeZone()))
|
|
||||||
require.NoError(t, err, "getting start time")
|
|
||||||
|
|
||||||
et, err := ics.GetUTCTime(
|
|
||||||
ptr.Val(msg.GetEndDateTime().GetDateTime()),
|
|
||||||
ptr.Val(msg.GetEndDateTime().GetTimeZone()))
|
|
||||||
require.NoError(t, err, "getting end time")
|
|
||||||
|
|
||||||
assert.Equal(
|
|
||||||
t,
|
|
||||||
st.Format(ics.ICalDateTimeFormatUTC),
|
|
||||||
event.GetProperty(ical.ComponentPropertyDtStart).Value)
|
|
||||||
assert.Equal(
|
|
||||||
t,
|
|
||||||
et.Format(ics.ICalDateTimeFormatUTC),
|
|
||||||
event.GetProperty(ical.ComponentPropertyDtEnd).Value)
|
|
||||||
|
|
||||||
tos := msg.GetToRecipients()
|
|
||||||
ccs := msg.GetCcRecipients()
|
|
||||||
att := event.Attendees()
|
|
||||||
|
|
||||||
assert.Equal(t, len(tos)+len(ccs), len(att))
|
|
||||||
|
|
||||||
for _, to := range tos {
|
|
||||||
found := false
|
|
||||||
|
|
||||||
for _, attendee := range att {
|
|
||||||
if "mailto:"+ptr.Val(to.GetEmailAddress().GetAddress()) == attendee.Value {
|
|
||||||
found = true
|
|
||||||
|
|
||||||
assert.Equal(t, "REQ-PARTICIPANT", attendee.ICalParameters["ROLE"][0])
|
|
||||||
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.True(t, found, "to recipient not found in attendees")
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, cc := range ccs {
|
|
||||||
found := false
|
|
||||||
|
|
||||||
for _, attendee := range att {
|
|
||||||
if "mailto:"+ptr.Val(cc.GetEmailAddress().GetAddress()) == attendee.Value {
|
|
||||||
found = true
|
|
||||||
|
|
||||||
assert.Equal(t, "OPT-PARTICIPANT", attendee.ICalParameters["ROLE"][0])
|
|
||||||
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.True(t, found, "cc recipient not found in attendees")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *EMLUnitSuite) TestConvert_eml_ics_from_event_obj() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
body := []byte(testdata.EmailWithEventObject)
|
|
||||||
|
|
||||||
out, err := FromJSON(ctx, body)
|
|
||||||
assert.NoError(t, err, "converting to eml")
|
|
||||||
|
|
||||||
rmsg, err := api.BytesToMessageable(body)
|
|
||||||
require.NoError(t, err, "creating message")
|
|
||||||
|
|
||||||
msg := rmsg.(*models.EventMessageRequest)
|
|
||||||
evt := msg.GetEvent()
|
|
||||||
|
|
||||||
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
|
|
||||||
require.NoError(t, err, "reading created eml")
|
|
||||||
require.NotNil(t, eml, "eml should not be nil")
|
|
||||||
|
|
||||||
require.Equal(t, 1, len(eml.OtherParts), "eml should have 1 attachment")
|
|
||||||
require.Equal(t, "text/calendar", eml.OtherParts[0].ContentType, "eml attachment should be a calendar")
|
|
||||||
|
|
||||||
catt := *eml.OtherParts[0]
|
|
||||||
cal, err := ical.ParseCalendar(bytes.NewReader(catt.Content))
|
|
||||||
require.NoError(t, err, "parsing calendar")
|
|
||||||
|
|
||||||
event := cal.Events()[0]
|
|
||||||
|
|
||||||
assert.Equal(t, ptr.Val(evt.GetId()), event.Id())
|
|
||||||
assert.NotEqual(t, ptr.Val(msg.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
|
|
||||||
assert.Equal(t, ptr.Val(evt.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
|
|
||||||
}
|
|
||||||
|
|
||||||
//-------------------------------------------------------------
|
|
||||||
// Postable -> EML tests
|
|
||||||
//-------------------------------------------------------------
|
|
||||||
|
|
||||||
func (suite *EMLUnitSuite) TestConvert_postable_to_eml() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
body := []byte(stub.PostWithAttachments)
|
|
||||||
|
|
||||||
postMetadata := metadata.ConversationPostMetadata{
|
|
||||||
Recipients: []string{"group@example.com"},
|
|
||||||
Topic: "test subject",
|
|
||||||
}
|
|
||||||
|
|
||||||
out, err := FromJSONPostToEML(ctx, body, postMetadata)
|
|
||||||
assert.NoError(t, err, "converting to eml")
|
|
||||||
|
|
||||||
post, err := api.BytesToPostable(body)
|
|
||||||
require.NoError(t, err, "creating post")
|
|
||||||
|
|
||||||
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
|
|
||||||
require.NoError(t, err, "reading created eml")
|
|
||||||
|
|
||||||
assert.Equal(t, postMetadata.Topic, eml.GetHeader("Subject"))
|
|
||||||
assert.Equal(t, post.GetCreatedDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
|
|
||||||
|
|
||||||
assert.Equal(t, formatAddress(post.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
|
|
||||||
|
|
||||||
// Test recipients. The post metadata should contain the group email address.
|
|
||||||
|
|
||||||
tos := strings.Split(eml.GetHeader("To"), ", ")
|
|
||||||
for _, sourceTo := range postMetadata.Recipients {
|
|
||||||
assert.Contains(t, tos, sourceTo)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assert cc, bcc to be empty since they are not supported for posts right now.
|
|
||||||
assert.Equal(t, "", eml.GetHeader("Cc"))
|
|
||||||
assert.Equal(t, "", eml.GetHeader("Bcc"))
|
|
||||||
|
|
||||||
// Test attachments using PostWithAttachments data as a reference.
|
|
||||||
// This data has 1 direct attachment and 1 inline attachment.
|
|
||||||
assert.Equal(t, 1, len(eml.Attachments), "direct attachment count")
|
|
||||||
assert.Equal(t, 1, len(eml.Inlines), "inline attachment count")
|
|
||||||
|
|
||||||
for _, sourceAttachment := range post.GetAttachments() {
|
|
||||||
targetContent := eml.Attachments[0].Content
|
|
||||||
if ptr.Val(sourceAttachment.GetIsInline()) {
|
|
||||||
targetContent = eml.Inlines[0].Content
|
|
||||||
}
|
|
||||||
|
|
||||||
sourceContent, err := sourceAttachment.GetBackingStore().Get("contentBytes")
|
|
||||||
assert.NoError(t, err, "getting source attachment content")
|
|
||||||
|
|
||||||
assert.Equal(t, sourceContent, targetContent)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test body
|
|
||||||
source := strings.ReplaceAll(eml.HTML, "\n", "")
|
|
||||||
target := strings.ReplaceAll(ptr.Val(post.GetBody().GetContent()), "\n", "")
|
|
||||||
|
|
||||||
// replace the cid with a constant value to make the comparison
|
|
||||||
re := regexp.MustCompile(`(?:src|originalSrc)="cid:[^"]*"`)
|
|
||||||
source = re.ReplaceAllString(source, `src="cid:replaced"`)
|
|
||||||
target = re.ReplaceAllString(target, `src="cid:replaced"`)
|
|
||||||
|
|
||||||
assert.Equal(t, source, target)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tests an ics within an eml within another eml
|
|
||||||
func (suite *EMLUnitSuite) TestConvert_message_in_messageble_to_eml() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
body := []byte(testdata.EmailWithinEmail)
|
|
||||||
|
|
||||||
out, err := FromJSON(ctx, body)
|
|
||||||
assert.NoError(t, err, "converting to eml")
|
|
||||||
|
|
||||||
msg, err := api.BytesToMessageable(body)
|
|
||||||
require.NoError(t, err, "creating message")
|
|
||||||
|
|
||||||
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
|
|
||||||
require.NoError(t, err, "reading created eml")
|
|
||||||
|
|
||||||
assert.Equal(t, ptr.Val(msg.GetSubject()), eml.GetHeader("Subject"))
|
|
||||||
assert.Equal(t, msg.GetSentDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
|
|
||||||
|
|
||||||
assert.Equal(t, formatAddress(msg.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
|
|
||||||
|
|
||||||
attachments := eml.Attachments
|
|
||||||
assert.Equal(t, 3, len(attachments), "attachment count in parent email")
|
|
||||||
|
|
||||||
ieml, err := enmime.ReadEnvelope(strings.NewReader(string(attachments[0].Content)))
|
|
||||||
require.NoError(t, err, "reading created eml")
|
|
||||||
|
|
||||||
itm, err := msg.GetAttachments()[0].GetBackingStore().Get("item")
|
|
||||||
require.NoError(t, err, "getting item from message")
|
|
||||||
|
|
||||||
imsg := itm.(*models.Message)
|
|
||||||
assert.Equal(t, ptr.Val(imsg.GetSubject()), ieml.GetHeader("Subject"))
|
|
||||||
assert.Equal(t, imsg.GetSentDateTime().Format(time.RFC1123Z), ieml.GetHeader("Date"))
|
|
||||||
|
|
||||||
assert.Equal(t, formatAddress(imsg.GetFrom().GetEmailAddress()), ieml.GetHeader("From"))
|
|
||||||
|
|
||||||
iattachments := ieml.Attachments
|
|
||||||
assert.Equal(t, 1, len(iattachments), "attachment count in child email")
|
|
||||||
|
|
||||||
// Known from testdata
|
|
||||||
assert.Contains(t, string(iattachments[0].Content), "X-LIC-LOCATION:Africa/Abidjan")
|
|
||||||
}
|
|
||||||
|
|||||||
@ -104,19 +104,6 @@
|
|||||||
"contentId": null,
|
"contentId": null,
|
||||||
"contentLocation": null,
|
"contentLocation": null,
|
||||||
"contentBytes": "W1BhdGhzXQpQcmVmaXggPSAuLgo="
|
"contentBytes": "W1BhdGhzXQpQcmVmaXggPSAuLgo="
|
||||||
},
|
|
||||||
{
|
|
||||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
|
||||||
"@odata.mediaContentType": "application/octet-stream",
|
|
||||||
"id": "ZZMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAEwbDEWAAABEgAQAD3rU0iyzCdHgz0xmOrWc9g=",
|
|
||||||
"lastModifiedDateTime": "2023-11-16T05:42:47Z",
|
|
||||||
"name": "qt2.conf",
|
|
||||||
"contentType": "application/octet-stream",
|
|
||||||
"size": 156,
|
|
||||||
"isInline": false,
|
|
||||||
"contentId": null,
|
|
||||||
"contentLocation": null,
|
|
||||||
"contentBytes": "Z1BhdGhzXQpQcmVmaXggPSAuLgo="
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,103 +0,0 @@
|
|||||||
{
|
|
||||||
"@odata.type": "#microsoft.graph.eventMessageRequest",
|
|
||||||
"@odata.etag": "W/\"CwAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFY+7zz\"",
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFZU-vfAAA=",
|
|
||||||
"createdDateTime": "2024-01-15T11:26:41Z",
|
|
||||||
"lastModifiedDateTime": "2024-01-15T11:26:43Z",
|
|
||||||
"changeKey": "CwAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFY+7zz",
|
|
||||||
"categories": [],
|
|
||||||
"receivedDateTime": "2024-01-15T11:26:41Z",
|
|
||||||
"sentDateTime": "2024-01-15T11:26:41Z",
|
|
||||||
"hasAttachments": false,
|
|
||||||
"internetMessageId": "<PH0PR04MB7285DDED30186B1D8E1BD2AABC6C2@PH0PR04MB7285.namprd04.prod.outlook.com>",
|
|
||||||
"subject": "Invitable event",
|
|
||||||
"bodyPreview": "How come the sun is hot?\r\n\r\n________________________________________________________________________________\r\nMicrosoft Teams meeting\r\nJoin on your computer, mobile app or room device\r\nClick here to join the meeting\r\nMeeting ID: 290 273 192 285\r\nPasscode:",
|
|
||||||
"importance": "normal",
|
|
||||||
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
|
|
||||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQABoIUFzzByJMltJobtYSAJ0=",
|
|
||||||
"conversationIndex": "AdpHpbXXGghQXPMHIkyW0mhu1hIAnQ==",
|
|
||||||
"isDeliveryReceiptRequested": null,
|
|
||||||
"isReadReceiptRequested": false,
|
|
||||||
"isRead": true,
|
|
||||||
"isDraft": false,
|
|
||||||
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFZU%2FvfAAA%3D&exvsurl=1&viewmodel=ReadMessageItem",
|
|
||||||
"inferenceClassification": "focused",
|
|
||||||
"meetingMessageType": "meetingRequest",
|
|
||||||
"type": "seriesMaster",
|
|
||||||
"isOutOfDate": false,
|
|
||||||
"isAllDay": false,
|
|
||||||
"isDelegated": false,
|
|
||||||
"responseRequested": true,
|
|
||||||
"allowNewTimeProposals": null,
|
|
||||||
"meetingRequestType": "newMeetingRequest",
|
|
||||||
"body": {
|
|
||||||
"contentType": "html",
|
|
||||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"></head><body><div class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">How come the sun is hot?<br></div><br><div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span> </div><div class=\"me-email-text\" lang=\"en-US\" style=\"color:#252424; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\"><div style=\"margin-top:24px; margin-bottom:20px\"><span style=\"font-size:24px; color:#252424\">Microsoft Teams meeting</span> </div><div style=\"margin-bottom:20px\"><div style=\"margin-top:0px; margin-bottom:0px; font-weight:bold\"><span style=\"font-size:14px; color:#252424\">Join on your computer, mobile app or room device</span> </div><a class=\"me-email-headline\" href=\"https://teams.microsoft.com/l/meetup-join/19%3ameeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl%40thread.v2/0?context=%7b%22Tid%22%3a%22fb8afbaa-e94c-4ea5-8a8a-24aff04d7874%22%2c%22Oid%22%3a%227ceb8e03-bdc5-4509-a136-457526165ec0%22%7d\" target=\"_blank\" rel=\"noreferrer noopener\" style=\"font-size:14px; font-family:'Segoe UI Semibold','Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif; text-decoration:underline; color:#6264a7\">Click here to join the meeting</a> </div><div style=\"margin-bottom:20px; margin-top:20px\"><div style=\"margin-bottom:4px\"><span data-tid=\"meeting-code\" style=\"font-size:14px; color:#252424\">Meeting ID: <span style=\"font-size:16px; color:#252424\">290 273 192 285</span> </span><br><span style=\"font-size:14px; color:#252424\">Passcode: </span><span style=\"font-size:16px; color:#252424\">CwEBTS </span><div style=\"font-size:14px\"><a class=\"me-email-link\" target=\"_blank\" href=\"https://www.microsoft.com/en-us/microsoft-teams/download-app\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Download Teams</a> | <a class=\"me-email-link\" target=\"_blank\" href=\"https://www.microsoft.com/microsoft-teams/join-a-meeting\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Join on the web</a></div></div></div><div style=\"margin-bottom:24px; margin-top:20px\"><a class=\"me-email-link\" target=\"_blank\" href=\"https://aka.ms/JoinTeamsMeeting\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Learn More</a> | <a class=\"me-email-link\" target=\"_blank\" href=\"https://teams.microsoft.com/meetingOptions/?organizerId=7ceb8e03-bdc5-4509-a136-457526165ec0&tenantId=fb8afbaa-e94c-4ea5-8a8a-24aff04d7874&threadId=19_meeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl@thread.v2&messageId=0&language=en-US\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Meeting options</a> </div></div><div style=\"font-size:14px; margin-bottom:4px; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\"></div><div style=\"font-size:12px\"></div><div></div><div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span> </div></body></html>"
|
|
||||||
},
|
|
||||||
"sender": {
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Johanna Lorenz",
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"from": {
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Johanna Lorenz",
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"toRecipients": [
|
|
||||||
{
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Faker 1",
|
|
||||||
"address": "fakeid1@provider.com"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"ccRecipients": [
|
|
||||||
{
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Faker 2",
|
|
||||||
"address": "fakeid2@provider.com"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"bccRecipients": [],
|
|
||||||
"replyTo": [],
|
|
||||||
"flag": {
|
|
||||||
"flagStatus": "notFlagged"
|
|
||||||
},
|
|
||||||
"startDateTime": {
|
|
||||||
"dateTime": "2024-01-22T02:30:00.0000000",
|
|
||||||
"timeZone": "UTC"
|
|
||||||
},
|
|
||||||
"endDateTime": {
|
|
||||||
"dateTime": "2024-01-22T03:00:00.0000000",
|
|
||||||
"timeZone": "UTC"
|
|
||||||
},
|
|
||||||
"location": {
|
|
||||||
"displayName": "Microsoft Teams Meeting",
|
|
||||||
"locationType": "default",
|
|
||||||
"uniqueIdType": "unknown"
|
|
||||||
},
|
|
||||||
"recurrence": {
|
|
||||||
"pattern": {
|
|
||||||
"type": "daily",
|
|
||||||
"interval": 1,
|
|
||||||
"month": 0,
|
|
||||||
"dayOfMonth": 0,
|
|
||||||
"firstDayOfWeek": "sunday",
|
|
||||||
"index": "first"
|
|
||||||
},
|
|
||||||
"range": {
|
|
||||||
"type": "endDate",
|
|
||||||
"startDate": "2024-01-21",
|
|
||||||
"endDate": "2024-01-24",
|
|
||||||
"recurrenceTimeZone": "tzone://Microsoft/Utc",
|
|
||||||
"numberOfOccurrences": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"previousLocation": null,
|
|
||||||
"previousStartDateTime": null,
|
|
||||||
"previousEndDateTime": null
|
|
||||||
}
|
|
||||||
@ -1,204 +0,0 @@
|
|||||||
{
|
|
||||||
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users('JohannaL%4010rqc2.onmicrosoft.com')/messages(microsoft.graph.eventMessage/event())/$entity",
|
|
||||||
"@odata.type": "#microsoft.graph.eventMessageRequest",
|
|
||||||
"@odata.etag": "W/\"CwAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFY+7zz\"",
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFZU-vfAAA=",
|
|
||||||
"createdDateTime": "2024-01-15T11:26:41Z",
|
|
||||||
"lastModifiedDateTime": "2024-01-15T11:26:43Z",
|
|
||||||
"changeKey": "CwAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFY+7zz",
|
|
||||||
"categories": [],
|
|
||||||
"receivedDateTime": "2024-01-15T11:26:41Z",
|
|
||||||
"sentDateTime": "2024-01-15T11:26:41Z",
|
|
||||||
"hasAttachments": false,
|
|
||||||
"internetMessageId": "<PH0PR04MB7285DDED30186B1D8E1BD2AABC6C2@PH0PR04MB7285.namprd04.prod.outlook.com>",
|
|
||||||
"subject": "Invitable event",
|
|
||||||
"bodyPreview": "How come the sun is hot?\r\n\r\n________________________________________________________________________________\r\nMicrosoft Teams meeting\r\nJoin on your computer, mobile app or room device\r\nClick here to join the meeting\r\nMeeting ID: 290 273 192 285\r\nPasscode:",
|
|
||||||
"importance": "normal",
|
|
||||||
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
|
|
||||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQABoIUFzzByJMltJobtYSAJ0=",
|
|
||||||
"conversationIndex": "AdpHpbXXGghQXPMHIkyW0mhu1hIAnQ==",
|
|
||||||
"isDeliveryReceiptRequested": null,
|
|
||||||
"isReadReceiptRequested": false,
|
|
||||||
"isRead": true,
|
|
||||||
"isDraft": false,
|
|
||||||
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFZU%2FvfAAA%3D&exvsurl=1&viewmodel=ReadMessageItem",
|
|
||||||
"inferenceClassification": "focused",
|
|
||||||
"meetingMessageType": "meetingRequest",
|
|
||||||
"type": "seriesMaster",
|
|
||||||
"isOutOfDate": false,
|
|
||||||
"isAllDay": false,
|
|
||||||
"isDelegated": false,
|
|
||||||
"responseRequested": true,
|
|
||||||
"allowNewTimeProposals": null,
|
|
||||||
"meetingRequestType": "newMeetingRequest",
|
|
||||||
"body": {
|
|
||||||
"contentType": "html",
|
|
||||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"></head><body><div class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">How come the sun is hot?<br></div><br><div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span> </div><div class=\"me-email-text\" lang=\"en-US\" style=\"color:#252424; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\"><div style=\"margin-top:24px; margin-bottom:20px\"><span style=\"font-size:24px; color:#252424\">Microsoft Teams meeting</span> </div><div style=\"margin-bottom:20px\"><div style=\"margin-top:0px; margin-bottom:0px; font-weight:bold\"><span style=\"font-size:14px; color:#252424\">Join on your computer, mobile app or room device</span> </div><a class=\"me-email-headline\" href=\"https://teams.microsoft.com/l/meetup-join/19%3ameeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl%40thread.v2/0?context=%7b%22Tid%22%3a%22fb8afbaa-e94c-4ea5-8a8a-24aff04d7874%22%2c%22Oid%22%3a%227ceb8e03-bdc5-4509-a136-457526165ec0%22%7d\" target=\"_blank\" rel=\"noreferrer noopener\" style=\"font-size:14px; font-family:'Segoe UI Semibold','Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif; text-decoration:underline; color:#6264a7\">Click here to join the meeting</a> </div><div style=\"margin-bottom:20px; margin-top:20px\"><div style=\"margin-bottom:4px\"><span data-tid=\"meeting-code\" style=\"font-size:14px; color:#252424\">Meeting ID: <span style=\"font-size:16px; color:#252424\">290 273 192 285</span> </span><br><span style=\"font-size:14px; color:#252424\">Passcode: </span><span style=\"font-size:16px; color:#252424\">CwEBTS </span><div style=\"font-size:14px\"><a class=\"me-email-link\" target=\"_blank\" href=\"https://www.microsoft.com/en-us/microsoft-teams/download-app\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Download Teams</a> | <a class=\"me-email-link\" target=\"_blank\" href=\"https://www.microsoft.com/microsoft-teams/join-a-meeting\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Join on the web</a></div></div></div><div style=\"margin-bottom:24px; margin-top:20px\"><a class=\"me-email-link\" target=\"_blank\" href=\"https://aka.ms/JoinTeamsMeeting\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Learn More</a> | <a class=\"me-email-link\" target=\"_blank\" href=\"https://teams.microsoft.com/meetingOptions/?organizerId=7ceb8e03-bdc5-4509-a136-457526165ec0&tenantId=fb8afbaa-e94c-4ea5-8a8a-24aff04d7874&threadId=19_meeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl@thread.v2&messageId=0&language=en-US\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Meeting options</a> </div></div><div style=\"font-size:14px; margin-bottom:4px; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\"></div><div style=\"font-size:12px\"></div><div></div><div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span> </div></body></html>"
|
|
||||||
},
|
|
||||||
"sender": {
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Johanna Lorenz",
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"from": {
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Johanna Lorenz",
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"toRecipients": [
|
|
||||||
{
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Pradeep Gupta",
|
|
||||||
"address": "PradeepG@10rqc2.onmicrosoft.com"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"ccRecipients": [],
|
|
||||||
"bccRecipients": [],
|
|
||||||
"replyTo": [],
|
|
||||||
"flag": {
|
|
||||||
"flagStatus": "notFlagged"
|
|
||||||
},
|
|
||||||
"startDateTime": {
|
|
||||||
"dateTime": "2024-01-22T02:30:00.0000000",
|
|
||||||
"timeZone": "UTC"
|
|
||||||
},
|
|
||||||
"endDateTime": {
|
|
||||||
"dateTime": "2024-01-22T03:00:00.0000000",
|
|
||||||
"timeZone": "UTC"
|
|
||||||
},
|
|
||||||
"location": {
|
|
||||||
"displayName": "Microsoft Teams Meeting",
|
|
||||||
"locationType": "default",
|
|
||||||
"uniqueIdType": "unknown"
|
|
||||||
},
|
|
||||||
"recurrence": {
|
|
||||||
"pattern": {
|
|
||||||
"type": "daily",
|
|
||||||
"interval": 1,
|
|
||||||
"month": 0,
|
|
||||||
"dayOfMonth": 0,
|
|
||||||
"firstDayOfWeek": "sunday",
|
|
||||||
"index": "first"
|
|
||||||
},
|
|
||||||
"range": {
|
|
||||||
"type": "endDate",
|
|
||||||
"startDate": "2024-01-21",
|
|
||||||
"endDate": "2024-01-24",
|
|
||||||
"recurrenceTimeZone": "tzone://Microsoft/Utc",
|
|
||||||
"numberOfOccurrences": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"previousLocation": null,
|
|
||||||
"previousStartDateTime": null,
|
|
||||||
"previousEndDateTime": null,
|
|
||||||
"event@odata.associationLink": "https://graph.microsoft.com/v1.0/users('JohannaL@10rqc2.onmicrosoft.com')/events('AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAAFZU--pAAA=')/$ref",
|
|
||||||
"event@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('JohannaL@10rqc2.onmicrosoft.com')/events('AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAAFZU--pAAA=')",
|
|
||||||
"event": {
|
|
||||||
"@odata.etag": "W/\"QRQ4NCSZO02NX5rCa4e7TQABWPwm+A==\"",
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAAFZU--pAAA=",
|
|
||||||
"createdDateTime": "2024-01-15T11:26:39.1524133Z",
|
|
||||||
"lastModifiedDateTime": "2024-01-16T10:29:34.8704061Z",
|
|
||||||
"changeKey": "QRQ4NCSZO02NX5rCa4e7TQABWPwm+A==",
|
|
||||||
"categories": [],
|
|
||||||
"transactionId": "d19de894-1d85-dde1-ea5f-9332e850667b",
|
|
||||||
"originalStartTimeZone": "India Standard Time",
|
|
||||||
"originalEndTimeZone": "India Standard Time",
|
|
||||||
"iCalUId": "040000008200E00074C5B7101A82E008000000002757EEB4A547DA01000000000000000010000000CCC41D0213F00E489061EF756A0E6864",
|
|
||||||
"reminderMinutesBeforeStart": 15,
|
|
||||||
"isReminderOn": true,
|
|
||||||
"hasAttachments": false,
|
|
||||||
"subject": "Different title to test",
|
|
||||||
"bodyPreview": "How come the sun is hot?\r\n\r\n________________________________________________________________________________\r\nMicrosoft Teams meeting\r\nJoin on your computer, mobile app or room device\r\nClick here to join the meeting\r\nMeeting ID: 290 273 192 285\r\nPasscode:",
|
|
||||||
"importance": "normal",
|
|
||||||
"sensitivity": "normal",
|
|
||||||
"isAllDay": false,
|
|
||||||
"isCancelled": false,
|
|
||||||
"isOrganizer": true,
|
|
||||||
"responseRequested": true,
|
|
||||||
"seriesMasterId": null,
|
|
||||||
"showAs": "busy",
|
|
||||||
"type": "seriesMaster",
|
|
||||||
"webLink": "https://outlook.office365.com/owa/?itemid=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAAFZU%2F%2FpAAA%3D&exvsurl=1&path=/calendar/item",
|
|
||||||
"onlineMeetingUrl": null,
|
|
||||||
"isOnlineMeeting": true,
|
|
||||||
"onlineMeetingProvider": "teamsForBusiness",
|
|
||||||
"allowNewTimeProposals": true,
|
|
||||||
"occurrenceId": null,
|
|
||||||
"isDraft": false,
|
|
||||||
"hideAttendees": false,
|
|
||||||
"responseStatus": {
|
|
||||||
"response": "organizer",
|
|
||||||
"time": "0001-01-01T00:00:00Z"
|
|
||||||
},
|
|
||||||
"body": {
|
|
||||||
"contentType": "html",
|
|
||||||
"content": "<html>\r\n<head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\r\n</head>\r\n<body>\r\n<div class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">\r\nHow come the sun is hot?<br>\r\n</div>\r\n<br>\r\n<div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span>\r\n</div>\r\n<div class=\"me-email-text\" lang=\"en-US\" style=\"color:#252424; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">\r\n<div style=\"margin-top:24px; margin-bottom:20px\"><span style=\"font-size:24px; color:#252424\">Microsoft Teams meeting</span>\r\n</div>\r\n<div style=\"margin-bottom:20px\">\r\n<div style=\"margin-top:0px; margin-bottom:0px; font-weight:bold\"><span style=\"font-size:14px; color:#252424\">Join on your computer, mobile app or room device</span>\r\n</div>\r\n<a href=\"https://teams.microsoft.com/l/meetup-join/19%3ameeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl%40thread.v2/0?context=%7b%22Tid%22%3a%22fb8afbaa-e94c-4ea5-8a8a-24aff04d7874%22%2c%22Oid%22%3a%227ceb8e03-bdc5-4509-a136-457526165ec0%22%7d\" class=\"me-email-headline\" style=\"font-size:14px; font-family:'Segoe UI Semibold','Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif; text-decoration:underline; color:#6264a7\">Click\r\n here to join the meeting</a> </div>\r\n<div style=\"margin-bottom:20px; margin-top:20px\">\r\n<div style=\"margin-bottom:4px\"><span data-tid=\"meeting-code\" style=\"font-size:14px; color:#252424\">Meeting ID:\r\n<span style=\"font-size:16px; color:#252424\">290 273 192 285</span> </span><br>\r\n<span style=\"font-size:14px; color:#252424\">Passcode: </span><span style=\"font-size:16px; color:#252424\">CwEBTS\r\n</span>\r\n<div style=\"font-size:14px\"><a href=\"https://www.microsoft.com/en-us/microsoft-teams/download-app\" class=\"me-email-link\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Download\r\n Teams</a> | <a href=\"https://www.microsoft.com/microsoft-teams/join-a-meeting\" class=\"me-email-link\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">\r\nJoin on the web</a></div>\r\n</div>\r\n</div>\r\n<div style=\"margin-bottom:24px; margin-top:20px\"><a href=\"https://aka.ms/JoinTeamsMeeting\" class=\"me-email-link\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Learn More</a>\r\n | <a href=\"https://teams.microsoft.com/meetingOptions/?organizerId=7ceb8e03-bdc5-4509-a136-457526165ec0&tenantId=fb8afbaa-e94c-4ea5-8a8a-24aff04d7874&threadId=19_meeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl@thread.v2&messageId=0&language=en-US\" class=\"me-email-link\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">\r\nMeeting options</a> </div>\r\n</div>\r\n<div style=\"font-size:14px; margin-bottom:4px; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">\r\n</div>\r\n<div style=\"font-size:12px\"></div>\r\n<div></div>\r\n<div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span>\r\n</div>\r\n</body>\r\n</html>\r\n"
|
|
||||||
},
|
|
||||||
"start": {
|
|
||||||
"dateTime": "2024-01-22T02:30:00.0000000",
|
|
||||||
"timeZone": "UTC"
|
|
||||||
},
|
|
||||||
"end": {
|
|
||||||
"dateTime": "2024-01-22T03:00:00.0000000",
|
|
||||||
"timeZone": "UTC"
|
|
||||||
},
|
|
||||||
"location": {
|
|
||||||
"displayName": "Microsoft Teams Meeting",
|
|
||||||
"locationType": "default",
|
|
||||||
"uniqueId": "Microsoft Teams Meeting",
|
|
||||||
"uniqueIdType": "private"
|
|
||||||
},
|
|
||||||
"locations": [
|
|
||||||
{
|
|
||||||
"displayName": "Microsoft Teams Meeting",
|
|
||||||
"locationType": "default",
|
|
||||||
"uniqueId": "Microsoft Teams Meeting",
|
|
||||||
"uniqueIdType": "private"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"recurrence": {
|
|
||||||
"pattern": {
|
|
||||||
"type": "daily",
|
|
||||||
"interval": 1,
|
|
||||||
"month": 0,
|
|
||||||
"dayOfMonth": 0,
|
|
||||||
"firstDayOfWeek": "sunday",
|
|
||||||
"index": "first"
|
|
||||||
},
|
|
||||||
"range": {
|
|
||||||
"type": "endDate",
|
|
||||||
"startDate": "2024-01-22",
|
|
||||||
"endDate": "2024-01-25",
|
|
||||||
"recurrenceTimeZone": "India Standard Time",
|
|
||||||
"numberOfOccurrences": 0
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"attendees": [
|
|
||||||
{
|
|
||||||
"type": "required",
|
|
||||||
"status": {
|
|
||||||
"response": "none",
|
|
||||||
"time": "0001-01-01T00:00:00Z"
|
|
||||||
},
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Pradeep Gupta",
|
|
||||||
"address": "PradeepG@10rqc2.onmicrosoft.com"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"organizer": {
|
|
||||||
"emailAddress": {
|
|
||||||
"name": "Johanna Lorenz",
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"onlineMeeting": {
|
|
||||||
"joinUrl": "https://teams.microsoft.com/l/meetup-join/19%3ameeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl%40thread.v2/0?context=%7b%22Tid%22%3a%22fb8afbaa-e94c-4ea5-8a8a-24aff04d7874%22%2c%22Oid%22%3a%227ceb8e03-bdc5-4509-a136-457526165ec0%22%7d"
|
|
||||||
},
|
|
||||||
"calendar@odata.associationLink": "https://graph.microsoft.com/v1.0/users('JohannaL@10rqc2.onmicrosoft.com')/calendars('AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBDQAAAA==')/$ref",
|
|
||||||
"calendar@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('JohannaL@10rqc2.onmicrosoft.com')/calendars('AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBDQAAAA==')"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,268 +0,0 @@
|
|||||||
{
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAA=",
|
|
||||||
"@odata.type": "#microsoft.graph.message",
|
|
||||||
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages/$entity",
|
|
||||||
"@odata.etag": "W/\"CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl\"",
|
|
||||||
"categories": [],
|
|
||||||
"changeKey": "CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl",
|
|
||||||
"createdDateTime": "2024-02-05T09:33:23Z",
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:48Z",
|
|
||||||
"attachments": [
|
|
||||||
{
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k=",
|
|
||||||
"@odata.type": "#microsoft.graph.itemAttachment",
|
|
||||||
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
|
|
||||||
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
|
|
||||||
"isInline": false,
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"name": "Purpose of life",
|
|
||||||
"size": 11840,
|
|
||||||
"item": {
|
|
||||||
"id": "",
|
|
||||||
"@odata.type": "#microsoft.graph.message",
|
|
||||||
"createdDateTime": "2024-02-05T09:33:24Z",
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"attachments": [
|
|
||||||
{
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
|
|
||||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
|
||||||
"@odata.mediaContentType": "text/calendar",
|
|
||||||
"contentType": "text/calendar",
|
|
||||||
"isInline": false,
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"name": "Abidjan.ics",
|
|
||||||
"size": 573,
|
|
||||||
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"body": {
|
|
||||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
|
|
||||||
"contentType": "html"
|
|
||||||
},
|
|
||||||
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
|
|
||||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
|
|
||||||
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
|
|
||||||
"flag": {
|
|
||||||
"flagStatus": "notFlagged"
|
|
||||||
},
|
|
||||||
"from": {
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Johanna Lorenz"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"hasAttachments": true,
|
|
||||||
"importance": "normal",
|
|
||||||
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
|
|
||||||
"isDeliveryReceiptRequested": false,
|
|
||||||
"isDraft": false,
|
|
||||||
"isRead": true,
|
|
||||||
"isReadReceiptRequested": false,
|
|
||||||
"receivedDateTime": "2024-02-05T09:33:12Z",
|
|
||||||
"sender": {
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Johanna Lorenz"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"sentDateTime": "2024-02-05T09:33:11Z",
|
|
||||||
"subject": "Purpose of life",
|
|
||||||
"toRecipients": [
|
|
||||||
{
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "PradeepG@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Pradeep Gupta"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k%3D&exvsurl=1&viewmodel=ItemAttachment"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl02=",
|
|
||||||
"@odata.type": "#microsoft.graph.itemAttachment",
|
|
||||||
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
|
|
||||||
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
|
|
||||||
"isInline": false,
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"name": "Purpose of life part 2",
|
|
||||||
"size": 11840,
|
|
||||||
"item": {
|
|
||||||
"id": "",
|
|
||||||
"@odata.type": "#microsoft.graph.message",
|
|
||||||
"createdDateTime": "2024-02-05T09:33:24Z",
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"attachments": [
|
|
||||||
{
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
|
|
||||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
|
||||||
"@odata.mediaContentType": "text/calendar",
|
|
||||||
"contentType": "text/calendar",
|
|
||||||
"isInline": false,
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"name": "Abidjan.ics",
|
|
||||||
"size": 573,
|
|
||||||
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"body": {
|
|
||||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
|
|
||||||
"contentType": "html"
|
|
||||||
},
|
|
||||||
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
|
|
||||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
|
|
||||||
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
|
|
||||||
"flag": {
|
|
||||||
"flagStatus": "notFlagged"
|
|
||||||
},
|
|
||||||
"from": {
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Johanna Lorenz"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"hasAttachments": true,
|
|
||||||
"importance": "normal",
|
|
||||||
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
|
|
||||||
"isDeliveryReceiptRequested": false,
|
|
||||||
"isDraft": false,
|
|
||||||
"isRead": true,
|
|
||||||
"isReadReceiptRequested": false,
|
|
||||||
"receivedDateTime": "2024-02-05T09:33:12Z",
|
|
||||||
"sender": {
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Johanna Lorenz"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"sentDateTime": "2024-02-05T09:33:11Z",
|
|
||||||
"subject": "Purpose of life",
|
|
||||||
"toRecipients": [
|
|
||||||
{
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "PradeepG@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Pradeep Gupta"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl02%3D&exvsurl=1&viewmodel=ItemAttachment"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl03=",
|
|
||||||
"@odata.type": "#microsoft.graph.itemAttachment",
|
|
||||||
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
|
|
||||||
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
|
|
||||||
"isInline": false,
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"name": "Purpose of life part 3",
|
|
||||||
"size": 11840,
|
|
||||||
"item": {
|
|
||||||
"id": "",
|
|
||||||
"@odata.type": "#microsoft.graph.message",
|
|
||||||
"createdDateTime": "2024-02-05T09:33:24Z",
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"attachments": [
|
|
||||||
{
|
|
||||||
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
|
|
||||||
"@odata.type": "#microsoft.graph.fileAttachment",
|
|
||||||
"@odata.mediaContentType": "text/calendar",
|
|
||||||
"contentType": "text/calendar",
|
|
||||||
"isInline": false,
|
|
||||||
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"name": "Abidjan.ics",
|
|
||||||
"size": 573,
|
|
||||||
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"body": {
|
|
||||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
|
|
||||||
"contentType": "html"
|
|
||||||
},
|
|
||||||
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
|
|
||||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
|
|
||||||
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
|
|
||||||
"flag": {
|
|
||||||
"flagStatus": "notFlagged"
|
|
||||||
},
|
|
||||||
"from": {
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Johanna Lorenz"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"hasAttachments": true,
|
|
||||||
"importance": "normal",
|
|
||||||
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
|
|
||||||
"isDeliveryReceiptRequested": false,
|
|
||||||
"isDraft": false,
|
|
||||||
"isRead": true,
|
|
||||||
"isReadReceiptRequested": false,
|
|
||||||
"receivedDateTime": "2024-02-05T09:33:12Z",
|
|
||||||
"sender": {
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Johanna Lorenz"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"sentDateTime": "2024-02-05T09:33:11Z",
|
|
||||||
"subject": "Purpose of life",
|
|
||||||
"toRecipients": [
|
|
||||||
{
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "PradeepG@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Pradeep Gupta"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl03%3D&exvsurl=1&viewmodel=ItemAttachment"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"bccRecipients": [],
|
|
||||||
"body": {
|
|
||||||
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div><span class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">Now, this is what we call nesting in this business.<br></span></div></body></html>",
|
|
||||||
"contentType": "html"
|
|
||||||
},
|
|
||||||
"bodyPreview": "Now, this is what we call nesting in this business.",
|
|
||||||
"ccRecipients": [],
|
|
||||||
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAIv2-4RHwDhJhlqBV5PTE3Y=",
|
|
||||||
"conversationIndex": "AQHaWBZdi/b/hEfAOEmGWoFXk9MTdg==",
|
|
||||||
"flag": {
|
|
||||||
"flagStatus": "notFlagged"
|
|
||||||
},
|
|
||||||
"from": {
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Johanna Lorenz"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"hasAttachments": true,
|
|
||||||
"importance": "normal",
|
|
||||||
"inferenceClassification": "focused",
|
|
||||||
"internetMessageId": "<SJ0PR04MB729409CE8C191E01151C110DBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
|
|
||||||
"isDeliveryReceiptRequested": false,
|
|
||||||
"isDraft": false,
|
|
||||||
"isRead": true,
|
|
||||||
"isReadReceiptRequested": false,
|
|
||||||
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
|
|
||||||
"receivedDateTime": "2024-02-05T09:33:46Z",
|
|
||||||
"replyTo": [],
|
|
||||||
"sender": {
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "JohannaL@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Johanna Lorenz"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"sentDateTime": "2024-02-05T09:33:45Z",
|
|
||||||
"subject": "Fw: Purpose of life",
|
|
||||||
"toRecipients": [
|
|
||||||
{
|
|
||||||
"emailAddress": {
|
|
||||||
"address": "PradeepG@10rqc2.onmicrosoft.com",
|
|
||||||
"name": "Pradeep Gupta"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAA%3D&exvsurl=1&viewmodel=ReadMessageItem"
|
|
||||||
}
|
|
||||||
@ -4,12 +4,3 @@ import _ "embed"
|
|||||||
|
|
||||||
//go:embed email-with-attachments.json
|
//go:embed email-with-attachments.json
|
||||||
var EmailWithAttachments string
|
var EmailWithAttachments string
|
||||||
|
|
||||||
//go:embed email-with-event-info.json
|
|
||||||
var EmailWithEventInfo string
|
|
||||||
|
|
||||||
//go:embed email-with-event-object.json
|
|
||||||
var EmailWithEventObject string
|
|
||||||
|
|
||||||
//go:embed email-within-email.json
|
|
||||||
var EmailWithinEmail string
|
|
||||||
|
|||||||
@ -166,20 +166,3 @@ var GraphTimeZoneToTZ = map[string]string{
|
|||||||
"Yukon Standard Time": "America/Whitehorse",
|
"Yukon Standard Time": "America/Whitehorse",
|
||||||
"tzone://Microsoft/Utc": "Etc/UTC",
|
"tzone://Microsoft/Utc": "Etc/UTC",
|
||||||
}
|
}
|
||||||
|
|
||||||
// Map from alternatives to the canonical time zone name
|
|
||||||
// There mapping are currently generated by manually going on the
|
|
||||||
// values in the GraphTimeZoneToTZ which is not available in the tzdb
|
|
||||||
var CanonicalTimeZoneMap = map[string]string{
|
|
||||||
"Africa/Asmara": "Africa/Asmera",
|
|
||||||
"Asia/Calcutta": "Asia/Kolkata",
|
|
||||||
"Asia/Rangoon": "Asia/Yangon",
|
|
||||||
"Asia/Saigon": "Asia/Ho_Chi_Minh",
|
|
||||||
"Europe/Kiev": "Europe/Kyiv",
|
|
||||||
"Europe/Warsaw": "Europe/Warszawa",
|
|
||||||
"America/Buenos_Aires": "America/Argentina/Buenos_Aires",
|
|
||||||
"America/Godthab": "America/Nuuk",
|
|
||||||
// NOTE: "Atlantic/Raykjavik" missing in tzdb but is in MS list
|
|
||||||
|
|
||||||
"Etc/UTC": "UTC", // simplifying the time zone name
|
|
||||||
}
|
|
||||||
|
|||||||
@ -5,7 +5,6 @@ import (
|
|||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/mail"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
"unicode"
|
"unicode"
|
||||||
@ -17,9 +16,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/common/str"
|
"github.com/alcionai/corso/src/internal/common/str"
|
||||||
"github.com/alcionai/corso/src/internal/converters/ics/tzdata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/dttm"
|
"github.com/alcionai/corso/src/pkg/dttm"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -33,9 +30,8 @@ import (
|
|||||||
// TODO locations: https://github.com/alcionai/corso/issues/5003
|
// TODO locations: https://github.com/alcionai/corso/issues/5003
|
||||||
|
|
||||||
const (
|
const (
|
||||||
ICalDateTimeFormat = "20060102T150405"
|
iCalDateTimeFormat = "20060102T150405Z"
|
||||||
ICalDateTimeFormatUTC = "20060102T150405Z"
|
iCalDateFormat = "20060102"
|
||||||
ICalDateFormat = "20060102"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func keyValues(key, value string) *ics.KeyValues {
|
func keyValues(key, value string) *ics.KeyValues {
|
||||||
@ -75,12 +71,7 @@ func getLocationString(location models.Locationable) string {
|
|||||||
return strings.Join(nonEmpty, ", ")
|
return strings.Join(nonEmpty, ", ")
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetUTCTime(ts, tz string) (time.Time, error) {
|
func getUTCTime(ts, tz string) (time.Time, error) {
|
||||||
var (
|
|
||||||
loc *time.Location
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
// Timezone is always converted to UTC. This is the easiest way to
|
// Timezone is always converted to UTC. This is the easiest way to
|
||||||
// ensure we have the correct time as the .ics file expects the same
|
// ensure we have the correct time as the .ics file expects the same
|
||||||
// timezone everywhere according to the spec.
|
// timezone everywhere according to the spec.
|
||||||
@ -89,18 +80,15 @@ func GetUTCTime(ts, tz string) (time.Time, error) {
|
|||||||
return time.Time{}, clues.Wrap(err, "parsing time").With("given_time_string", ts)
|
return time.Time{}, clues.Wrap(err, "parsing time").With("given_time_string", ts)
|
||||||
}
|
}
|
||||||
|
|
||||||
loc, err = time.LoadLocation(tz)
|
timezone, ok := GraphTimeZoneToTZ[tz]
|
||||||
if err != nil {
|
if !ok {
|
||||||
timezone, ok := GraphTimeZoneToTZ[tz]
|
return it, clues.New("unknown timezone").With("timezone", tz)
|
||||||
if !ok {
|
}
|
||||||
return it, clues.New("unknown timezone").With("timezone", tz)
|
|
||||||
}
|
|
||||||
|
|
||||||
loc, err = time.LoadLocation(timezone)
|
loc, err := time.LoadLocation(timezone)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return time.Time{}, clues.Wrap(err, "loading timezone").
|
return time.Time{}, clues.Wrap(err, "loading timezone").
|
||||||
With("converted_timezone", timezone)
|
With("converted_timezone", timezone)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// embed timezone
|
// embed timezone
|
||||||
@ -175,17 +163,6 @@ func getRecurrencePattern(
|
|||||||
recurComponents = append(recurComponents, "BYDAY="+prefix+strings.Join(dowComponents, ","))
|
recurComponents = append(recurComponents, "BYDAY="+prefix+strings.Join(dowComponents, ","))
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is necessary to compute when weekly events recur
|
|
||||||
fdow := pat.GetFirstDayOfWeek()
|
|
||||||
if fdow != nil {
|
|
||||||
icalday, ok := GraphToICalDOW[fdow.String()]
|
|
||||||
if !ok {
|
|
||||||
return "", clues.NewWC(ctx, "unknown first day of week").With("day", fdow)
|
|
||||||
}
|
|
||||||
|
|
||||||
recurComponents = append(recurComponents, "WKST="+icalday)
|
|
||||||
}
|
|
||||||
|
|
||||||
rrange := recurrence.GetRangeEscaped()
|
rrange := recurrence.GetRangeEscaped()
|
||||||
if rrange != nil {
|
if rrange != nil {
|
||||||
switch ptr.Val(rrange.GetTypeEscaped()) {
|
switch ptr.Val(rrange.GetTypeEscaped()) {
|
||||||
@ -202,14 +179,14 @@ func getRecurrencePattern(
|
|||||||
// the resolution we need
|
// the resolution we need
|
||||||
parsedTime = parsedTime.Add(24*time.Hour - 1*time.Second)
|
parsedTime = parsedTime.Add(24*time.Hour - 1*time.Second)
|
||||||
|
|
||||||
endTime, err := GetUTCTime(
|
endTime, err := getUTCTime(
|
||||||
parsedTime.Format(string(dttm.M365DateTimeTimeZone)),
|
parsedTime.Format(string(dttm.M365DateTimeTimeZone)),
|
||||||
ptr.Val(rrange.GetRecurrenceTimeZone()))
|
ptr.Val(rrange.GetRecurrenceTimeZone()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", clues.WrapWC(ctx, err, "parsing end time")
|
return "", clues.WrapWC(ctx, err, "parsing end time")
|
||||||
}
|
}
|
||||||
|
|
||||||
recurComponents = append(recurComponents, "UNTIL="+endTime.Format(ICalDateTimeFormatUTC))
|
recurComponents = append(recurComponents, "UNTIL="+endTime.Format(iCalDateTimeFormat))
|
||||||
}
|
}
|
||||||
case models.NOEND_RECURRENCERANGETYPE:
|
case models.NOEND_RECURRENCERANGETYPE:
|
||||||
// Nothing to do
|
// Nothing to do
|
||||||
@ -231,18 +208,9 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
|||||||
With("body_len", len(body))
|
With("body_len", len(body))
|
||||||
}
|
}
|
||||||
|
|
||||||
return FromEventable(ctx, event)
|
|
||||||
}
|
|
||||||
|
|
||||||
func FromEventable(ctx context.Context, event models.Eventable) (string, error) {
|
|
||||||
cal := ics.NewCalendar()
|
cal := ics.NewCalendar()
|
||||||
cal.SetProductId("-//Alcion//Corso") // Does this have to be customizable?
|
cal.SetProductId("-//Alcion//Corso") // Does this have to be customizable?
|
||||||
|
|
||||||
err := addTimeZoneComponents(ctx, cal, event)
|
|
||||||
if err != nil {
|
|
||||||
return "", clues.Wrap(err, "adding timezone components")
|
|
||||||
}
|
|
||||||
|
|
||||||
id := ptr.Val(event.GetId())
|
id := ptr.Val(event.GetId())
|
||||||
iCalEvent := cal.AddEvent(id)
|
iCalEvent := cal.AddEvent(id)
|
||||||
|
|
||||||
@ -277,7 +245,7 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
|
|||||||
exICalEvent := cal.AddEvent(id)
|
exICalEvent := cal.AddEvent(id)
|
||||||
start := exception.GetOriginalStart() // will always be in UTC
|
start := exception.GetOriginalStart() // will always be in UTC
|
||||||
|
|
||||||
exICalEvent.AddProperty(ics.ComponentProperty(ics.PropertyRecurrenceId), start.Format(ICalDateTimeFormatUTC))
|
exICalEvent.AddProperty(ics.ComponentProperty(ics.PropertyRecurrenceId), start.Format(iCalDateTimeFormat))
|
||||||
|
|
||||||
err = updateEventProperties(ctx, exception, exICalEvent)
|
err = updateEventProperties(ctx, exception, exICalEvent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -288,91 +256,6 @@ func FromEventable(ctx context.Context, event models.Eventable) (string, error)
|
|||||||
return cal.Serialize(), nil
|
return cal.Serialize(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getTZDataKeyValues(ctx context.Context, timezone string) (map[string]string, error) {
|
|
||||||
template, ok := tzdata.TZData[timezone]
|
|
||||||
if !ok {
|
|
||||||
return nil, clues.NewWC(ctx, "timezone not found in tz database").
|
|
||||||
With("timezone", timezone)
|
|
||||||
}
|
|
||||||
|
|
||||||
keyValues := map[string]string{}
|
|
||||||
|
|
||||||
for _, line := range strings.Split(template, "\n") {
|
|
||||||
splits := strings.SplitN(line, ":", 2)
|
|
||||||
if len(splits) != 2 {
|
|
||||||
return nil, clues.NewWC(ctx, "invalid tzdata line").
|
|
||||||
With("line", line).
|
|
||||||
With("timezone", timezone)
|
|
||||||
}
|
|
||||||
|
|
||||||
keyValues[splits[0]] = splits[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
return keyValues, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func addTimeZoneComponents(ctx context.Context, cal *ics.Calendar, event models.Eventable) error {
|
|
||||||
// Handling of timezone get a bit tricky when we have to deal with
|
|
||||||
// relative recurrence. The issue comes up when we set a recurrence
|
|
||||||
// to be something like "repeat every 3rd Tuesday". Tuesday in UTC
|
|
||||||
// and in IST will be different and so we cannot just always use UTC.
|
|
||||||
//
|
|
||||||
// The way this is solved is by using the timezone in the
|
|
||||||
// recurrence for start and end timezones as we have to use UTC
|
|
||||||
// for UNTIL(mostly).
|
|
||||||
// https://www.rfc-editor.org/rfc/rfc5545#section-3.3.10
|
|
||||||
timezone, err := getRecurrenceTimezone(ctx, event)
|
|
||||||
if err != nil {
|
|
||||||
return clues.Stack(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if timezone != time.UTC {
|
|
||||||
kvs, err := getTZDataKeyValues(ctx, timezone.String())
|
|
||||||
if err != nil {
|
|
||||||
return clues.Stack(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
tz := cal.AddTimezone(timezone.String())
|
|
||||||
|
|
||||||
for k, v := range kvs {
|
|
||||||
tz.AddProperty(ics.ComponentProperty(k), v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// getRecurrenceTimezone get the timezone specified by the recurrence
|
|
||||||
// in the calendar. It does a normalization pass where we always convert
|
|
||||||
// the timezone to the value in tzdb If we don't have a recurrence
|
|
||||||
// timezone, we don't have to use a specific timezone in the export and
|
|
||||||
// is safe to return UTC from this method.
|
|
||||||
func getRecurrenceTimezone(ctx context.Context, event models.Eventable) (*time.Location, error) {
|
|
||||||
if event.GetRecurrence() != nil {
|
|
||||||
timezone := ptr.Val(event.GetRecurrence().GetRangeEscaped().GetRecurrenceTimeZone())
|
|
||||||
|
|
||||||
ctz, ok := GraphTimeZoneToTZ[timezone]
|
|
||||||
if ok {
|
|
||||||
timezone = ctz
|
|
||||||
}
|
|
||||||
|
|
||||||
cannon, ok := CanonicalTimeZoneMap[timezone]
|
|
||||||
if ok {
|
|
||||||
timezone = cannon
|
|
||||||
}
|
|
||||||
|
|
||||||
loc, err := time.LoadLocation(timezone)
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.WrapWC(ctx, err, "unknown timezone").
|
|
||||||
With("timezone", timezone)
|
|
||||||
}
|
|
||||||
|
|
||||||
return loc, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return time.UTC, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func isASCII(s string) bool {
|
func isASCII(s string) bool {
|
||||||
for _, c := range s {
|
for _, c := range s {
|
||||||
if c > unicode.MaxASCII {
|
if c > unicode.MaxASCII {
|
||||||
@ -383,12 +266,6 @@ func isASCII(s string) bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// Checks if a given string is a valid email address
|
|
||||||
func isEmail(em string) bool {
|
|
||||||
_, err := mail.ParseAddress(em)
|
|
||||||
return err == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func updateEventProperties(ctx context.Context, event models.Eventable, iCalEvent *ics.VEvent) error {
|
func updateEventProperties(ctx context.Context, event models.Eventable, iCalEvent *ics.VEvent) error {
|
||||||
// CREATED - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.7.1
|
// CREATED - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.7.1
|
||||||
created := event.GetCreatedDateTime()
|
created := event.GetCreatedDateTime()
|
||||||
@ -402,23 +279,22 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
iCalEvent.SetModifiedAt(ptr.Val(modified))
|
iCalEvent.SetModifiedAt(ptr.Val(modified))
|
||||||
}
|
}
|
||||||
|
|
||||||
timezone, err := getRecurrenceTimezone(ctx, event)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// DTSTART - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.4
|
// DTSTART - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.4
|
||||||
allDay := ptr.Val(event.GetIsAllDay())
|
allDay := ptr.Val(event.GetIsAllDay())
|
||||||
startString := event.GetStart().GetDateTime()
|
startString := event.GetStart().GetDateTime()
|
||||||
startTimezone := event.GetStart().GetTimeZone()
|
startTimezone := event.GetStart().GetTimeZone()
|
||||||
|
|
||||||
if startString != nil {
|
if startString != nil {
|
||||||
start, err := GetUTCTime(ptr.Val(startString), ptr.Val(startTimezone))
|
start, err := getUTCTime(ptr.Val(startString), ptr.Val(startTimezone))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.WrapWC(ctx, err, "parsing start time")
|
return clues.WrapWC(ctx, err, "parsing start time")
|
||||||
}
|
}
|
||||||
|
|
||||||
addTime(iCalEvent, ics.ComponentPropertyDtStart, start, allDay, timezone)
|
if allDay {
|
||||||
|
iCalEvent.SetStartAt(start, ics.WithValue(string(ics.ValueDataTypeDate)))
|
||||||
|
} else {
|
||||||
|
iCalEvent.SetStartAt(start)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// DTEND - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.2
|
// DTEND - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.2.2
|
||||||
@ -426,12 +302,16 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
endTimezone := event.GetEnd().GetTimeZone()
|
endTimezone := event.GetEnd().GetTimeZone()
|
||||||
|
|
||||||
if endString != nil {
|
if endString != nil {
|
||||||
end, err := GetUTCTime(ptr.Val(endString), ptr.Val(endTimezone))
|
end, err := getUTCTime(ptr.Val(endString), ptr.Val(endTimezone))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.WrapWC(ctx, err, "parsing end time")
|
return clues.WrapWC(ctx, err, "parsing end time")
|
||||||
}
|
}
|
||||||
|
|
||||||
addTime(iCalEvent, ics.ComponentPropertyDtEnd, end, allDay, timezone)
|
if allDay {
|
||||||
|
iCalEvent.SetEndAt(end, ics.WithValue(string(ics.ValueDataTypeDate)))
|
||||||
|
} else {
|
||||||
|
iCalEvent.SetEndAt(end)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
recurrence := event.GetRecurrence()
|
recurrence := event.GetRecurrence()
|
||||||
@ -446,7 +326,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
|
|
||||||
// STATUS - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.11
|
// STATUS - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.11
|
||||||
cancelled := event.GetIsCancelled()
|
cancelled := event.GetIsCancelled()
|
||||||
if cancelled != nil && ptr.Val(cancelled) {
|
if cancelled != nil {
|
||||||
iCalEvent.SetStatus(ics.ObjectStatusCancelled)
|
iCalEvent.SetStatus(ics.ObjectStatusCancelled)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -484,14 +364,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
desc := replacer.Replace(description)
|
desc := replacer.Replace(description)
|
||||||
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
|
iCalEvent.AddProperty("X-ALT-DESC", desc, ics.WithFmtType("text/html"))
|
||||||
} else {
|
} else {
|
||||||
// Disable auto wrap, causes huge memory spikes
|
stripped, err := html2text.FromString(description, html2text.Options{PrettyTables: true})
|
||||||
// https://github.com/jaytaylor/html2text/issues/48
|
|
||||||
prettyTablesOptions := html2text.NewPrettyTablesOptions()
|
|
||||||
prettyTablesOptions.AutoWrapText = false
|
|
||||||
|
|
||||||
stripped, err := html2text.FromString(
|
|
||||||
description,
|
|
||||||
html2text.Options{PrettyTables: true, PrettyTablesOptions: prettyTablesOptions})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.Wrap(err, "converting html to text").
|
return clues.Wrap(err, "converting html to text").
|
||||||
With("description_length", len(description))
|
With("description_length", len(description))
|
||||||
@ -595,21 +468,8 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// It is possible that we get non email items like the below
|
|
||||||
// one which is an internal representation of the user in the
|
|
||||||
// Exchange system. While we can technically output this as an
|
|
||||||
// attendee, it is not useful plus other downstream tools like
|
|
||||||
// ones to use PST can choke on this.
|
|
||||||
// /o=ExchangeLabs/ou=ExchangeAdministrative Group(FY...LT)/cn=Recipients/cn=883...4a-John Doe
|
|
||||||
addr := ptr.Val(attendee.GetEmailAddress().GetAddress())
|
addr := ptr.Val(attendee.GetEmailAddress().GetAddress())
|
||||||
if isEmail(addr) {
|
iCalEvent.AddAttendee(addr, props...)
|
||||||
iCalEvent.AddAttendee(addr, props...)
|
|
||||||
} else {
|
|
||||||
logger.Ctx(ctx).
|
|
||||||
With("attendee_email", addr).
|
|
||||||
With("attendee_name", name).
|
|
||||||
Info("skipping non email attendee from ics export")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// LOCATION - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.7
|
// LOCATION - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.7
|
||||||
@ -655,6 +515,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ATTACH - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.1
|
// ATTACH - https://www.rfc-editor.org/rfc/rfc5545#section-3.8.1.1
|
||||||
|
// TODO Handle different attachment types (file, item and reference)
|
||||||
attachments := event.GetAttachments()
|
attachments := event.GetAttachments()
|
||||||
for _, attachment := range attachments {
|
for _, attachment := range attachments {
|
||||||
props := []ics.PropertyParameter{}
|
props := []ics.PropertyParameter{}
|
||||||
@ -675,17 +536,6 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
return clues.WrapWC(ctx, err, "getting attachment content")
|
return clues.WrapWC(ctx, err, "getting attachment content")
|
||||||
}
|
}
|
||||||
|
|
||||||
if cb == nil {
|
|
||||||
// TODO(meain): Handle non file attachments
|
|
||||||
// https://github.com/alcionai/corso/issues/4772
|
|
||||||
logger.Ctx(ctx).
|
|
||||||
With("attachment_id", ptr.Val(attachment.GetId()),
|
|
||||||
"attachment_type", ptr.Val(attachment.GetOdataType())).
|
|
||||||
Info("no contentBytes for attachment")
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
content, ok := cb.([]uint8)
|
content, ok := cb.([]uint8)
|
||||||
if !ok {
|
if !ok {
|
||||||
return clues.NewWC(ctx, "getting attachment content string").
|
return clues.NewWC(ctx, "getting attachment content string").
|
||||||
@ -727,7 +577,7 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
|
|
||||||
dateStrings := []string{}
|
dateStrings := []string{}
|
||||||
for _, date := range cancelledDates {
|
for _, date := range cancelledDates {
|
||||||
dateStrings = append(dateStrings, date.Format(ICalDateFormat))
|
dateStrings = append(dateStrings, date.Format(iCalDateFormat))
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(dateStrings) > 0 {
|
if len(dateStrings) > 0 {
|
||||||
@ -737,26 +587,6 @@ func updateEventProperties(ctx context.Context, event models.Eventable, iCalEven
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func addTime(iCalEvent *ics.VEvent, prop ics.ComponentProperty, tm time.Time, allDay bool, tzLoc *time.Location) {
|
|
||||||
if allDay {
|
|
||||||
if tzLoc == time.UTC {
|
|
||||||
iCalEvent.SetProperty(prop, tm.Format(ICalDateFormat), ics.WithValue(string(ics.ValueDataTypeDate)))
|
|
||||||
} else {
|
|
||||||
iCalEvent.SetProperty(
|
|
||||||
prop,
|
|
||||||
tm.In(tzLoc).Format(ICalDateFormat),
|
|
||||||
ics.WithValue(string(ics.ValueDataTypeDate)),
|
|
||||||
keyValues("TZID", tzLoc.String()))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if tzLoc == time.UTC {
|
|
||||||
iCalEvent.SetProperty(prop, tm.Format(ICalDateTimeFormatUTC))
|
|
||||||
} else {
|
|
||||||
iCalEvent.SetProperty(prop, tm.In(tzLoc).Format(ICalDateTimeFormat), keyValues("TZID", tzLoc.String()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getCancelledDates(ctx context.Context, event models.Eventable) ([]time.Time, error) {
|
func getCancelledDates(ctx context.Context, event models.Eventable) ([]time.Time, error) {
|
||||||
dateStrings, err := api.GetCancelledEventDateStrings(event)
|
dateStrings, err := api.GetCancelledEventDateStrings(event)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -768,7 +598,7 @@ func getCancelledDates(ctx context.Context, event models.Eventable) ([]time.Time
|
|||||||
|
|
||||||
for _, ds := range dateStrings {
|
for _, ds := range dateStrings {
|
||||||
// the data just contains date and no time which seems to work
|
// the data just contains date and no time which seems to work
|
||||||
start, err := GetUTCTime(ds, tz)
|
start, err := getUTCTime(ds, tz)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.WrapWC(ctx, err, "parsing cancelled event date")
|
return nil, clues.WrapWC(ctx, err, "parsing cancelled event date")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,7 +13,6 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
ics "github.com/arran4/golang-ical"
|
|
||||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
"github.com/microsoft/kiota-abstractions-go/serialization"
|
||||||
kjson "github.com/microsoft/kiota-serialization-json-go"
|
kjson "github.com/microsoft/kiota-serialization-json-go"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
@ -22,7 +21,6 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/converters/ics/tzdata"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -34,7 +32,7 @@ func TestICSUnitSuite(t *testing.T) {
|
|||||||
suite.Run(t, &ICSUnitSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &ICSUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestGetLocationString() {
|
func (suite *ICSUnitSuite) TestGetLocationString() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
loc func() models.Locationable
|
loc func() models.Locationable
|
||||||
@ -112,13 +110,13 @@ func (s *ICSUnitSuite) TestGetLocationString() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
s.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
assert.Equal(s.T(), tt.expect, getLocationString(tt.loc()))
|
assert.Equal(suite.T(), tt.expect, getLocationString(tt.loc()))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestGetUTCTime() {
|
func (suite *ICSUnitSuite) TestGetUTCTime() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
timestamp string
|
timestamp string
|
||||||
@ -140,13 +138,6 @@ func (s *ICSUnitSuite) TestGetUTCTime() {
|
|||||||
time: time.Date(2021, 1, 1, 6, 30, 0, 0, time.UTC),
|
time: time.Date(2021, 1, 1, 6, 30, 0, 0, time.UTC),
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "timezone from TZ database",
|
|
||||||
timestamp: "2021-01-01T12:00:00Z",
|
|
||||||
timezone: "America/Los_Angeles",
|
|
||||||
time: time.Date(2021, 1, 1, 20, 0, 0, 0, time.UTC),
|
|
||||||
errCheck: require.NoError,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "invalid time",
|
name: "invalid time",
|
||||||
timestamp: "invalid",
|
timestamp: "invalid",
|
||||||
@ -164,18 +155,18 @@ func (s *ICSUnitSuite) TestGetUTCTime() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
s.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
t, err := GetUTCTime(tt.timestamp, tt.timezone)
|
t, err := getUTCTime(tt.timestamp, tt.timezone)
|
||||||
tt.errCheck(s.T(), err)
|
tt.errCheck(suite.T(), err)
|
||||||
|
|
||||||
if !tt.time.Equal(time.Time{}) {
|
if !tt.time.Equal(time.Time{}) {
|
||||||
assert.Equal(s.T(), tt.time, t)
|
assert.Equal(suite.T(), tt.time, t)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
func (suite *ICSUnitSuite) TestGetRecurrencePattern() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
recurrence func() models.PatternedRecurrenceable
|
recurrence func() models.PatternedRecurrenceable
|
||||||
@ -189,37 +180,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("daily")
|
typ, err := models.ParseRecurrencePatternType("daily")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
rec.SetPattern(pat)
|
rec.SetPattern(pat)
|
||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=DAILY;INTERVAL=1;WKST=SU",
|
expect: "FREQ=DAILY;INTERVAL=1",
|
||||||
errCheck: require.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "daily different start of week",
|
|
||||||
recurrence: func() models.PatternedRecurrenceable {
|
|
||||||
rec := models.NewPatternedRecurrence()
|
|
||||||
pat := models.NewRecurrencePattern()
|
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("daily")
|
|
||||||
require.NoError(s.T(), err)
|
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.MONDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
rec.SetPattern(pat)
|
|
||||||
|
|
||||||
return rec
|
|
||||||
},
|
|
||||||
expect: "FREQ=DAILY;INTERVAL=1;WKST=MO",
|
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -229,16 +199,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("daily")
|
typ, err := models.ParseRecurrencePatternType("daily")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
rng := models.NewRecurrenceRange()
|
rng := models.NewRecurrenceRange()
|
||||||
|
|
||||||
rrtype, err := models.ParseRecurrenceRangeType("endDate")
|
rrtype, err := models.ParseRecurrenceRangeType("endDate")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
||||||
|
|
||||||
@ -251,7 +220,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=DAILY;INTERVAL=1;WKST=SU;UNTIL=20210101T182959Z",
|
expect: "FREQ=DAILY;INTERVAL=1;UNTIL=20210101T182959Z",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -261,17 +230,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("weekly")
|
typ, err := models.ParseRecurrencePatternType("weekly")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
rec.SetPattern(pat)
|
rec.SetPattern(pat)
|
||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU",
|
expect: "FREQ=WEEKLY;INTERVAL=1",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -281,16 +249,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("weekly")
|
typ, err := models.ParseRecurrencePatternType("weekly")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
rng := models.NewRecurrenceRange()
|
rng := models.NewRecurrenceRange()
|
||||||
|
|
||||||
rrtype, err := models.ParseRecurrenceRangeType("endDate")
|
rrtype, err := models.ParseRecurrenceRangeType("endDate")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
||||||
|
|
||||||
@ -303,7 +270,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU;UNTIL=20210101T235959Z",
|
expect: "FREQ=WEEKLY;INTERVAL=1;UNTIL=20210101T235959Z",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -313,16 +280,15 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("weekly")
|
typ, err := models.ParseRecurrencePatternType("weekly")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
rng := models.NewRecurrenceRange()
|
rng := models.NewRecurrenceRange()
|
||||||
|
|
||||||
rrtype, err := models.ParseRecurrenceRangeType("numbered")
|
rrtype, err := models.ParseRecurrenceRangeType("numbered")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
rng.SetTypeEscaped(rrtype.(*models.RecurrenceRangeType))
|
||||||
|
|
||||||
@ -334,7 +300,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=WEEKLY;INTERVAL=1;WKST=SU;COUNT=10",
|
expect: "FREQ=WEEKLY;INTERVAL=1;COUNT=10",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -344,11 +310,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("weekly")
|
typ, err := models.ParseRecurrencePatternType("weekly")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
days := []models.DayOfWeek{
|
days := []models.DayOfWeek{
|
||||||
models.MONDAY_DAYOFWEEK,
|
models.MONDAY_DAYOFWEEK,
|
||||||
@ -362,7 +327,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,TH;WKST=SU",
|
expect: "FREQ=WEEKLY;INTERVAL=1;BYDAY=MO,WE,TH",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -372,17 +337,16 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("daily")
|
typ, err := models.ParseRecurrencePatternType("daily")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(2)))
|
pat.SetInterval(ptr.To(int32(2)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
rec.SetPattern(pat)
|
rec.SetPattern(pat)
|
||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=DAILY;INTERVAL=2;WKST=SU",
|
expect: "FREQ=DAILY;INTERVAL=2",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -392,11 +356,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("absoluteMonthly")
|
typ, err := models.ParseRecurrencePatternType("absoluteMonthly")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
pat.SetDayOfMonth(ptr.To(int32(5)))
|
pat.SetDayOfMonth(ptr.To(int32(5)))
|
||||||
|
|
||||||
@ -404,7 +367,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=5;WKST=SU",
|
expect: "FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=5",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -414,11 +377,10 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("absoluteYearly")
|
typ, err := models.ParseRecurrencePatternType("absoluteYearly")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(3)))
|
pat.SetInterval(ptr.To(int32(3)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
pat.SetMonth(ptr.To(int32(8)))
|
pat.SetMonth(ptr.To(int32(8)))
|
||||||
|
|
||||||
@ -426,7 +388,7 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=YEARLY;INTERVAL=3;BYMONTH=8;WKST=SU",
|
expect: "FREQ=YEARLY;INTERVAL=3;BYMONTH=8",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -436,38 +398,37 @@ func (s *ICSUnitSuite) TestGetRecurrencePattern() {
|
|||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("relativeYearly")
|
typ, err := models.ParseRecurrencePatternType("relativeYearly")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
pat.SetMonth(ptr.To(int32(8)))
|
pat.SetMonth(ptr.To(int32(8)))
|
||||||
pat.SetDaysOfWeek([]models.DayOfWeek{models.FRIDAY_DAYOFWEEK})
|
pat.SetDaysOfWeek([]models.DayOfWeek{models.FRIDAY_DAYOFWEEK})
|
||||||
|
|
||||||
wi, err := models.ParseWeekIndex("first")
|
wi, err := models.ParseWeekIndex("first")
|
||||||
require.NoError(s.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
pat.SetIndex(wi.(*models.WeekIndex))
|
pat.SetIndex(wi.(*models.WeekIndex))
|
||||||
|
|
||||||
rec.SetPattern(pat)
|
rec.SetPattern(pat)
|
||||||
|
|
||||||
return rec
|
return rec
|
||||||
},
|
},
|
||||||
expect: "FREQ=YEARLY;INTERVAL=1;BYMONTH=8;BYDAY=1FR;WKST=SU",
|
expect: "FREQ=YEARLY;INTERVAL=1;BYMONTH=8;BYDAY=1FR",
|
||||||
errCheck: require.NoError,
|
errCheck: require.NoError,
|
||||||
},
|
},
|
||||||
// TODO(meain): could still use more tests for edge cases of time
|
// TODO(meain): could still use more tests for edge cases of time
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
s.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
ctx, flush := tester.NewContext(s.T())
|
ctx, flush := tester.NewContext(suite.T())
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
rec, err := getRecurrencePattern(ctx, tt.recurrence())
|
rec, err := getRecurrencePattern(ctx, tt.recurrence())
|
||||||
tt.errCheck(s.T(), err)
|
tt.errCheck(suite.T(), err)
|
||||||
|
|
||||||
assert.Equal(s.T(), tt.expect, rec)
|
assert.Equal(suite.T(), tt.expect, rec)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -492,8 +453,8 @@ func baseEvent() *models.Event {
|
|||||||
return e
|
return e
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestEventConversion() {
|
func (suite *ICSUnitSuite) TestEventConversion() {
|
||||||
t := s.T()
|
t := suite.T()
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
@ -578,19 +539,14 @@ func (s *ICSUnitSuite) TestEventConversion() {
|
|||||||
|
|
||||||
rec := models.NewPatternedRecurrence()
|
rec := models.NewPatternedRecurrence()
|
||||||
pat := models.NewRecurrencePattern()
|
pat := models.NewRecurrencePattern()
|
||||||
rng := models.NewRecurrenceRange()
|
|
||||||
|
|
||||||
typ, err := models.ParseRecurrencePatternType("daily")
|
typ, err := models.ParseRecurrencePatternType("daily")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
pat.SetTypeEscaped(typ.(*models.RecurrencePatternType))
|
||||||
pat.SetInterval(ptr.To(int32(1)))
|
pat.SetInterval(ptr.To(int32(1)))
|
||||||
pat.SetFirstDayOfWeek(ptr.To(models.SUNDAY_DAYOFWEEK))
|
|
||||||
|
|
||||||
rng.SetRecurrenceTimeZone(ptr.To("UTC"))
|
|
||||||
|
|
||||||
rec.SetPattern(pat)
|
rec.SetPattern(pat)
|
||||||
rec.SetRangeEscaped(rng)
|
|
||||||
|
|
||||||
e.SetRecurrence(rec)
|
e.SetRecurrence(rec)
|
||||||
|
|
||||||
@ -613,19 +569,6 @@ func (s *ICSUnitSuite) TestEventConversion() {
|
|||||||
assert.Contains(t, out, "STATUS:CANCELLED", "cancelled status")
|
assert.Contains(t, out, "STATUS:CANCELLED", "cancelled status")
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "not cancelled event",
|
|
||||||
event: func() *models.Event {
|
|
||||||
e := baseEvent()
|
|
||||||
|
|
||||||
e.SetIsCancelled(ptr.To(false))
|
|
||||||
|
|
||||||
return e
|
|
||||||
},
|
|
||||||
check: func(out string) {
|
|
||||||
assert.NotContains(t, out, "STATUS:CANCELLED", "cancelled status")
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "text body",
|
name: "text body",
|
||||||
event: func() *models.Event {
|
event: func() *models.Event {
|
||||||
@ -867,8 +810,8 @@ func (s *ICSUnitSuite) TestEventConversion() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
s.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
t := s.T()
|
t := suite.T()
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
@ -918,8 +861,8 @@ func checkAttendee(t *testing.T, out, check, msg string) {
|
|||||||
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
|
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestAttendees() {
|
func (suite *ICSUnitSuite) TestAttendees() {
|
||||||
t := s.T()
|
t := suite.T()
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
@ -945,17 +888,6 @@ func (s *ICSUnitSuite) TestAttendees() {
|
|||||||
"attendee")
|
"attendee")
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
name: "attendee with internal exchange representation for email",
|
|
||||||
att: [][]string{{
|
|
||||||
"/o=ExchangeLabs/ou=ExchangeAdministrative Group(FY...LT)/cn=Recipients/cn=883...4a-John Doe",
|
|
||||||
"required",
|
|
||||||
"declined",
|
|
||||||
}},
|
|
||||||
check: func(out string) {
|
|
||||||
assert.NotContains(t, out, "ATTENDEE")
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "multiple attendees",
|
name: "multiple attendees",
|
||||||
att: [][]string{
|
att: [][]string{
|
||||||
@ -986,8 +918,8 @@ func (s *ICSUnitSuite) TestAttendees() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
s.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
t := s.T()
|
t := suite.T()
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
@ -1108,8 +1040,8 @@ func checkAttachment(t *testing.T, out, check, msg string) {
|
|||||||
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
|
assert.ElementsMatch(t, as, bs, fmt.Sprintf("fields %s", msg))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestAttachments() {
|
func (suite *ICSUnitSuite) TestAttachments() {
|
||||||
t := s.T()
|
t := suite.T()
|
||||||
|
|
||||||
type attachment struct {
|
type attachment struct {
|
||||||
cid string // contentid
|
cid string // contentid
|
||||||
@ -1165,8 +1097,8 @@ func (s *ICSUnitSuite) TestAttachments() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
s.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
t := s.T()
|
t := suite.T()
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
@ -1209,7 +1141,7 @@ func (s *ICSUnitSuite) TestAttachments() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestCancellations() {
|
func (suite *ICSUnitSuite) TestCancellations() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
cancelledIds []string
|
cancelledIds []string
|
||||||
@ -1233,8 +1165,8 @@ func (s *ICSUnitSuite) TestCancellations() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
s.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
t := s.T()
|
t := suite.T()
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
@ -1297,7 +1229,7 @@ func eventToJSON(e *models.Event) ([]byte, error) {
|
|||||||
return bts, err
|
return bts, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestEventExceptions() {
|
func (suite *ICSUnitSuite) TestEventExceptions() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
event func() *models.Event
|
event func() *models.Event
|
||||||
@ -1319,7 +1251,7 @@ func (s *ICSUnitSuite) TestEventExceptions() {
|
|||||||
exception.SetEnd(newEnd)
|
exception.SetEnd(newEnd)
|
||||||
|
|
||||||
parsed, err := eventToMap(exception)
|
parsed, err := eventToMap(exception)
|
||||||
require.NoError(s.T(), err, "parsing exception")
|
require.NoError(suite.T(), err, "parsing exception")
|
||||||
|
|
||||||
// add exception event to additional data
|
// add exception event to additional data
|
||||||
e.SetAdditionalData(map[string]any{
|
e.SetAdditionalData(map[string]any{
|
||||||
@ -1338,15 +1270,15 @@ func (s *ICSUnitSuite) TestEventExceptions() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(s.T(), 2, events, "number of events")
|
assert.Equal(suite.T(), 2, events, "number of events")
|
||||||
|
|
||||||
assert.Contains(s.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id")
|
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id")
|
||||||
|
|
||||||
assert.Contains(s.T(), out, "SUMMARY:Subject", "original event")
|
assert.Contains(suite.T(), out, "SUMMARY:Subject", "original event")
|
||||||
assert.Contains(s.T(), out, "SUMMARY:Exception", "exception event")
|
assert.Contains(suite.T(), out, "SUMMARY:Exception", "exception event")
|
||||||
|
|
||||||
assert.Contains(s.T(), out, "DTSTART:20210101T130000Z", "new start time")
|
assert.Contains(suite.T(), out, "DTSTART:20210101T130000Z", "new start time")
|
||||||
assert.Contains(s.T(), out, "DTEND:20210101T140000Z", "new end time")
|
assert.Contains(suite.T(), out, "DTEND:20210101T140000Z", "new end time")
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -1375,10 +1307,10 @@ func (s *ICSUnitSuite) TestEventExceptions() {
|
|||||||
exception2.SetEnd(newEnd)
|
exception2.SetEnd(newEnd)
|
||||||
|
|
||||||
parsed1, err := eventToMap(exception1)
|
parsed1, err := eventToMap(exception1)
|
||||||
require.NoError(s.T(), err, "parsing exception 1")
|
require.NoError(suite.T(), err, "parsing exception 1")
|
||||||
|
|
||||||
parsed2, err := eventToMap(exception2)
|
parsed2, err := eventToMap(exception2)
|
||||||
require.NoError(s.T(), err, "parsing exception 2")
|
require.NoError(suite.T(), err, "parsing exception 2")
|
||||||
|
|
||||||
// add exception event to additional data
|
// add exception event to additional data
|
||||||
e.SetAdditionalData(map[string]any{
|
e.SetAdditionalData(map[string]any{
|
||||||
@ -1397,230 +1329,36 @@ func (s *ICSUnitSuite) TestEventExceptions() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(s.T(), 3, events, "number of events")
|
assert.Equal(suite.T(), 3, events, "number of events")
|
||||||
|
|
||||||
assert.Contains(s.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id 1")
|
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210101T120000Z", "recurrence id 1")
|
||||||
assert.Contains(s.T(), out, "RECURRENCE-ID:20210102T120000Z", "recurrence id 2")
|
assert.Contains(suite.T(), out, "RECURRENCE-ID:20210102T120000Z", "recurrence id 2")
|
||||||
|
|
||||||
assert.Contains(s.T(), out, "SUMMARY:Subject", "original event")
|
assert.Contains(suite.T(), out, "SUMMARY:Subject", "original event")
|
||||||
assert.Contains(s.T(), out, "SUMMARY:Exception 1", "exception event 1")
|
assert.Contains(suite.T(), out, "SUMMARY:Exception 1", "exception event 1")
|
||||||
assert.Contains(s.T(), out, "SUMMARY:Exception 2", "exception event 2")
|
assert.Contains(suite.T(), out, "SUMMARY:Exception 2", "exception event 2")
|
||||||
|
|
||||||
assert.Contains(s.T(), out, "DTSTART:20210101T130000Z", "new start time 1")
|
assert.Contains(suite.T(), out, "DTSTART:20210101T130000Z", "new start time 1")
|
||||||
assert.Contains(s.T(), out, "DTEND:20210101T140000Z", "new end time 1")
|
assert.Contains(suite.T(), out, "DTEND:20210101T140000Z", "new end time 1")
|
||||||
|
|
||||||
assert.Contains(s.T(), out, "DTSTART:20210102T130000Z", "new start time 2")
|
assert.Contains(suite.T(), out, "DTSTART:20210102T130000Z", "new start time 2")
|
||||||
assert.Contains(s.T(), out, "DTEND:20210102T140000Z", "new end time 2")
|
assert.Contains(suite.T(), out, "DTEND:20210102T140000Z", "new end time 2")
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tt := range table {
|
for _, tt := range table {
|
||||||
s.Run(tt.name, func() {
|
suite.Run(tt.name, func() {
|
||||||
ctx, flush := tester.NewContext(s.T())
|
ctx, flush := tester.NewContext(suite.T())
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
bts, err := eventToJSON(tt.event())
|
bts, err := eventToJSON(tt.event())
|
||||||
require.NoError(s.T(), err, "getting serialized content")
|
require.NoError(suite.T(), err, "getting serialized content")
|
||||||
|
|
||||||
out, err := FromJSON(ctx, bts)
|
out, err := FromJSON(ctx, bts)
|
||||||
require.NoError(s.T(), err, "converting to ics")
|
require.NoError(suite.T(), err, "converting to ics")
|
||||||
|
|
||||||
tt.check(out)
|
tt.check(out)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestGetRecurrenceTimezone() {
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
intz string
|
|
||||||
outtz string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "empty",
|
|
||||||
intz: "",
|
|
||||||
outtz: "UTC",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "utc",
|
|
||||||
intz: "UTC",
|
|
||||||
outtz: "UTC",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "simple",
|
|
||||||
intz: "Asia/Kolkata",
|
|
||||||
outtz: "Asia/Kolkata",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "windows tz",
|
|
||||||
intz: "India Standard Time",
|
|
||||||
outtz: "Asia/Kolkata",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "non canonical",
|
|
||||||
intz: "Asia/Calcutta",
|
|
||||||
outtz: "Asia/Kolkata",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range table {
|
|
||||||
s.Run(tt.name, func() {
|
|
||||||
ctx, flush := tester.NewContext(s.T())
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
event := baseEvent()
|
|
||||||
if len(tt.intz) > 0 {
|
|
||||||
recur := models.NewPatternedRecurrence()
|
|
||||||
rp := models.NewRecurrenceRange()
|
|
||||||
rp.SetRecurrenceTimeZone(ptr.To(tt.intz))
|
|
||||||
|
|
||||||
recur.SetRangeEscaped(rp)
|
|
||||||
event.SetRecurrence(recur)
|
|
||||||
}
|
|
||||||
|
|
||||||
timezone, err := getRecurrenceTimezone(ctx, event)
|
|
||||||
require.NoError(s.T(), err)
|
|
||||||
assert.Equal(s.T(), tt.outtz, timezone.String())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestAddTimezoneComponents() {
|
|
||||||
event := baseEvent()
|
|
||||||
recur := models.NewPatternedRecurrence()
|
|
||||||
rp := models.NewRecurrenceRange()
|
|
||||||
rp.SetRecurrenceTimeZone(ptr.To("Asia/Kolkata"))
|
|
||||||
|
|
||||||
recur.SetRangeEscaped(rp)
|
|
||||||
event.SetRecurrence(recur)
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(s.T())
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
cal := ics.NewCalendar()
|
|
||||||
|
|
||||||
err := addTimeZoneComponents(ctx, cal, event)
|
|
||||||
require.NoError(s.T(), err)
|
|
||||||
|
|
||||||
text := cal.Serialize()
|
|
||||||
assert.Contains(s.T(), text, "BEGIN:VTIMEZONE", "beginning of timezone")
|
|
||||||
assert.Contains(s.T(), text, "TZID:Asia/Kolkata", "timezone id")
|
|
||||||
assert.Contains(s.T(), text, "END:VTIMEZONE", "end of timezone")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *ICSUnitSuite) TestAddTime() {
|
|
||||||
locak, err := time.LoadLocation("Asia/Kolkata")
|
|
||||||
require.NoError(s.T(), err)
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
prop ics.ComponentProperty
|
|
||||||
time time.Time
|
|
||||||
allDay bool
|
|
||||||
loc *time.Location
|
|
||||||
exp string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "utc",
|
|
||||||
prop: ics.ComponentPropertyDtStart,
|
|
||||||
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
|
|
||||||
allDay: false,
|
|
||||||
loc: time.UTC,
|
|
||||||
exp: "DTSTART:20210102T030405Z",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "local",
|
|
||||||
prop: ics.ComponentPropertyDtStart,
|
|
||||||
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
|
|
||||||
allDay: false,
|
|
||||||
loc: locak,
|
|
||||||
exp: "DTSTART;TZID=Asia/Kolkata:20210102T083405",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "all day",
|
|
||||||
prop: ics.ComponentPropertyDtStart,
|
|
||||||
time: time.Date(2021, 1, 2, 0, 0, 0, 0, time.UTC),
|
|
||||||
allDay: true,
|
|
||||||
loc: time.UTC,
|
|
||||||
exp: "DTSTART;VALUE=DATE:20210102",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "all day local",
|
|
||||||
prop: ics.ComponentPropertyDtStart,
|
|
||||||
time: time.Date(2021, 1, 2, 0, 0, 0, 0, time.UTC),
|
|
||||||
allDay: true,
|
|
||||||
loc: locak,
|
|
||||||
exp: "DTSTART;VALUE=DATE;TZID=Asia/Kolkata:20210102",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "end",
|
|
||||||
prop: ics.ComponentPropertyDtEnd,
|
|
||||||
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
|
|
||||||
allDay: false,
|
|
||||||
loc: time.UTC,
|
|
||||||
exp: "DTEND:20210102T030405Z",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// This won't happen, but a good test to have to test loc handling
|
|
||||||
name: "windows tz",
|
|
||||||
prop: ics.ComponentPropertyDtStart,
|
|
||||||
time: time.Date(2021, 1, 2, 3, 4, 5, 0, time.UTC),
|
|
||||||
allDay: false,
|
|
||||||
loc: time.FixedZone("India Standard Time", 5*60*60+30*60),
|
|
||||||
exp: "DTSTART;TZID=India Standard Time:20210102T083405",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range table {
|
|
||||||
s.Run(tt.name, func() {
|
|
||||||
cal := ics.NewCalendar()
|
|
||||||
evt := cal.AddEvent("id")
|
|
||||||
|
|
||||||
addTime(evt, tt.prop, tt.time, tt.allDay, tt.loc)
|
|
||||||
|
|
||||||
expSplits := strings.FieldsFunc(tt.exp, func(c rune) bool {
|
|
||||||
return c == ':' || c == ';'
|
|
||||||
})
|
|
||||||
|
|
||||||
text := cal.Serialize()
|
|
||||||
checkLine := ""
|
|
||||||
|
|
||||||
for _, l := range strings.Split(text, "\r\n") {
|
|
||||||
if strings.HasPrefix(l, string(tt.prop)) {
|
|
||||||
checkLine = l
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
actSplits := strings.FieldsFunc(checkLine, func(c rune) bool {
|
|
||||||
return c == ':' || c == ';'
|
|
||||||
})
|
|
||||||
|
|
||||||
assert.Greater(s.T(), len(checkLine), 0, "line not found")
|
|
||||||
assert.Equal(s.T(), len(expSplits), len(actSplits), "length of fields")
|
|
||||||
assert.ElementsMatch(s.T(), expSplits, actSplits, "fields")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This tests and ensures that the generated data is int he format
|
|
||||||
// that we expect
|
|
||||||
func (s *ICSUnitSuite) TestGetTZDataKeyValues() {
|
|
||||||
for key := range tzdata.TZData {
|
|
||||||
s.Run(key, func() {
|
|
||||||
ctx, flush := tester.NewContext(s.T())
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
data, err := getTZDataKeyValues(ctx, key)
|
|
||||||
require.NoError(s.T(), err)
|
|
||||||
|
|
||||||
assert.NotEmpty(s.T(), data, "data")
|
|
||||||
assert.NotContains(s.T(), data, "BEGIN", "beginning of timezone") // should be stripped
|
|
||||||
assert.NotContains(s.T(), data, "END", "end of timezone") // should be stripped
|
|
||||||
assert.NotContains(s.T(), data, "TZID", "timezone id") // should be stripped
|
|
||||||
assert.Contains(s.T(), data, "DTSTART", "start time")
|
|
||||||
assert.Contains(s.T(), data, "TZOFFSETFROM", "offset from")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -1,35 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
set -eo pipefail
|
|
||||||
|
|
||||||
if ! echo "$PWD" | grep -q '/tzdata$'; then
|
|
||||||
echo "Please run this script from the tzdata dir"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# TODO: Generate from https://www.iana.org/time-zones
|
|
||||||
if [ ! -d /tmp/corso-tzdata ]; then
|
|
||||||
git clone --depth 1 https://github.com/add2cal/timezones-ical-library.git /tmp/corso-tzdata
|
|
||||||
else
|
|
||||||
cd /tmp/corso-tzdata
|
|
||||||
git pull
|
|
||||||
cd -
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Generate a huge go file with all the timezones
|
|
||||||
echo "package tzdata" >data.go
|
|
||||||
echo "" >>data.go
|
|
||||||
|
|
||||||
echo "var TZData = map[string]string{" >>data.go
|
|
||||||
|
|
||||||
find /tmp/corso-tzdata/ -name '*.ics' | while read -r f; do
|
|
||||||
tz=$(echo "$f" | sed 's|/tmp/corso-tzdata/api/||;s|\.ics$||')
|
|
||||||
echo "Processing $tz"
|
|
||||||
printf "\t\"%s\": \`" "$tz" >>data.go
|
|
||||||
cat "$f" | grep -Ev "(BEGIN:|END:|TZID:)" |
|
|
||||||
sed 's|`|\\`|g;s|\r||;s|TZID:/timezones-ical-library/|TZID:|' |
|
|
||||||
perl -pe 'chomp if eof' >>data.go
|
|
||||||
echo "\`," >>data.go
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "}" >>data.go
|
|
||||||
@ -86,7 +86,7 @@ func FromJSON(ctx context.Context, body []byte) (string, error) {
|
|||||||
|
|
||||||
data, err := api.BytesToContactable(body)
|
data, err := api.BytesToContactable(body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", clues.WrapWC(ctx, err, "converting to contactable").
|
return "", clues.Wrap(err, "converting to contactable").
|
||||||
With("body_length", len(body))
|
With("body_length", len(body))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -75,10 +75,6 @@ type BackupBases interface {
|
|||||||
SnapshotAssistBases() []BackupBase
|
SnapshotAssistBases() []BackupBase
|
||||||
}
|
}
|
||||||
|
|
||||||
func EmptyBackupBase() BackupBases {
|
|
||||||
return &backupBases{}
|
|
||||||
}
|
|
||||||
|
|
||||||
type backupBases struct {
|
type backupBases struct {
|
||||||
mergeBases []BackupBase
|
mergeBases []BackupBase
|
||||||
assistBases []BackupBase
|
assistBases []BackupBase
|
||||||
|
|||||||
@ -4,7 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"reflect"
|
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -25,21 +24,22 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/kopia/retention"
|
"github.com/alcionai/corso/src/internal/kopia/retention"
|
||||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/storage"
|
"github.com/alcionai/corso/src/pkg/storage"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
corsoWrapperAlertNamespace = "corso-kopia-wrapper"
|
|
||||||
|
|
||||||
defaultKopiaConfigDir = "/tmp/"
|
defaultKopiaConfigDir = "/tmp/"
|
||||||
kopiaConfigFileTemplate = "repository-%s.config"
|
kopiaConfigFileTemplate = "repository-%s.config"
|
||||||
defaultCompressor = "zstd-better-compression"
|
defaultCompressor = "zstd-better-compression"
|
||||||
// Interval of 0 disables scheduling.
|
// Interval of 0 disables scheduling.
|
||||||
defaultSchedulingInterval = time.Second * 0
|
defaultSchedulingInterval = time.Second * 0
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrSettingDefaultConfig = clues.New("setting default repo config values")
|
||||||
|
ErrorRepoAlreadyExists = clues.New("repo already exists")
|
||||||
|
|
||||||
defaultMinEpochDuration = time.Hour * 8
|
|
||||||
// minEpochDurationLowerBound is the minimum corso will allow the kopia epoch
|
// minEpochDurationLowerBound is the minimum corso will allow the kopia epoch
|
||||||
// duration to be set to. This number can still be tuned further, right now
|
// duration to be set to. This number can still be tuned further, right now
|
||||||
// it's just to make sure it's not set to something totally wild.
|
// it's just to make sure it's not set to something totally wild.
|
||||||
@ -59,20 +59,6 @@ const (
|
|||||||
minEpochDurationUpperBound = 7 * 24 * time.Hour
|
minEpochDurationUpperBound = 7 * 24 * time.Hour
|
||||||
)
|
)
|
||||||
|
|
||||||
// allValidCompressors is the set of compression algorithms either currently
|
|
||||||
// being used or that were previously used. Use this during the config verify
|
|
||||||
// command to avoid spurious errors. We can revisit whether we want to update
|
|
||||||
// the config in those old repos at a later time.
|
|
||||||
var allValidCompressors = map[compression.Name]struct{}{
|
|
||||||
compression.Name(defaultCompressor): {},
|
|
||||||
compression.Name("s2-default"): {},
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
ErrSettingDefaultConfig = clues.New("setting default repo config values")
|
|
||||||
ErrorRepoAlreadyExists = clues.New("repo already exists")
|
|
||||||
)
|
|
||||||
|
|
||||||
// Having all fields set to 0 causes it to keep max-int versions of snapshots.
|
// Having all fields set to 0 causes it to keep max-int versions of snapshots.
|
||||||
var (
|
var (
|
||||||
zeroOpt = policy.OptionalInt(0)
|
zeroOpt = policy.OptionalInt(0)
|
||||||
@ -158,16 +144,12 @@ func (w *conn) Initialize(
|
|||||||
RetentionPeriod: blobCfg.RetentionPeriod,
|
RetentionPeriod: blobCfg.RetentionPeriod,
|
||||||
}
|
}
|
||||||
|
|
||||||
var initErr error
|
|
||||||
|
|
||||||
if err = repo.Initialize(ctx, bst, &kopiaOpts, cfg.CorsoPassphrase); err != nil {
|
if err = repo.Initialize(ctx, bst, &kopiaOpts, cfg.CorsoPassphrase); err != nil {
|
||||||
if !errors.Is(err, repo.ErrAlreadyInitialized) {
|
if errors.Is(err, repo.ErrAlreadyInitialized) {
|
||||||
return clues.WrapWC(ctx, err, "initializing repo")
|
return clues.StackWC(ctx, ErrorRepoAlreadyExists, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Ctx(ctx).Info("repo already exists, verifying repo config")
|
return clues.WrapWC(ctx, err, "initializing repo")
|
||||||
|
|
||||||
initErr = clues.StackWC(ctx, ErrorRepoAlreadyExists, err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err = w.commonConnect(
|
err = w.commonConnect(
|
||||||
@ -179,28 +161,13 @@ func (w *conn) Initialize(
|
|||||||
cfg.CorsoPassphrase,
|
cfg.CorsoPassphrase,
|
||||||
defaultCompressor)
|
defaultCompressor)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// If the repo already exists then give some indication to that to help the
|
return err
|
||||||
// user debug. For example, they could have called init again on a repo that
|
|
||||||
// already exists but accidentally used a different passphrase.
|
|
||||||
return clues.Stack(err, initErr)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := w.setDefaultConfigValues(ctx); err != nil {
|
if err := w.setDefaultConfigValues(ctx); err != nil {
|
||||||
return clues.StackWC(ctx, err)
|
return clues.StackWC(ctx, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// In theory it should be possible to set this when creating the repo.
|
|
||||||
// However, the existing code paths for repo init in kopia end up clobbering
|
|
||||||
// any custom parameters passed in with default values. It's not clear if
|
|
||||||
// that's intentional or not.
|
|
||||||
if err := w.updatePersistentConfig(
|
|
||||||
ctx,
|
|
||||||
repository.PersistentConfig{
|
|
||||||
MinEpochDuration: ptr.To(defaultMinEpochDuration),
|
|
||||||
}); err != nil {
|
|
||||||
return clues.Stack(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calling with all parameters here will set extend object locks for
|
// Calling with all parameters here will set extend object locks for
|
||||||
// maintenance. Parameters for actual retention should have been set during
|
// maintenance. Parameters for actual retention should have been set during
|
||||||
// initialization and won't be updated again.
|
// initialization and won't be updated again.
|
||||||
@ -756,115 +723,3 @@ func (w *conn) updatePersistentConfig(
|
|||||||
"persisting updated config").
|
"persisting updated config").
|
||||||
OrNil()
|
OrNil()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *conn) verifyDefaultPolicyConfigOptions(
|
|
||||||
ctx context.Context,
|
|
||||||
errs *fault.Bus,
|
|
||||||
) {
|
|
||||||
const alertName = "kopia-global-policy"
|
|
||||||
|
|
||||||
globalPol, err := w.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
if err != nil {
|
|
||||||
errs.AddAlert(ctx, fault.NewAlert(
|
|
||||||
err.Error(),
|
|
||||||
corsoWrapperAlertNamespace,
|
|
||||||
"fetch-policy",
|
|
||||||
alertName,
|
|
||||||
nil))
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = clues.Add(ctx, "current_global_policy", globalPol.String())
|
|
||||||
|
|
||||||
if _, ok := allValidCompressors[globalPol.CompressionPolicy.CompressorName]; !ok {
|
|
||||||
errs.AddAlert(ctx, fault.NewAlert(
|
|
||||||
"unexpected compressor",
|
|
||||||
corsoWrapperAlertNamespace,
|
|
||||||
"compressor",
|
|
||||||
alertName,
|
|
||||||
nil))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Need to use deep equals because the values are pointers to optional types.
|
|
||||||
// That makes regular equality checks fail even if the data contained in each
|
|
||||||
// policy is the same.
|
|
||||||
if !reflect.DeepEqual(globalPol.RetentionPolicy, defaultRetention) {
|
|
||||||
errs.AddAlert(ctx, fault.NewAlert(
|
|
||||||
"unexpected retention policy",
|
|
||||||
corsoWrapperAlertNamespace,
|
|
||||||
"retention-policy",
|
|
||||||
alertName,
|
|
||||||
nil))
|
|
||||||
}
|
|
||||||
|
|
||||||
if globalPol.SchedulingPolicy.Interval() != defaultSchedulingInterval {
|
|
||||||
errs.AddAlert(ctx, fault.NewAlert(
|
|
||||||
"unexpected scheduling interval",
|
|
||||||
corsoWrapperAlertNamespace,
|
|
||||||
"scheduling-interval",
|
|
||||||
alertName,
|
|
||||||
nil))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *conn) verifyRetentionConfig(
|
|
||||||
ctx context.Context,
|
|
||||||
errs *fault.Bus,
|
|
||||||
) {
|
|
||||||
const alertName = "kopia-object-locking"
|
|
||||||
|
|
||||||
directRepo, ok := w.Repository.(repo.DirectRepository)
|
|
||||||
if !ok {
|
|
||||||
errs.AddAlert(ctx, fault.NewAlert(
|
|
||||||
"",
|
|
||||||
corsoWrapperAlertNamespace,
|
|
||||||
"fetch-direct-repo",
|
|
||||||
alertName,
|
|
||||||
nil))
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
blobConfig, maintenanceParams, err := getRetentionConfigs(ctx, directRepo)
|
|
||||||
if err != nil {
|
|
||||||
errs.AddAlert(ctx, fault.NewAlert(
|
|
||||||
err.Error(),
|
|
||||||
corsoWrapperAlertNamespace,
|
|
||||||
"fetch-config",
|
|
||||||
alertName,
|
|
||||||
nil))
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
err = retention.OptsFromConfigs(*blobConfig, *maintenanceParams).
|
|
||||||
Verify(ctx)
|
|
||||||
if err != nil {
|
|
||||||
errs.AddAlert(ctx, fault.NewAlert(
|
|
||||||
err.Error(),
|
|
||||||
corsoWrapperAlertNamespace,
|
|
||||||
"config-values",
|
|
||||||
alertName,
|
|
||||||
nil))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// verifyDefaultConfigOptions checks the following configurations:
|
|
||||||
// kopia global policy:
|
|
||||||
// - kopia snapshot retention is disabled
|
|
||||||
// - kopia compression matches the default compression for corso
|
|
||||||
// - kopia scheduling is disabled
|
|
||||||
//
|
|
||||||
// object locking:
|
|
||||||
// - maintenance and blob config blob parameters are consistent (i.e. all
|
|
||||||
// enabled or all disabled)
|
|
||||||
func (w *conn) verifyDefaultConfigOptions(
|
|
||||||
ctx context.Context,
|
|
||||||
errs *fault.Bus,
|
|
||||||
) {
|
|
||||||
logger.Ctx(ctx).Info("verifying config parameters")
|
|
||||||
|
|
||||||
w.verifyDefaultPolicyConfigOptions(ctx, errs)
|
|
||||||
w.verifyRetentionConfig(ctx, errs)
|
|
||||||
}
|
|
||||||
|
|||||||
@ -3,7 +3,6 @@ package kopia
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"math"
|
"math"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -16,13 +15,11 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
|
||||||
"github.com/alcionai/corso/src/pkg/storage"
|
"github.com/alcionai/corso/src/pkg/storage"
|
||||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||||
)
|
)
|
||||||
@ -96,7 +93,7 @@ func TestWrapperIntegrationSuite(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *WrapperIntegrationSuite) TestInitialize_SamePassphrase() {
|
func (suite *WrapperIntegrationSuite) TestRepoExistsError() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
repoNameHash := strTD.NewHashForRepoConfigName()
|
||||||
|
|
||||||
@ -112,46 +109,6 @@ func (suite *WrapperIntegrationSuite) TestInitialize_SamePassphrase() {
|
|||||||
err = k.Close(ctx)
|
err = k.Close(ctx)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *WrapperIntegrationSuite) TestInitialize_IncorrectPassphrase() {
|
|
||||||
t := suite.T()
|
|
||||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
st1 := storeTD.NewFilesystemStorage(t)
|
|
||||||
k := NewConn(st1)
|
|
||||||
|
|
||||||
err := k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = k.Close(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
// Hacky way to edit the existing passphrase for the repo so we can check that
|
|
||||||
// we get a sensible error back.
|
|
||||||
st2 := st1
|
|
||||||
st2.Config = maps.Clone(st1.Config)
|
|
||||||
|
|
||||||
var found bool
|
|
||||||
|
|
||||||
for k, v := range st2.Config {
|
|
||||||
if strings.Contains(strings.ToLower(k), "passphrase") {
|
|
||||||
st2.Config[k] = v + "1"
|
|
||||||
found = true
|
|
||||||
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
require.True(t, found, "unable to update passphrase for test")
|
|
||||||
|
|
||||||
k = NewConn(st2)
|
|
||||||
|
|
||||||
err = k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
err = k.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||||
assert.Error(t, err, clues.ToCore(err))
|
assert.Error(t, err, clues.ToCore(err))
|
||||||
assert.ErrorIs(t, err, ErrorRepoAlreadyExists)
|
assert.ErrorIs(t, err, ErrorRepoAlreadyExists)
|
||||||
@ -277,8 +234,8 @@ func (suite *WrapperIntegrationSuite) TestGetPolicyOrDefault_GetsDefault() {
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
si := snapshot.SourceInfo{
|
si := snapshot.SourceInfo{
|
||||||
Host: "exchangeemail",
|
Host: corsoHost,
|
||||||
UserName: "tenantID-resourceID",
|
UserName: corsoUser,
|
||||||
Path: "test-path-root",
|
Path: "test-path-root",
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -314,8 +271,8 @@ func (suite *WrapperIntegrationSuite) TestSetCompressor() {
|
|||||||
// Check the global policy will be the effective policy in future snapshots
|
// Check the global policy will be the effective policy in future snapshots
|
||||||
// for some source info.
|
// for some source info.
|
||||||
si := snapshot.SourceInfo{
|
si := snapshot.SourceInfo{
|
||||||
Host: "exchangeemail",
|
Host: corsoHost,
|
||||||
UserName: "tenantID-resourceID",
|
UserName: corsoUser,
|
||||||
Path: "test-path-root",
|
Path: "test-path-root",
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -327,87 +284,7 @@ func (suite *WrapperIntegrationSuite) TestSetCompressor() {
|
|||||||
string(policyTree.EffectivePolicy().CompressionPolicy.CompressorName))
|
string(policyTree.EffectivePolicy().CompressionPolicy.CompressorName))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *WrapperIntegrationSuite) TestConfigPersistentConfigOnInitAndNotOnConnect() {
|
func (suite *WrapperIntegrationSuite) TestConfigDefaultsSetOnInitAndNotOnConnect() {
|
||||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
mutateParams repository.PersistentConfig
|
|
||||||
checkFunc func(
|
|
||||||
t *testing.T,
|
|
||||||
wanted repository.PersistentConfig,
|
|
||||||
mutableParams format.MutableParameters,
|
|
||||||
blobConfig format.BlobStorageConfiguration,
|
|
||||||
)
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "MinEpochDuration",
|
|
||||||
mutateParams: repository.PersistentConfig{
|
|
||||||
MinEpochDuration: ptr.To(defaultMinEpochDuration + time.Minute),
|
|
||||||
},
|
|
||||||
checkFunc: func(
|
|
||||||
t *testing.T,
|
|
||||||
wanted repository.PersistentConfig,
|
|
||||||
mutableParams format.MutableParameters,
|
|
||||||
blobConfig format.BlobStorageConfiguration,
|
|
||||||
) {
|
|
||||||
assert.Equal(t, *wanted.MinEpochDuration, mutableParams.EpochParameters.MinEpochDuration)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
k, err := openLocalKopiaRepo(t, ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
// Close is safe to call even if the repo is already closed.
|
|
||||||
t.Cleanup(func() {
|
|
||||||
k.Close(ctx)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Need to disconnect and connect again because in-memory state in kopia
|
|
||||||
// isn't updated immediately.
|
|
||||||
err = k.Close(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = k.Connect(ctx, repository.Options{}, repoNameHash)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
mutable, blob, err := k.getPersistentConfig(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
defaultParams := repository.PersistentConfig{
|
|
||||||
MinEpochDuration: ptr.To(defaultMinEpochDuration),
|
|
||||||
}
|
|
||||||
|
|
||||||
test.checkFunc(t, defaultParams, mutable, blob)
|
|
||||||
|
|
||||||
err = k.updatePersistentConfig(ctx, test.mutateParams)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = k.Close(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = k.Connect(ctx, repository.Options{}, repoNameHash)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
mutable, blob, err = k.getPersistentConfig(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
test.checkFunc(t, test.mutateParams, mutable, blob)
|
|
||||||
|
|
||||||
err = k.Close(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *WrapperIntegrationSuite) TestConfigPolicyDefaultsSetOnInitAndNotOnConnect() {
|
|
||||||
newCompressor := "pgzip"
|
newCompressor := "pgzip"
|
||||||
newRetentionDaily := policy.OptionalInt(42)
|
newRetentionDaily := policy.OptionalInt(42)
|
||||||
newRetention := policy.RetentionPolicy{KeepDaily: &newRetentionDaily}
|
newRetention := policy.RetentionPolicy{KeepDaily: &newRetentionDaily}
|
||||||
@ -717,13 +594,6 @@ func (suite *WrapperIntegrationSuite) TestUpdatePersistentConfig() {
|
|||||||
err := connection.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
err := connection.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
||||||
require.NoError(t, err, "initializing repo: %v", clues.ToCore(err))
|
require.NoError(t, err, "initializing repo: %v", clues.ToCore(err))
|
||||||
|
|
||||||
// Need to close and reopen the repo due to kopia caching of values.
|
|
||||||
err = connection.Close(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = connection.Connect(ctx, repository.Options{}, repoNameHash)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
startParams, startBlobConfig, err := connection.getPersistentConfig(ctx)
|
startParams, startBlobConfig, err := connection.getPersistentConfig(ctx)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
@ -822,281 +692,3 @@ func (suite *ConnRetentionIntegrationSuite) TestInitWithAndWithoutRetention() {
|
|||||||
// Some checks to make sure retention was fully initialized as expected.
|
// Some checks to make sure retention was fully initialized as expected.
|
||||||
checkRetentionParams(t, ctx, k2, blob.Governance, time.Hour*48, assert.True)
|
checkRetentionParams(t, ctx, k2, blob.Governance, time.Hour*48, assert.True)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestVerifyDefaultConfigOptions checks that if the repo has misconfigured
|
|
||||||
// values an error is returned. This is easiest to do in a test suite that
|
|
||||||
// allows object locking because some of the configured values that are checked
|
|
||||||
// relate to object locking.
|
|
||||||
func (suite *ConnRetentionIntegrationSuite) TestVerifyDefaultConfigOptions() {
|
|
||||||
nonzeroOpt := policy.OptionalInt(42)
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
setupRepo func(context.Context, *testing.T, *conn)
|
|
||||||
expectAlerts int
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "ValidConfigs NoRetention",
|
|
||||||
setupRepo: func(context.Context, *testing.T, *conn) {},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "ValidConfigs Retention",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
err := con.setRetentionParameters(
|
|
||||||
ctx,
|
|
||||||
repository.Retention{
|
|
||||||
Mode: ptr.To(repository.GovernanceRetention),
|
|
||||||
Duration: ptr.To(48 * time.Hour),
|
|
||||||
Extend: ptr.To(true),
|
|
||||||
})
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "ValidRetentionButNotExtending",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
err := con.setRetentionParameters(
|
|
||||||
ctx,
|
|
||||||
repository.Retention{
|
|
||||||
Mode: ptr.To(repository.GovernanceRetention),
|
|
||||||
Duration: ptr.To(48 * time.Hour),
|
|
||||||
Extend: ptr.To(false),
|
|
||||||
})
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "ExtendingRetentionButNotConfigured",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
err := con.setRetentionParameters(
|
|
||||||
ctx,
|
|
||||||
repository.Retention{
|
|
||||||
Extend: ptr.To(true),
|
|
||||||
})
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "NonZeroScheduleInterval",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
updateSchedulingOnPolicy(time.Hour, pol)
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "OldValidCompressor",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
_, err = updateCompressionOnPolicy("s2-default", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 0,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "NonDefaultCompression",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
_, err = updateCompressionOnPolicy("pgzip-best-speed", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "NonZeroSnapshotRetentionLatest",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
retention := policy.RetentionPolicy{
|
|
||||||
KeepLatest: &nonzeroOpt,
|
|
||||||
KeepHourly: &zeroOpt,
|
|
||||||
KeepWeekly: &zeroOpt,
|
|
||||||
KeepDaily: &zeroOpt,
|
|
||||||
KeepMonthly: &zeroOpt,
|
|
||||||
KeepAnnual: &zeroOpt,
|
|
||||||
}
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
updateRetentionOnPolicy(retention, pol)
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "NonZeroSnapshotRetentionHourly",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
retention := policy.RetentionPolicy{
|
|
||||||
KeepLatest: &zeroOpt,
|
|
||||||
KeepHourly: &nonzeroOpt,
|
|
||||||
KeepWeekly: &zeroOpt,
|
|
||||||
KeepDaily: &zeroOpt,
|
|
||||||
KeepMonthly: &zeroOpt,
|
|
||||||
KeepAnnual: &zeroOpt,
|
|
||||||
}
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
updateRetentionOnPolicy(retention, pol)
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "NonZeroSnapshotRetentionWeekly",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
retention := policy.RetentionPolicy{
|
|
||||||
KeepLatest: &zeroOpt,
|
|
||||||
KeepHourly: &zeroOpt,
|
|
||||||
KeepWeekly: &nonzeroOpt,
|
|
||||||
KeepDaily: &zeroOpt,
|
|
||||||
KeepMonthly: &zeroOpt,
|
|
||||||
KeepAnnual: &zeroOpt,
|
|
||||||
}
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
updateRetentionOnPolicy(retention, pol)
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "NonZeroSnapshotRetentionDaily",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
retention := policy.RetentionPolicy{
|
|
||||||
KeepLatest: &zeroOpt,
|
|
||||||
KeepHourly: &zeroOpt,
|
|
||||||
KeepWeekly: &zeroOpt,
|
|
||||||
KeepDaily: &nonzeroOpt,
|
|
||||||
KeepMonthly: &zeroOpt,
|
|
||||||
KeepAnnual: &zeroOpt,
|
|
||||||
}
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
updateRetentionOnPolicy(retention, pol)
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "NonZeroSnapshotRetentionMonthly",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
retention := policy.RetentionPolicy{
|
|
||||||
KeepLatest: &zeroOpt,
|
|
||||||
KeepHourly: &zeroOpt,
|
|
||||||
KeepWeekly: &zeroOpt,
|
|
||||||
KeepDaily: &zeroOpt,
|
|
||||||
KeepMonthly: &nonzeroOpt,
|
|
||||||
KeepAnnual: &zeroOpt,
|
|
||||||
}
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
updateRetentionOnPolicy(retention, pol)
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "NonZeroSnapshotRetentionAnnual",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
retention := policy.RetentionPolicy{
|
|
||||||
KeepLatest: &zeroOpt,
|
|
||||||
KeepHourly: &zeroOpt,
|
|
||||||
KeepWeekly: &zeroOpt,
|
|
||||||
KeepDaily: &zeroOpt,
|
|
||||||
KeepMonthly: &zeroOpt,
|
|
||||||
KeepAnnual: &nonzeroOpt,
|
|
||||||
}
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
updateRetentionOnPolicy(retention, pol)
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "MultipleAlerts",
|
|
||||||
setupRepo: func(ctx context.Context, t *testing.T, con *conn) {
|
|
||||||
err := con.setRetentionParameters(
|
|
||||||
ctx,
|
|
||||||
repository.Retention{
|
|
||||||
Mode: ptr.To(repository.GovernanceRetention),
|
|
||||||
Duration: ptr.To(48 * time.Hour),
|
|
||||||
Extend: ptr.To(false),
|
|
||||||
})
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
pol, err := con.getGlobalPolicyOrEmpty(ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
updateSchedulingOnPolicy(time.Hour, pol)
|
|
||||||
|
|
||||||
_, err = updateCompressionOnPolicy("pgzip-best-speed", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
err = con.writeGlobalPolicy(ctx, "test", pol)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
},
|
|
||||||
expectAlerts: 3,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
t.Cleanup(flush)
|
|
||||||
|
|
||||||
repoNameHash := strTD.NewHashForRepoConfigName()
|
|
||||||
st1 := storeTD.NewPrefixedS3Storage(t)
|
|
||||||
|
|
||||||
con := NewConn(st1)
|
|
||||||
err := con.Initialize(ctx, repository.Options{}, repository.Retention{}, repoNameHash)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
t.Cleanup(func() { con.Close(ctx) })
|
|
||||||
|
|
||||||
test.setupRepo(ctx, t, con)
|
|
||||||
|
|
||||||
errs := fault.New(true)
|
|
||||||
con.verifyDefaultConfigOptions(ctx, errs)
|
|
||||||
|
|
||||||
// There shouldn't be any reported failures because this is just to check
|
|
||||||
// if things are alright.
|
|
||||||
assert.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
|
|
||||||
assert.Len(t, errs.Alerts(), test.expectAlerts)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -136,34 +136,3 @@ func (r *Opts) setBlobConfigMode(
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify checks that the config info in r passes kopia's retention validation
|
|
||||||
// checks when it comes to locking durations and that if retention is requested
|
|
||||||
// in the blob config blob then lock extension is also configured to run during
|
|
||||||
// maintenance. If rentention is not enabled in the blob config blob then lock
|
|
||||||
// extension should be disabled during maintenance.
|
|
||||||
func (r Opts) Verify(ctx context.Context) error {
|
|
||||||
if !r.blobCfg.IsRetentionEnabled() {
|
|
||||||
if r.params.ExtendObjectLocks {
|
|
||||||
return clues.NewWC(
|
|
||||||
ctx,
|
|
||||||
"retention disabled but maintenance lock extension enabled")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Both disabled.
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rest of function handles case where retention is enabled in the blob config
|
|
||||||
// blob.
|
|
||||||
if !r.params.ExtendObjectLocks {
|
|
||||||
return clues.NewWC(
|
|
||||||
ctx,
|
|
||||||
"retention enabled but maintenance lock extension disabled")
|
|
||||||
}
|
|
||||||
|
|
||||||
return clues.Stack(maintenance.CheckExtendRetention(
|
|
||||||
ctx,
|
|
||||||
r.blobCfg,
|
|
||||||
&r.params)).OrNil()
|
|
||||||
}
|
|
||||||
|
|||||||
@ -202,97 +202,3 @@ func (suite *OptsUnitSuite) TestSet() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OptsUnitSuite) TestVerify() {
|
|
||||||
mode := blob.Governance
|
|
||||||
fullCycleInterval := time.Hour * 24
|
|
||||||
duration := 2 * fullCycleInterval
|
|
||||||
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
input *retention.Opts
|
|
||||||
expectErr assert.ErrorAssertionFunc
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "ValidDisabled",
|
|
||||||
input: retention.OptsFromConfigs(
|
|
||||||
format.BlobStorageConfiguration{},
|
|
||||||
maintenance.Params{
|
|
||||||
FullCycle: maintenance.CycleParams{
|
|
||||||
Interval: fullCycleInterval,
|
|
||||||
},
|
|
||||||
ExtendObjectLocks: false,
|
|
||||||
}),
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "ValidEnabled",
|
|
||||||
input: retention.OptsFromConfigs(
|
|
||||||
format.BlobStorageConfiguration{
|
|
||||||
RetentionMode: mode,
|
|
||||||
RetentionPeriod: duration,
|
|
||||||
},
|
|
||||||
maintenance.Params{
|
|
||||||
FullCycle: maintenance.CycleParams{
|
|
||||||
Interval: fullCycleInterval,
|
|
||||||
},
|
|
||||||
ExtendObjectLocks: true,
|
|
||||||
}),
|
|
||||||
expectErr: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "InvalidDuration",
|
|
||||||
input: retention.OptsFromConfigs(
|
|
||||||
format.BlobStorageConfiguration{
|
|
||||||
RetentionMode: mode,
|
|
||||||
RetentionPeriod: fullCycleInterval,
|
|
||||||
},
|
|
||||||
maintenance.Params{
|
|
||||||
FullCycle: maintenance.CycleParams{
|
|
||||||
Interval: fullCycleInterval,
|
|
||||||
},
|
|
||||||
ExtendObjectLocks: true,
|
|
||||||
}),
|
|
||||||
expectErr: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "InvalidNotExtending",
|
|
||||||
input: retention.OptsFromConfigs(
|
|
||||||
format.BlobStorageConfiguration{
|
|
||||||
RetentionMode: mode,
|
|
||||||
RetentionPeriod: duration,
|
|
||||||
},
|
|
||||||
maintenance.Params{
|
|
||||||
FullCycle: maintenance.CycleParams{
|
|
||||||
Interval: fullCycleInterval,
|
|
||||||
},
|
|
||||||
ExtendObjectLocks: false,
|
|
||||||
}),
|
|
||||||
expectErr: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "InvalidNotConfigured",
|
|
||||||
input: retention.OptsFromConfigs(
|
|
||||||
format.BlobStorageConfiguration{},
|
|
||||||
maintenance.Params{
|
|
||||||
FullCycle: maintenance.CycleParams{
|
|
||||||
Interval: fullCycleInterval,
|
|
||||||
},
|
|
||||||
ExtendObjectLocks: true,
|
|
||||||
}),
|
|
||||||
expectErr: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
t.Cleanup(flush)
|
|
||||||
|
|
||||||
err := test.input.Verify(ctx)
|
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@ -16,7 +16,6 @@ import (
|
|||||||
"github.com/kopia/kopia/snapshot/snapshotfs"
|
"github.com/kopia/kopia/snapshot/snapshotfs"
|
||||||
"github.com/kopia/kopia/snapshot/snapshotmaintenance"
|
"github.com/kopia/kopia/snapshot/snapshotmaintenance"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
"golang.org/x/exp/slices"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/common/readers"
|
"github.com/alcionai/corso/src/internal/common/readers"
|
||||||
@ -34,7 +33,12 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/store"
|
"github.com/alcionai/corso/src/pkg/store"
|
||||||
)
|
)
|
||||||
|
|
||||||
const defaultCorsoPin = "corso"
|
const (
|
||||||
|
// TODO(ashmrtnz): These should be some values from upper layer corso,
|
||||||
|
// possibly corresponding to who is making the backup.
|
||||||
|
corsoHost = "corso-host"
|
||||||
|
corsoUser = "corso"
|
||||||
|
)
|
||||||
|
|
||||||
// common manifest tags
|
// common manifest tags
|
||||||
const (
|
const (
|
||||||
@ -198,12 +202,24 @@ func (w Wrapper) ConsumeBackupCollections(
|
|||||||
return nil, nil, nil, clues.Wrap(err, "building kopia directories")
|
return nil, nil, nil, clues.Wrap(err, "building kopia directories")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add some extra tags so we can look things up by reason.
|
||||||
|
tags := maps.Clone(additionalTags)
|
||||||
|
if tags == nil {
|
||||||
|
// Some platforms seem to return nil if the input is nil.
|
||||||
|
tags = map[string]string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, r := range backupReasons {
|
||||||
|
for _, k := range tagKeys(r) {
|
||||||
|
tags[k] = ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
s, err := w.makeSnapshotWithRoot(
|
s, err := w.makeSnapshotWithRoot(
|
||||||
ctx,
|
ctx,
|
||||||
backupReasons,
|
|
||||||
assistBase,
|
assistBase,
|
||||||
dirTree,
|
dirTree,
|
||||||
additionalTags,
|
tags,
|
||||||
progress)
|
progress)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, nil, err
|
return nil, nil, nil, err
|
||||||
@ -212,72 +228,8 @@ func (w Wrapper) ConsumeBackupCollections(
|
|||||||
return s, progress.deets, progress.toMerge, progress.errs.Failure()
|
return s, progress.deets, progress.toMerge, progress.errs.Failure()
|
||||||
}
|
}
|
||||||
|
|
||||||
// userAndHost is used as a passing mechanism for values that will be fed into
|
|
||||||
// kopia's UserName and Host fields for SourceInfo. It exists to avoid returning
|
|
||||||
// two strings from the hostAndUserFromReasons function.
|
|
||||||
type userAndHost struct {
|
|
||||||
user string
|
|
||||||
host string
|
|
||||||
}
|
|
||||||
|
|
||||||
func hostAndUserFromReasons(reasons []identity.Reasoner) (userAndHost, error) {
|
|
||||||
var (
|
|
||||||
tenant string
|
|
||||||
resource string
|
|
||||||
// reasonMap is a hash set of the concatenation of the service and category.
|
|
||||||
reasonMap = map[string]struct{}{}
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, reason := range reasons {
|
|
||||||
// Use a check on the iteration index instead of empty string so we can
|
|
||||||
// differentiate between the first iteration and a reason with an empty
|
|
||||||
// value (should result in an error if there's another reason with a
|
|
||||||
// non-empty value).
|
|
||||||
if i == 0 {
|
|
||||||
tenant = reason.Tenant()
|
|
||||||
} else if tenant != reason.Tenant() {
|
|
||||||
return userAndHost{}, clues.New("multiple tenant IDs in backup reasons").
|
|
||||||
With(
|
|
||||||
"old_tenant_id", tenant,
|
|
||||||
"new_tenant_id", reason.Tenant())
|
|
||||||
}
|
|
||||||
|
|
||||||
if i == 0 {
|
|
||||||
resource = reason.ProtectedResource()
|
|
||||||
} else if resource != reason.ProtectedResource() {
|
|
||||||
return userAndHost{}, clues.New("multiple protected resource IDs in backup reasons").
|
|
||||||
With(
|
|
||||||
"old_resource_id", resource,
|
|
||||||
"new_resource_id", reason.ProtectedResource())
|
|
||||||
}
|
|
||||||
|
|
||||||
dataType := reason.Service().String() + reason.Category().String()
|
|
||||||
reasonMap[dataType] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
allReasons := maps.Keys(reasonMap)
|
|
||||||
slices.Sort(allReasons)
|
|
||||||
|
|
||||||
host := strings.Join(allReasons, "-")
|
|
||||||
user := strings.Join([]string{tenant, resource}, "-")
|
|
||||||
|
|
||||||
if len(user) == 0 || user == "-" {
|
|
||||||
return userAndHost{}, clues.New("empty user value")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(host) == 0 {
|
|
||||||
return userAndHost{}, clues.New("empty host value")
|
|
||||||
}
|
|
||||||
|
|
||||||
return userAndHost{
|
|
||||||
host: host,
|
|
||||||
user: user,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w Wrapper) makeSnapshotWithRoot(
|
func (w Wrapper) makeSnapshotWithRoot(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupReasons []identity.Reasoner,
|
|
||||||
prevBases []BackupBase,
|
prevBases []BackupBase,
|
||||||
root fs.Directory,
|
root fs.Directory,
|
||||||
addlTags map[string]string,
|
addlTags map[string]string,
|
||||||
@ -300,24 +252,11 @@ func (w Wrapper) makeSnapshotWithRoot(
|
|||||||
snapIDs = append(snapIDs, ent.ItemDataSnapshot.ID)
|
snapIDs = append(snapIDs, ent.ItemDataSnapshot.ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add some extra tags so we can look things up by reason.
|
|
||||||
allTags := maps.Clone(addlTags)
|
|
||||||
if allTags == nil {
|
|
||||||
// Some platforms seem to return nil if the input is nil.
|
|
||||||
allTags = map[string]string{}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range backupReasons {
|
|
||||||
for _, k := range tagKeys(r) {
|
|
||||||
allTags[k] = ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = clues.Add(
|
ctx = clues.Add(
|
||||||
ctx,
|
ctx,
|
||||||
"num_assist_snapshots", len(prevBases),
|
"num_assist_snapshots", len(prevBases),
|
||||||
"assist_snapshot_ids", snapIDs,
|
"assist_snapshot_ids", snapIDs,
|
||||||
"additional_tags", allTags)
|
"additional_tags", addlTags)
|
||||||
|
|
||||||
if len(snapIDs) > 0 {
|
if len(snapIDs) > 0 {
|
||||||
logger.Ctx(ctx).Info("using snapshots for kopia-assisted incrementals")
|
logger.Ctx(ctx).Info("using snapshots for kopia-assisted incrementals")
|
||||||
@ -327,7 +266,7 @@ func (w Wrapper) makeSnapshotWithRoot(
|
|||||||
|
|
||||||
tags := map[string]string{}
|
tags := map[string]string{}
|
||||||
|
|
||||||
for k, v := range allTags {
|
for k, v := range addlTags {
|
||||||
mk, mv := makeTagKV(k)
|
mk, mv := makeTagKV(k)
|
||||||
|
|
||||||
if len(v) == 0 {
|
if len(v) == 0 {
|
||||||
@ -337,16 +276,7 @@ func (w Wrapper) makeSnapshotWithRoot(
|
|||||||
tags[mk] = v
|
tags[mk] = v
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the SourceInfo to the tenant ID, resource ID, and the concatenation
|
err := repo.WriteSession(
|
||||||
// of the service/data types being backed up. This will give us unique
|
|
||||||
// values for each set of backups with the assumption that no concurrent
|
|
||||||
// backups for the same set of things is being run on this repo.
|
|
||||||
userHost, err := hostAndUserFromReasons(backupReasons)
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.StackWC(ctx, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
err = repo.WriteSession(
|
|
||||||
ctx,
|
ctx,
|
||||||
w.c,
|
w.c,
|
||||||
repo.WriteSessionOptions{
|
repo.WriteSessionOptions{
|
||||||
@ -358,9 +288,10 @@ func (w Wrapper) makeSnapshotWithRoot(
|
|||||||
},
|
},
|
||||||
func(innerCtx context.Context, rw repo.RepositoryWriter) error {
|
func(innerCtx context.Context, rw repo.RepositoryWriter) error {
|
||||||
si := snapshot.SourceInfo{
|
si := snapshot.SourceInfo{
|
||||||
Host: userHost.host,
|
Host: corsoHost,
|
||||||
UserName: userHost.user,
|
UserName: corsoUser,
|
||||||
Path: root.Name(),
|
// TODO(ashmrtnz): will this be something useful for snapshot lookups later?
|
||||||
|
Path: root.Name(),
|
||||||
}
|
}
|
||||||
|
|
||||||
trueVal := policy.OptionalBool(true)
|
trueVal := policy.OptionalBool(true)
|
||||||
@ -394,10 +325,6 @@ func (w Wrapper) makeSnapshotWithRoot(
|
|||||||
}
|
}
|
||||||
|
|
||||||
man.Tags = tags
|
man.Tags = tags
|
||||||
// Add one pin to keep kopia's retention policy from collecting it if it
|
|
||||||
// ends up enabled for some reason. The value in the pin doesn't matter.
|
|
||||||
// We don't need to remove any pins.
|
|
||||||
man.UpdatePins(append(man.Pins, defaultCorsoPin), nil)
|
|
||||||
|
|
||||||
if _, err := snapshot.SaveSnapshot(innerCtx, rw, man); err != nil {
|
if _, err := snapshot.SaveSnapshot(innerCtx, rw, man); err != nil {
|
||||||
err = clues.WrapWC(ctx, err, "saving snapshot")
|
err = clues.WrapWC(ctx, err, "saving snapshot")
|
||||||
@ -665,12 +592,7 @@ func (w Wrapper) RepoMaintenance(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
storer store.Storer,
|
storer store.Storer,
|
||||||
opts repository.Maintenance,
|
opts repository.Maintenance,
|
||||||
errs *fault.Bus,
|
|
||||||
) error {
|
) error {
|
||||||
// Check the existing config parameters first so that even if we fail for some
|
|
||||||
// reason below we know we checked the config.
|
|
||||||
w.c.verifyDefaultConfigOptions(ctx, errs)
|
|
||||||
|
|
||||||
kopiaSafety, err := translateSafety(opts.Safety)
|
kopiaSafety, err := translateSafety(opts.Safety)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.WrapWC(ctx, err, "identifying safety level")
|
return clues.WrapWC(ctx, err, "identifying safety level")
|
||||||
@ -701,9 +623,8 @@ func (w Wrapper) RepoMaintenance(
|
|||||||
// Even if we fail this we don't want to fail the overall maintenance
|
// Even if we fail this we don't want to fail the overall maintenance
|
||||||
// operation since there's other useful work we can still do.
|
// operation since there's other useful work we can still do.
|
||||||
if err := cleanupOrphanedData(ctx, storer, w.c, buffer, time.Now); err != nil {
|
if err := cleanupOrphanedData(ctx, storer, w.c, buffer, time.Now); err != nil {
|
||||||
errs.AddRecoverable(ctx, clues.Wrap(
|
logger.CtxErr(ctx, err).Info(
|
||||||
err,
|
"cleaning up failed backups, some space may not be freed")
|
||||||
"cleaning up failed backups, some space may not be freed"))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,7 +3,6 @@ package kopia
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"strings"
|
"strings"
|
||||||
@ -27,6 +26,7 @@ import (
|
|||||||
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
strTD "github.com/alcionai/corso/src/internal/common/str/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
istats "github.com/alcionai/corso/src/internal/stats"
|
istats "github.com/alcionai/corso/src/internal/stats"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -37,7 +37,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
|
||||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -198,7 +197,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_FirstRun_NoChanges() {
|
|||||||
Type: repository.MetadataMaintenance,
|
Type: repository.MetadataMaintenance,
|
||||||
}
|
}
|
||||||
|
|
||||||
err = w.RepoMaintenance(ctx, nil, opts, fault.New(true))
|
err = w.RepoMaintenance(ctx, nil, opts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -220,7 +219,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails
|
|||||||
}
|
}
|
||||||
|
|
||||||
// This will set the user.
|
// This will set the user.
|
||||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = k.Close(ctx)
|
err = k.Close(ctx)
|
||||||
@ -236,7 +235,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_NoForce_Fails
|
|||||||
|
|
||||||
var notOwnedErr maintenance.NotOwnedError
|
var notOwnedErr maintenance.NotOwnedError
|
||||||
|
|
||||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||||
assert.ErrorAs(t, err, ¬OwnedErr, clues.ToCore(err))
|
assert.ErrorAs(t, err, ¬OwnedErr, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,7 +257,7 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeed
|
|||||||
}
|
}
|
||||||
|
|
||||||
// This will set the user.
|
// This will set the user.
|
||||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
err = k.Close(ctx)
|
err = k.Close(ctx)
|
||||||
@ -275,13 +274,13 @@ func (suite *BasicKopiaIntegrationSuite) TestMaintenance_WrongUser_Force_Succeed
|
|||||||
mOpts.Force = true
|
mOpts.Force = true
|
||||||
|
|
||||||
// This will set the user.
|
// This will set the user.
|
||||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mOpts.Force = false
|
mOpts.Force = false
|
||||||
|
|
||||||
// Running without force should succeed now.
|
// Running without force should succeed now.
|
||||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -376,146 +375,6 @@ func (suite *BasicKopiaIntegrationSuite) TestUpdatePersistentConfig() {
|
|||||||
mutableParams.EpochParameters.MinEpochDuration)
|
mutableParams.EpochParameters.MinEpochDuration)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *BasicKopiaIntegrationSuite) TestConsumeBackupCollections_SetsSourceInfo() {
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
reasons []identity.Reasoner
|
|
||||||
expectError assert.ErrorAssertionFunc
|
|
||||||
expectUser string
|
|
||||||
expectHost string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "DifferentDataTypesInService",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
identity.NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory),
|
|
||||||
identity.NewReason(testTenant, testUser, path.ExchangeService, path.ContactsCategory),
|
|
||||||
},
|
|
||||||
expectError: assert.NoError,
|
|
||||||
expectUser: testTenant + "-" + testUser,
|
|
||||||
expectHost: path.ExchangeService.String() +
|
|
||||||
path.ContactsCategory.String() + "-" + path.ExchangeService.String() +
|
|
||||||
path.EmailCategory.String(),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "DifferentServices",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
identity.NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory),
|
|
||||||
identity.NewReason(testTenant, testUser, path.OneDriveService, path.FilesCategory),
|
|
||||||
},
|
|
||||||
expectError: assert.NoError,
|
|
||||||
expectUser: testTenant + "-" + testUser,
|
|
||||||
expectHost: path.ExchangeService.String() +
|
|
||||||
path.EmailCategory.String() + "-" + path.OneDriveService.String() +
|
|
||||||
path.FilesCategory.String(),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "EmptyTenant",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
identity.NewReason("", testUser, path.ExchangeService, path.EmailCategory),
|
|
||||||
},
|
|
||||||
expectError: assert.NoError,
|
|
||||||
expectUser: "-" + testUser,
|
|
||||||
expectHost: path.ExchangeService.String() + path.EmailCategory.String(),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "EmptyResource",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
identity.NewReason(testTenant, "", path.ExchangeService, path.EmailCategory),
|
|
||||||
},
|
|
||||||
expectError: assert.NoError,
|
|
||||||
expectUser: testTenant + "-",
|
|
||||||
expectHost: path.ExchangeService.String() + path.EmailCategory.String(),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "EmptyTenantAndResource Errors",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
identity.NewReason("", "", path.ExchangeService, path.EmailCategory),
|
|
||||||
},
|
|
||||||
expectError: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "EmptyAndPopulatedTenant Errors",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
identity.NewReason("", testUser, path.ExchangeService, path.EmailCategory),
|
|
||||||
identity.NewReason(testTenant, testUser, path.ExchangeService, path.ContactsCategory),
|
|
||||||
},
|
|
||||||
expectError: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "DifferentTenants Errors",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
identity.NewReason(testTenant+"1", testUser, path.ExchangeService, path.EmailCategory),
|
|
||||||
identity.NewReason(testTenant, testUser, path.ExchangeService, path.ContactsCategory),
|
|
||||||
},
|
|
||||||
expectError: assert.Error,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "DifferentResources Errors",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
identity.NewReason(testTenant, testUser+"1", path.ExchangeService, path.EmailCategory),
|
|
||||||
identity.NewReason(testTenant, testUser, path.ExchangeService, path.ContactsCategory),
|
|
||||||
},
|
|
||||||
expectError: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
var cols []data.BackupCollection
|
|
||||||
|
|
||||||
for i, reason := range test.reasons {
|
|
||||||
colPath, err := path.Build(
|
|
||||||
testTenant,
|
|
||||||
testUser,
|
|
||||||
reason.Service(),
|
|
||||||
reason.Category(),
|
|
||||||
false,
|
|
||||||
fmt.Sprintf("%d", i))
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
cols = append(cols, exchMock.NewCollection(colPath, colPath, 0))
|
|
||||||
}
|
|
||||||
|
|
||||||
c, err := openLocalKopiaRepo(t, ctx)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
wrapper := &Wrapper{c}
|
|
||||||
|
|
||||||
defer wrapper.Close(ctx)
|
|
||||||
|
|
||||||
stats, _, _, err := wrapper.ConsumeBackupCollections(
|
|
||||||
ctx,
|
|
||||||
test.reasons,
|
|
||||||
nil,
|
|
||||||
cols,
|
|
||||||
nil,
|
|
||||||
nil,
|
|
||||||
true,
|
|
||||||
count.New(),
|
|
||||||
fault.New(true))
|
|
||||||
test.expectError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
snap, err := snapshot.LoadSnapshot(
|
|
||||||
ctx,
|
|
||||||
wrapper.c,
|
|
||||||
manifest.ID(stats.SnapshotID))
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
assert.Equal(t, test.expectHost, snap.Source.Host, "source host")
|
|
||||||
assert.Equal(t, test.expectUser, snap.Source.UserName, "source user")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------
|
// ---------------
|
||||||
// integration tests that require object locking to be enabled on the bucket.
|
// integration tests that require object locking to be enabled on the bucket.
|
||||||
// ---------------
|
// ---------------
|
||||||
@ -733,7 +592,7 @@ func (suite *RetentionIntegrationSuite) TestSetRetentionParameters_And_Maintenan
|
|||||||
// This will set common maintenance config parameters. There's some interplay
|
// This will set common maintenance config parameters. There's some interplay
|
||||||
// between the maintenance schedule and retention period that we want to check
|
// between the maintenance schedule and retention period that we want to check
|
||||||
// below.
|
// below.
|
||||||
err = w.RepoMaintenance(ctx, nil, mOpts, fault.New(true))
|
err = w.RepoMaintenance(ctx, nil, mOpts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// Enable retention.
|
// Enable retention.
|
||||||
@ -838,7 +697,7 @@ func (suite *RetentionIntegrationSuite) TestSetAndUpdateRetentionParameters_RunM
|
|||||||
// This will set common maintenance config parameters. There's some interplay
|
// This will set common maintenance config parameters. There's some interplay
|
||||||
// between the maintenance schedule and retention period that we want to check
|
// between the maintenance schedule and retention period that we want to check
|
||||||
// below.
|
// below.
|
||||||
err = w.RepoMaintenance(ctx, ms, mOpts, fault.New(true))
|
err = w.RepoMaintenance(ctx, ms, mOpts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
// Enable retention.
|
// Enable retention.
|
||||||
@ -882,7 +741,7 @@ func (suite *RetentionIntegrationSuite) TestSetAndUpdateRetentionParameters_RunM
|
|||||||
|
|
||||||
// Run full maintenance again. This should extend object locks for things if
|
// Run full maintenance again. This should extend object locks for things if
|
||||||
// they exist.
|
// they exist.
|
||||||
err = w.RepoMaintenance(ctx, ms, mOpts, fault.New(true))
|
err = w.RepoMaintenance(ctx, ms, mOpts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -1151,8 +1010,6 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
|
|||||||
manifest.ID(stats.SnapshotID))
|
manifest.ID(stats.SnapshotID))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
assert.Contains(t, snap.Pins, defaultCorsoPin)
|
|
||||||
|
|
||||||
man = snap
|
man = snap
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@ -2,7 +2,6 @@ package m365
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
@ -14,10 +13,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/teamschats"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
|
||||||
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/count"
|
"github.com/alcionai/corso/src/pkg/count"
|
||||||
@ -26,33 +22,9 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/filters"
|
"github.com/alcionai/corso/src/pkg/filters"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
)
|
)
|
||||||
|
|
||||||
type backupHandler interface {
|
|
||||||
produceBackupCollectionser
|
|
||||||
}
|
|
||||||
|
|
||||||
type produceBackupCollectionser interface {
|
|
||||||
ProduceBackupCollections(
|
|
||||||
ctx context.Context,
|
|
||||||
bpc inject.BackupProducerConfig,
|
|
||||||
ac api.Client,
|
|
||||||
creds account.M365Config,
|
|
||||||
su support.StatusUpdater,
|
|
||||||
counter *count.Bus,
|
|
||||||
errs *fault.Bus,
|
|
||||||
) (
|
|
||||||
collections []data.BackupCollection,
|
|
||||||
excludeItems *prefixmatcher.StringSetMatcher,
|
|
||||||
// canUsePreviousBacukp can be always returned true for impelementations
|
|
||||||
// that always return a tombstone collection when the metadata read fails
|
|
||||||
canUsePreviousBackup bool,
|
|
||||||
err error,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Data Collections
|
// Data Collections
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -91,38 +63,65 @@ func (ctrl *Controller) ProduceBackupCollections(
|
|||||||
canUsePreviousBackup bool
|
canUsePreviousBackup bool
|
||||||
)
|
)
|
||||||
|
|
||||||
var handler backupHandler
|
|
||||||
|
|
||||||
switch service {
|
switch service {
|
||||||
case path.ExchangeService:
|
case path.ExchangeService:
|
||||||
handler = exchange.NewBackup()
|
colls, excludeItems, canUsePreviousBackup, err = exchange.ProduceBackupCollections(
|
||||||
|
ctx,
|
||||||
|
bpc,
|
||||||
|
ctrl.AC,
|
||||||
|
ctrl.credentials,
|
||||||
|
ctrl.UpdateStatus,
|
||||||
|
counter,
|
||||||
|
errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
case path.OneDriveService:
|
case path.OneDriveService:
|
||||||
handler = onedrive.NewBackup()
|
colls, excludeItems, canUsePreviousBackup, err = onedrive.ProduceBackupCollections(
|
||||||
|
ctx,
|
||||||
|
bpc,
|
||||||
|
ctrl.AC,
|
||||||
|
ctrl.credentials,
|
||||||
|
ctrl.UpdateStatus,
|
||||||
|
counter,
|
||||||
|
errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
case path.SharePointService:
|
case path.SharePointService:
|
||||||
handler = sharepoint.NewBackup()
|
colls, excludeItems, canUsePreviousBackup, err = sharepoint.ProduceBackupCollections(
|
||||||
|
ctx,
|
||||||
|
bpc,
|
||||||
|
ctrl.AC,
|
||||||
|
ctrl.credentials,
|
||||||
|
ctrl.UpdateStatus,
|
||||||
|
counter,
|
||||||
|
errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
case path.GroupsService:
|
case path.GroupsService:
|
||||||
handler = groups.NewBackup()
|
colls, excludeItems, err = groups.ProduceBackupCollections(
|
||||||
|
ctx,
|
||||||
|
bpc,
|
||||||
|
ctrl.AC,
|
||||||
|
ctrl.credentials,
|
||||||
|
ctrl.UpdateStatus,
|
||||||
|
counter,
|
||||||
|
errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
case path.TeamsChatsService:
|
// canUsePreviousBacukp can be always returned true for groups as we
|
||||||
handler = teamschats.NewBackup()
|
// return a tombstone collection in case the metadata read fails
|
||||||
|
canUsePreviousBackup = true
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, nil, false, clues.NewWC(ctx, fmt.Sprintf("service not supported: %s", service.HumanString()))
|
return nil, nil, false, clues.Wrap(clues.NewWC(ctx, service.String()), "service not supported")
|
||||||
}
|
|
||||||
|
|
||||||
colls, excludeItems, canUsePreviousBackup, err = handler.ProduceBackupCollections(
|
|
||||||
ctx,
|
|
||||||
bpc,
|
|
||||||
ctrl.AC,
|
|
||||||
ctrl.credentials,
|
|
||||||
ctrl.UpdateStatus,
|
|
||||||
counter,
|
|
||||||
errs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, false, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, c := range colls {
|
for _, c := range colls {
|
||||||
@ -154,28 +153,25 @@ func (ctrl *Controller) IsServiceEnabled(
|
|||||||
return sharepoint.IsServiceEnabled(ctx, ctrl.AC.Sites(), resourceOwner)
|
return sharepoint.IsServiceEnabled(ctx, ctrl.AC.Sites(), resourceOwner)
|
||||||
case path.GroupsService:
|
case path.GroupsService:
|
||||||
return groups.IsServiceEnabled(ctx, ctrl.AC.Groups(), resourceOwner)
|
return groups.IsServiceEnabled(ctx, ctrl.AC.Groups(), resourceOwner)
|
||||||
case path.TeamsChatsService:
|
|
||||||
return teamschats.IsServiceEnabled(ctx, ctrl.AC.Users(), resourceOwner)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return false, clues.Wrap(clues.NewWC(ctx, service.String()), "service not supported")
|
return false, clues.Wrap(clues.NewWC(ctx, service.String()), "service not supported")
|
||||||
}
|
}
|
||||||
|
|
||||||
func verifyBackupInputs(sel selectors.Selector, cachedIDs []string) error {
|
func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
|
||||||
var ids []string
|
var ids []string
|
||||||
|
|
||||||
switch sel.Service {
|
switch sels.Service {
|
||||||
case selectors.ServiceExchange, selectors.ServiceOneDrive:
|
case selectors.ServiceExchange, selectors.ServiceOneDrive:
|
||||||
// Exchange and OneDrive user existence now checked in checkServiceEnabled.
|
// Exchange and OneDrive user existence now checked in checkServiceEnabled.
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
case selectors.ServiceSharePoint, selectors.ServiceGroups, selectors.ServiceTeamsChats:
|
case selectors.ServiceSharePoint, selectors.ServiceGroups:
|
||||||
ids = cachedIDs
|
ids = cachedIDs
|
||||||
}
|
}
|
||||||
|
|
||||||
if !filters.Contains(ids).Compare(sel.ID()) {
|
if !filters.Contains(ids).Compare(sels.ID()) {
|
||||||
return clues.Wrap(core.ErrNotFound, "verifying existence of resource").
|
return clues.Stack(core.ErrNotFound).With("selector_protected_resource", sels.DiscreteOwner)
|
||||||
With("selector_protected_resource", sel.ID())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/data/mock"
|
"github.com/alcionai/corso/src/internal/data/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||||
@ -18,7 +19,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/its"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -36,7 +36,10 @@ import (
|
|||||||
|
|
||||||
type DataCollectionIntgSuite struct {
|
type DataCollectionIntgSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
m365 its.M365IntgTestSetup
|
user string
|
||||||
|
site string
|
||||||
|
tenantID string
|
||||||
|
ac api.Client
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDataCollectionIntgSuite(t *testing.T) {
|
func TestDataCollectionIntgSuite(t *testing.T) {
|
||||||
@ -48,14 +51,29 @@ func TestDataCollectionIntgSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *DataCollectionIntgSuite) SetupSuite() {
|
func (suite *DataCollectionIntgSuite) SetupSuite() {
|
||||||
suite.m365 = its.GetM365(suite.T())
|
t := suite.T()
|
||||||
|
|
||||||
|
suite.user = tconfig.M365UserID(t)
|
||||||
|
suite.site = tconfig.M365SiteID(t)
|
||||||
|
|
||||||
|
acct := tconfig.NewM365Account(t)
|
||||||
|
creds, err := acct.M365Config()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
suite.tenantID = creds.AzureTenantID
|
||||||
|
|
||||||
|
suite.ac, err = api.NewClient(
|
||||||
|
creds,
|
||||||
|
control.DefaultOptions(),
|
||||||
|
count.New())
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
||||||
ctx, flush := tester.NewContext(suite.T())
|
ctx, flush := tester.NewContext(suite.T())
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
selUsers := []string{suite.m365.User.ID}
|
selUsers := []string{suite.user}
|
||||||
|
|
||||||
ctrl := newController(ctx, suite.T(), path.ExchangeService)
|
ctrl := newController(ctx, suite.T(), path.ExchangeService)
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
@ -67,7 +85,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
|||||||
getSelector: func(t *testing.T) selectors.Selector {
|
getSelector: func(t *testing.T) selectors.Selector {
|
||||||
sel := selectors.NewExchangeBackup(selUsers)
|
sel := selectors.NewExchangeBackup(selUsers)
|
||||||
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
|
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
|
||||||
sel.DiscreteOwner = suite.m365.User.ID
|
sel.DiscreteOwner = suite.user
|
||||||
return sel.Selector
|
return sel.Selector
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -76,7 +94,7 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
|||||||
getSelector: func(t *testing.T) selectors.Selector {
|
getSelector: func(t *testing.T) selectors.Selector {
|
||||||
sel := selectors.NewExchangeBackup(selUsers)
|
sel := selectors.NewExchangeBackup(selUsers)
|
||||||
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
|
sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
|
||||||
sel.DiscreteOwner = suite.m365.User.ID
|
sel.DiscreteOwner = suite.user
|
||||||
return sel.Selector
|
return sel.Selector
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -121,11 +139,11 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
|
|||||||
Selector: sel,
|
Selector: sel,
|
||||||
}
|
}
|
||||||
|
|
||||||
collections, excludes, canUsePreviousBackup, err := exchange.NewBackup().ProduceBackupCollections(
|
collections, excludes, canUsePreviousBackup, err := exchange.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
bpc,
|
bpc,
|
||||||
suite.m365.AC,
|
suite.ac,
|
||||||
suite.m365.Creds,
|
suite.ac.Credentials,
|
||||||
ctrl.UpdateStatus,
|
ctrl.UpdateStatus,
|
||||||
count.New(),
|
count.New(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
@ -252,7 +270,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
|
|||||||
ctx, flush := tester.NewContext(suite.T())
|
ctx, flush := tester.NewContext(suite.T())
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
selSites := []string{suite.m365.Site.ID}
|
selSites := []string{suite.site}
|
||||||
ctrl := newController(ctx, suite.T(), path.SharePointService)
|
ctrl := newController(ctx, suite.T(), path.SharePointService)
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@ -291,10 +309,10 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
|
|||||||
Selector: sel,
|
Selector: sel,
|
||||||
}
|
}
|
||||||
|
|
||||||
collections, excludes, canUsePreviousBackup, err := sharepoint.NewBackup().ProduceBackupCollections(
|
collections, excludes, canUsePreviousBackup, err := sharepoint.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
bpc,
|
bpc,
|
||||||
suite.m365.AC,
|
suite.ac,
|
||||||
ctrl.credentials,
|
ctrl.credentials,
|
||||||
ctrl.UpdateStatus,
|
ctrl.UpdateStatus,
|
||||||
count.New(),
|
count.New(),
|
||||||
@ -333,7 +351,8 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
|
|||||||
|
|
||||||
type SPCollectionIntgSuite struct {
|
type SPCollectionIntgSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
m365 its.M365IntgTestSetup
|
connector *Controller
|
||||||
|
user string
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSPCollectionIntgSuite(t *testing.T) {
|
func TestSPCollectionIntgSuite(t *testing.T) {
|
||||||
@ -345,7 +364,13 @@ func TestSPCollectionIntgSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *SPCollectionIntgSuite) SetupSuite() {
|
func (suite *SPCollectionIntgSuite) SetupSuite() {
|
||||||
suite.m365 = its.GetM365(suite.T())
|
ctx, flush := tester.NewContext(suite.T())
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
suite.connector = newController(ctx, suite.T(), path.SharePointService)
|
||||||
|
suite.user = tconfig.M365UserID(suite.T())
|
||||||
|
|
||||||
|
tester.LogTimeOfTest(suite.T())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
||||||
@ -354,20 +379,24 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ctrl := newController(ctx, t, path.SharePointService)
|
var (
|
||||||
|
siteID = tconfig.M365SiteID(t)
|
||||||
|
ctrl = newController(ctx, t, path.SharePointService)
|
||||||
|
siteIDs = []string{siteID}
|
||||||
|
)
|
||||||
|
|
||||||
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Site.ID, nil)
|
site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
|
sel := selectors.NewSharePointBackup(siteIDs)
|
||||||
sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch()))
|
sel.Include(sel.LibraryFolders([]string{"foo"}, selectors.PrefixMatch()))
|
||||||
sel.Include(sel.Library("Documents"))
|
|
||||||
sel.SetDiscreteOwnerIDName(suite.m365.Site.ID, suite.m365.Site.WebURL)
|
sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
|
||||||
|
|
||||||
bpc := inject.BackupProducerConfig{
|
bpc := inject.BackupProducerConfig{
|
||||||
LastBackupVersion: version.NoBackup,
|
LastBackupVersion: version.NoBackup,
|
||||||
Options: control.DefaultOptions(),
|
Options: control.DefaultOptions(),
|
||||||
ProtectedResource: suite.m365.Site.Provider,
|
ProtectedResource: site,
|
||||||
Selector: sel.Selector,
|
Selector: sel.Selector,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -378,40 +407,21 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
|||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
assert.True(t, canUsePreviousBackup, "can use previous backup")
|
||||||
|
require.Len(t, cols, 2) // 1 collection, 1 path prefix directory to ensure the root path exists.
|
||||||
var (
|
|
||||||
hasDocumentsColl bool
|
|
||||||
hasMetadataColl bool
|
|
||||||
)
|
|
||||||
|
|
||||||
documentsColl, err := path.BuildPrefix(
|
|
||||||
suite.m365.TenantID,
|
|
||||||
suite.m365.Site.ID,
|
|
||||||
path.SharePointService,
|
|
||||||
path.LibrariesCategory)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
metadataColl, err := path.BuildMetadata(
|
|
||||||
suite.m365.TenantID,
|
|
||||||
suite.m365.Site.ID,
|
|
||||||
path.SharePointService,
|
|
||||||
path.LibrariesCategory,
|
|
||||||
false)
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
|
||||||
|
|
||||||
for i, col := range cols {
|
|
||||||
fp := col.FullPath()
|
|
||||||
t.Logf("Collection %d: %s", i, fp)
|
|
||||||
|
|
||||||
hasDocumentsColl = hasDocumentsColl || fp.Equal(documentsColl)
|
|
||||||
hasMetadataColl = hasMetadataColl || fp.Equal(metadataColl)
|
|
||||||
}
|
|
||||||
|
|
||||||
require.Truef(t, hasDocumentsColl, "found documents collection %s", documentsColl)
|
|
||||||
require.Truef(t, hasMetadataColl, "found metadata collection %s", metadataColl)
|
|
||||||
|
|
||||||
// No excludes yet as this isn't an incremental backup.
|
// No excludes yet as this isn't an incremental backup.
|
||||||
assert.True(t, excludes.Empty())
|
assert.True(t, excludes.Empty())
|
||||||
|
|
||||||
|
t.Logf("cols[0] Path: %s\n", cols[0].FullPath().String())
|
||||||
|
assert.Equal(
|
||||||
|
t,
|
||||||
|
path.SharePointMetadataService.String(),
|
||||||
|
cols[0].FullPath().Service().String())
|
||||||
|
|
||||||
|
t.Logf("cols[1] Path: %s\n", cols[1].FullPath().String())
|
||||||
|
assert.Equal(
|
||||||
|
t,
|
||||||
|
path.SharePointService.String(),
|
||||||
|
cols[1].FullPath().Service().String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
||||||
@ -420,19 +430,24 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ctrl := newController(ctx, t, path.SharePointService)
|
var (
|
||||||
|
siteID = tconfig.M365SiteID(t)
|
||||||
|
ctrl = newController(ctx, t, path.SharePointService)
|
||||||
|
siteIDs = []string{siteID}
|
||||||
|
)
|
||||||
|
|
||||||
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Site.ID, nil)
|
site, err := ctrl.PopulateProtectedResourceIDAndName(ctx, siteID, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
sel := selectors.NewSharePointBackup([]string{suite.m365.Site.ID})
|
sel := selectors.NewSharePointBackup(siteIDs)
|
||||||
sel.Include(sel.Lists(selectors.Any()))
|
sel.Include(sel.Lists(selectors.Any()))
|
||||||
sel.SetDiscreteOwnerIDName(suite.m365.Site.ID, suite.m365.Site.WebURL)
|
|
||||||
|
sel.SetDiscreteOwnerIDName(site.ID(), site.Name())
|
||||||
|
|
||||||
bpc := inject.BackupProducerConfig{
|
bpc := inject.BackupProducerConfig{
|
||||||
LastBackupVersion: version.NoBackup,
|
LastBackupVersion: version.NoBackup,
|
||||||
Options: control.DefaultOptions(),
|
Options: control.DefaultOptions(),
|
||||||
ProtectedResource: suite.m365.Site.Provider,
|
ProtectedResource: site,
|
||||||
Selector: sel.Selector,
|
Selector: sel.Selector,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -467,7 +482,9 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
|||||||
|
|
||||||
type GroupsCollectionIntgSuite struct {
|
type GroupsCollectionIntgSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
m365 its.M365IntgTestSetup
|
connector *Controller
|
||||||
|
tenantID string
|
||||||
|
user string
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGroupsCollectionIntgSuite(t *testing.T) {
|
func TestGroupsCollectionIntgSuite(t *testing.T) {
|
||||||
@ -479,7 +496,21 @@ func TestGroupsCollectionIntgSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *GroupsCollectionIntgSuite) SetupSuite() {
|
func (suite *GroupsCollectionIntgSuite) SetupSuite() {
|
||||||
suite.m365 = its.GetM365(suite.T())
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
suite.connector = newController(ctx, t, path.GroupsService)
|
||||||
|
suite.user = tconfig.M365UserID(t)
|
||||||
|
|
||||||
|
acct := tconfig.NewM365Account(t)
|
||||||
|
creds, err := acct.M365Config()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
suite.tenantID = creds.AzureTenantID
|
||||||
|
|
||||||
|
tester.LogTimeOfTest(t)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint() {
|
func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint() {
|
||||||
@ -488,19 +519,24 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ctrl := newController(ctx, t, path.GroupsService)
|
var (
|
||||||
|
groupID = tconfig.M365TeamID(t)
|
||||||
|
ctrl = newController(ctx, t, path.GroupsService)
|
||||||
|
groupIDs = []string{groupID}
|
||||||
|
)
|
||||||
|
|
||||||
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Group.ID, nil)
|
group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
sel := selectors.NewGroupsBackup(groupIDs)
|
||||||
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
|
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
|
||||||
sel.SetDiscreteOwnerIDName(suite.m365.Group.ID, suite.m365.Group.DisplayName)
|
|
||||||
|
sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
|
||||||
|
|
||||||
bpc := inject.BackupProducerConfig{
|
bpc := inject.BackupProducerConfig{
|
||||||
LastBackupVersion: version.NoBackup,
|
LastBackupVersion: version.NoBackup,
|
||||||
Options: control.DefaultOptions(),
|
Options: control.DefaultOptions(),
|
||||||
ProtectedResource: suite.m365.Group.Provider,
|
ProtectedResource: group,
|
||||||
Selector: sel.Selector,
|
Selector: sel.Selector,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -519,8 +555,8 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint()
|
|||||||
assert.Greater(t, len(collections), 1)
|
assert.Greater(t, len(collections), 1)
|
||||||
|
|
||||||
p, err := path.BuildMetadata(
|
p, err := path.BuildMetadata(
|
||||||
suite.m365.TenantID,
|
suite.tenantID,
|
||||||
suite.m365.Group.ID,
|
groupID,
|
||||||
path.GroupsService,
|
path.GroupsService,
|
||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
false)
|
false)
|
||||||
@ -558,23 +594,31 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ctrl := newController(ctx, t, path.GroupsService)
|
var (
|
||||||
|
groupID = tconfig.M365TeamID(t)
|
||||||
|
ctrl = newController(ctx, t, path.GroupsService)
|
||||||
|
groupIDs = []string{groupID}
|
||||||
|
)
|
||||||
|
|
||||||
_, err := ctrl.PopulateProtectedResourceIDAndName(ctx, suite.m365.Group.ID, nil)
|
group, err := ctrl.PopulateProtectedResourceIDAndName(ctx, groupID, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
sel := selectors.NewGroupsBackup([]string{suite.m365.Group.ID})
|
sel := selectors.NewGroupsBackup(groupIDs)
|
||||||
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
|
sel.Include(sel.LibraryFolders([]string{"test"}, selectors.PrefixMatch()))
|
||||||
sel.SetDiscreteOwnerIDName(suite.m365.Group.ID, suite.m365.Group.DisplayName)
|
|
||||||
|
sel.SetDiscreteOwnerIDName(group.ID(), group.Name())
|
||||||
|
|
||||||
|
site, err := suite.connector.AC.Groups().GetRootSite(ctx, groupID)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
pth, err := path.Build(
|
pth, err := path.Build(
|
||||||
suite.m365.TenantID,
|
suite.tenantID,
|
||||||
suite.m365.Group.ID,
|
groupID,
|
||||||
path.GroupsService,
|
path.GroupsService,
|
||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
true,
|
true,
|
||||||
odConsts.SitesPathDir,
|
odConsts.SitesPathDir,
|
||||||
suite.m365.Group.RootSite.ID)
|
ptr.Val(site.GetId()))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mmc := []data.RestoreCollection{
|
mmc := []data.RestoreCollection{
|
||||||
@ -592,7 +636,7 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
|
|||||||
bpc := inject.BackupProducerConfig{
|
bpc := inject.BackupProducerConfig{
|
||||||
LastBackupVersion: version.NoBackup,
|
LastBackupVersion: version.NoBackup,
|
||||||
Options: control.DefaultOptions(),
|
Options: control.DefaultOptions(),
|
||||||
ProtectedResource: suite.m365.Group.Provider,
|
ProtectedResource: group,
|
||||||
Selector: sel.Selector,
|
Selector: sel.Selector,
|
||||||
MetadataCollections: mmc,
|
MetadataCollections: mmc,
|
||||||
}
|
}
|
||||||
@ -612,8 +656,8 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
|
|||||||
assert.Greater(t, len(collections), 1)
|
assert.Greater(t, len(collections), 1)
|
||||||
|
|
||||||
p, err := path.BuildMetadata(
|
p, err := path.BuildMetadata(
|
||||||
suite.m365.TenantID,
|
suite.tenantID,
|
||||||
suite.m365.Group.ID,
|
groupID,
|
||||||
path.GroupsService,
|
path.GroupsService,
|
||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
false)
|
false)
|
||||||
@ -626,13 +670,13 @@ func (suite *GroupsCollectionIntgSuite) TestCreateGroupsCollection_SharePoint_In
|
|||||||
foundRootTombstone := false
|
foundRootTombstone := false
|
||||||
|
|
||||||
sp, err := path.BuildPrefix(
|
sp, err := path.BuildPrefix(
|
||||||
suite.m365.TenantID,
|
suite.tenantID,
|
||||||
suite.m365.Group.ID,
|
groupID,
|
||||||
path.GroupsService,
|
path.GroupsService,
|
||||||
path.LibrariesCategory)
|
path.LibrariesCategory)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
sp, err = sp.Append(false, odConsts.SitesPathDir, suite.m365.Group.RootSite.ID)
|
sp, err = sp.Append(false, odConsts.SitesPathDir, ptr.Val(site.GetId()))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
for _, coll := range collections {
|
for _, coll := range collections {
|
||||||
|
|||||||
@ -16,6 +16,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -28,7 +29,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -366,7 +366,7 @@ func downloadContent(
|
|||||||
itemID := ptr.Val(item.GetId())
|
itemID := ptr.Val(item.GetId())
|
||||||
ctx = clues.Add(ctx, "item_id", itemID)
|
ctx = clues.Add(ctx, "item_id", itemID)
|
||||||
|
|
||||||
content, err := downloadItem(ctx, iaag, driveID, item)
|
content, err := downloadItem(ctx, iaag, item)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
return content, nil
|
return content, nil
|
||||||
} else if !graph.IsErrUnauthorizedOrBadToken(err) {
|
} else if !graph.IsErrUnauthorizedOrBadToken(err) {
|
||||||
@ -395,7 +395,7 @@ func downloadContent(
|
|||||||
|
|
||||||
cdi := custom.ToCustomDriveItem(di)
|
cdi := custom.ToCustomDriveItem(di)
|
||||||
|
|
||||||
content, err = downloadItem(ctx, iaag, driveID, cdi)
|
content, err = downloadItem(ctx, iaag, cdi)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "content download retry")
|
return nil, clues.Wrap(err, "content download retry")
|
||||||
}
|
}
|
||||||
@ -426,7 +426,7 @@ func readItemContents(
|
|||||||
return nil, core.ErrNotFound
|
return nil, core.ErrNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
rc, err := downloadFile(ctx, iaag, props.downloadURL, false)
|
rc, err := downloadFile(ctx, iaag, props.downloadURL)
|
||||||
if graph.IsErrUnauthorizedOrBadToken(err) {
|
if graph.IsErrUnauthorizedOrBadToken(err) {
|
||||||
logger.CtxErr(ctx, err).Debug("stale item in cache")
|
logger.CtxErr(ctx, err).Debug("stale item in cache")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -21,7 +21,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/common/readers"
|
"github.com/alcionai/corso/src/internal/common/readers"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
odmetadata "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
|
metaTD "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata/testdata"
|
||||||
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
|
odTD "github.com/alcionai/corso/src/internal/m365/service/onedrive/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
@ -34,7 +34,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -74,13 +73,13 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
stubMetaID = "testMetaID"
|
stubMetaID = "testMetaID"
|
||||||
stubMetaEntityID = "email@provider.com"
|
stubMetaEntityID = "email@provider.com"
|
||||||
stubMetaRoles = []string{"read", "write"}
|
stubMetaRoles = []string{"read", "write"}
|
||||||
stubMeta = odmetadata.Metadata{
|
stubMeta = metadata.Metadata{
|
||||||
FileName: stubItemName,
|
FileName: stubItemName,
|
||||||
Permissions: []odmetadata.Permission{
|
Permissions: []metadata.Permission{
|
||||||
{
|
{
|
||||||
ID: stubMetaID,
|
ID: stubMetaID,
|
||||||
EntityID: stubMetaEntityID,
|
EntityID: stubMetaEntityID,
|
||||||
EntityType: odmetadata.GV2User,
|
EntityType: metadata.GV2User,
|
||||||
Roles: stubMetaRoles,
|
Roles: stubMetaRoles,
|
||||||
Expiration: &now,
|
Expiration: &now,
|
||||||
},
|
},
|
||||||
@ -209,7 +208,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
mbh.GetErrs = []error{test.getErr}
|
mbh.GetErrs = []error{test.getErr}
|
||||||
mbh.GI = getsItem{Err: assert.AnError}
|
mbh.GI = getsItem{Err: assert.AnError}
|
||||||
|
|
||||||
pcr := metaTD.NewStubPermissionResponse(odmetadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
|
pcr := metaTD.NewStubPermissionResponse(metadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
|
||||||
mbh.GIP = getsItemPermission{Perm: pcr}
|
mbh.GIP = getsItemPermission{Perm: pcr}
|
||||||
|
|
||||||
coll, err := NewCollection(
|
coll, err := NewCollection(
|
||||||
@ -295,7 +294,7 @@ func (suite *CollectionUnitSuite) TestCollection() {
|
|||||||
assert.Equal(t, readers.DefaultSerializationVersion, rr.Format().Version)
|
assert.Equal(t, readers.DefaultSerializationVersion, rr.Format().Version)
|
||||||
assert.False(t, rr.Format().DelInFlight)
|
assert.False(t, rr.Format().DelInFlight)
|
||||||
|
|
||||||
readMeta := odmetadata.Metadata{}
|
readMeta := metadata.Metadata{}
|
||||||
err = json.NewDecoder(rr).Decode(&readMeta)
|
err = json.NewDecoder(rr).Decode(&readMeta)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
|||||||
@ -14,6 +14,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||||
@ -25,7 +26,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||||
)
|
)
|
||||||
@ -285,7 +285,7 @@ func (c *Collections) Get(
|
|||||||
globalExcludeItemIDs *prefixmatcher.StringSetMatchBuilder,
|
globalExcludeItemIDs *prefixmatcher.StringSetMatchBuilder,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, bool, error) {
|
) ([]data.BackupCollection, bool, error) {
|
||||||
if !c.ctrl.ToggleFeatures.UseOldDeltaProcess {
|
if c.ctrl.ToggleFeatures.UseDeltaTree {
|
||||||
colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, globalExcludeItemIDs, errs)
|
colls, canUsePrevBackup, err := c.getTree(ctx, prevMetadata, globalExcludeItemIDs, errs)
|
||||||
|
|
||||||
return colls,
|
return colls,
|
||||||
|
|||||||
@ -2634,9 +2634,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
|
|||||||
tenant,
|
tenant,
|
||||||
idname.NewProvider(user, user),
|
idname.NewProvider(user, user),
|
||||||
func(*support.ControllerOperationStatus) {},
|
func(*support.ControllerOperationStatus) {},
|
||||||
control.Options{ToggleFeatures: control.Toggles{
|
control.Options{ToggleFeatures: control.Toggles{}},
|
||||||
UseOldDeltaProcess: true,
|
|
||||||
}},
|
|
||||||
count.New())
|
count.New())
|
||||||
|
|
||||||
prevDelta := "prev-delta"
|
prevDelta := "prev-delta"
|
||||||
|
|||||||
@ -9,9 +9,9 @@ import (
|
|||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
"github.com/alcionai/corso/src/pkg/services/m365/custom"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user