Compare commits

..

No commits in common. "main" and "v0.18.0" have entirely different histories.

409 changed files with 6077 additions and 31389 deletions

View File

@ -1,5 +1,4 @@
name: Backup Restore Test
description: Run various backup/restore/export tests for a service.
inputs:
service:
@ -19,10 +18,6 @@ inputs:
description: Arguments to pass for restore; restore is skipped when missing.
required: false
default: ""
export-args:
description: Arguments to pass for export.
required: false
default: ""
restore-container:
description: Folder to use for testing
required: true
@ -37,9 +32,6 @@ inputs:
description: Runs export tests when true
required: false
default: false
category:
description: category of data for given service
required: false
outputs:
backup-id:
@ -57,9 +49,7 @@ runs:
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-backup-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-backup-${{inputs.kind }}.log
./corso backup create '${{ inputs.service }}' \
--no-stats --hide-progress --json \
${{ inputs.backup-args }} |
@ -78,9 +68,7 @@ runs:
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
./corso restore '${{ inputs.service }}' \
--no-stats \
--hide-progress \
@ -103,17 +91,11 @@ runs:
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }}
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
# lists are not restored to a different folder. they get created adjacent to their originals
# hence SANITY_TEST_RESTORE_CONTAINER_PREFIX is necessary to differentiate restored from original
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: |
echo "---------------------------"
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
./sanity-test restore ${{ inputs.service }}
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
@ -126,11 +108,9 @@ runs:
echo Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }} \
/tmp/export-${{ inputs.service }}-${{inputs.kind }} \
--no-stats \
--hide-progress \
${{ inputs.export-args }} \
@ -143,19 +123,14 @@ runs:
shell: bash
working-directory: src
env:
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{ inputs.kind }}
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
# applies only for sharepoint lists
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: |
echo "---------------------------"
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
./sanity-test export ${{ inputs.service }}
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
@ -168,19 +143,17 @@ runs:
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-restore-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive \
/tmp/export-${{ inputs.service }}-${{inputs.kind }}-archive \
--no-stats \
--hide-progress \
--archive \
${{ inputs.export-args }} \
--backup '${{ steps.backup.outputs.result }}'
unzip /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive/*.zip \
-d /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-unzipped
unzip /tmp/export-${{ inputs.service }}-${{inputs.kind }}-archive/*.zip \
-d /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
cat /tmp/corsologs
- name: Check archive export ${{ inputs.service }} ${{ inputs.kind }}
@ -188,19 +161,14 @@ runs:
shell: bash
working-directory: src
env:
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{inputs.kind }}-unzipped
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
# applies only for sharepoint lists
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: |
echo "---------------------------"
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}-validate-${{inputs.kind }}.log
./sanity-test export ${{ inputs.service }}
- name: List ${{ inputs.service }} ${{ inputs.kind }}
@ -211,9 +179,7 @@ runs:
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}${CATEGORY_SUFFIX}-list-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}-list-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \
--no-stats \
--hide-progress \
@ -234,10 +200,7 @@ runs:
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
# Include category in the log file name if present
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}${CATEGORY_SUFFIX}-single-${{inputs.kind }}.log
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}-single-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \
--no-stats \
--hide-progress \

View File

@ -1,5 +1,4 @@
name: Setup and Cache Golang
description: Build golang binaries for later use in CI.
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
#

View File

@ -1,5 +1,4 @@
name: Publish Binary
description: Publish binary artifacts.
inputs:
version:

View File

@ -1,5 +1,4 @@
name: Publish Website
description: Publish website artifacts.
inputs:
aws-iam-role:

View File

@ -1,5 +1,4 @@
name: Purge M365 User Data
description: Deletes M365 data generated during CI tests.
# Hard deletion of an m365 user's data. Our CI processes create a lot
# of data churn (creation and immediate deletion) of files, the likes
@ -31,19 +30,12 @@ inputs:
description: Secret value of for AZURE_CLIENT_ID
azure-client-secret:
description: Secret value of for AZURE_CLIENT_SECRET
azure-pnp-client-id:
description: Secret value of AZURE_PNP_CLIENT_ID
azure-pnp-client-cert:
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
azure-tenant-id:
description: Secret value of AZURE_TENANT_ID
description: Secret value of for AZURE_TENANT_ID
m365-admin-user:
description: Secret value of for M365_TENANT_ADMIN_USER
m365-admin-password:
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
tenant-domain:
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
required: true
runs:
using: composite
@ -61,13 +53,7 @@ runs:
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
#- name: Reset retention for all mailboxes to 0
@ -88,16 +74,10 @@ runs:
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
################################################################################################################
# Sharepoint
@ -108,14 +88,6 @@ runs:
shell: pwsh
working-directory: ./src/cmd/purge/scripts
env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}

View File

@ -1,5 +1,4 @@
name: Send a message to Teams
description: Send messages to communication apps.
inputs:
msg:

View File

@ -1,5 +1,4 @@
name: Lint Website
description: Lint website content.
inputs:
version:

View File

@ -28,7 +28,7 @@ jobs:
# only run CI tests if the src folder or workflow actions have changed
- name: Check for file changes in src/ or .github/workflows/
uses: dorny/paths-filter@v3
uses: dorny/paths-filter@v2
id: dornycheck
with:
list-files: json

View File

@ -40,5 +40,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Publishing Binary"
msg: "[FAILED] Publishing Binary"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -463,7 +463,7 @@ jobs:
go-version-file: src/go.mod
- name: Go Lint
uses: golangci/golangci-lint-action@v4
uses: golangci/golangci-lint-action@v3
with:
# Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code.
@ -492,8 +492,8 @@ jobs:
# I could not find a way to install tree-grepper without nix
# https://github.com/BrianHicks/tree-grepper/issues/293
- uses: cachix/install-nix-action@v25
- uses: cachix/cachix-action@v14
- uses: cachix/install-nix-action@v24
- uses: cachix/cachix-action@v13
with:
name: tree-grepper
- run: nix-env -if https://github.com/BrianHicks/tree-grepper/archive/refs/heads/main.tar.gz
@ -511,27 +511,6 @@ jobs:
echo "Use len check instead of empty string comparison"
exit 1
fi
- name: Check for cases where errors are not propagated
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((if_statement (binary_expression) @_if (block (return_statement (expression_list (call_expression (selector_expression) @_fun ) @ret .)))) (#match? @_if "err != nil") (#match? @_fun "clues.NewWC"))' | grep .; then
echo "Make sure to propagate errors with clues"
exit 1
fi
- name: Check if clues without context are used when context is passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
echo "Do not use clues.*WC when context is passed in"
exit 1
fi
- name: Check clues with context is used when context is not passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
echo "Use clues.*WC when context is not passed in"
exit 1
fi
# ----------------------------------------------------------------------------------------------------
# --- GitHub Actions Linting -------------------------------------------------------------------------

View File

@ -12,7 +12,7 @@ jobs:
continue-on-error: true
strategy:
matrix:
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
steps:
- uses: actions/checkout@v4
@ -33,15 +33,12 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
Test-Site-Data-Cleanup:
@ -50,7 +47,7 @@ jobs:
continue-on-error: true
strategy:
matrix:
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
steps:
- uses: actions/checkout@v4
@ -73,13 +70,10 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -155,6 +155,3 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
user:
description: "User to run longevity test on"
description: 'User to run longevity test on'
permissions:
# required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Longevity-Tests:
needs: [SetM365App]
needs: [ SetM365App ]
environment: Testing
runs-on: ubuntu-latest
env:
@ -37,7 +37,7 @@ jobs:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
RESTORE_DEST_PFX: Corso_Test_Longevity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
PREFIX: "longevity"
PREFIX: 'longevity'
# Options for retention.
RETENTION_MODE: GOVERNANCE
@ -113,6 +113,7 @@ jobs:
--extend-retention \
--prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
--succeed-if-exists \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
@ -392,5 +393,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Longevity Test"
msg: "[FAILED] Longevity Test"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -48,7 +48,7 @@ jobs:
# ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted:
needs: [Checkout, SetM365App]
needs: [ Checkout, SetM365App]
environment: Testing
runs-on: ubuntu-latest
defaults:
@ -100,9 +100,9 @@ jobs:
-timeout 2h \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
##########################################################################################################################################
##########################################################################################################################################
# Logging & Notifications
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
@ -118,5 +118,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[COROS FAILED] Nightly Checks"
msg: "[FAILED] Nightly Checks"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -19,7 +19,7 @@ jobs:
private_key: ${{ secrets.PRIVATE_KEY }}
- name: Slash Command Dispatch
uses: peter-evans/slash-command-dispatch@v4
uses: peter-evans/slash-command-dispatch@v3
env:
TOKEN: ${{ steps.generate_token.outputs.token }}
with:

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
user:
description: "User to run sanity test on"
description: 'User to run sanity test on'
permissions:
# required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests:
needs: [SetM365App]
needs: [ SetM365App ]
environment: Testing
runs-on: ubuntu-latest
env:
@ -44,10 +44,11 @@ jobs:
run:
working-directory: src
##########################################################################################################################################
##########################################################################################################################################
# setup
# setup
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
@ -63,9 +64,9 @@ jobs:
- run: mkdir ${CORSO_LOG_DIR}
##########################################################################################################################################
##########################################################################################################################################
# Pre-Run cleanup
# Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
@ -90,9 +91,6 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites
timeout-minutes: 30
@ -108,13 +106,10 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
##########################################################################################################################################
##########################################################################################################################################
# Repository commands
# Repository commands
- name: Version Test
timeout-minutes: 10
@ -174,9 +169,9 @@ jobs:
--mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
##########################################################################################################################################
##########################################################################################################################################
# Exchange
# Exchange
# generate new entries to roll into the next load test
# only runs if the test was successful
@ -198,8 +193,8 @@ jobs:
service: exchange
kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -211,8 +206,8 @@ jobs:
service: exchange
kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -225,8 +220,8 @@ jobs:
service: exchange
kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -239,15 +234,16 @@ jobs:
service: exchange
kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}"
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Onedrive
##########################################################################################################################################
# Onedrive
# generate new entries for test
- name: OneDrive - Create new data
@ -274,8 +270,8 @@ jobs:
service: onedrive
kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -299,14 +295,14 @@ jobs:
service: onedrive
kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}"
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
##########################################################################################################################################
# Sharepoint Library
# Sharepoint
# generate new entries for test
- name: SharePoint - Create new data
@ -333,12 +329,11 @@ jobs:
with:
service: sharepoint
kind: first-backup
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}"'
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
# generate some more enteries for incremental check
- name: SharePoint - Create new data (for incremental)
@ -360,107 +355,15 @@ jobs:
with:
service: sharepoint
kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}"
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}"'
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
##########################################################################################################################################
##########################################################################################################################################
# Sharepoint Lists
# generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
- name: SharePoint Lists - Create new data
id: new-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
# Extracts the common prefix for the Sharepoint list names.
- name: SharePoint Lists - Store restore container
id: sharepoint-lists-store-restore-container
run: |
echo ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Backup
id: sharepoint-lists-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: first-backup-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
# generate some more enteries for incremental check
- name: SharePoint Lists - Create new data (for incremental)
id: inc-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Store restore container (for incremental)
id: sharepoint-lists-store-restore-container-inc
run: |
echo ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Incremental backup
id: sharepoint-lists-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: incremental-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
##########################################################################################################################################
# Groups and Teams
# Groups and Teams
# generate new entries for test
- name: Groups - Create new data
@ -487,8 +390,8 @@ jobs:
with:
service: groups
kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
@ -512,15 +415,15 @@ jobs:
with:
service: groups
kind: incremental
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
restore-container: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
##########################################################################################################################################
# Logging & Notifications
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
@ -536,5 +439,5 @@ jobs:
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Sanity Tests"
msg: "[FAILED] Sanity Tests"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -6,63 +6,18 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased] (beta)
### Fixed
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
- Emails attached within other emails are now correctly exported
- Gracefully handle email and post attachments without name when exporting to eml
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
- Fixed an issue causing exports dealing with calendar data to have high memory usage
## [v0.19.0] (beta) - 2024-02-06
### Added
- Events can now be exported from Exchange backups as .ics files.
- Update repo init configuration to reduce the total number of GET requests sent
to the object store when using corso. This affects repos that have many
backups created in them per day the most.
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
### Fixed
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
- Backup attachments associated with group mailbox items.
- Groups and Teams backups no longer fail when a resource has no display name.
- Contacts in-place restore failed if the restore destination was empty.
- Link shares with external users are now backed up and restored as expected
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
### Changed
- When running `backup details` on an empty backup returns a more helpful error message.
- Backup List additionally shows the data category for each backup.
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
### Known issues
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
- Event description for exchange exports might look slightly different for certain events.
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
- External users with access through shared links will not receive these links as they are not sent via email during restore.
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
- SharePoint list item attachments are not available due to graph API limitations.
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
## [v0.18.0] (beta) - 2024-01-02
### Fixed
- Handle the case where an email cannot be retrieved from Exchange due to an `ErrorInvalidRecipients` error. In
this case, Corso will skip over the item but report this in the backup summary.
- Fix `ErrorItemNotFound` errors when restoring emails with multiple attachments.
- Guarantee Exchange email restoration when restoring multiple attachments. Some previous restores were failing with `ErrorItemNotFound`.
- Avoid Graph SDK `Requests must contain extension changes exclusively.` errors by removing server-populated field from restored event items.
- Improve Group mailbox(conversations) backup performance by only downloading new items or items with modified content.
- Handle cases where Exchange backup stored invalid JSON blobs if there were special characters in the user content. These would result in errors during restore.
- Handle cases where Exchange backup stored invalid JSON blobs if there were special characters in the user content. These would result in errors during restore or restore errors.
### Known issues
- Restoring OneDrive, SharePoint, or Teams & Groups items shared with external users while the tenant or site is configured to not allow sharing with external users will not restore permissions.
### Added
- Contacts can now be exported from Exchange backups as .vcf files
## [v0.17.0] (beta) - 2023-12-11
### Changed
@ -502,9 +457,7 @@ this case, Corso will skip over the item but report this in the backup summary.
- Miscellaneous
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
[Unreleased]: https://github.com/alcionai/corso/compare/v0.17.0...HEAD
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
[v0.15.0]: https://github.com/alcionai/corso/compare/v0.14.0...v0.15.0

View File

@ -1,6 +1,3 @@
> [!NOTE]
> **The Corso project is no longer actively maintained and has been archived**.
<p align="center">
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
</p>

View File

@ -4,7 +4,6 @@ run:
linters:
enable:
- errcheck
- exhaustive
- forbidigo
- gci
- gofmt
@ -26,11 +25,6 @@ linters:
- staticcheck
linters-settings:
exhaustive:
check:
- switch
default-signifies-exhaustive: false
explicit-exhaustive-switch: true
gci:
sections:
- standard

View File

@ -2,6 +2,7 @@ package backup
import (
"context"
"encoding/json"
"fmt"
"strings"
@ -19,16 +20,14 @@ import (
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/store"
)
var ErrEmptyBackup = clues.New("no items in backup")
// ---------------------------------------------------------------------------
// adding commands to cobra
// ---------------------------------------------------------------------------
@ -45,7 +44,6 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addTeamsChatsCommands,
}
// AddCommands attaches all `corso backup * *` commands to the parent.
@ -190,47 +188,45 @@ func genericCreateCommand(
ictx = clues.Add(ctx, "resource_owner_selected", owner)
)
logger.Ctx(ictx).Infof("setting up backup")
bo, err := r.NewBackupWithLookup(ictx, discSel, ins)
if err != nil {
cerr := clues.WrapWC(ictx, err, owner)
errs = append(errs, cerr)
Errf(
ictx,
"%s\nCause: %s",
"Unable to initiate backup",
err.Error())
meta, err := json.Marshal(cerr.Core().Values)
if err != nil {
meta = []byte("Unable to marshal error metadata")
}
Errf(ictx, "%s\nMessage: %v\nMetadata:%s", "Unable to complete backup", err, meta)
continue
}
ictx = clues.Add(
ictx,
ctx,
"resource_owner_id", bo.ResourceOwner.ID(),
"resource_owner_name", clues.Hide(bo.ResourceOwner.Name()))
logger.Ctx(ictx).Infof("running backup")
"resource_owner_name", bo.ResourceOwner.Name())
err = bo.Run(ictx)
if err != nil {
if errors.Is(err, core.ErrServiceNotEnabled) {
logger.Ctx(ictx).Infow("service not enabled",
if errors.Is(err, graph.ErrServiceNotEnabled) {
logger.Ctx(ctx).Infow("service not enabled",
"resource_owner_id", bo.ResourceOwner.ID(),
"service", serviceName)
continue
}
cerr := clues.Wrap(err, owner)
cerr := clues.WrapWC(ictx, err, owner)
errs = append(errs, cerr)
Errf(
ictx,
"%s\nCause: %s",
"Unable to complete backup",
err.Error())
meta, err := json.Marshal(cerr.Core().Values)
if err != nil {
meta = []byte("Unable to marshal error metadata")
}
Errf(ictx, "%s\nMessage: %v\nMetadata:%s", "Unable to complete backup", err, meta)
continue
}
@ -238,10 +234,10 @@ func genericCreateCommand(
bIDs = append(bIDs, string(bo.Results.BackupID))
if !DisplayJSONFormat() {
Infof(ictx, fmt.Sprintf("Backup complete %s %s", observe.Bullet, color.BlueOutput(bo.Results.BackupID)))
printBackupStats(ictx, r, string(bo.Results.BackupID))
Infof(ctx, fmt.Sprintf("Backup complete %s %s", observe.Bullet, color.BlueOutput(bo.Results.BackupID)))
printBackupStats(ctx, r, string(bo.Results.BackupID))
} else {
Infof(ictx, "Backup complete - ID: %v\n", bo.Results.BackupID)
Infof(ctx, "Backup complete - ID: %v\n", bo.Results.BackupID)
}
}
@ -338,7 +334,7 @@ func genericListCommand(
fe.PrintItems(
ctx,
!ifShow(flags.ListAlertsFV),
!ifShow(flags.FailedItemsFV),
!ifShow(flags.ListFailedItemsFV),
!ifShow(flags.ListSkippedItemsFV),
!ifShow(flags.ListRecoveredErrorsFV))
@ -398,10 +394,6 @@ func genericDetailsCore(
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
}
if len(d.Entries) == 0 {
return nil, ErrEmptyBackup
}
if opts.SkipReduce {
return d, nil
}

View File

@ -5,12 +5,10 @@ import (
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
@ -68,30 +66,3 @@ func (suite *BackupUnitSuite) TestGenericDetailsCore() {
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, expected, output.Entries)
}
func (suite *BackupUnitSuite) TestGenericDetailsCore_empty() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bg := testdata.VersionedBackupGetter{
Details: &details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.Entry{},
},
},
}
sel := selectors.NewExchangeBackup([]string{"user-id"})
sel.Include(sel.AllData())
_, err := genericDetailsCore(
ctx,
bg,
"backup-ID",
sel.Selector,
control.DefaultOptions())
require.Error(t, err, "has error")
assert.ErrorIs(t, err, ErrEmptyBackup, clues.ToCore(err))
}

View File

@ -3,6 +3,7 @@ package backup
import (
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
@ -61,11 +62,15 @@ corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
// called by backup.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case createCommand:
c, _ = utils.AddCommand(cmd, exchangeCreateCmd())
c, fs = utils.AddCommand(cmd, exchangeCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandCreateUseSuffix
c.Example = exchangeServiceCommandCreateExamples
@ -82,13 +87,15 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
flags.AddDisableSlidingWindowLimiterFlag(c)
case listCommand:
c, _ = utils.AddCommand(cmd, exchangeListCmd())
c, fs = utils.AddCommand(cmd, exchangeListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, exchangeDetailsCmd())
c, fs = utils.AddCommand(cmd, exchangeDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandDetailsUseSuffix
c.Example = exchangeServiceCommandDetailsExamples
@ -101,7 +108,8 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
flags.AddExchangeDetailsAndRestoreFlags(c, false)
case deleteCommand:
c, _ = utils.AddCommand(cmd, exchangeDeleteCmd())
c, fs = utils.AddCommand(cmd, exchangeDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandDeleteUseSuffix
c.Example = exchangeServiceCommandDeleteExamples

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
@ -40,7 +39,7 @@ var (
type NoBackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestNoBackupExchangeE2ESuite(t *testing.T) {
@ -55,7 +54,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
}
@ -94,7 +93,7 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
type BackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupExchangeE2ESuite(t *testing.T) {
@ -109,7 +108,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
}
@ -139,7 +138,7 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
cmd, ctx := buildExchangeBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
suite.its.user.ID,
category.String(),
&recorder)
@ -150,11 +149,8 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
result := recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
@ -177,7 +173,7 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
cmd, ctx := buildExchangeBackupCmd(
ctx,
suite.dpnd.configFilePath,
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.m365.User.ID),
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.its.user.ID),
category.String(),
&recorder)
err := cmd.ExecuteContext(ctx)
@ -186,11 +182,8 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
result := recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
@ -229,8 +222,7 @@ func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category p
assert.Contains(
t,
err.Error(),
"not found",
"error missing user not found")
"not found in tenant", "error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
@ -249,7 +241,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFl
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
@ -273,7 +265,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
@ -288,11 +280,8 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
result := suite.dpnd.recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
// as an offhand check: the result should contain the m365 user id
assert.Contains(t, result, suite.its.user.ID)
}
// AWS flags
@ -306,7 +295,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--user", suite.its.user.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
@ -329,7 +318,7 @@ type PreparedBackupExchangeE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
@ -346,13 +335,13 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
suite.backupOps = make(map[path.CategoryType]string)
var (
users = []string{suite.m365.User.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
users = []string{suite.its.user.ID}
ins = idname.NewCache(map[string]string{suite.its.user.ID: suite.its.user.ID})
)
for _, set := range []path.CategoryType{email, contacts, events} {

View File

@ -6,6 +6,7 @@ import (
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags"
@ -35,12 +36,9 @@ const (
groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group
corso backup create groups --group Marketing
# Backup only Teams channel messages
# Backup only Teams conversations messages
corso backup create groups --group Marketing --data messages
# Backup only group mailbox posts
corso backup create groups --group Marketing --data conversations
# Backup all Groups and Teams data for all groups
corso backup create groups --group '*'`
@ -53,19 +51,20 @@ corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
# Explore Marketing messages posted after the start of 2022
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
--last-message-reply-after 2022-01-01T00:00:00
# Explore group mailbox posts with conversation subject "hello world"
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"`
--last-message-reply-after 2022-01-01T00:00:00`
)
// called by backup.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case createCommand:
c, _ = utils.AddCommand(cmd, groupsCreateCmd(), utils.MarkPreviewCommand())
c, fs = utils.AddCommand(cmd, groupsCreateCmd(), utils.MarkPreviewCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandCreateUseSuffix
c.Example = groupsServiceCommandCreateExamples
@ -79,13 +78,15 @@ func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
flags.AddDisableLazyItemReader(c)
case listCommand:
c, _ = utils.AddCommand(cmd, groupsListCmd(), utils.MarkPreviewCommand())
c, fs = utils.AddCommand(cmd, groupsListCmd(), utils.MarkPreviewCommand())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, groupsDetailsCmd(), utils.MarkPreviewCommand())
c, fs = utils.AddCommand(cmd, groupsDetailsCmd(), utils.MarkPreviewCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandDetailsUseSuffix
c.Example = groupsServiceCommandDetailsExamples
@ -99,7 +100,8 @@ func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
flags.AddSharePointDetailsAndRestoreFlags(c)
case deleteCommand:
c, _ = utils.AddCommand(cmd, groupsDeleteCmd(), utils.MarkPreviewCommand())
c, fs = utils.AddCommand(cmd, groupsDeleteCmd(), utils.MarkPreviewCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandDeleteUseSuffix
c.Example = groupsServiceCommandDeleteExamples
@ -160,7 +162,7 @@ func createGroupsCmd(cmd *cobra.Command, args []string) error {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.AC.Groups().GetAllIDsAndNames(ctx, errs)
ins, err := svcCli.GroupsMap(ctx, errs)
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 groups"))
}
@ -316,7 +318,7 @@ func groupsBackupCreateSelectors(
group, cats []string,
) *selectors.GroupsBackup {
if filters.PathContains(group).Compare(flags.Wildcard) {
return includeAllGroupsWithCategories(ins, cats)
return includeAllGroupWithCategories(ins, cats)
}
sel := selectors.NewGroupsBackup(slices.Clone(group))
@ -324,6 +326,6 @@ func groupsBackupCreateSelectors(
return utils.AddGroupsCategories(sel, cats)
}
func includeAllGroupsWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
func includeAllGroupWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
}

View File

@ -20,7 +20,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
@ -36,7 +35,7 @@ import (
type NoBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestNoBackupGroupsE2ESuite(t *testing.T) {
@ -51,7 +50,7 @@ func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
}
@ -90,7 +89,7 @@ func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
type BackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupGroupsE2ESuite(t *testing.T) {
@ -105,7 +104,7 @@ func (suite *BackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
}
@ -114,8 +113,6 @@ func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsBackupCategoryTest(suite, flags.DataConversations)
}
@ -137,7 +134,7 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
cmd, ctx := buildGroupsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Group.ID,
suite.its.group.ID,
category,
&recorder)
@ -185,8 +182,7 @@ func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category stri
assert.Contains(
t,
err.Error(),
"not found",
"error missing user not found")
"not found in tenant", "error missing group not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
@ -205,7 +201,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
@ -219,9 +215,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
}
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
@ -232,7 +225,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
@ -256,7 +249,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--group", suite.its.group.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
@ -279,7 +272,7 @@ type PreparedBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
@ -296,19 +289,16 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
groups = []string{suite.m365.Group.ID}
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
groups = []string{suite.its.group.ID}
ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
cats = []path.CategoryType{
path.ChannelMessagesCategory,
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// path.ConversationPostsCategory,
path.ConversationPostsCategory,
path.LibrariesCategory,
}
)
@ -462,8 +452,6 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages(
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
}

View File

@ -14,16 +14,142 @@ import (
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
gmock "github.com/alcionai/corso/src/pkg/services/m365/api/graph/mock"
"github.com/alcionai/corso/src/pkg/storage"
"github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// Gockable client
// ---------------------------------------------------------------------------
// GockClient produces a new exchange api client that can be
// mocked using gock.
func gockClient(creds account.M365Config, counter *count.Bus) (api.Client, error) {
s, err := gmock.NewService(creds, counter)
if err != nil {
return api.Client{}, err
}
li, err := gmock.NewService(creds, counter, graph.NoTimeout())
if err != nil {
return api.Client{}, err
}
return api.Client{
Credentials: creds,
Stable: s,
LargeItem: li,
}, nil
}
// ---------------------------------------------------------------------------
// Suite Setup
// ---------------------------------------------------------------------------
type ids struct {
ID string
DriveID string
DriveRootFolderID string
}
type intgTesterSetup struct {
acct account.Account
ac api.Client
gockAC api.Client
user ids
site ids
group ids
team ids
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its := intgTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
its.acct = tconfig.NewM365Account(t)
creds, err := its.acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
its.ac, err = api.NewClient(
creds,
control.DefaultOptions(),
count.New())
require.NoError(t, err, clues.ToCore(err))
its.gockAC, err = gockClient(creds, count.New())
require.NoError(t, err, clues.ToCore(err))
// user drive
uids := ids{}
uids.ID = tconfig.M365UserID(t)
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, uids.ID)
require.NoError(t, err, clues.ToCore(err))
uids.DriveID = ptr.Val(userDrive.GetId())
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, uids.DriveID)
require.NoError(t, err, clues.ToCore(err))
uids.DriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
its.user = uids
// site
sids := ids{}
sids.ID = tconfig.M365SiteID(t)
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, sids.ID)
require.NoError(t, err, clues.ToCore(err))
sids.DriveID = ptr.Val(siteDrive.GetId())
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, sids.DriveID)
require.NoError(t, err, clues.ToCore(err))
sids.DriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
its.site = sids
// group
gids := ids{}
// use of the TeamID is intentional here, so that we are assured
// the group has full usage of the teams api.
gids.ID = tconfig.M365TeamID(t)
its.group = gids
// team
tids := ids{}
tids.ID = tconfig.M365TeamID(t)
its.team = tids
return its
}
type dependencies struct {
st storage.Storage
repo repository.Repositoryer

View File

@ -60,6 +60,7 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
switch cmd.Use {
case createCommand:
c, fs = utils.AddCommand(cmd, oneDriveCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix
c.Example = oneDriveServiceCommandCreateExamples
@ -67,20 +68,22 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
flags.AddUserFlag(c)
flags.AddGenericBackupFlags(c)
fs.BoolVar(
&flags.UseOldDeltaProcessFV,
flags.UseOldDeltaProcessFN,
&flags.UseDeltaTreeFV,
flags.UseDeltaTreeFN,
false,
"process backups using the old delta processor instead of tree-based enumeration")
cobra.CheckErr(fs.MarkHidden(flags.UseOldDeltaProcessFN))
"process backups using the delta tree instead of standard enumeration")
cobra.CheckErr(fs.MarkHidden(flags.UseDeltaTreeFN))
case listCommand:
c, _ = utils.AddCommand(cmd, oneDriveListCmd())
c, fs = utils.AddCommand(cmd, oneDriveListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, oneDriveDetailsCmd())
c, fs = utils.AddCommand(cmd, oneDriveDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandDetailsUseSuffix
c.Example = oneDriveServiceCommandDetailsExamples
@ -90,7 +93,8 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
flags.AddOneDriveDetailsAndRestoreFlags(c)
case deleteCommand:
c, _ = utils.AddCommand(cmd, oneDriveDeleteCmd())
c, fs = utils.AddCommand(cmd, oneDriveDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandDeleteUseSuffix
c.Example = oneDriveServiceCommandDeleteExamples

View File

@ -94,7 +94,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
cmd := cliTD.StubRootCmd(
"backup", "create", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.UserFN, "foo@not-there.com")
"--"+flags.UserFN, "foo@nothere.com")
cli.BuildCommandTree(cmd)
cmd.SetOut(&recorder)
@ -107,8 +107,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
assert.Contains(
t,
err.Error(),
"not found",
"error missing user not found")
"not found in tenant", "error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())

View File

@ -5,6 +5,7 @@ import (
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags"
@ -37,11 +38,7 @@ corso backup create sharepoint --site https://example.com/hr
corso backup create sharepoint --site https://example.com/hr,https://example.com/team
# Backup all SharePoint data for all Sites
corso backup create sharepoint --site '*'
# Backup all SharePoint list data for a Site
corso backup create sharepoint --site https://example.com/hr --data lists
`
corso backup create sharepoint --site '*'`
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce
@ -61,54 +58,39 @@ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Explore all files within the document library "Work Documents"
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library "Work Documents"
# Explore lists by their name(s)
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2"
# Explore lists created after a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34
# Explore lists created before a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34
# Explore lists modified before a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34
# Explore lists modified after a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34`
`
)
// called by backup.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case createCommand:
c, _ = utils.AddCommand(cmd, sharePointCreateCmd())
c, fs = utils.AddCommand(cmd, sharePointCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix
c.Example = sharePointServiceCommandCreateExamples
flags.AddSiteFlag(c, true)
flags.AddSiteIDFlag(c, true)
// [TODO](hitesh) to add lists flag to invoke backup for lists
// when explicit invoke is not required anymore
flags.AddDataFlag(c, []string{flags.DataLibraries}, true)
flags.AddGenericBackupFlags(c)
case listCommand:
c, _ = utils.AddCommand(cmd, sharePointListCmd())
c, fs = utils.AddCommand(cmd, sharePointListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, sharePointDetailsCmd())
c, fs = utils.AddCommand(cmd, sharePointDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix
c.Example = sharePointServiceCommandDetailsExamples
@ -118,7 +100,8 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
flags.AddSharePointDetailsAndRestoreFlags(c)
case deleteCommand:
c, _ = utils.AddCommand(cmd, sharePointDeleteCmd())
c, fs = utils.AddCommand(cmd, sharePointDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix
c.Example = sharePointServiceCommandDeleteExamples

View File

@ -20,9 +20,7 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
@ -90,7 +88,7 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
type BackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestBackupSharepointE2ESuite(t *testing.T) {
@ -105,13 +103,11 @@ func (suite *BackupSharepointE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
}
func (suite *BackupSharepointE2ESuite) TestSharepointBackupCmd_lists() {
// Issue: https://github.com/alcionai/corso/issues/4754
suite.T().Skip("unskip when sharepoint lists support is enabled")
runSharepointBackupCategoryTest(suite, flags.DataLists)
}
@ -129,7 +125,7 @@ func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category s
cmd, ctx := buildSharepointBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Site.ID,
suite.its.site.ID,
category,
&recorder)
@ -142,8 +138,6 @@ func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category s
}
func (suite *BackupSharepointE2ESuite) TestSharepointBackupCmd_siteNotFound_lists() {
// Issue: https://github.com/alcionai/corso/issues/4754
suite.T().Skip("un-skip test when lists support is enabled")
runSharepointBackupSiteNotFoundTest(suite, flags.DataLists)
}
@ -188,7 +182,7 @@ type PreparedBackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
its intgTesterSetup
}
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
@ -205,13 +199,13 @@ func (suite *PreparedBackupSharepointE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
suite.backupOps = make(map[path.CategoryType]string)
var (
sites = []string{suite.m365.Site.ID}
ins = idname.NewCache(map[string]string{suite.m365.Site.ID: suite.m365.Site.ID})
sites = []string{suite.its.site.ID}
ins = idname.NewCache(map[string]string{suite.its.site.ID: suite.its.site.ID})
cats = []path.CategoryType{
path.ListsCategory,
}
@ -344,34 +338,19 @@ func runSharepointDetailsCmdTest(suite *PreparedBackupSharepointE2ESuite, catego
result := suite.dpnd.recorder.String()
i := 0
findings := make(map[path.CategoryType]int)
incrementor := func(cond bool, cat path.CategoryType) {
if cond {
findings[cat]++
}
}
foundList := 0
for _, ent := range deets.Entries {
if ent.SharePoint == nil {
continue
if ent.SharePoint != nil && ent.SharePoint.ItemName != "" {
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
foundList++
i++
}
isSharePointList := ent.SharePoint.ItemType == details.SharePointList
hasListName := isSharePointList && len(ent.SharePoint.List.Name) > 0
hasItemName := !isSharePointList && len(ent.SharePoint.ItemName) > 0
incrementor(hasListName, category)
incrementor(hasItemName, category)
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
assert.GreaterOrEqual(t, findings[category], 1)
assert.GreaterOrEqual(t, foundList, 1)
}
// ---------------------------------------------------------------------------

View File

@ -253,12 +253,14 @@ func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() {
cats: []string{"invalid category"},
expect: assert.Error,
},
{
name: "site with lists category",
site: []string{"smarf"},
cats: []string{flags.DataLists},
expect: assert.NoError,
},
// [TODO]: Uncomment when lists are enabled
// {
// name: "site with lists category",
// site: []string{"smarf"},
// cats: []string{flags.DataLists},
// expect: assert.NoError,
// },
// [TODO]: Uncomment when pages are enabled

View File

@ -1,305 +0,0 @@
package backup
import (
"context"
"fmt"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
)
// ------------------------------------------------------------------------------------------------
// setup and globals
// ------------------------------------------------------------------------------------------------
const (
teamschatsServiceCommand = "chats"
teamschatsServiceCommandCreateUseSuffix = "--user <userEmail> | '" + flags.Wildcard + "'"
teamschatsServiceCommandDeleteUseSuffix = "--backups <backupId>"
teamschatsServiceCommandDetailsUseSuffix = "--backup <backupId>"
)
const (
teamschatsServiceCommandCreateExamples = `# Backup all chats with bob@company.hr
corso backup create chats --user bob@company.hr
# Backup all chats for all users
corso backup create chats --user '*'`
teamschatsServiceCommandDeleteExamples = `# Delete chats backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce
corso backup delete chats --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
teamschatsServiceCommandDetailsExamples = `# Explore chats in Bob's latest backup (1234abcd...)
corso backup details chats --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// called by backup.go to map subcommands to provider-specific handling.
func addTeamsChatsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case createCommand:
c, _ = utils.AddCommand(cmd, teamschatsCreateCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandCreateUseSuffix
c.Example = teamschatsServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddUserFlag(c)
flags.AddDataFlag(c, []string{flags.DataChats}, false)
flags.AddGenericBackupFlags(c)
case listCommand:
c, _ = utils.AddCommand(cmd, teamschatsListCmd(), utils.MarkPreReleaseCommand())
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, teamschatsDetailsCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandDetailsUseSuffix
c.Example = teamschatsServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true)
flags.AddTeamsChatsDetailsAndRestoreFlags(c)
case deleteCommand:
c, _ = utils.AddCommand(cmd, teamschatsDeleteCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandDeleteUseSuffix
c.Example = teamschatsServiceCommandDeleteExamples
flags.AddMultipleBackupIDsFlag(c, false)
flags.AddBackupIDFlag(c, false)
}
return c
}
// ------------------------------------------------------------------------------------------------
// backup create
// ------------------------------------------------------------------------------------------------
// `corso backup create chats [<flag>...]`
func teamschatsCreateCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Aliases: []string{teamsServiceCommand},
Short: "Backup M365 Chats data",
RunE: createTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// processes a teamschats backup.
func createTeamsChatsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := validateTeamsChatsBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
return err
}
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
ctx,
cmd,
path.TeamsChatsService)
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
// TODO: log/print recoverable errors
errs := fault.New(false)
svcCli, err := m365.NewM365Client(ctx, *acct)
if err != nil {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.AC.Users().GetAllIDsAndNames(ctx, errs)
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 teamschats"))
}
sel := teamschatsBackupCreateSelectors(ctx, ins, flags.UserFV, flags.CategoryDataFV)
selectorSet := []selectors.Selector{}
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
selectorSet = append(selectorSet, discSel.Selector)
}
return genericCreateCommand(
ctx,
r,
"Chats",
selectorSet,
ins)
}
// ------------------------------------------------------------------------------------------------
// backup list
// ------------------------------------------------------------------------------------------------
// `corso backup list teamschats [<flag>...]`
func teamschatsListCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "List the history of M365 Chats backups",
RunE: listTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// lists the history of backup operations
func listTeamsChatsCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsChatsService, args)
}
// ------------------------------------------------------------------------------------------------
// backup details
// ------------------------------------------------------------------------------------------------
// `corso backup details teamschats [<flag>...]`
func teamschatsDetailsCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "Shows the details of a M365 Chats backup",
RunE: detailsTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// processes a teamschats backup.
func detailsTeamsChatsCmd(cmd *cobra.Command, args []string) error {
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
return runDetailsTeamsChatsCmd(cmd)
}
func runDetailsTeamsChatsCmd(cmd *cobra.Command) error {
ctx := cmd.Context()
opts := utils.MakeTeamsChatsOpts(cmd)
sel := utils.IncludeTeamsChatsRestoreDataSelectors(ctx, opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterTeamsChatsRestoreInfoSelectors(sel, opts)
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
if err != nil {
return Only(ctx, err)
}
if len(ds.Entries) > 0 {
ds.PrintEntries(ctx)
} else {
Info(ctx, selectors.ErrorNoMatchingItems)
}
return nil
}
// ------------------------------------------------------------------------------------------------
// backup delete
// ------------------------------------------------------------------------------------------------
// `corso backup delete teamschats [<flag>...]`
func teamschatsDeleteCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "Delete backed-up M365 Chats data",
RunE: deleteTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// deletes an teamschats backup.
func deleteTeamsChatsCmd(cmd *cobra.Command, args []string) error {
backupIDValue := []string{}
if len(flags.BackupIDsFV) > 0 {
backupIDValue = flags.BackupIDsFV
} else if len(flags.BackupIDFV) > 0 {
backupIDValue = append(backupIDValue, flags.BackupIDFV)
} else {
return clues.New("either --backup or --backups flag is required")
}
return genericDeleteCommand(cmd, path.TeamsChatsService, "TeamsChats", backupIDValue, args)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func validateTeamsChatsBackupCreateFlags(teamschats, cats []string) error {
if len(teamschats) == 0 {
return clues.New(
"requires one or more --" +
flags.UserFN + " ids, or the wildcard --" +
flags.UserFN + " *")
}
msg := fmt.Sprintf(
" is an unrecognized data type; only %s is supported",
flags.DataChats)
allowedCats := utils.TeamsChatsAllowedCategories()
for _, d := range cats {
if _, ok := allowedCats[d]; !ok {
return clues.New(d + msg)
}
}
return nil
}
func teamschatsBackupCreateSelectors(
ctx context.Context,
ins idname.Cacher,
users, cats []string,
) *selectors.TeamsChatsBackup {
if filters.PathContains(users).Compare(flags.Wildcard) {
return includeAllTeamsChatsWithCategories(ins, cats)
}
sel := selectors.NewTeamsChatsBackup(slices.Clone(users))
return utils.AddTeamsChatsCategories(sel, cats)
}
func includeAllTeamsChatsWithCategories(ins idname.Cacher, categories []string) *selectors.TeamsChatsBackup {
return utils.AddTeamsChatsCategories(selectors.NewTeamsChatsBackup(ins.IDs()), categories)
}

View File

@ -1,636 +0,0 @@
package backup_test
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// tests that require no existing backups
// ---------------------------------------------------------------------------
type NoBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestNoBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *NoBackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
func (suite *NoBackupTeamsChatsE2ESuite) TestTeamsChatsBackupListCmd_noBackups() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
// as an offhand check: the result should contain the m365 teamschat id
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
}
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *BackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_chats() {
runTeamsChatsBackupCategoryTest(suite, flags.DataChats)
}
func runTeamsChatsBackupCategoryTest(suite *BackupTeamsChatsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_teamschatNotFound_chats() {
runTeamsChatsBackupTeamsChatNotFoundTest(suite, flags.DataChats)
}
func runTeamsChatsBackupTeamsChatNotFoundTest(suite *BackupTeamsChatsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
"foo@not-there.com",
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"not found",
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAzureClientIDFlag() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_fromConfigFile() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
}
// AWS flags
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAWSFlags() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
// since invalid aws creds are explicitly set, should see a failure
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
}
func TestPreparedBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupTeamsChatsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
teamschats = []string{suite.m365.User.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
cats = []path.CategoryType{
path.ChatsCategory,
}
)
for _, set := range cats {
var (
sel = selectors.NewTeamsChatsBackup(teamschats)
scopes []selectors.TeamsChatsScope
)
switch set {
case path.ChatsCategory:
scopes = selTD.TeamsChatsBackupChatScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_chats() {
runTeamsChatsListCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsListCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_singleID_chats() {
runTeamsChatsListSingleCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsListSingleCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, bID)
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", "smarfs")
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsDetailsCmd_chats() {
runTeamsChatsDetailsCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsDetailsCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
foundFolders := 0
for _, ent := range deets.Entries {
// Skip folders as they don't mean anything to the end teamschat.
if ent.Folder != nil {
foundFolders++
continue
}
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
// We only backup the default folder for each category so there should be at
// least that folder (we don't make details entries for prefix folders).
assert.GreaterOrEqual(t, foundFolders, 1)
}
// ---------------------------------------------------------------------------
// tests for deleting backups
// ---------------------------------------------------------------------------
type BackupDeleteTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps [3]operations.BackupOperation
}
func TestBackupDeleteTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteTeamsChatsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *BackupDeleteTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
m365TeamsChatID := tconfig.M365TeamID(t)
teamschats := []string{m365TeamsChatID}
// some tests require an existing backup
sel := selectors.NewTeamsChatsBackup(teamschats)
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
for i := 0; i < cap(suite.backupOps); i++ {
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[i] = backupOp
err = suite.backupOps[i].Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID),
string(suite.backupOps[1].Results.BackupID)))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backups", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_SingleID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_UnknownID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_NoBackupID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildTeamsChatsBackupCmd(
ctx context.Context,
configFile, resource, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--"+flags.ConfigFileFN, configFile,
"--"+flags.UserFN, resource,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -1,248 +0,0 @@
package backup
import (
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
)
type TeamsChatsUnitSuite struct {
tester.Suite
}
func TestTeamsChatsUnitSuite(t *testing.T) {
suite.Run(t, &TeamsChatsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *TeamsChatsUnitSuite) TestAddTeamsChatsCommands() {
expectUse := teamschatsServiceCommand
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "create teamschats",
use: createCommand,
expectUse: expectUse + " " + teamschatsServiceCommandCreateUseSuffix,
expectShort: teamschatsCreateCmd().Short,
expectRunE: createTeamsChatsCmd,
},
{
name: "list teamschats",
use: listCommand,
expectUse: expectUse,
expectShort: teamschatsListCmd().Short,
expectRunE: listTeamsChatsCmd,
},
{
name: "details teamschats",
use: detailsCommand,
expectUse: expectUse + " " + teamschatsServiceCommandDetailsUseSuffix,
expectShort: teamschatsDetailsCmd().Short,
expectRunE: detailsTeamsChatsCmd,
},
{
name: "delete teamschats",
use: deleteCommand,
expectUse: expectUse + " " + teamschatsServiceCommandDeleteUseSuffix,
expectShort: teamschatsDeleteCmd().Short,
expectRunE: deleteTeamsChatsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addTeamsChatsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
})
}
}
func (suite *TeamsChatsUnitSuite) TestValidateTeamsChatsBackupCreateFlags() {
table := []struct {
name string
cats []string
expect assert.ErrorAssertionFunc
}{
{
name: "none",
cats: []string{},
expect: assert.NoError,
},
{
name: "chats",
cats: []string{flags.DataChats},
expect: assert.NoError,
},
{
name: "all allowed",
cats: []string{
flags.DataChats,
},
expect: assert.NoError,
},
{
name: "bad inputs",
cats: []string{"foo"},
expect: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
err := validateTeamsChatsBackupCreateFlags([]string{"*"}, test.cats)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}
func (suite *TeamsChatsUnitSuite) TestBackupCreateFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: createCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.TeamsChatsCategoryDataInput),
},
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeTeamsChatsOpts(cmd)
co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupListFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: listCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedBackupListFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupDetailsFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: detailsCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
},
flagsTD.PreparedTeamsChatsFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
flagsTD.AssertTeamsChatsFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupDeleteFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: deleteCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}

View File

@ -2,6 +2,7 @@ package debug
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -10,13 +11,21 @@ import (
// called by debug.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, exchangeMetadataFilesCmd(), utils.MarkDebugCommand())
c, fs = utils.AddCommand(cmd, exchangeMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
}

View File

@ -2,6 +2,7 @@ package debug
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -10,14 +11,21 @@ import (
// called by debug.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, groupsMetadataFilesCmd(), utils.MarkDebugCommand())
c, fs = utils.AddCommand(cmd, groupsMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
}

View File

@ -2,6 +2,7 @@ package debug
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -10,13 +11,21 @@ import (
// called by debug.go to map subcommands to provider-specific handling.
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, oneDriveMetadataFilesCmd(), utils.MarkDebugCommand())
c, fs = utils.AddCommand(cmd, oneDriveMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
}

View File

@ -2,6 +2,7 @@ package debug
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -10,13 +11,21 @@ import (
// called by debug.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, sharePointMetadataFilesCmd(), utils.MarkDebugCommand())
c, fs = utils.AddCommand(cmd, sharePointMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
}

View File

@ -3,6 +3,7 @@ package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -10,14 +11,21 @@ import (
// called by export.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, exchangeExportCmd())
c, fs = utils.AddCommand(cmd, exchangeExportCmd())
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddExchangeDetailsAndRestoreFlags(c, true)
flags.AddExportConfigFlags(c)

View File

@ -13,7 +13,6 @@ import (
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/export"
@ -97,7 +96,7 @@ func runExport(
return Only(ctx, clues.Wrap(err, "Failed to initialize "+serviceName+" export"))
}
collections, err := eo.Run(ctx)
expColl, err := eo.Run(ctx)
if err != nil {
if errors.Is(err, data.ErrNotFound) {
return Only(ctx, clues.New("Backup or backup details missing for id "+backupID))
@ -106,8 +105,20 @@ func runExport(
return Only(ctx, clues.Wrap(err, "Failed to run "+serviceName+" export"))
}
if err = showExportProgress(ctx, eo, collections, exportLocation); err != nil {
return err
// It would be better to give a progressbar than a spinner, but we
// have any way of knowing how many files are available as of now.
diskWriteComplete := observe.MessageWithCompletion(ctx, observe.ProgressCfg{}, "Writing data to disk")
err = export.ConsumeExportCollections(ctx, exportLocation, expColl, eo.Errors)
// The progressbar has to be closed before we move on as the Infof
// below flushes progressbar to prevent clobbering the output and
// that causes the entire export operation to stall indefinitely.
// https://github.com/alcionai/corso/blob/8102523dc62c001b301cd2ab4e799f86146ab1a0/src/cli/print/print.go#L151
close(diskWriteComplete)
if err != nil {
return Only(ctx, err)
}
if len(eo.Errors.Recovered()) > 0 {
@ -131,23 +142,3 @@ func runExport(
return nil
}
// slim wrapper that allows us to defer the progress bar closure with the expected scope.
func showExportProgress(
ctx context.Context,
op operations.ExportOperation,
collections []export.Collectioner,
exportLocation string,
) error {
// It would be better to give a progressbar than a spinner, but we
// have any way of knowing how many files are available as of now.
progressMessage := observe.MessageWithCompletion(ctx, observe.DefaultCfg(), "Writing data to disk")
defer close(progressMessage)
err := export.ConsumeExportCollections(ctx, exportLocation, collections, op.Errors)
if err != nil {
return Only(ctx, err)
}
return nil
}

View File

@ -3,22 +3,31 @@ package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by export.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, groupsExportCmd(), utils.MarkPreviewCommand())
c, fs = utils.AddCommand(cmd, groupsExportCmd(), utils.MarkPreviewCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddSiteFlag(c, false)
flags.AddSiteIDFlag(c, false)
@ -50,13 +59,7 @@ corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00
# Export all posts from a conversation with topic "hello world" from group mailbox's last backup to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"
# Export post with ID 98765abcdef from a conversation from group mailbox's last backup to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world" --post 98765abcdef`
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
)
// `corso export groups [<flag>...] <destination>`
@ -98,6 +101,10 @@ func exportGroupsCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
// TODO(pandeyabs): Exclude conversations from export since they are not
// supported yet. https://github.com/alcionai/corso/issues/4822
sel.Exclude(sel.Conversation(selectors.Any()))
acceptedGroupsFormatTypes := []string{
string(control.DefaultFormat),
string(control.JSONFormat),

View File

@ -3,6 +3,7 @@ package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -10,14 +11,21 @@ import (
// called by export.go to map subcommands to provider-specific handling.
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, oneDriveExportCmd())
c, fs = utils.AddCommand(cmd, oneDriveExportCmd())
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddOneDriveDetailsAndRestoreFlags(c)
flags.AddExportConfigFlags(c)

View File

@ -3,21 +3,30 @@ package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by export.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, sharePointExportCmd())
c, fs = utils.AddCommand(cmd, sharePointExportCmd())
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddExportConfigFlags(c)
@ -45,27 +54,7 @@ corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Export all files in the "Documents" library to the current directory.
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library Documents --folder "Display Templates/Style Sheets" .
# Export lists by their name(s)
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2" .
# Export lists created after a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34 .
# Export lists created before a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34 .
# Export lists modified before a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34 .
# Export lists modified after a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34 .`
--library Documents --folder "Display Templates/Style Sheets" .`
)
// `corso export sharepoint [<flag>...] <destination>`
@ -106,6 +95,9 @@ func exportSharePointCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
// Exclude lists from exports since they are not supported yet.
sel.Exclude(sel.Lists(selectors.Any()))
return runExport(
ctx,
cmd,

View File

@ -60,11 +60,8 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
"--" + flags.ListCreatedAfterFN, flagsTD.ListCreatedAfterInput,
"--" + flags.ListCreatedBeforeFN, flagsTD.ListCreatedBeforeInput,
"--" + flags.ListModifiedAfterFN, flagsTD.ListModifiedAfterInput,
"--" + flags.ListModifiedBeforeFN, flagsTD.ListModifiedBeforeInput,
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.FormatFN, flagsTD.FormatType,
@ -91,11 +88,8 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
assert.Equal(t, flagsTD.ListCreatedAfterInput, opts.ListCreatedAfter)
assert.Equal(t, flagsTD.ListCreatedBeforeInput, opts.ListCreatedBefore)
assert.Equal(t, flagsTD.ListModifiedAfterInput, opts.ListModifiedAfter)
assert.Equal(t, flagsTD.ListModifiedBeforeInput, opts.ListModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListItemInput, opts.ListItem)
assert.ElementsMatch(t, flagsTD.ListFolderInput, opts.ListFolder)
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)

View File

@ -12,11 +12,9 @@ func AddAllBackupListFlags(cmd *cobra.Command) {
}
func AddFailedItemsFN(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&FailedItemsFV, FailedItemsFN, Show,
cmd.Flags().StringVar(
&ListFailedItemsFV, FailedItemsFN, Show,
"Toggles showing or hiding the list of items that failed.")
cobra.CheckErr(fs.MarkHidden(FailedItemsFN))
}
func AddSkippedItemsFN(cmd *cobra.Command) {

View File

@ -28,6 +28,13 @@ func AddFilesystemFlags(cmd *cobra.Command) {
"",
"path to local or network storage")
cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN))
fs.BoolVar(
&SucceedIfExistsFV,
SucceedIfExistsFN,
false,
"Exit with success if the repo has already been initialized.")
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
}
func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string {

View File

@ -13,7 +13,7 @@ const (
FileModifiedAfterFN = "file-modified-after"
FileModifiedBeforeFN = "file-modified-before"
UseOldDeltaProcessFN = "use-old-delta-process"
UseDeltaTreeFN = "use-delta-tree"
)
var (
@ -25,7 +25,7 @@ var (
FileModifiedAfterFV string
FileModifiedBeforeFV string
UseOldDeltaProcessFV bool
UseDeltaTreeFV bool
)
// AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the

View File

@ -17,9 +17,9 @@ const (
FailFastFN = "fail-fast"
FailedItemsFN = "failed-items"
FetchParallelismFN = "fetch-parallelism"
NoPermissionsFN = "no-permissions"
NoStatsFN = "no-stats"
RecoveredErrorsFN = "recovered-errors"
NoPermissionsFN = "no-permissions"
RunModeFN = "run-mode"
SkippedItemsFN = "skipped-items"
SkipReduceFN = "skip-reduce"
@ -35,17 +35,17 @@ var (
ForceItemDataDownloadFV bool
EnableImmutableIDFV bool
FailFastFV bool
FailedItemsFV string
FetchParallelismFV int
ListAlertsFV string
ListFailedItemsFV string
ListSkippedItemsFV string
ListRecoveredErrorsFV string
NoPermissionsFV bool
NoStatsFV bool
// RunMode describes the type of run, such as:
// flagtest, dry, run. Should default to 'run'.
RunModeFV string
SkipReduceFV bool
RunModeFV string
NoPermissionsFV bool
SkipReduceFV bool
)
// well-known flag values

View File

@ -12,8 +12,9 @@ const (
AWSSessionTokenFN = "aws-session-token"
// Corso Flags
PassphraseFN = "passphrase"
NewPassphraseFN = "new-passphrase"
PassphraseFN = "passphrase"
NewPassphraseFN = "new-passphrase"
SucceedIfExistsFN = "succeed-if-exists"
)
var (
@ -24,6 +25,7 @@ var (
AWSSessionTokenFV string
PassphraseFV string
NewPhasephraseFV string
SucceedIfExistsFV bool
)
// AddMultipleBackupIDsFlag adds the --backups flag.

View File

@ -38,6 +38,11 @@ func AddS3BucketFlags(cmd *cobra.Command) {
fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.")
fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)")
fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.")
// In general, we don't want to expose this flag to users and have them mistake it
// for a broad-scale idempotency solution. We can un-hide it later the need arises.
fs.BoolVar(&SucceedIfExistsFV, SucceedIfExistsFN, false, "Exit with success if the repo has already been initialized.")
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
}
func S3FlagOverrides(cmd *cobra.Command) map[string]string {

View File

@ -11,35 +11,23 @@ const (
)
const (
LibraryFN = "library"
ListFN = "list"
ListModifiedAfterFN = "list-modified-after"
ListModifiedBeforeFN = "list-modified-before"
ListCreatedAfterFN = "list-created-after"
ListCreatedBeforeFN = "list-created-before"
LibraryFN = "library"
ListFolderFN = "list"
ListItemFN = "list-item"
PageFolderFN = "page-folder"
PageFN = "page"
SiteFN = "site" // site only accepts WebURL values
SiteIDFN = "site-id" // site-id accepts actual site ids
SiteFN = "site" // site only accepts WebURL values
SiteIDFN = "site-id" // site-id accepts actual site ids
)
var (
LibraryFV string
ListFV []string
ListModifiedAfterFV string
ListModifiedBeforeFV string
ListCreatedAfterFV string
ListCreatedBeforeFV string
LibraryFV string
ListFolderFV []string
ListItemFV []string
PageFolderFV []string
PageFV []string
SiteIDFV []string
WebURLFV []string
SiteIDFV []string
WebURLFV []string
)
// AddSharePointDetailsAndRestoreFlags adds flags that are common to both the
@ -79,26 +67,17 @@ func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
"Select files modified before this datetime.")
// lists
fs.StringSliceVar(
&ListFV,
ListFN, nil,
"Select lists by name.")
fs.StringVar(
&ListModifiedAfterFV,
ListModifiedAfterFN, "",
"Select lists modified after this datetime.")
fs.StringVar(
&ListModifiedBeforeFV,
ListModifiedBeforeFN, "",
"Select lists modified before this datetime.")
fs.StringVar(
&ListCreatedAfterFV,
ListCreatedAfterFN, "",
"Select lists created after this datetime.")
fs.StringVar(
&ListCreatedBeforeFV,
ListCreatedBeforeFN, "",
"Select lists created before this datetime.")
&ListFolderFV,
ListFolderFN, nil,
"Select lists by name; accepts '"+Wildcard+"' to select all lists.")
cobra.CheckErr(fs.MarkHidden(ListFolderFN))
fs.StringSliceVar(
&ListItemFV,
ListItemFN, nil,
"Select lists by item name; accepts '"+Wildcard+"' to select all lists.")
cobra.CheckErr(fs.MarkHidden(ListItemFN))
// pages

View File

@ -1,13 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
DataChats = "chats"
)
func AddTeamsChatsDetailsAndRestoreFlags(cmd *cobra.Command) {
// TODO: add details flags
}

View File

@ -20,7 +20,7 @@ func PreparedBackupListFlags() []string {
func AssertBackupListFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, flags.Show, flags.ListAlertsFV)
assert.Equal(t, flags.Show, flags.FailedItemsFV)
assert.Equal(t, flags.Show, flags.ListFailedItemsFV)
assert.Equal(t, flags.Show, flags.ListSkippedItemsFV)
assert.Equal(t, flags.Show, flags.ListRecoveredErrorsFV)
}

View File

@ -21,7 +21,6 @@ var (
ExchangeCategoryDataInput = []string{"email", "events", "contacts"}
SharepointCategoryDataInput = []string{"files", "lists", "pages"}
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
TeamsChatsCategoryDataInput = []string{"chats"}
ChannelInput = []string{"channel1", "channel2"}
MessageInput = []string{"message1", "message2"}
@ -60,11 +59,8 @@ var (
FileModifiedAfterInput = "fileModifiedAfter"
FileModifiedBeforeInput = "fileModifiedBefore"
ListsInput = []string{"listName1", "listName2"}
ListCreatedAfterInput = "listCreatedAfter"
ListCreatedBeforeInput = "listCreatedBefore"
ListModifiedAfterInput = "listModifiedAfter"
ListModifiedBeforeInput = "listModifiedBefore"
ListFolderInput = []string{"listFolder1", "listFolder2"}
ListItemInput = []string{"listItem1", "listItem2"}
PageFolderInput = []string{"pageFolder1", "pageFolder2"}
PageInput = []string{"page1", "page2"}

View File

@ -1,25 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
)
func PreparedTeamsChatsFlags() []string {
return []string{
// FIXME: populate when adding filters
// "--" + flags.ChatCreatedAfterFN, ChatCreatedAfterInput,
// "--" + flags.ChatCreatedBeforeFN, ChatCreatedBeforeInput,
// "--" + flags.ChatLastMessageAfterFN, ChatLastMessageAfterInput,
// "--" + flags.ChatLastMessageBeforeFN, ChatLastMessageBeforeInput,
}
}
func AssertTeamsChatsFlags(t *testing.T, cmd *cobra.Command) {
// FIXME: populate when adding filters
// assert.Equal(t, ChatCreatedAfterInput, flags.ChatCreatedAfterFV)
// assert.Equal(t, ChatCreatedBeforeInput, flags.ChatCreatedBeforeFV)
// assert.Equal(t, ChatLastMessageAfterInput, flags.ChatLastMessageAfterFV)
// assert.Equal(t, ChatLastMessageBeforeInput, flags.ChatLastMessageBeforeFV)
}

View File

@ -133,7 +133,7 @@ func Pretty(ctx context.Context, a any) {
return
}
printPrettyJSON(ctx, getRootCmd(ctx).ErrOrStderr(), a)
printPrettyJSON(getRootCmd(ctx).ErrOrStderr(), a)
}
// PrettyJSON prettifies and prints the value.
@ -143,7 +143,7 @@ func PrettyJSON(ctx context.Context, p minimumPrintabler) {
return
}
outputJSON(ctx, getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
}
// out is the testable core of exported print funcs
@ -193,56 +193,56 @@ type minimumPrintabler interface {
// Item prints the printable, according to the caller's requested format.
func Item(ctx context.Context, p Printable) {
printItem(ctx, getRootCmd(ctx).OutOrStdout(), p)
printItem(getRootCmd(ctx).OutOrStdout(), p)
}
// print prints the printable items,
// according to the caller's requested format.
func printItem(ctx context.Context, w io.Writer, p Printable) {
func printItem(w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug {
outputJSON(ctx, w, p, outputAsJSONDebug)
outputJSON(w, p, outputAsJSONDebug)
return
}
outputTable(ctx, w, []Printable{p})
outputTable(w, []Printable{p})
}
// ItemProperties prints the printable either as in a single line or a json
// The difference between this and Item is that this one does not print the ID
func ItemProperties(ctx context.Context, p Printable) {
printItemProperties(ctx, getRootCmd(ctx).OutOrStdout(), p)
printItemProperties(getRootCmd(ctx).OutOrStdout(), p)
}
// print prints the printable items,
// according to the caller's requested format.
func printItemProperties(ctx context.Context, w io.Writer, p Printable) {
func printItemProperties(w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug {
outputJSON(ctx, w, p, outputAsJSONDebug)
outputJSON(w, p, outputAsJSONDebug)
return
}
outputOneLine(ctx, w, []Printable{p})
outputOneLine(w, []Printable{p})
}
// All prints the slice of printable items,
// according to the caller's requested format.
func All(ctx context.Context, ps ...Printable) {
printAll(ctx, getRootCmd(ctx).OutOrStdout(), ps)
printAll(getRootCmd(ctx).OutOrStdout(), ps)
}
// printAll prints the slice of printable items,
// according to the caller's requested format.
func printAll(ctx context.Context, w io.Writer, ps []Printable) {
func printAll(w io.Writer, ps []Printable) {
if len(ps) == 0 {
return
}
if outputAsJSON || outputAsJSONDebug {
outputJSONArr(ctx, w, ps, outputAsJSONDebug)
outputJSONArr(w, ps, outputAsJSONDebug)
return
}
outputTable(ctx, w, ps)
outputTable(w, ps)
}
// ------------------------------------------------------------------------------------------
@ -252,11 +252,11 @@ func printAll(ctx context.Context, w io.Writer, ps []Printable) {
// Table writes the printables in a tabular format. Takes headers from
// the 0th printable only.
func Table(ctx context.Context, ps []Printable) {
outputTable(ctx, getRootCmd(ctx).OutOrStdout(), ps)
outputTable(getRootCmd(ctx).OutOrStdout(), ps)
}
// output to stdout the list of printable structs in a table
func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
func outputTable(w io.Writer, ps []Printable) {
t := table.Table{
Headers: ps[0].Headers(false),
Rows: [][]string{},
@ -266,9 +266,6 @@ func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
t.Rows = append(t.Rows, p.Values(false))
}
// observe bars needs to be flushed before printing
observe.Flush(ctx)
_ = t.WriteTable(
w,
&table.Config{
@ -282,20 +279,20 @@ func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
// JSON
// ------------------------------------------------------------------------------------------
func outputJSON(ctx context.Context, w io.Writer, p minimumPrintabler, debug bool) {
func outputJSON(w io.Writer, p minimumPrintabler, debug bool) {
if debug {
printJSON(ctx, w, p)
printJSON(w, p)
return
}
if debug {
printJSON(ctx, w, p)
printJSON(w, p)
} else {
printJSON(ctx, w, p.MinimumPrintable())
printJSON(w, p.MinimumPrintable())
}
}
func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool) {
func outputJSONArr(w io.Writer, ps []Printable, debug bool) {
sl := make([]any, 0, len(ps))
for _, p := range ps {
@ -306,14 +303,11 @@ func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool)
}
}
printJSON(ctx, w, sl)
printJSON(w, sl)
}
// output to stdout the list of printable structs as json.
func printJSON(ctx context.Context, w io.Writer, a any) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
func printJSON(w io.Writer, a any) {
bs, err := json.Marshal(a)
if err != nil {
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
@ -324,10 +318,7 @@ func printJSON(ctx context.Context, w io.Writer, a any) {
}
// output to stdout the list of printable structs as prettified json.
func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
func printPrettyJSON(w io.Writer, a any) {
bs, err := json.MarshalIndent(a, "", " ")
if err != nil {
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
@ -343,10 +334,7 @@ func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
// Output in the following format:
// Bytes Uploaded: 401 kB | Items Uploaded: 59 | Items Skipped: 0 | Errors: 0
func outputOneLine(ctx context.Context, w io.Writer, ps []Printable) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
func outputOneLine(w io.Writer, ps []Printable) {
headers := ps[0].Headers(true)
rows := [][]string{}

View File

@ -2,6 +2,7 @@ package repo
import (
"github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
@ -109,6 +110,10 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
ric := repository.InitConfig{RetentionOpts: retentionOpts}
if err = r.Initialize(ctx, ric); err != nil {
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
return nil
}
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
}

View File

@ -5,6 +5,7 @@ import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
@ -81,9 +82,9 @@ func (suite *FilesystemE2ESuite) TestInitFilesystemCmd() {
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// noop
// a second initialization should result in an error
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
})
}
}

View File

@ -4,6 +4,7 @@ import (
"strings"
"github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
@ -131,6 +132,10 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
ric := repository.InitConfig{RetentionOpts: retentionOpts}
if err = r.Initialize(ctx, ric); err != nil {
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
return nil
}
return Only(ctx, clues.Stack(ErrInitializingRepo, err))
}

View File

@ -89,9 +89,9 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// noop
// a second initialization should result in an error
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
})
}
}
@ -116,7 +116,8 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
"repo", "init", "s3",
"--"+flags.ConfigFileFN, configFP,
"--bucket", cfg.Bucket,
"--prefix", cfg.Prefix)
"--prefix", cfg.Prefix,
"--succeed-if-exists")
cli.BuildCommandTree(cmd)
// run the command

View File

@ -2,6 +2,7 @@ package restore
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -9,14 +10,22 @@ import (
// called by restore.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case restoreCommand:
c, _ = utils.AddCommand(cmd, exchangeRestoreCmd())
c, fs = utils.AddCommand(cmd, exchangeRestoreCmd())
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
// general flags
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddExchangeDetailsAndRestoreFlags(c, false)
flags.AddRestoreConfigFlags(c, true)

View File

@ -2,6 +2,7 @@ package restore
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -11,14 +12,21 @@ import (
// called by restore.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case restoreCommand:
c, _ = utils.AddCommand(cmd, groupsRestoreCmd(), utils.MarkPreviewCommand())
c, fs = utils.AddCommand(cmd, groupsRestoreCmd(), utils.MarkPreviewCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddSiteFlag(c, false)
flags.AddSiteIDFlag(c, false)

View File

@ -60,7 +60,8 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions,
@ -91,7 +92,6 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination)
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
// assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource)
assert.True(t, flags.NoPermissionsFV)
flagsTD.AssertProviderFlags(t, cmd)

View File

@ -2,6 +2,7 @@ package restore
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
@ -10,14 +11,21 @@ import (
// called by restore.go to map subcommands to provider-specific handling.
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case restoreCommand:
c, _ = utils.AddCommand(cmd, oneDriveRestoreCmd())
c, fs = utils.AddCommand(cmd, oneDriveRestoreCmd())
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddOneDriveDetailsAndRestoreFlags(c)
flags.AddNoPermissionsFlag(c)

View File

@ -2,22 +2,31 @@ package restore
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by restore.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use {
case restoreCommand:
c, _ = utils.AddCommand(cmd, sharePointRestoreCmd())
c, fs = utils.AddCommand(cmd, sharePointRestoreCmd())
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --site) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true)
flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddNoPermissionsFlag(c)
@ -50,27 +59,7 @@ corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Restore all files in the "Documents" library.
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library Documents --folder "Display Templates/Style Sheets"
# Restore lists by their name(s)
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2"
# Restore lists created after a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34
# Restore lists created before a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34
# Restore lists modified before a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34
# Restore lists modified after a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34`
--library Documents --folder "Display Templates/Style Sheets" `
)
// `corso restore sharepoint [<flag>...]`
@ -106,6 +95,9 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
// Exclude lists from restore since they are not supported yet.
sel.Exclude(sel.Lists(selectors.Any()))
return runRestore(
ctx,
cmd,

View File

@ -59,11 +59,8 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
"--" + flags.ListCreatedAfterFN, flagsTD.ListCreatedAfterInput,
"--" + flags.ListCreatedBeforeFN, flagsTD.ListCreatedBeforeInput,
"--" + flags.ListModifiedAfterFN, flagsTD.ListModifiedAfterInput,
"--" + flags.ListModifiedBeforeFN, flagsTD.ListModifiedBeforeInput,
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions,
@ -92,11 +89,8 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
assert.Equal(t, flagsTD.ListCreatedAfterInput, opts.ListCreatedAfter)
assert.Equal(t, flagsTD.ListCreatedBeforeInput, opts.ListCreatedBefore)
assert.Equal(t, flagsTD.ListModifiedAfterInput, opts.ListModifiedAfter)
assert.Equal(t, flagsTD.ListModifiedBeforeInput, opts.ListModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListItemInput, opts.ListItem)
assert.ElementsMatch(t, flagsTD.ListFolderInput, opts.ListFolder)
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)

View File

@ -1,12 +1,8 @@
package utils
import (
"errors"
"strconv"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
@ -43,55 +39,3 @@ func trimFolderSlash(folders []string) []string {
return res
}
func validateCommonTimeFlags(opts any) error {
timeFlags := []string{
flags.FileCreatedAfterFN,
flags.FileCreatedBeforeFN,
flags.FileModifiedAfterFN,
flags.FileModifiedBeforeFN,
flags.ListCreatedAfterFN,
flags.ListCreatedBeforeFN,
flags.ListModifiedAfterFN,
flags.ListModifiedBeforeFN,
}
isFlagPopulated := func(opts any, flag string) bool {
switch opts := opts.(type) {
case GroupsOpts:
_, ok := opts.Populated[flag]
return ok
case SharePointOpts:
_, ok := opts.Populated[flag]
return ok
default:
return false
}
}
getTimeField := func(opts any, flag string) (string, error) {
switch opts := opts.(type) {
case GroupsOpts:
return opts.GetFileTimeField(flag), nil
case SharePointOpts:
return opts.GetFileTimeField(flag), nil
default:
return "", errors.New("unsupported type")
}
}
for _, flag := range timeFlags {
if populated := isFlagPopulated(opts, flag); populated {
timeField, err := getTimeField(opts, flag)
if err != nil {
return err
}
if !IsValidTimeFormat(timeField) {
return clues.New("invalid time format for " + flag)
}
}
}
return nil
}

View File

@ -103,6 +103,7 @@ func (suite *FlagUnitSuite) TestAddS3BucketFlags() {
assert.Equal(t, "prefix1", flags.PrefixFV, flags.PrefixFN)
assert.True(t, flags.DoNotUseTLSFV, flags.DoNotUseTLSFN)
assert.True(t, flags.DoNotVerifyTLSFV, flags.DoNotVerifyTLSFN)
assert.True(t, flags.SucceedIfExistsFV, flags.SucceedIfExistsFN)
},
}
@ -115,6 +116,7 @@ func (suite *FlagUnitSuite) TestAddS3BucketFlags() {
"--" + flags.PrefixFN, "prefix1",
"--" + flags.DoNotUseTLSFN,
"--" + flags.DoNotVerifyTLSFN,
"--" + flags.SucceedIfExistsFN,
})
err := cmd.Execute()
@ -128,6 +130,7 @@ func (suite *FlagUnitSuite) TestFilesystemFlags() {
Use: "test",
Run: func(cmd *cobra.Command, args []string) {
assert.Equal(t, "/tmp/test", flags.FilesystemPathFV, flags.FilesystemPathFN)
assert.True(t, flags.SucceedIfExistsFV, flags.SucceedIfExistsFN)
assert.Equal(t, "tenantID", flags.AzureClientTenantFV, flags.AzureClientTenantFN)
assert.Equal(t, "clientID", flags.AzureClientIDFV, flags.AzureClientIDFN)
assert.Equal(t, "secret", flags.AzureClientSecretFV, flags.AzureClientSecretFN)
@ -140,6 +143,7 @@ func (suite *FlagUnitSuite) TestFilesystemFlags() {
cmd.SetArgs([]string{
"test",
"--" + flags.FilesystemPathFN, "/tmp/test",
"--" + flags.SucceedIfExistsFN,
"--" + flags.AzureClientIDFN, "clientID",
"--" + flags.AzureClientTenantFN, "tenantID",
"--" + flags.AzureClientSecretFN, "secret",

View File

@ -32,7 +32,8 @@ type GroupsOpts struct {
FileModifiedAfter string
FileModifiedBefore string
Lists []string
ListFolder []string
ListItem []string
PageFolder []string
Page []string
@ -43,21 +44,6 @@ type GroupsOpts struct {
Populated flags.PopulatedFlags
}
func (g GroupsOpts) GetFileTimeField(flag string) string {
switch flag {
case flags.FileCreatedAfterFN:
return g.FileCreatedAfter
case flags.FileCreatedBeforeFN:
return g.FileCreatedBefore
case flags.FileModifiedAfterFN:
return g.FileModifiedAfter
case flags.FileModifiedBeforeFN:
return g.FileModifiedBefore
default:
return ""
}
}
func GroupsAllowedCategories() map[string]struct{} {
return map[string]struct{}{
flags.DataLibraries: {},
@ -107,7 +93,8 @@ func MakeGroupsOpts(cmd *cobra.Command) GroupsOpts {
MessageLastReplyAfter: flags.MessageLastReplyAfterFV,
MessageLastReplyBefore: flags.MessageLastReplyBeforeFV,
Lists: flags.ListFV,
ListFolder: flags.ListFolderFV,
ListItem: flags.ListItemFV,
Page: flags.PageFV,
PageFolder: flags.PageFolderFV,
@ -139,6 +126,22 @@ func ValidateGroupsRestoreFlags(backupID string, opts GroupsOpts, isRestore bool
}
}
if _, ok := opts.Populated[flags.FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) {
return clues.New("invalid time format for " + flags.FileCreatedAfterFN)
}
if _, ok := opts.Populated[flags.FileCreatedBeforeFN]; ok && !IsValidTimeFormat(opts.FileCreatedBefore) {
return clues.New("invalid time format for " + flags.FileCreatedBeforeFN)
}
if _, ok := opts.Populated[flags.FileModifiedAfterFN]; ok && !IsValidTimeFormat(opts.FileModifiedAfter) {
return clues.New("invalid time format for " + flags.FileModifiedAfterFN)
}
if _, ok := opts.Populated[flags.FileModifiedBeforeFN]; ok && !IsValidTimeFormat(opts.FileModifiedBefore) {
return clues.New("invalid time format for " + flags.FileModifiedBeforeFN)
}
if _, ok := opts.Populated[flags.MessageCreatedAfterFN]; ok && !IsValidTimeFormat(opts.MessageCreatedAfter) {
return clues.New("invalid time format for " + flags.MessageCreatedAfterFN)
}
@ -155,7 +158,7 @@ func ValidateGroupsRestoreFlags(backupID string, opts GroupsOpts, isRestore bool
return clues.New("invalid time format for " + flags.MessageLastReplyBeforeFN)
}
return validateCommonTimeFlags(opts)
return nil
}
// AddGroupsFilter adds the scope of the provided values to the selector's
@ -178,7 +181,7 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
var (
groups = opts.Groups
folderPaths, fileNames = len(opts.FolderPath), len(opts.FileName)
lists = len(opts.Lists)
listFolders, listItems = len(opts.ListFolder), len(opts.ListItem)
pageFolders, pageItems = len(opts.PageFolder), len(opts.Page)
chans, chanMsgs = len(opts.Channels), len(opts.Messages)
convs, convPosts = len(opts.Conversations), len(opts.Posts)
@ -191,7 +194,7 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
sel := selectors.NewGroupsRestore(groups)
if folderPaths+fileNames+
lists+
listFolders+listItems+
pageFolders+pageItems+
chans+chanMsgs+
convs+convPosts == 0 {
@ -201,43 +204,58 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
// sharepoint site selectors
if folderPaths+fileNames > 0 {
if fileNames == 0 {
opts.FileName = selectors.Any()
if folderPaths+fileNames+
listFolders+listItems+
pageFolders+pageItems > 0 {
if folderPaths+fileNames > 0 {
if fileNames == 0 {
opts.FileName = selectors.Any()
}
opts.FolderPath = trimFolderSlash(opts.FolderPath)
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.FolderPath)
if len(containsFolders) > 0 {
sel.Include(sel.LibraryItems(containsFolders, opts.FileName))
}
if len(prefixFolders) > 0 {
sel.Include(sel.LibraryItems(prefixFolders, opts.FileName, selectors.PrefixMatch()))
}
}
opts.FolderPath = trimFolderSlash(opts.FolderPath)
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.FolderPath)
if listFolders+listItems > 0 {
if listItems == 0 {
opts.ListItem = selectors.Any()
}
if len(containsFolders) > 0 {
sel.Include(sel.LibraryItems(containsFolders, opts.FileName))
opts.ListFolder = trimFolderSlash(opts.ListFolder)
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.ListFolder)
if len(containsFolders) > 0 {
sel.Include(sel.ListItems(containsFolders, opts.ListItem))
}
if len(prefixFolders) > 0 {
sel.Include(sel.ListItems(prefixFolders, opts.ListItem, selectors.PrefixMatch()))
}
}
if len(prefixFolders) > 0 {
sel.Include(sel.LibraryItems(prefixFolders, opts.FileName, selectors.PrefixMatch()))
}
}
if pageFolders+pageItems > 0 {
if pageItems == 0 {
opts.Page = selectors.Any()
}
if lists > 0 {
opts.Lists = trimFolderSlash(opts.Lists)
sel.Include(sel.ListItems(opts.Lists, opts.Lists, selectors.StrictEqualMatch()))
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
}
opts.PageFolder = trimFolderSlash(opts.PageFolder)
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.PageFolder)
if pageFolders+pageItems > 0 {
if pageItems == 0 {
opts.Page = selectors.Any()
}
if len(containsFolders) > 0 {
sel.Include(sel.PageItems(containsFolders, opts.Page))
}
opts.PageFolder = trimFolderSlash(opts.PageFolder)
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.PageFolder)
if len(containsFolders) > 0 {
sel.Include(sel.PageItems(containsFolders, opts.Page))
}
if len(prefixFolders) > 0 {
sel.Include(sel.PageItems(prefixFolders, opts.Page, selectors.PrefixMatch()))
if len(prefixFolders) > 0 {
sel.Include(sel.PageItems(prefixFolders, opts.Page, selectors.PrefixMatch()))
}
}
}
@ -266,14 +284,9 @@ func IncludeGroupsRestoreDataSelectors(ctx context.Context, opts GroupsOpts) *se
opts.Conversations = selectors.Any()
}
// if no post is specified, select all posts in the conversation
if convPosts == 0 {
opts.Posts = selectors.Any()
}
// if no post is specified, only select conversations;
// otherwise, look for conv/post pairs
if convs == 0 {
// otherwise, look for channel/message pairs
if chanMsgs == 0 {
sel.Include(sel.Conversation(opts.Conversations))
} else {
sel.Include(sel.ConversationPosts(opts.Conversations, opts.Posts))

View File

@ -30,7 +30,6 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
containsOnly = []string{"contains"}
prefixOnly = []string{"/prefix"}
containsAndPrefix = []string{"contains", "/prefix"}
listNames = []string{"list-name1"}
onlySlash = []string{string(path.PathSeparator)}
)
@ -92,12 +91,29 @@ func (suite *GroupsUtilsSuite) TestIncludeGroupsRestoreDataSelectors() {
expectIncludeLen: 2,
},
{
name: "list names",
name: "list contains",
opts: utils.GroupsOpts{
Lists: listNames,
FileName: empty,
FolderPath: empty,
ListItem: empty,
ListFolder: containsOnly,
},
expectIncludeLen: 1,
},
{
name: "list prefixes",
opts: utils.GroupsOpts{
ListFolder: prefixOnly,
},
expectIncludeLen: 1,
},
{
name: "list prefixes and contains",
opts: utils.GroupsOpts{
ListFolder: containsAndPrefix,
},
expectIncludeLen: 2,
},
{
name: "Page Folder",
opts: utils.GroupsOpts{

View File

@ -28,7 +28,7 @@ func Control() control.Options {
opt.ToggleFeatures.DisableSlidingWindowLimiter = flags.DisableSlidingWindowLimiterFV
opt.ToggleFeatures.DisableLazyItemReader = flags.DisableLazyItemReaderFV
opt.ToggleFeatures.ExchangeImmutableIDs = flags.EnableImmutableIDFV
opt.ToggleFeatures.UseOldDeltaProcess = flags.UseOldDeltaProcessFV
opt.ToggleFeatures.UseDeltaTree = flags.UseDeltaTreeFV
opt.Parallelism.ItemFetch = flags.FetchParallelismFV
return opt
@ -58,7 +58,7 @@ func ParseBackupOptions() control.BackupConfig {
opt.M365.DeltaPageSize = dps
opt.M365.DisableDeltaEndpoint = flags.DisableDeltaFV
opt.M365.ExchangeImmutableIDs = flags.EnableImmutableIDFV
opt.M365.UseOldDriveDeltaProcess = flags.UseOldDeltaProcessFV
opt.M365.UseDriveDeltaTree = flags.UseDeltaTreeFV
opt.ServiceRateLimiter.DisableSlidingWindowLimiter = flags.DisableSlidingWindowLimiterFV
opt.Parallelism.ItemFetch = flags.FetchParallelismFV
opt.Incrementals.ForceFullEnumeration = flags.DisableIncrementalsFV

View File

@ -25,11 +25,8 @@ type SharePointOpts struct {
FileModifiedAfter string
FileModifiedBefore string
Lists []string
ListModifiedAfter string
ListModifiedBefore string
ListCreatedBefore string
ListCreatedAfter string
ListFolder []string
ListItem []string
PageFolder []string
Page []string
@ -40,29 +37,6 @@ type SharePointOpts struct {
Populated flags.PopulatedFlags
}
func (s SharePointOpts) GetFileTimeField(flag string) string {
switch flag {
case flags.FileCreatedAfterFN:
return s.FileCreatedAfter
case flags.FileCreatedBeforeFN:
return s.FileCreatedBefore
case flags.FileModifiedAfterFN:
return s.FileModifiedAfter
case flags.FileModifiedBeforeFN:
return s.FileModifiedBefore
case flags.ListModifiedAfterFN:
return s.ListModifiedAfter
case flags.ListModifiedBeforeFN:
return s.ListModifiedBefore
case flags.ListCreatedBeforeFN:
return s.ListCreatedBefore
case flags.ListCreatedAfterFN:
return s.ListCreatedAfter
default:
return ""
}
}
func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
return SharePointOpts{
SiteID: flags.SiteIDFV,
@ -76,11 +50,8 @@ func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
FileModifiedAfter: flags.FileModifiedAfterFV,
FileModifiedBefore: flags.FileModifiedBeforeFV,
Lists: flags.ListFV,
ListModifiedAfter: flags.ListModifiedAfterFV,
ListModifiedBefore: flags.ListModifiedBeforeFV,
ListCreatedAfter: flags.ListCreatedAfterFV,
ListCreatedBefore: flags.ListCreatedBeforeFV,
ListFolder: flags.ListFolderFV,
ListItem: flags.ListItemFV,
Page: flags.PageFV,
PageFolder: flags.PageFolderFV,
@ -98,21 +69,22 @@ func MakeSharePointOpts(cmd *cobra.Command) SharePointOpts {
func SharePointAllowedCategories() map[string]struct{} {
return map[string]struct{}{
flags.DataLibraries: {},
flags.DataLists: {},
// flags.DataLists: {}, [TODO]: uncomment when lists are enabled
}
}
func AddCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
if len(cats) == 0 {
// [TODO](hitesh) to enable lists without being invoked explicitly via --data flag
// backup of sharepoint lists not enabled yet
// sel.Include(sel.LibraryFolders(selectors.Any()), sel.Lists(selectors.Any()))
sel.Include(sel.LibraryFolders(selectors.Any()))
}
for _, d := range cats {
switch d {
case flags.DataLists:
sel.Include(sel.Lists(selectors.Any()))
// backup of sharepoint lists not enabled yet
// case flags.DataLists:
// sel.Include(sel.Lists(selectors.Any()))
case flags.DataLibraries:
sel.Include(sel.LibraryFolders(selectors.Any()))
}
@ -136,7 +108,23 @@ func ValidateSharePointRestoreFlags(backupID string, opts SharePointOpts) error
}
}
return validateCommonTimeFlags(opts)
if _, ok := opts.Populated[flags.FileCreatedAfterFN]; ok && !IsValidTimeFormat(opts.FileCreatedAfter) {
return clues.New("invalid time format for " + flags.FileCreatedAfterFN)
}
if _, ok := opts.Populated[flags.FileCreatedBeforeFN]; ok && !IsValidTimeFormat(opts.FileCreatedBefore) {
return clues.New("invalid time format for " + flags.FileCreatedBeforeFN)
}
if _, ok := opts.Populated[flags.FileModifiedAfterFN]; ok && !IsValidTimeFormat(opts.FileModifiedAfter) {
return clues.New("invalid time format for " + flags.FileModifiedAfterFN)
}
if _, ok := opts.Populated[flags.FileModifiedBeforeFN]; ok && !IsValidTimeFormat(opts.FileModifiedBefore) {
return clues.New("invalid time format for " + flags.FileModifiedBeforeFN)
}
return nil
}
// AddSharePointInfo adds the scope of the provided values to the selector's
@ -158,24 +146,24 @@ func AddSharePointInfo(
func IncludeSharePointRestoreDataSelectors(ctx context.Context, opts SharePointOpts) *selectors.SharePointRestore {
sites := opts.SiteID
folderPaths, fileNames := len(opts.FolderPath), len(opts.FileName)
siteIDs, webUrls := len(opts.SiteID), len(opts.WebURL)
lists := len(opts.Lists)
pageFolders, pageItems := len(opts.PageFolder), len(opts.Page)
lfp, lfn := len(opts.FolderPath), len(opts.FileName)
ls, lwu := len(opts.SiteID), len(opts.WebURL)
slp, sli := len(opts.ListFolder), len(opts.ListItem)
pf, pi := len(opts.PageFolder), len(opts.Page)
if siteIDs == 0 {
if ls == 0 {
sites = selectors.Any()
}
sel := selectors.NewSharePointRestore(sites)
if folderPaths+fileNames+webUrls+lists+pageFolders+pageItems == 0 {
if lfp+lfn+lwu+slp+sli+pf+pi == 0 {
sel.Include(sel.AllData())
return sel
}
if folderPaths+fileNames > 0 {
if fileNames == 0 {
if lfp+lfn > 0 {
if lfn == 0 {
opts.FileName = selectors.Any()
}
@ -191,14 +179,25 @@ func IncludeSharePointRestoreDataSelectors(ctx context.Context, opts SharePointO
}
}
if lists > 0 {
opts.Lists = trimFolderSlash(opts.Lists)
sel.Include(sel.ListItems(opts.Lists, opts.Lists, selectors.StrictEqualMatch()))
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
if slp+sli > 0 {
if sli == 0 {
opts.ListItem = selectors.Any()
}
opts.ListFolder = trimFolderSlash(opts.ListFolder)
containsFolders, prefixFolders := splitFoldersIntoContainsAndPrefix(opts.ListFolder)
if len(containsFolders) > 0 {
sel.Include(sel.ListItems(containsFolders, opts.ListItem))
}
if len(prefixFolders) > 0 {
sel.Include(sel.ListItems(prefixFolders, opts.ListItem, selectors.PrefixMatch()))
}
}
if pageFolders+pageItems > 0 {
if pageItems == 0 {
if pf+pi > 0 {
if pi == 0 {
opts.Page = selectors.Any()
}
@ -214,7 +213,7 @@ func IncludeSharePointRestoreDataSelectors(ctx context.Context, opts SharePointO
}
}
if webUrls > 0 {
if lwu > 0 {
urls := make([]string, 0, len(opts.WebURL))
for _, wu := range opts.WebURL {
@ -253,8 +252,4 @@ func FilterSharePointRestoreInfoSelectors(
AddSharePointInfo(sel, opts.FileCreatedBefore, sel.CreatedBefore)
AddSharePointInfo(sel, opts.FileModifiedAfter, sel.ModifiedAfter)
AddSharePointInfo(sel, opts.FileModifiedBefore, sel.ModifiedBefore)
AddSharePointInfo(sel, opts.ListModifiedAfter, sel.ListModifiedAfter)
AddSharePointInfo(sel, opts.ListModifiedBefore, sel.ListModifiedBefore)
AddSharePointInfo(sel, opts.ListCreatedAfter, sel.ListCreatedAfter)
AddSharePointInfo(sel, opts.ListCreatedBefore, sel.ListCreatedBefore)
}

View File

@ -31,7 +31,6 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
multi = []string{"more", "than", "one"}
containsOnly = []string{"contains"}
prefixOnly = []string{"/prefix"}
listNames = []string{"list-name1"}
containsAndPrefix = []string{"contains", "/prefix"}
onlySlash = []string{string(path.PathSeparator)}
)
@ -61,7 +60,8 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
opts: utils.SharePointOpts{
FileName: single,
FolderPath: single,
Lists: single,
ListItem: single,
ListFolder: single,
SiteID: single,
WebURL: single,
},
@ -108,12 +108,31 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
expectIncludeLen: 2,
},
{
name: "list names",
name: "list contains",
opts: utils.SharePointOpts{
Lists: listNames,
FileName: empty,
FolderPath: empty,
ListItem: empty,
ListFolder: containsOnly,
SiteID: empty,
WebURL: empty,
},
expectIncludeLen: 1,
},
{
name: "list prefixes",
opts: utils.SharePointOpts{
ListFolder: prefixOnly,
},
expectIncludeLen: 1,
},
{
name: "list prefixes and contains",
opts: utils.SharePointOpts{
ListFolder: containsAndPrefix,
},
expectIncludeLen: 2,
},
{
name: "weburl contains",
opts: utils.SharePointOpts{
@ -279,20 +298,12 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
FileCreatedBefore: dttm.Now(),
FileModifiedAfter: dttm.Now(),
FileModifiedBefore: dttm.Now(),
ListCreatedAfter: dttm.Now(),
ListCreatedBefore: dttm.Now(),
ListModifiedAfter: dttm.Now(),
ListModifiedBefore: dttm.Now(),
Populated: flags.PopulatedFlags{
flags.SiteFN: struct{}{},
flags.FileCreatedAfterFN: struct{}{},
flags.FileCreatedBeforeFN: struct{}{},
flags.FileModifiedAfterFN: struct{}{},
flags.FileModifiedBeforeFN: struct{}{},
flags.ListCreatedAfterFN: struct{}{},
flags.ListCreatedBeforeFN: struct{}{},
flags.ListModifiedAfterFN: struct{}{},
flags.ListModifiedBeforeFN: struct{}{},
},
},
expect: assert.NoError,
@ -358,50 +369,6 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
},
expect: assert.Error,
},
{
name: "invalid list created after",
backupID: "id",
opts: utils.SharePointOpts{
ListCreatedAfter: "1235",
Populated: flags.PopulatedFlags{
flags.ListCreatedAfterFN: struct{}{},
},
},
expect: assert.Error,
},
{
name: "invalid list created before",
backupID: "id",
opts: utils.SharePointOpts{
ListCreatedBefore: "1235",
Populated: flags.PopulatedFlags{
flags.ListCreatedBeforeFN: struct{}{},
},
},
expect: assert.Error,
},
{
name: "invalid list modified after",
backupID: "id",
opts: utils.SharePointOpts{
ListModifiedAfter: "1235",
Populated: flags.PopulatedFlags{
flags.ListModifiedAfterFN: struct{}{},
},
},
expect: assert.Error,
},
{
name: "invalid list modified before",
backupID: "id",
opts: utils.SharePointOpts{
ListModifiedBefore: "1235",
Populated: flags.PopulatedFlags{
flags.ListModifiedBeforeFN: struct{}{},
},
},
expect: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
@ -411,35 +378,36 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
}
}
// [TODO] uncomment the test cases once sharepoint list backup is enabled
func (suite *SharePointUtilsSuite) TestAddSharepointCategories() {
table := []struct {
name string
cats []string
expectScopeLen int
}{
{
name: "none",
cats: []string{},
expectScopeLen: 1,
},
// {
// name: "none",
// cats: []string{},
// expectScopeLen: 2,
// },
{
name: "libraries",
cats: []string{flags.DataLibraries},
expectScopeLen: 1,
},
{
name: "lists",
cats: []string{flags.DataLists},
expectScopeLen: 1,
},
{
name: "all allowed",
cats: []string{
flags.DataLibraries,
flags.DataLists,
},
expectScopeLen: 2,
},
// {
// name: "lists",
// cats: []string{flags.DataLists},
// expectScopeLen: 1,
// },
// {
// name: "all allowed",
// cats: []string{
// flags.DataLibraries,
// flags.DataLists,
// },
// expectScopeLen: 2,
// },
{
name: "bad inputs",
cats: []string{"foo"},

View File

@ -1,101 +0,0 @@
package utils
import (
"context"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/pkg/selectors"
)
type TeamsChatsOpts struct {
Users []string
ExportCfg ExportCfgOpts
Populated flags.PopulatedFlags
}
func TeamsChatsAllowedCategories() map[string]struct{} {
return map[string]struct{}{
flags.DataChats: {},
}
}
func AddTeamsChatsCategories(sel *selectors.TeamsChatsBackup, cats []string) *selectors.TeamsChatsBackup {
if len(cats) == 0 {
sel.Include(sel.AllData())
}
for _, d := range cats {
switch d {
case flags.DataChats:
sel.Include(sel.Chats(selectors.Any()))
}
}
return sel
}
func MakeTeamsChatsOpts(cmd *cobra.Command) TeamsChatsOpts {
return TeamsChatsOpts{
Users: flags.UserFV,
ExportCfg: makeExportCfgOpts(cmd),
// populated contains the list of flags that appear in the
// command, according to pflags. Use this to differentiate
// between an "empty" and a "missing" value.
Populated: flags.GetPopulatedFlags(cmd),
}
}
// ValidateTeamsChatsRestoreFlags checks common flags for correctness and interdependencies
func ValidateTeamsChatsRestoreFlags(backupID string, opts TeamsChatsOpts, isRestore bool) error {
if len(backupID) == 0 {
return clues.New("a backup ID is required")
}
// restore isn't currently supported
if isRestore {
return clues.New("restore not supported")
}
return nil
}
// AddTeamsChatsFilter adds the scope of the provided values to the selector's
// filter set
func AddTeamsChatsFilter(
sel *selectors.TeamsChatsRestore,
v string,
f func(string) []selectors.TeamsChatsScope,
) {
if len(v) == 0 {
return
}
sel.Filter(f(v))
}
// IncludeTeamsChatsRestoreDataSelectors builds the common data-selector
// inclusions for teamschats commands.
func IncludeTeamsChatsRestoreDataSelectors(ctx context.Context, opts TeamsChatsOpts) *selectors.TeamsChatsRestore {
users := opts.Users
if len(opts.Users) == 0 {
users = selectors.Any()
}
return selectors.NewTeamsChatsRestore(users)
}
// FilterTeamsChatsRestoreInfoSelectors builds the common info-selector filters.
func FilterTeamsChatsRestoreInfoSelectors(
sel *selectors.TeamsChatsRestore,
opts TeamsChatsOpts,
) {
// TODO: populate when adding filters
}

View File

@ -7,8 +7,6 @@ import (
"os"
"github.com/alcionai/corso/src/internal/converters/eml"
"github.com/alcionai/corso/src/internal/converters/ics"
"github.com/alcionai/corso/src/internal/converters/vcf"
)
func main() {
@ -29,23 +27,13 @@ func main() {
var out string
switch from {
case "json":
case "msg":
switch to {
case "eml":
out, err = eml.FromJSON(context.Background(), body)
if err != nil {
log.Fatal(err)
}
case "vcf":
out, err = vcf.FromJSON(context.Background(), body)
if err != nil {
log.Fatal(err)
}
case "ics":
out, err = ics.FromJSON(context.Background(), body)
if err != nil {
log.Fatal(err)
}
default:
log.Fatal("Unknown target format", to)
}

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/m365"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
siteMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
m365Stub "github.com/alcionai/corso/src/internal/m365/stub"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/internal/tester"
@ -59,7 +58,7 @@ func generateAndRestoreItems(
service path.ServiceType,
cat path.CategoryType,
sel selectors.Selector,
tenantID, resourceID, destFldr string,
tenantID, userID, destFldr string,
howMany int,
dbf dataBuilderFunc,
opts control.Options,
@ -74,7 +73,7 @@ func generateAndRestoreItems(
nowLegacy = dttm.FormatToLegacy(time.Now())
id = uuid.NewString()
subject = "automated " + now[:16] + " - " + id[:8]
body = "automated " + cat.HumanString() + " generation for " + resourceID + " at " + now + " - " + id
body = "automated " + cat.HumanString() + " generation for " + userID + " at " + now + " - " + id
)
items = append(items, item{
@ -95,7 +94,7 @@ func generateAndRestoreItems(
dataColls, err := buildCollections(
service,
tenantID, resourceID,
tenantID, userID,
restoreCfg,
collections)
if err != nil {
@ -193,31 +192,29 @@ type collection struct {
func buildCollections(
service path.ServiceType,
tenant, resource string,
tenant, user string,
restoreCfg control.RestoreConfig,
colls []collection,
) ([]data.RestoreCollection, error) {
var (
collections = make([]data.RestoreCollection, 0, len(colls))
mc data.Collection
)
collections := make([]data.RestoreCollection, 0, len(colls))
for _, c := range colls {
switch {
case service == path.ExchangeService:
emc, err := generateExchangeMockColls(tenant, resource, c)
if err != nil {
return nil, err
}
pth, err := path.Build(
tenant,
user,
service,
c.category,
false,
c.PathElements...)
if err != nil {
return nil, err
}
mc = emc
case service == path.SharePointService:
smc, err := generateSharepointListsMockColls(tenant, resource, c)
if err != nil {
return nil, err
}
mc := exchMock.NewCollection(pth, pth, len(c.items))
mc = smc
for i := 0; i < len(c.items); i++ {
mc.Names[i] = c.items[i].name
mc.Data[i] = c.items[i].data
}
collections = append(collections, data.NoFetchRestoreCollection{Collection: mc})
@ -226,49 +223,6 @@ func buildCollections(
return collections, nil
}
func generateExchangeMockColls(tenant string, resource string, c collection) (*exchMock.DataCollection, error) {
pth, err := path.Build(
tenant,
resource,
path.ExchangeService,
c.category,
false,
c.PathElements...)
if err != nil {
return nil, err
}
emc := exchMock.NewCollection(pth, pth, len(c.items))
for i := 0; i < len(c.items); i++ {
emc.Names[i] = c.items[i].name
emc.Data[i] = c.items[i].data
}
return emc, nil
}
func generateSharepointListsMockColls(tenant string, resource string, c collection) (*siteMock.ListCollection, error) {
pth, err := path.BuildOrPrefix(
tenant,
resource,
path.SharePointService,
c.category,
false)
if err != nil {
return nil, err
}
smc := siteMock.NewCollection(pth, pth, len(c.items))
for i := 0; i < len(c.items); i++ {
smc.Names[i] = c.items[i].name
smc.Data[i] = c.items[i].data
}
return smc, nil
}
var (
folderAName = "folder-a"
folderBName = "b"

View File

@ -7,8 +7,6 @@ import (
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
siteMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
@ -16,23 +14,14 @@ import (
"github.com/alcionai/corso/src/pkg/selectors"
)
var (
spFilesCmd = &cobra.Command{
Use: "files",
Short: "Generate SharePoint files",
RunE: handleSharePointLibraryFileFactory,
}
spListsCmd = &cobra.Command{
Use: "lists",
Short: "Generate SharePoint lists",
RunE: handleSharepointListsFactory,
}
)
var spFilesCmd = &cobra.Command{
Use: "files",
Short: "Generate SharePoint files",
RunE: handleSharePointLibraryFileFactory,
}
func AddSharePointCommands(cmd *cobra.Command) {
cmd.AddCommand(spFilesCmd)
cmd.AddCommand(spListsCmd)
}
func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error {
@ -81,52 +70,3 @@ func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error
return nil
}
func handleSharepointListsFactory(cmd *cobra.Command, args []string) error {
var (
ctx = cmd.Context()
service = path.SharePointService
category = path.ListsCategory
errs = fault.New(false)
)
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
ctrl, _, ins, err := getControllerAndVerifyResourceOwner(ctx, Site, path.SharePointService)
if err != nil {
return Only(ctx, err)
}
deets, err := generateAndRestoreItems(
ctx,
ctrl,
service,
category,
selectors.NewSharePointRestore([]string{ins.ID()}).Selector,
Tenant, ins.ID(), Destination,
Count,
func(id, now, subject, body string) []byte {
listBytes, err := siteMock.ListBytes(id)
if err != nil {
logger.CtxErr(ctx, err)
return nil
}
return listBytes
},
control.DefaultOptions(),
errs,
count.New())
if err != nil {
return Only(ctx, err)
}
for _, e := range errs.Recovered() {
logger.CtxErr(ctx, err).Error(e.Error())
}
deets.PrintEntries(ctx)
return nil
}

View File

@ -6,6 +6,12 @@ Param (
[Parameter(Mandatory = $False, HelpMessage = "Site for which to delete folders in SharePoint")]
[String]$Site,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin email")]
[String]$AdminUser = $ENV:M365_TENANT_ADMIN_USER,
[Parameter(Mandatory = $False, HelpMessage = "Exchange Admin password")]
[String]$AdminPwd = $ENV:M365_TENANT_ADMIN_PASSWORD,
[Parameter(Mandatory = $False, HelpMessage = "Document library root. Can add multiple comma-separated values")]
[String[]]$LibraryNameList = @(),
@ -16,16 +22,7 @@ Param (
[String[]]$FolderPrefixPurgeList,
[Parameter(Mandatory = $False, HelpMessage = "Delete document libraries with this prefix")]
[String[]]$LibraryPrefixDeleteList = @(),
[Parameter(Mandatory = $False, HelpMessage = "Tenant domain")]
[String]$TenantDomain = $ENV:TENANT_DOMAIN,
[Parameter(Mandatory = $False, HelpMessage = "Azure ClientId")]
[String]$ClientId = $ENV:AZURE_CLIENT_ID,
[Parameter(Mandatory = $False, HelpMessage = "Azure AppCert")]
[String]$AppCert = $ENV:AZURE_APP_CERT
[String[]]$LibraryPrefixDeleteList = @()
)
Set-StrictMode -Version 2.0
@ -111,7 +108,6 @@ function Purge-Library {
$foldersToPurge = @()
$folders = Get-PnPFolderItem -FolderSiteRelativeUrl $LibraryName -ItemType Folder
Write-Host "`nFolders: $folders"
foreach ($f in $folders) {
$folderName = $f.Name
$createTime = Get-TimestampFromFolderName -Folder $f
@ -213,8 +209,8 @@ if (-not (Get-Module -ListAvailable -Name PnP.PowerShell)) {
}
if ([string]::IsNullOrEmpty($ClientId) -or [string]::IsNullOrEmpty($AppCert)) {
Write-Host "ClientId and AppCert required as arguments or environment variables."
if ([string]::IsNullOrEmpty($AdminUser) -or [string]::IsNullOrEmpty($AdminPwd)) {
Write-Host "Admin user name and password required as arguments or environment variables."
Exit
}
@ -255,8 +251,12 @@ else {
Exit
}
$password = convertto-securestring -String $AdminPwd -AsPlainText -Force
$cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList $AdminUser, $password
Write-Host "`nAuthenticating and connecting to $SiteUrl"
Connect-PnPOnline -Url $siteUrl -ClientId $ClientId -CertificateBase64Encoded $AppCert -Tenant $TenantDomain
Connect-PnPOnline -Url $siteUrl -Credential $cred
Write-Host "Connected to $siteUrl`n"
# ensure that there are no unexpanded entries in the list of parameters

View File

@ -19,40 +19,33 @@ type PermissionInfo struct {
}
const (
sanityBackupID = "SANITY_BACKUP_ID"
sanityTestSourceContainer = "SANITY_TEST_SOURCE_CONTAINER"
sanityTestRestoreContainer = "SANITY_TEST_RESTORE_CONTAINER"
sanityTestRestoreContainerPrefix = "SANITY_TEST_RESTORE_CONTAINER_PREFIX"
sanityTestUser = "SANITY_TEST_USER"
sanityTestCategory = "SANITY_TEST_CATEGORY"
sanityBackupID = "SANITY_BACKUP_ID"
sanityTestSourceContainer = "SANITY_TEST_SOURCE_CONTAINER"
sanityTestRestoreContainer = "SANITY_TEST_RESTORE_CONTAINER"
sanityTestUser = "SANITY_TEST_USER"
)
type Envs struct {
BackupID string
SourceContainer string
RestoreContainer string
// applies for sharepoint lists only
RestoreContainerPrefix string
Category string
GroupID string
SiteID string
UserID string
TeamSiteID string
GroupID string
SiteID string
UserID string
TeamSiteID string
}
func EnvVars(ctx context.Context) Envs {
folder := strings.TrimSpace(os.Getenv(sanityTestRestoreContainer))
e := Envs{
BackupID: os.Getenv(sanityBackupID),
SourceContainer: os.Getenv(sanityTestSourceContainer),
RestoreContainer: folder,
Category: os.Getenv(sanityTestCategory),
RestoreContainerPrefix: os.Getenv(sanityTestRestoreContainerPrefix),
GroupID: tconfig.GetM365TeamID(ctx),
SiteID: tconfig.GetM365SiteID(ctx),
UserID: tconfig.GetM365UserID(ctx),
TeamSiteID: tconfig.GetM365TeamSiteID(ctx),
BackupID: os.Getenv(sanityBackupID),
SourceContainer: os.Getenv(sanityTestSourceContainer),
RestoreContainer: folder,
GroupID: tconfig.GetM365TeamID(ctx),
SiteID: tconfig.GetM365SiteID(ctx),
UserID: tconfig.GetM365UserID(ctx),
TeamSiteID: tconfig.GetM365TeamSiteID(ctx),
}
if len(os.Getenv(sanityTestUser)) > 0 {

View File

@ -20,17 +20,30 @@ func BuildFilepathSanitree(
info os.FileInfo,
err error,
) error {
if root == nil {
root = CreateNewRoot(info, true)
return nil
if err != nil {
Fatal(ctx, "error passed to filepath walker", err)
}
relPath := GetRelativePath(
ctx,
rootDir,
p,
info,
err)
relPath, err := filepath.Rel(rootDir, p)
if err != nil {
Fatal(ctx, "getting relative filepath", err)
}
if info != nil {
Debugf(ctx, "adding: %s", relPath)
}
if root == nil {
root = &Sanitree[fs.FileInfo, fs.FileInfo]{
Self: info,
ID: info.Name(),
Name: info.Name(),
Leaves: map[string]*Sanileaf[fs.FileInfo, fs.FileInfo]{},
Children: map[string]*Sanitree[fs.FileInfo, fs.FileInfo]{},
}
return nil
}
elems := path.Split(relPath)
node := root.NodeAt(ctx, elems[:len(elems)-1])
@ -65,41 +78,3 @@ func BuildFilepathSanitree(
return root
}
func CreateNewRoot(info fs.FileInfo, initChildren bool) *Sanitree[fs.FileInfo, fs.FileInfo] {
root := &Sanitree[fs.FileInfo, fs.FileInfo]{
Self: info,
ID: info.Name(),
Name: info.Name(),
Leaves: map[string]*Sanileaf[fs.FileInfo, fs.FileInfo]{},
Children: map[string]*Sanitree[fs.FileInfo, fs.FileInfo]{},
}
if initChildren {
root.Children = map[string]*Sanitree[fs.FileInfo, fs.FileInfo]{}
}
return root
}
func GetRelativePath(
ctx context.Context,
rootDir, p string,
info fs.FileInfo,
fileWalkerErr error,
) string {
if fileWalkerErr != nil {
Fatal(ctx, "error passed to filepath walker", fileWalkerErr)
}
relPath, err := filepath.Rel(rootDir, p)
if err != nil {
Fatal(ctx, "getting relative filepath", err)
}
if info != nil {
Debugf(ctx, "adding: %s", relPath)
}
return relPath
}

View File

@ -3,9 +3,7 @@ package driveish
import (
"context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/internal/common/ptr"
@ -17,24 +15,10 @@ const (
owner = "owner"
)
// sanitree population will grab a superset of data in the drive.
// this increases the chance that we'll run into a race collision with
// the cleanup script. Sometimes that's okay (deleting old data that
// isn't scrutinized in the test), other times it's not. We mark whether
// that's okay to do or not by specifying the folders being
// scrutinized for the test. Any errors within those folders should cause
// a fatal exit. Errors outside of those folders get ignored.
//
// since we're using folder names, mustPopulateFolders will
// work best (ie: have the fewest collisions/side-effects) if the folder
// names are very specific. Standard sanity tests should include timestamps,
// which should help ensure that. Be warned if you try to use it with
// a more generic name: unintended effects could occur.
func populateSanitree(
ctx context.Context,
ac api.Client,
driveID string,
mustPopulateFolders []string,
) *common.Sanitree[models.DriveItemable, models.DriveItemable] {
common.Infof(ctx, "building sanitree for drive: %s", driveID)
@ -43,12 +27,10 @@ func populateSanitree(
common.Fatal(ctx, "getting drive root folder", err)
}
rootName := ptr.Val(root.GetName())
stree := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
Self: root,
ID: ptr.Val(root.GetId()),
Name: rootName,
Name: ptr.Val(root.GetName()),
Leaves: map[string]*common.Sanileaf[models.DriveItemable, models.DriveItemable]{},
Children: map[string]*common.Sanitree[models.DriveItemable, models.DriveItemable]{},
}
@ -58,8 +40,6 @@ func populateSanitree(
ac,
driveID,
stree.Name+"/",
mustPopulateFolders,
slices.Contains(mustPopulateFolders, rootName),
stree)
return stree
@ -68,29 +48,14 @@ func populateSanitree(
func recursivelyBuildTree(
ctx context.Context,
ac api.Client,
driveID string,
location string,
mustPopulateFolders []string,
isChildOfFolderRequiringNoErrors bool,
driveID, location string,
stree *common.Sanitree[models.DriveItemable, models.DriveItemable],
) {
common.Debugf(ctx, "adding: %s", location)
children, err := ac.Drives().GetFolderChildren(ctx, driveID, stree.ID)
if err != nil {
if isChildOfFolderRequiringNoErrors {
common.Fatal(ctx, "getting drive children by id", err)
}
common.Infof(
ctx,
"ignoring error getting children in directory %q because it is not within directory set %v\nerror: %s\n%+v",
location,
mustPopulateFolders,
err.Error(),
clues.ToCore(err))
return
common.Fatal(ctx, "getting drive children by id", err)
}
for _, driveItem := range children {
@ -103,20 +68,17 @@ func recursivelyBuildTree(
// currently we don't restore blank folders.
// skip permission check for empty folders
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
common.Infof(ctx, "skipped empty folder: %s%s", location, itemName)
common.Infof(ctx, "skipped empty folder: %s/%s", location, itemName)
continue
}
cannotAllowErrors := isChildOfFolderRequiringNoErrors ||
slices.Contains(mustPopulateFolders, itemName)
branch := &common.Sanitree[models.DriveItemable, models.DriveItemable]{
Parent: stree,
Self: driveItem,
ID: itemID,
Name: itemName,
Expand: map[string]any{
expandPermissions: permissionIn(ctx, ac, driveID, itemID, cannotAllowErrors),
expandPermissions: permissionIn(ctx, ac, driveID, itemID),
},
Leaves: map[string]*common.Sanileaf[models.DriveItemable, models.DriveItemable]{},
Children: map[string]*common.Sanitree[models.DriveItemable, models.DriveItemable]{},
@ -129,8 +91,6 @@ func recursivelyBuildTree(
ac,
driveID,
location+branch.Name+"/",
mustPopulateFolders,
cannotAllowErrors,
branch)
}

View File

@ -31,8 +31,7 @@ func CheckExport(
root := populateSanitree(
ctx,
ac,
driveID,
[]string{envs.SourceContainer})
driveID)
sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert(

View File

@ -45,14 +45,7 @@ func CheckRestoration(
"drive_id", driveID,
"drive_name", driveName)
root := populateSanitree(
ctx,
ac,
driveID,
[]string{
envs.SourceContainer,
envs.RestoreContainer,
})
root := populateSanitree(ctx, ac, driveID)
sourceTree, ok := root.Children[envs.SourceContainer]
common.Assert(
@ -92,24 +85,12 @@ func permissionIn(
ctx context.Context,
ac api.Client,
driveID, itemID string,
cannotAllowErrors bool,
) []common.PermissionInfo {
pi := []common.PermissionInfo{}
pcr, err := ac.Drives().GetItemPermission(ctx, driveID, itemID)
if err != nil {
if cannotAllowErrors {
common.Fatal(ctx, "getting permission", err)
}
common.Infof(
ctx,
"ignoring error getting permissions for %q\nerror: %s,%+v",
itemID,
err.Error(),
clues.ToCore(err))
return []common.PermissionInfo{}
common.Fatal(ctx, "getting permission", err)
}
for _, perm := range pcr.GetValue() {

View File

@ -2,20 +2,10 @@ package export
import (
"context"
"io"
"io/fs"
"os"
"path/filepath"
"strings"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/tidwall/gjson"
"github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
"github.com/alcionai/corso/src/cmd/sanity_test/restore"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -24,128 +14,15 @@ func CheckSharePointExport(
ac api.Client,
envs common.Envs,
) {
if envs.Category == path.ListsCategory.String() {
CheckSharepointListsExport(ctx, ac, envs)
}
if envs.Category == path.LibrariesCategory.String() {
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
if err != nil {
common.Fatal(ctx, "getting the drive:", err)
}
envs.RestoreContainer = filepath.Join(envs.RestoreContainer, "Libraries/Documents") // check in default loc
driveish.CheckExport(
ctx,
ac,
drive,
envs)
}
}
func CheckSharepointListsExport(
ctx context.Context,
ac api.Client,
envs common.Envs,
) {
exportFolderName := path.ListsCategory.HumanString()
sourceTree := restore.BuildListsSanitree(ctx, ac, envs.SiteID, envs.SourceContainer, exportFolderName)
listsExportDir := filepath.Join(envs.RestoreContainer, exportFolderName)
exportedTree := BuildFilepathSanitreeForSharepointLists(ctx, listsExportDir)
ctx = clues.Add(
ctx,
"export_container_id", exportedTree.ID,
"export_container_name", exportedTree.Name,
"source_container_id", sourceTree.ID,
"source_container_name", sourceTree.Name)
comparator := func(
ctx context.Context,
expect *common.Sanitree[models.Siteable, models.Listable],
result *common.Sanitree[fs.FileInfo, fs.FileInfo],
) {
modifiedResultLeaves := map[string]*common.Sanileaf[fs.FileInfo, fs.FileInfo]{}
for key, val := range result.Leaves {
fixedName := strings.TrimSuffix(key, ".json")
modifiedResultLeaves[fixedName] = val
}
common.CompareLeaves(ctx, expect.Leaves, modifiedResultLeaves, nil)
}
common.CompareDiffTrees(
ctx,
sourceTree,
exportedTree,
comparator)
common.Infof(ctx, "Success")
}
func BuildFilepathSanitreeForSharepointLists(
ctx context.Context,
rootDir string,
) *common.Sanitree[fs.FileInfo, fs.FileInfo] {
var root *common.Sanitree[fs.FileInfo, fs.FileInfo]
walker := func(
p string,
info os.FileInfo,
err error,
) error {
if root == nil {
root = common.CreateNewRoot(info, false)
return nil
}
relPath := common.GetRelativePath(
ctx,
rootDir,
p,
info,
err)
if !info.IsDir() {
file, err := os.Open(p)
if err != nil {
common.Fatal(ctx, "opening file to read", err)
}
defer file.Close()
content, err := io.ReadAll(file)
if err != nil {
common.Fatal(ctx, "reading file", err)
}
res := gjson.Get(string(content), "items.#")
itemsCount := res.Num
elems := path.Split(relPath)
node := root.NodeAt(ctx, elems[:len(elems)-2])
node.CountLeaves++
node.Leaves[info.Name()] = &common.Sanileaf[fs.FileInfo, fs.FileInfo]{
Parent: node,
Self: info,
ID: info.Name(),
Name: info.Name(),
// using list item count as size for lists
Size: int64(itemsCount),
}
}
return nil
}
err := filepath.Walk(rootDir, walker)
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
if err != nil {
common.Fatal(ctx, "walking filepath", err)
common.Fatal(ctx, "getting the drive:", err)
}
return root
envs.RestoreContainer = filepath.Join(envs.RestoreContainer, "Libraries/Documents") // check in default loc
driveish.CheckExport(
ctx,
ac,
drive,
envs)
}

View File

@ -2,15 +2,9 @@ package restore
import (
"context"
"fmt"
"strings"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/cmd/sanity_test/driveish"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -19,155 +13,16 @@ func CheckSharePointRestoration(
ac api.Client,
envs common.Envs,
) {
if envs.Category == "lists" {
CheckSharePointListsRestoration(ctx, ac, envs)
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
if err != nil {
common.Fatal(ctx, "getting site's default drive:", err)
}
if envs.Category == "libraries" {
drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
if err != nil {
common.Fatal(ctx, "getting site's default drive:", err)
}
driveish.CheckRestoration(
ctx,
ac,
drive,
envs,
// skip permissions tests
nil)
}
}
func CheckSharePointListsRestoration(
ctx context.Context,
ac api.Client,
envs common.Envs,
) {
restoredTree := BuildListsSanitree(ctx, ac, envs.SiteID, envs.RestoreContainerPrefix, "")
sourceTree := BuildListsSanitree(ctx, ac, envs.SiteID, envs.SourceContainer, "")
ctx = clues.Add(
driveish.CheckRestoration(
ctx,
"restore_container_id", restoredTree.ID,
"restore_container_name", restoredTree.Name,
"source_container_id", sourceTree.ID,
"source_container_name", sourceTree.Name)
common.CompareDiffTrees[models.Siteable, models.Listable](
ctx,
sourceTree,
restoredTree,
ac,
drive,
envs,
// skip permissions tests
nil)
common.Infof(ctx, "Success")
}
func BuildListsSanitree(
ctx context.Context,
ac api.Client,
siteID string,
restoreContainerPrefix, exportFolderName string,
) *common.Sanitree[models.Siteable, models.Listable] {
common.Infof(ctx, "building sanitree for lists of site: %s", siteID)
site, err := ac.Sites().GetByID(ctx, siteID, api.CallConfig{})
if err != nil {
common.Fatal(
ctx,
fmt.Sprintf("finding site by id %q", siteID),
err)
}
cfg := api.CallConfig{
Select: idAnd("displayName", "list", "lastModifiedDateTime"),
}
lists, err := ac.Lists().GetLists(ctx, siteID, cfg)
if err != nil {
common.Fatal(
ctx,
fmt.Sprintf("finding lists of site with id %q", siteID),
err)
}
lists = filterToSupportedLists(lists)
filteredLists := filterListsByPrefix(lists, restoreContainerPrefix)
rootTreeName := ptr.Val(site.GetDisplayName())
// lists get stored into the local dir at destination/Lists/
if len(exportFolderName) > 0 {
rootTreeName = exportFolderName
}
root := &common.Sanitree[models.Siteable, models.Listable]{
Self: site,
ID: ptr.Val(site.GetId()),
Name: rootTreeName,
CountLeaves: len(filteredLists),
Leaves: map[string]*common.Sanileaf[models.Siteable, models.Listable]{},
}
for _, list := range filteredLists {
listID := ptr.Val(list.GetId())
listItems, err := ac.Lists().GetListItems(ctx, siteID, listID, api.CallConfig{})
if err != nil {
common.Fatal(
ctx,
fmt.Sprintf("finding listItems of list with id %q", listID),
err)
}
m := &common.Sanileaf[models.Siteable, models.Listable]{
Parent: root,
Self: list,
ID: listID,
Name: ptr.Val(list.GetDisplayName()),
// using list item count as size for lists
Size: int64(len(listItems)),
}
root.Leaves[m.ID] = m
}
return root
}
func filterToSupportedLists(lists []models.Listable) []models.Listable {
filteredLists := make([]models.Listable, 0)
for _, list := range lists {
if !api.SkipListTemplates.HasKey(ptr.Val(list.GetList().GetTemplate())) {
filteredLists = append(filteredLists, list)
}
}
return filteredLists
}
func filterListsByPrefix(lists []models.Listable, prefix string) []models.Listable {
result := []models.Listable{}
for _, list := range lists {
for _, pfx := range strings.Split(prefix, ",") {
if strings.HasPrefix(ptr.Val(list.GetDisplayName()), pfx) {
result = append(result, list)
break
}
}
}
return result
}
func idAnd(ss ...string) []string {
id := []string{"id"}
if len(ss) == 0 {
return id
}
return append(id, ss...)
}

View File

@ -2,32 +2,35 @@ module github.com/alcionai/corso/src
go 1.21
replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20231205231702-863c24d6f8b1
replace (
github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4
// No tags in the alcion fork of the repo so use v7 as that's in the import path.
github.com/minio/minio-go/v7 => github.com/alcionai/minio-go/v7 v7.0.0-20231130221740-c745a3d084aa
// Alcion fork removes the validation of email addresses as we might get incomplete email addresses
github.com/xhit/go-simple-mail/v2 v2.16.0 => github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a
)
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0
github.com/alcionai/clues v0.0.0-20231115004051-523cbddac8e8
github.com/armon/go-metrics v0.4.1
github.com/aws/aws-xray-sdk-go v1.8.3
github.com/cenkalti/backoff/v4 v4.2.1
github.com/fatih/color v1.16.0
github.com/golang-jwt/jwt/v5 v5.2.0
github.com/google/uuid v1.6.0
github.com/google/uuid v1.5.0
github.com/h2non/gock v1.2.0
github.com/jaytaylor/html2text v0.0.0-20230321000545-74c2419ad056
github.com/jhillyerd/enmime v1.1.0
github.com/kopia/kopia v0.15.0
github.com/microsoft/kiota-abstractions-go v1.5.4
github.com/microsoft/kiota-abstractions-go v1.5.3
github.com/microsoft/kiota-authentication-azure-go v1.0.1
github.com/microsoft/kiota-http-go v1.1.1
github.com/microsoft/kiota-serialization-form-go v1.0.0
github.com/microsoft/kiota-serialization-json-go v1.0.5
github.com/microsoftgraph/msgraph-sdk-go v1.30.0
github.com/microsoft/kiota-serialization-json-go v1.0.4
github.com/microsoftgraph/msgraph-sdk-go v1.28.0
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1
github.com/pkg/errors v0.9.1
github.com/puzpuzpuz/xsync/v3 v3.0.2
@ -46,18 +49,11 @@ require (
go.uber.org/zap v1.26.0
golang.org/x/exp v0.0.0-20231127185646-65229373498e
golang.org/x/time v0.5.0
golang.org/x/tools v0.17.0
golang.org/x/tools v0.16.1
gotest.tools/v3 v3.5.1
)
require (
github.com/arran4/golang-ical v0.2.4
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056
)
require (
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.1 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
github.com/andybalholm/brotli v1.0.6 // indirect
@ -82,19 +78,19 @@ require (
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.11.0 // indirect
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
github.com/std-uritemplate/std-uritemplate/go v0.0.50 // indirect
github.com/std-uritemplate/std-uritemplate/go v0.0.47 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/toorop/go-dkim v0.0.0-20201103131630-e1cd1a0a5208 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasthttp v1.51.0 // indirect
go.opentelemetry.io/otel/metric v1.21.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20231212172506-995d672761c0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20231127180814-3a041ad873d4 // indirect
)
require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
@ -110,10 +106,10 @@ require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.17.4 // indirect
github.com/klauspost/compress v1.17.3 // indirect
github.com/klauspost/cpuid/v2 v2.2.6 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
github.com/klauspost/reedsolomon v1.12.0 // indirect
github.com/klauspost/reedsolomon v1.11.8 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
@ -121,15 +117,15 @@ require (
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/microsoft/kiota-serialization-text-go v1.0.0
github.com/minio/md5-simd v1.1.2 // indirect
github.com/minio/minio-go/v7 v7.0.67
github.com/minio/minio-go/v7 v7.0.64
github.com/minio/sha256-simd v1.0.1 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/natefinch/atomic v1.0.1 // indirect
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/client_golang v1.18.0 // indirect
github.com/prometheus/client_golang v1.17.0 // indirect
github.com/prometheus/client_model v0.5.0 // indirect
github.com/prometheus/common v0.45.0 // indirect
github.com/prometheus/procfs v0.12.0 // indirect
@ -137,21 +133,21 @@ require (
github.com/rs/xid v1.5.0 // indirect
github.com/segmentio/backo-go v1.0.1 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/tidwall/gjson v1.17.0
github.com/tidwall/gjson v1.17.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
github.com/zeebo/blake3 v0.2.3 // indirect
go.opentelemetry.io/otel v1.21.0 // indirect
go.opentelemetry.io/otel/trace v1.21.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.18.0 // indirect
golang.org/x/crypto v0.17.0 // indirect
golang.org/x/mod v0.14.0 // indirect
golang.org/x/net v0.20.0
golang.org/x/sync v0.6.0 // indirect
golang.org/x/sys v0.16.0 // indirect
golang.org/x/net v0.19.0
golang.org/x/sync v0.5.0 // indirect
golang.org/x/sys v0.15.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/grpc v1.60.1 // indirect
google.golang.org/protobuf v1.32.0 // indirect
google.golang.org/grpc v1.59.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

View File

@ -1,15 +1,11 @@
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 h1:lGlwhPtrX6EVml1hO0ivjkUxsSyl4dsiw9qcA1k/3IQ=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1/go.mod h1:RKUqNu35KJYcVG/fqTRqmuXJZYNhYkBrnC/hX7yGbTA=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 h1:sO0/P7g68FrryJzljemN+6GTssUXdANk6aJ7T1ZxnsQ=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1/go.mod h1:h8hyGFDsU5HMivxiS2iYFZsgDbU9OnnJ163x5UGVKYo=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0 h1:BMAjVKJM0U/CYF27gA0ZMmXGkOcvfFtD0oHVZ1TIPRI=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.4.0/go.mod h1:1fXstnBMas5kzG+S3q8UoJcmyU6nUeunJcMDHcRYHhs=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 h1:6oNBlSdi1QqM1PNW7FPA6xOGA5UNsXnkaYZz9vdPGhA=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1/go.mod h1:s4kgfzA0covAXNicZHDMN58jExvcng2mC/DepXiF1EI=
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.5.0 h1:AifHbc4mg0x9zW52WOpKbsHaDKuRhlI7TVl47thgQ70=
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/storage/armstorage v1.5.0/go.mod h1:T5RfihdXtBDxt1Ch2wobif3TvzTdumDy29kahv6AV9A=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.1 h1:AMf7YbZOZIW5b66cXNHMWWT/zkjhz5+a+k/3x40EO7E=
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.2.1/go.mod h1:uwfk06ZBcvL/g4VHNjurPfVln9NMbsk2XIZxJ+hu81k=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 h1:DzHpqpoJVaCgOUdVHxE8QB52S6NiVdDQvGlny1qvPqA=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0 h1:hVeq+yCyUi+MsoO/CU95yqCIcdzra5ovzk8Q2BBpV2M=
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/DATA-DOG/go-sqlmock v1.4.1 h1:ThlnYciV1iM/V0OSF/dtkqWb6xo5qITT1TJBG1MRDJM=
github.com/DATA-DOG/go-sqlmock v1.4.1/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
@ -19,12 +15,14 @@ github.com/VividCortex/ewma v1.2.0 h1:f58SaIzcDXrSy3kWaHNvuJgJ3Nmz59Zji6XoJR/q1o
github.com/VividCortex/ewma v1.2.0/go.mod h1:nz4BbCtbLyFDeC9SUHbtcT5644juEuWfUAUnGx7j5l4=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8=
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c h1:QtARFaqYKtGjmEejr07KFf2iyfCAdTxYGRAAFveLjFA=
github.com/alcionai/clues v0.0.0-20240125221452-9fc7746dd20c/go.mod h1:1YJwJy3W6GGsC2UiDAEWABUjgvT8OZHjKs8OoaXeKbw=
github.com/alcionai/clues v0.0.0-20231115004051-523cbddac8e8 h1:kdpkhcKWWxnZFteGjglHvFTTPCPsvjhKDDY9J1Od6Sg=
github.com/alcionai/clues v0.0.0-20231115004051-523cbddac8e8/go.mod h1:hGnRqQtV7YoojQSNMtqFK0TvfcwAljGZhUEQwx1lw34=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a h1:4nhM0NM1qtUT1s55rQ+D0Xw1Re5mIU9/crjEl6KdE+k=
github.com/alcionai/go-simple-mail/v2 v2.0.0-20231220071811-c70ebcd9a41a/go.mod h1:b7P5ygho6SYE+VIqpxA6QkYfv4teeyG4MKqB3utRu98=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4 h1:3YZ70H3mkUgwiHLiNvukrqh2awRgfl1RAkbV0IoUqqk=
github.com/alcionai/kopia v0.12.2-0.20240322180947-41471159a0a4/go.mod h1:QFRSOUQzZfKE3hKVBHP7hxOn5WyrEmdBtfN5wkib/eA=
github.com/alcionai/kopia v0.12.2-0.20231205231702-863c24d6f8b1 h1:UM4YDqNmwRsajtoQT4BkMCRrjIeMDwTuTbJs5fPQTTA=
github.com/alcionai/kopia v0.12.2-0.20231205231702-863c24d6f8b1/go.mod h1:f4PligAuyEicX+lfTlZltc69nM0eMoXX2nE5sCBdo6Y=
github.com/alcionai/minio-go/v7 v7.0.0-20231130221740-c745a3d084aa h1:PHzp5TkXgsardwMG6O2nnyk3zBsGW8CqgsOWQCYkykQ=
github.com/alcionai/minio-go/v7 v7.0.0-20231130221740-c745a3d084aa/go.mod h1:R4WVUR6ZTedlCcGwZRauLMIKjgyaWxhs4Mqi/OMPmEc=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -35,8 +33,6 @@ github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sx
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/arran4/golang-ical v0.2.4 h1:0/rTXn2qqEekLKec3SzRRy+z7pCLtniMb0KD/dPogUo=
github.com/arran4/golang-ical v0.2.4/go.mod h1:RqMuPGmwRRwjkb07hmm+JBqcWa1vF1LvVmPtSZN2OhQ=
github.com/aws/aws-sdk-go v1.48.6 h1:hnL/TE3eRigirDLrdRE9AWE1ALZSVLAsC4wK8TGsMqk=
github.com/aws/aws-sdk-go v1.48.6/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
github.com/aws/aws-xray-sdk-go v1.8.3 h1:S8GdgVncBRhzbNnNUgTPwhEqhwt2alES/9rLASyhxjU=
@ -61,7 +57,6 @@ github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp
github.com/cjlapao/common-go v0.0.39 h1:bAAUrj2B9v0kMzbAOhzjSmiyDy+rd56r2sy7oEiQLlA=
github.com/cjlapao/common-go v0.0.39/go.mod h1:M3dzazLjTjEtZJbbxoA5ZDiGCiHmpwqW9l4UWaddwOA=
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -74,8 +69,6 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/edsrzf/mmap-go v1.1.0 h1:6EUwBLQ/Mcr1EYLE4Tn1VdW1A4ckqCQWZBw8Hr0kjpQ=
github.com/edsrzf/mmap-go v1.1.0/go.mod h1:19H/e8pUPLicwkyNgOykDXkJ9F0MHE+Z52B8EIth78Q=
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9 h1:ATgqloALX6cHCranzkLb8/zjivwQ9DWWDCQRnxTPfaA=
github.com/emersion/go-vcard v0.0.0-20230815062825-8fda7d206ec9/go.mod h1:HMJKR5wlh/ziNp+sHEDV2ltblO4JD2+IdDOWtGcQBTM=
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
@ -117,8 +110,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU=
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw=
@ -158,16 +151,16 @@ github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/u
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA=
github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/klauspost/reedsolomon v1.12.0 h1:I5FEp3xSwVCcEh3F5A7dofEfhXdF/bWhQWPH+XwBFno=
github.com/klauspost/reedsolomon v1.12.0/go.mod h1:EPLZJeh4l27pUGC3aXOjheaoh1I9yut7xTURiW3LQ9Y=
github.com/klauspost/reedsolomon v1.11.8 h1:s8RpUW5TK4hjr+djiOpbZJB4ksx+TdYbRH7vHQpwPOY=
github.com/klauspost/reedsolomon v1.11.8/go.mod h1:4bXRN+cVzMdml6ti7qLouuYi32KHJ5MGv0Qd8a47h6A=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kopia/htmluibuild v0.0.1-0.20231019063300-75c2a788c7d0 h1:TvupyyfbUZzsO4DQJpQhKZnUa61xERcJ+ejCbHWG2NY=
github.com/kopia/htmluibuild v0.0.1-0.20231019063300-75c2a788c7d0/go.mod h1:cSImbrlwvv2phvj5RfScL2v08ghX6xli0PcK6f+t8S0=
@ -199,28 +192,26 @@ github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvls
github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/microsoft/kiota-abstractions-go v1.5.4 h1:ljOnE7BBT94xUIFQwzBZVJj2Udh///7/DCGBBRTvcIs=
github.com/microsoft/kiota-abstractions-go v1.5.4/go.mod h1:PcgbR/QXB3EePCbP1OM4Hhk1R9a033D4K/gC3ltHv2w=
github.com/microsoft/kiota-abstractions-go v1.5.3 h1:qUTwuXCbMi99EkHaTh5NGMK5MOKxJn7u/M2FbYcesLY=
github.com/microsoft/kiota-abstractions-go v1.5.3/go.mod h1:xyBzTVCYrp7QBW4/p+RFi44PHwp/IPn2dZepuV4nF80=
github.com/microsoft/kiota-authentication-azure-go v1.0.1 h1:F4HH+2QQHSecQg50gVEZaUcxA8/XxCaC2oOMYv2gTIM=
github.com/microsoft/kiota-authentication-azure-go v1.0.1/go.mod h1:IbifJeoi+sULI0vjnsWYSmDu5atFo/4FZ6WCoAkPjsc=
github.com/microsoft/kiota-http-go v1.1.1 h1:W4Olo7Z/MwNZCfkcvH/5eLhnn7koRBMMRhLEnf5MPKo=
github.com/microsoft/kiota-http-go v1.1.1/go.mod h1:QzhhfW5xkoUuT+/ohflpHJvumWeXIxa/Xl0GmQ2M6mY=
github.com/microsoft/kiota-serialization-form-go v1.0.0 h1:UNdrkMnLFqUCccQZerKjblsyVgifS11b3WCx+eFEsAI=
github.com/microsoft/kiota-serialization-form-go v1.0.0/go.mod h1:h4mQOO6KVTNciMF6azi1J9QB19ujSw3ULKcSNyXXOMA=
github.com/microsoft/kiota-serialization-json-go v1.0.5 h1:DKLs/zcRlY+UrcmI8bCprqYeh3UKfbgbzwy/H2elrmM=
github.com/microsoft/kiota-serialization-json-go v1.0.5/go.mod h1:SgAmhkzRPX1cjnzEWTv988IcBet7wbr4y6y014l5Y1w=
github.com/microsoft/kiota-serialization-json-go v1.0.4 h1:5TaISWwd2Me8clrK7SqNATo0tv9seOq59y4I5953egQ=
github.com/microsoft/kiota-serialization-json-go v1.0.4/go.mod h1:rM4+FsAY+9AEpBsBzkFFis+b/LZLlNKKewuLwK9Q6Mg=
github.com/microsoft/kiota-serialization-multipart-go v1.0.0 h1:3O5sb5Zj+moLBiJympbXNaeV07K0d46IfuEd5v9+pBs=
github.com/microsoft/kiota-serialization-multipart-go v1.0.0/go.mod h1:yauLeBTpANk4L03XD985akNysG24SnRJGaveZf+p4so=
github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
github.com/microsoftgraph/msgraph-sdk-go v1.30.0 h1:KbP7PPsLPrWT3jy377/qDPSrl0CUhZvaQUZujnsqzU4=
github.com/microsoftgraph/msgraph-sdk-go v1.30.0/go.mod h1:lhcb/pb6Ae/auQmcFYZkr0gNwSWCR5wrvN7yow1x5Yc=
github.com/microsoftgraph/msgraph-sdk-go v1.28.0 h1:WjisgYSVVx0HtHU6EckLog8i0v+i87OpUZgP+wnL2jM=
github.com/microsoftgraph/msgraph-sdk-go v1.28.0/go.mod h1:quVwiVQY6sxPiPR/O0Zli2iqXis1TPQBSEtq/uOcc+4=
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1 h1:uq4qZD8VXLiNZY0t4NoRpLDoEiNYJvAQK3hc0ZMmdxs=
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.1/go.mod h1:HUITyuFN556+0QZ/IVfH5K4FyJM7kllV6ExKi2ImKhE=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
github.com/minio/minio-go/v7 v7.0.67 h1:BeBvZWAS+kRJm1vGTMJYVjKUNoo0FoEt/wUWdUtfmh8=
github.com/minio/minio-go/v7 v7.0.67/go.mod h1:+UXocnUeZ3wHvVh5s95gcrA4YjMIbccT6ubB+1m054A=
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
@ -237,7 +228,6 @@ github.com/natefinch/atomic v1.0.1 h1:ZPYKxkqQOx3KZ+RsbnP/YsgvxWQPGxjC0oBt2AhwV0
github.com/natefinch/atomic v1.0.1/go.mod h1:N/D/ELrljoqDyT3rZrsUmtsuzvHkeB/wWjHV22AZRbM=
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=
@ -246,8 +236,8 @@ github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM=
github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU=
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
@ -258,8 +248,8 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk=
github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA=
github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q=
github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
@ -312,8 +302,8 @@ github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ=
github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk=
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo=
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM=
github.com/std-uritemplate/std-uritemplate/go v0.0.50 h1:LAE6WYRmLlDXPtEzr152BnD/MHxGCKmcp5D2Pw0NvmU=
github.com/std-uritemplate/std-uritemplate/go v0.0.50/go.mod h1:CLZ1543WRCuUQQjK0BvPM4QrG2toY8xNZUm8Vbt7vTc=
github.com/std-uritemplate/std-uritemplate/go v0.0.47 h1:erzz/DR4sOzWr0ca2MgSTkMckpLEsDySaTZwVFQq9zw=
github.com/std-uritemplate/std-uritemplate/go v0.0.47/go.mod h1:Qov4Ay4U83j37XjgxMYevGJFLbnZ2o9cEOhGufBKgKY=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
@ -328,8 +318,8 @@ github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcU
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
github.com/tg123/go-htpasswd v1.2.2 h1:tmNccDsQ+wYsoRfiONzIhDm5OkVHQzN3w4FOBAlN6BY=
github.com/tg123/go-htpasswd v1.2.2/go.mod h1:FcIrK0J+6zptgVwK1JDlqyajW/1B4PtuJ/FLWl7nx8A=
github.com/tg123/go-htpasswd v1.2.1 h1:i4wfsX1KvvkyoMiHZzjS0VzbAPWfxzI8INcZAKtutoU=
github.com/tg123/go-htpasswd v1.2.1/go.mod h1:erHp1B86KXdwQf1X5ZrLb7erXZnWueEQezb2dql4q58=
github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM=
github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
@ -372,21 +362,21 @@ go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo=
go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/exp v0.0.0-20231127185646-65229373498e h1:Gvh4YaCaXNs6dKTlfgismwWZKyjVZXwOPfIyUaqU3No=
golang.org/x/exp v0.0.0-20231127185646-65229373498e/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI=
golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0=
golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE=
golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -394,34 +384,33 @@ golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.17.0 h1:FvmRgNOcs3kOa+T20R1uhfP9F6HgG2mfxDv1vrx1Htc=
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
golang.org/x/tools v0.16.1 h1:TLyB3WofjdOEepBHAU20JdNC1Zbg87elYofWYAY5oZA=
golang.org/x/tools v0.16.1/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/genproto/googleapis/rpc v0.0.0-20231212172506-995d672761c0 h1:/jFB8jK5R3Sq3i/lmeZO0cATSzFfZaJq1J2Euan3XKU=
google.golang.org/genproto/googleapis/rpc v0.0.0-20231212172506-995d672761c0/go.mod h1:FUoWkonphQm3RhTS+kOEhF8h0iDpm4tdXolVCeZ9KKA=
google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU=
google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM=
google.golang.org/genproto/googleapis/rpc v0.0.0-20231127180814-3a041ad873d4 h1:DC7wcm+i+P1rN3Ff07vL+OndGg5OhNddHyTA+ocPqYE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20231127180814-3a041ad873d4/go.mod h1:eJVxU6o+4G1PSczBr85xmyvSNYAKvAYgkub40YGomFM=
google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk=
google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
@ -434,10 +423,7 @@ gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU=
gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU=
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056 h1:6YFJoB+0fUH6X3xU/G2tQqCYg+PkGtnZ5nMR5rpw72g=
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056/go.mod h1:OxvTsCwKosqQ1q7B+8FwXqg4rKZ/UG9dUW+g/VL2xH4=

View File

@ -10,7 +10,6 @@ import (
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/logger"
)
const (
@ -57,22 +56,12 @@ func ZipExportCollection(
defer wr.Close()
buf := make([]byte, ZipCopyBufferSize)
counted := 0
log := logger.Ctx(ctx).
With("collection_count", len(expCollections))
for _, ec := range expCollections {
folder := ec.BasePath()
items := ec.Items(ctx)
for item := range items {
counted++
// Log every 1000 items that are processed
if counted%1000 == 0 {
log.Infow("progress zipping export items", "count_items", counted)
}
err := item.Error
if err != nil {
writer.CloseWithError(clues.Wrap(err, "getting export item").With("id", item.ID))
@ -99,12 +88,8 @@ func ZipExportCollection(
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
return
}
item.Body.Close()
}
}
log.Infow("completed zipping export items", "count_items", counted)
}()
return zipCollection{reader}, nil

View File

@ -1,13 +1,10 @@
package jwt
import (
"context"
"time"
"github.com/alcionai/clues"
jwt "github.com/golang-jwt/jwt/v5"
"github.com/alcionai/corso/src/pkg/logger"
)
// IsJWTExpired checks if the JWT token is past expiry by analyzing the
@ -40,51 +37,3 @@ func IsJWTExpired(
return expired, nil
}
// GetJWTLifetime returns the issued at(iat) and expiration time(exp) claims
// present in the JWT token. These are optional claims and may not be present
// in the token. Absence is not reported as an error.
//
// An error is returned if the supplied token is malformed. Times are returned
// in UTC to have parity with graph responses.
func GetJWTLifetime(
ctx context.Context,
rawToken string,
) (time.Time, time.Time, error) {
var (
issuedAt time.Time
expiresAt time.Time
)
p := jwt.NewParser()
token, _, err := p.ParseUnverified(rawToken, &jwt.RegisteredClaims{})
if err != nil {
logger.CtxErr(ctx, err).Debug("parsing jwt token")
return time.Time{}, time.Time{}, clues.Wrap(err, "invalid jwt")
}
exp, err := token.Claims.GetExpirationTime()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting exp claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token expiry time")
}
iat, err := token.Claims.GetIssuedAt()
if err != nil {
logger.CtxErr(ctx, err).Debug("extracting iat claim")
return time.Time{}, time.Time{}, clues.Wrap(err, "getting token issued at time")
}
// Absence of iat or exp claims is not reported as an error by jwt library as these
// are optional as per spec.
if iat != nil {
issuedAt = iat.UTC()
}
if exp != nil {
expiresAt = exp.UTC()
}
return issuedAt, expiresAt, nil
}

View File

@ -113,134 +113,3 @@ func (suite *JWTUnitSuite) TestIsJWTExpired() {
})
}
}
func (suite *JWTUnitSuite) TestGetJWTLifetime() {
// Set of time values to be used in the tests.
// Truncate to seconds for comparisons since jwt tokens have second
// level precision.
idToTime := map[string]time.Time{
"T0": time.Now().UTC().Add(-time.Hour).Truncate(time.Second),
"T1": time.Now().UTC().Truncate(time.Second),
"T2": time.Now().UTC().Add(time.Hour).Truncate(time.Second),
}
table := []struct {
name string
getToken func() (string, error)
expectFunc func(t *testing.T, iat time.Time, exp time.Time)
expectErr assert.ErrorAssertionFunc
}{
{
name: "alive token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T1"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, idToTime["T1"], exp)
},
expectErr: assert.NoError,
},
// Test with a token which is not generated using the go-jwt lib.
// This is a long lived token which is valid for 100 years.
{
name: "alive raw token with iat and exp claims",
getToken: func() (string, error) {
return rawToken, nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Less(t, iat, time.Now(), "iat should be in the past")
assert.Greater(t, exp, time.Now(), "exp should be in the future")
},
expectErr: assert.NoError,
},
// Regardless of whether the token is expired or not, we should be able to
// extract the iat and exp claims from it without error.
{
name: "expired token",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T1"]),
ExpiresAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T1"], iat)
assert.Equal(t, idToTime["T0"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing iat claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(idToTime["T2"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, idToTime["T2"], exp)
},
expectErr: assert.NoError,
},
{
name: "missing exp claim",
getToken: func() (string, error) {
return createJWTToken(
jwt.RegisteredClaims{
IssuedAt: jwt.NewNumericDate(idToTime["T0"]),
})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, idToTime["T0"], iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "both claims missing",
getToken: func() (string, error) {
return createJWTToken(jwt.RegisteredClaims{})
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.NoError,
},
{
name: "malformed token",
getToken: func() (string, error) {
return "header.claims.signature", nil
},
expectFunc: func(t *testing.T, iat time.Time, exp time.Time) {
assert.Equal(t, time.Time{}, iat)
assert.Equal(t, time.Time{}, exp)
},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
token, err := test.getToken()
require.NoError(t, err)
iat, exp, err := GetJWTLifetime(ctx, token)
test.expectErr(t, err)
test.expectFunc(t, iat, exp)
})
}
}

View File

@ -59,19 +59,6 @@ func First(vs ...string) string {
return ""
}
// FirstIn returns the first entry in the map with a non-zero value
// when iterating the provided list of keys.
func FirstIn(m map[string]any, keys ...string) string {
for _, key := range keys {
v, err := AnyValueToString(key, m)
if err == nil && len(v) > 0 {
return v
}
}
return ""
}
// Preview reduces the string to the specified size.
// If the string is longer than the size, the last three
// characters are replaced with an ellipsis. Size < 4

View File

@ -118,96 +118,3 @@ func TestGenerateHash(t *testing.T) {
}
}
}
func TestFirstIn(t *testing.T) {
table := []struct {
name string
m map[string]any
keys []string
expect string
}{
{
name: "nil map",
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty map",
m: map[string]any{},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no match",
m: map[string]any{
"baz": "baz",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "no keys",
m: map[string]any{
"baz": "baz",
},
keys: []string{},
expect: "",
},
{
name: "nil match",
m: map[string]any{
"foo": nil,
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "empty match",
m: map[string]any{
"foo": "",
},
keys: []string{"foo", "bar"},
expect: "",
},
{
name: "matches first key",
m: map[string]any{
"foo": "fnords",
},
keys: []string{"foo", "bar"},
expect: "fnords",
},
{
name: "matches second key",
m: map[string]any{
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with nil first match",
m: map[string]any{
"foo": nil,
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
{
name: "matches second key with empty first match",
m: map[string]any{
"foo": "",
"bar": "smarf",
},
keys: []string{"foo", "bar"},
expect: "smarf",
},
}
for _, test := range table {
t.Run(test.name, func(t *testing.T) {
result := FirstIn(test.m, test.keys...)
assert.Equal(t, test.expect, result)
})
}
}

View File

@ -1,15 +1,9 @@
package syncd
import (
"sync"
"github.com/puzpuzpuz/xsync/v3"
)
// ---------------------------------------------------------------------------
// string -> V
// ---------------------------------------------------------------------------
// MapTo produces a threadsafe map[string]V
type MapTo[V any] struct {
xmo *xsync.MapOf[string, V]
@ -33,60 +27,3 @@ func (m MapTo[V]) Load(k string) (V, bool) {
func (m MapTo[V]) Size() int {
return m.xmo.Size()
}
// ---------------------------------------------------------------------------
// K -> V
// ---------------------------------------------------------------------------
// for laxy initialization
var mu sync.Mutex
// MapOf produces a threadsafe map[K]V
type MapOf[K comparable, V any] struct {
xmo *xsync.MapOf[K, V]
}
// NewMapOf produces a new threadsafe mapOf[K]V
func NewMapOf[K comparable, V any]() MapOf[K, V] {
return MapOf[K, V]{
xmo: xsync.NewMapOf[K, V](),
}
}
// LazyInit ensures the underlying map is populated.
// no-op if already initialized.
func (m *MapOf[K, V]) LazyInit() {
mu.Lock()
defer mu.Unlock()
if m.xmo == nil {
m.xmo = xsync.NewMapOf[K, V]()
}
}
func (m MapOf[K, V]) Store(k K, v V) {
m.xmo.Store(k, v)
}
func (m MapOf[K, V]) Load(k K) (V, bool) {
return m.xmo.Load(k)
}
func (m MapOf[K, V]) Size() int {
return m.xmo.Size()
}
func (m MapOf[K, V]) Values() map[K]V {
vs := map[K]V{}
if m.xmo == nil {
return vs
}
m.xmo.Range(func(k K, v V) bool {
vs[k] = v
return true
})
return vs
}

View File

@ -22,8 +22,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/converters/ics"
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -52,234 +50,20 @@ func formatAddress(entry models.EmailAddressable) string {
return fmt.Sprintf(addressFormat, name, email)
}
// getICalData converts the emails to an event so that ical generation
// can generate from it.
func getICalData(ctx context.Context, data models.Messageable) (string, error) {
msg, ok := data.(*models.EventMessageRequest)
if !ok {
return "", clues.NewWC(ctx, "unexpected message type").
With("interface_type", fmt.Sprintf("%T", data))
}
// This method returns nil if data is not pulled using the necessary expand property
// .../messages/<message_id>/?expand=Microsoft.Graph.EventMessage/Event
// Also works for emails which are a result of someone accepting an
// invite. If we add this expand query parameter value when directly
// fetching a cancellation mail, the request fails. It however looks
// to be OK to run when listing emails although it gives empty({})
// event value for cancellations.
// TODO(meain): cancelled event details are available when pulling .eml
if mevent := msg.GetEvent(); mevent != nil {
return ics.FromEventable(ctx, mevent)
}
// Exceptions(modifications) are covered under this, although graph just sends the
// exception event and not the parent, which what eml obtained from graph also contains
if ptr.Val(msg.GetMeetingMessageType()) != models.MEETINGREQUEST_MEETINGMESSAGETYPE {
// We don't have event data if it not "REQUEST" type.
// Both cancellation and acceptance does not return enough
// information to recreate an event.
return "", nil
}
// If data was not fetch with an expand property, then we can
// approximate the details with the following
event := models.NewEvent()
event.SetId(msg.GetId())
event.SetCreatedDateTime(msg.GetCreatedDateTime())
event.SetLastModifiedDateTime(msg.GetLastModifiedDateTime())
event.SetIsAllDay(msg.GetIsAllDay())
event.SetStart(msg.GetStartDateTime())
event.SetEnd(msg.GetEndDateTime())
event.SetRecurrence(msg.GetRecurrence())
// event.SetIsCancelled()
event.SetSubject(msg.GetSubject())
event.SetBodyPreview(msg.GetBodyPreview())
event.SetBody(msg.GetBody())
// https://learn.microsoft.com/en-us/graph/api/resources/eventmessage?view=graph-rest-1.0
// In addition, Outlook automatically creates an event instance in
// the invitee's calendar, with the showAs property as tentative.
event.SetShowAs(ptr.To(models.TENTATIVE_FREEBUSYSTATUS))
event.SetCategories(msg.GetCategories())
event.SetWebLink(msg.GetWebLink())
event.SetOrganizer(msg.GetFrom())
// NOTE: If an event was previously created and we added people to
// it, the original list of attendee are not available.
atts := []models.Attendeeable{}
for _, to := range msg.GetToRecipients() {
att := models.NewAttendee()
att.SetEmailAddress(to.GetEmailAddress())
att.SetTypeEscaped(ptr.To(models.REQUIRED_ATTENDEETYPE))
atts = append(atts, att)
}
for _, cc := range msg.GetCcRecipients() {
att := models.NewAttendee()
att.SetEmailAddress(cc.GetEmailAddress())
att.SetTypeEscaped(ptr.To(models.OPTIONAL_ATTENDEETYPE))
atts = append(atts, att)
}
// bcc did not show up in my tests, but adding for completeness
for _, bcc := range msg.GetBccRecipients() {
att := models.NewAttendee()
att.SetEmailAddress(bcc.GetEmailAddress())
att.SetTypeEscaped(ptr.To(models.OPTIONAL_ATTENDEETYPE))
atts = append(atts, att)
}
event.SetAttendees(atts)
event.SetLocation(msg.GetLocation())
// event.SetSensitivity() // unavailable in msg
event.SetImportance(msg.GetImportance())
// event.SetOnlineMeeting() // not available in eml either
event.SetAttachments(msg.GetAttachments())
return ics.FromEventable(ctx, event)
}
func getFileAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
kind := ptr.Val(attachment.GetContentType())
bytes, err := attachment.GetBackingStore().Get("contentBytes")
if err != nil {
return nil, clues.WrapWC(ctx, err, "failed to get attachment bytes").
With("kind", kind)
}
if bytes == nil {
// TODO(meain): Handle non file attachments
// https://github.com/alcionai/corso/issues/4772
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
Info("no contentBytes for attachment")
return nil, nil
}
bts, ok := bytes.([]byte)
if !ok {
return nil, clues.WrapWC(ctx, err, "invalid content bytes").
With("kind", kind).
With("interface_type", fmt.Sprintf("%T", bytes))
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
// Graph as of now does not let us create any attachments
// without a name, but we have run into instances where we have
// see attachments without a name, possibly from old
// data. This is for those cases.
name = "Unnamed"
}
contentID, err := attachment.GetBackingStore().Get("contentId")
if err != nil {
return nil, clues.WrapWC(ctx, err, "getting content id for attachment").
With("kind", kind)
}
if contentID != nil {
cids, _ := str.AnyToString(contentID)
if len(cids) > 0 {
name = cids
}
}
return &mail.File{
// cannot use filename as inline attachment will not get mapped properly
Name: name,
MimeType: kind,
Data: bts,
Inline: ptr.Val(attachment.GetIsInline()),
}, nil
}
func getItemAttachment(ctx context.Context, attachment models.Attachmentable) (*mail.File, error) {
it, err := attachment.GetBackingStore().Get("item")
if err != nil {
return nil, clues.WrapWC(ctx, err, "getting item for attachment").
With("attachment_id", ptr.Val(attachment.GetId()))
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
// Graph as of now does not let us create any attachments
// without a name, but we have run into instances where we have
// see attachments without a name, possibly from old
// data. This is for those cases.
name = "Unnamed"
}
switch it := it.(type) {
case *models.Message:
cb, err := FromMessageable(ctx, it)
if err != nil {
return nil, clues.WrapWC(ctx, err, "converting item attachment to eml").
With("attachment_id", ptr.Val(attachment.GetId()))
}
return &mail.File{
Name: name,
MimeType: "message/rfc822",
Data: []byte(cb),
}, nil
default:
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
Info("unknown item attachment type")
}
return nil, nil
}
func getMailAttachment(ctx context.Context, att models.Attachmentable) (*mail.File, error) {
otyp := ptr.Val(att.GetOdataType())
switch otyp {
case "#microsoft.graph.fileAttachment":
return getFileAttachment(ctx, att)
case "#microsoft.graph.itemAttachment":
return getItemAttachment(ctx, att)
default:
logger.Ctx(ctx).
With("attachment_id", ptr.Val(att.GetId()),
"attachment_type", otyp).
Info("unknown attachment type")
return nil, nil
}
}
// FromJSON converts a Messageable (as json) to .eml format
func FromJSON(ctx context.Context, body []byte) (string, error) {
ctx = clues.Add(ctx, "body_len", len(body))
data, err := api.BytesToMessageable(body)
if err != nil {
return "", clues.WrapWC(ctx, err, "converting to messageble")
}
return FromMessageable(ctx, data)
}
// Converts a Messageable to .eml format
func FromMessageable(ctx context.Context, data models.Messageable) (string, error) {
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
email := mail.NewMSG()
email.Encoding = mail.EncodingBase64 // Doing it to be safe for when we have eventMessage (newline issues)
email.AllowDuplicateAddress = true // More "correct" conversion
email.AddBccToHeader = true // Don't ignore Bcc
email.AllowEmptyAttachments = true // Don't error on empty attachments
email.UseProvidedAddress = true // Don't try to parse the email address
email.AllowDuplicateAddress = true // More "correct" conversion
email.AddBccToHeader = true // Don't ignore Bcc
email.AllowEmptyAttachments = true // Don't error on empty attachments
email.UseProvidedAddress = true // Don't try to parse the email address
if data.GetFrom() != nil {
email.SetFrom(formatAddress(data.GetFrom().GetEmailAddress()))
@ -347,155 +131,39 @@ func FromMessageable(ctx context.Context, data models.Messageable) (string, erro
}
}
if data.GetAttachments() != nil {
for _, attachment := range data.GetAttachments() {
att, err := getMailAttachment(ctx, attachment)
if err != nil {
return "", clues.WrapWC(ctx, err, "getting mail attachment")
}
// There are known cases where we just wanna log and
// ignore instead of erroring out
if att != nil {
email.Attach(att)
}
}
}
switch data.(type) {
case *models.EventMessageResponse, *models.EventMessage:
// We can't handle this as of now, not enough information
// TODO: Fetch event object from graph when fetching email
case *models.CalendarSharingMessage:
// TODO: Parse out calendar sharing message
// https://github.com/alcionai/corso/issues/5041
case *models.EventMessageRequest:
cal, err := getICalData(ctx, data)
if err != nil {
return "", clues.Wrap(err, "getting ical attachment")
}
if len(cal) > 0 {
email.AddAlternative(mail.TextCalendar, cal)
}
}
if err := email.GetError(); err != nil {
return "", clues.WrapWC(ctx, err, "converting to eml")
}
return email.GetMessage(), nil
}
//-------------------------------------------------------------
// Postable -> EML
//-------------------------------------------------------------
// FromJSONPostToEML converts a postable (as json) to .eml format.
// TODO(pandeyabs): This is a stripped down copy of messageable to
// eml conversion, it can be folded into one function by having a post
// to messageable converter.
func FromJSONPostToEML(
ctx context.Context,
body []byte,
postMetadata metadata.ConversationPostMetadata,
) (string, error) {
ctx = clues.Add(ctx, "body_len", len(body))
data, err := api.BytesToPostable(body)
if err != nil {
return "", clues.WrapWC(ctx, err, "converting to postable")
}
ctx = clues.Add(ctx, "item_id", ptr.Val(data.GetId()))
email := mail.NewMSG()
email.Encoding = mail.EncodingBase64 // Doing it to be safe for when we have eventMessage (newline issues)
email.AllowDuplicateAddress = true // More "correct" conversion
email.AddBccToHeader = true // Don't ignore Bcc
email.AllowEmptyAttachments = true // Don't error on empty attachments
email.UseProvidedAddress = true // Don't try to parse the email address
if data.GetFrom() != nil {
email.SetFrom(formatAddress(data.GetFrom().GetEmailAddress()))
}
// We don't have the To, Cc, Bcc recipient information for posts due to a graph
// limitation. All posts carry the group email address as the only recipient
// for now.
email.AddTo(postMetadata.Recipients...)
email.SetSubject(postMetadata.Topic)
// Reply-To email address is not available for posts. Note that this is different
// from inReplyTo field.
if data.GetCreatedDateTime() != nil {
email.SetDate(ptr.Val(data.GetCreatedDateTime()).Format(dateFormat))
}
if data.GetBody() != nil {
if data.GetBody().GetContentType() != nil {
var contentType mail.ContentType
switch data.GetBody().GetContentType().String() {
case "html":
contentType = mail.TextHTML
case "text":
contentType = mail.TextPlain
default:
// https://learn.microsoft.com/en-us/graph/api/resources/itembody?view=graph-rest-1.0#properties
// This should not be possible according to the documentation
logger.Ctx(ctx).
With("body_type", data.GetBody().GetContentType().String()).
Info("unknown body content type")
contentType = mail.TextPlain
}
email.SetBody(contentType, ptr.Val(data.GetBody().GetContent()))
}
}
if data.GetAttachments() != nil {
for _, attachment := range data.GetAttachments() {
kind := ptr.Val(attachment.GetContentType())
bytes, err := attachment.GetBackingStore().Get("contentBytes")
if err != nil {
return "", clues.WrapWC(ctx, err, "failed to get attachment bytes").
With("kind", kind)
return "", clues.WrapWC(ctx, err, "failed to get attachment bytes")
}
if bytes == nil {
// TODO(meain): Handle non file attachments
// Some attachments have an "item" field instead of
// "contentBytes". There are items like contacts, emails
// or calendar events which will not be a normal format
// and will have to be converted to a text format.
// TODO(meain): Handle custom attachments
// https://github.com/alcionai/corso/issues/4772
//
// TODO(pandeyabs): Above issue is for messages.
// This is not a problem for posts but leaving it here for safety.
logger.Ctx(ctx).
With("attachment_id", ptr.Val(attachment.GetId()),
"attachment_type", ptr.Val(attachment.GetOdataType())).
Info("no contentBytes for attachment")
With("attachment_id", ptr.Val(attachment.GetId())).
Info("unhandled attachment type")
continue
}
bts, ok := bytes.([]byte)
if !ok {
return "", clues.WrapWC(ctx, err, "invalid content bytes").
With("kind", kind).
With("interface_type", fmt.Sprintf("%T", bytes))
return "", clues.WrapWC(ctx, err, "invalid content bytes")
}
name := ptr.Val(attachment.GetName())
if len(name) == 0 {
name = "Unnamed"
}
contentID, err := attachment.GetBackingStore().Get("contentId")
if err != nil {
return "", clues.WrapWC(ctx, err, "getting content id for attachment").
With("kind", kind)
return "", clues.WrapWC(ctx, err, "getting content id for attachment")
}
if contentID != nil {
@ -515,8 +183,6 @@ func FromJSONPostToEML(
}
}
// Note: Posts cannot be of type EventMessageResponse, EventMessage or
// CalendarSharingMessage. So we don't need to handle those cases here.
if err = email.GetError(); err != nil {
return "", clues.WrapWC(ctx, err, "converting to eml")
}

View File

@ -1,13 +1,11 @@
package eml
import (
"bytes"
"regexp"
"strings"
"testing"
"time"
ical "github.com/arran4/golang-ical"
"github.com/jhillyerd/enmime"
kjson "github.com/microsoft/kiota-serialization-json-go"
"github.com/microsoftgraph/msgraph-sdk-go/models"
@ -17,9 +15,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/converters/eml/testdata"
"github.com/alcionai/corso/src/internal/converters/ics"
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
stub "github.com/alcionai/corso/src/internal/m365/service/groups/mock"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -137,11 +132,6 @@ func (suite *EMLUnitSuite) TestConvert_messageble_to_eml() {
}
func (suite *EMLUnitSuite) TestConvert_edge_cases() {
bodies := []string{
testdata.EmailWithAttachments,
testdata.EmailWithinEmail,
}
tests := []struct {
name string
transform func(models.Messageable)
@ -167,324 +157,34 @@ func (suite *EMLUnitSuite) TestConvert_edge_cases() {
require.NoError(suite.T(), err, "setting attachment content")
},
},
{
name: "attachment without name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(ptr.To(""))
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
},
},
{
name: "attachment with nil name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(nil)
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
},
},
{
name: "multiple attachments without name",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
attachments[1].SetName(ptr.To(""))
attachments[2].SetName(ptr.To(""))
// This test has to be run on a non inline attachment
// as inline attachments use contentID instead of name
// even when there is a name.
assert.False(suite.T(), ptr.Val(attachments[1].GetIsInline()))
assert.False(suite.T(), ptr.Val(attachments[2].GetIsInline()))
},
},
}
for _, b := range bodies {
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
for _, test := range tests {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(b)
body := []byte(testdata.EmailWithAttachments)
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
test.transform(msg)
test.transform(msg)
writer := kjson.NewJsonSerializationWriter()
writer := kjson.NewJsonSerializationWriter()
defer writer.Close()
defer writer.Close()
err = writer.WriteObjectValue("", msg)
require.NoError(t, err, "serializing message")
err = writer.WriteObjectValue("", msg)
require.NoError(t, err, "serializing message")
nbody, err := writer.GetSerializedContent()
require.NoError(t, err, "getting serialized content")
nbody, err := writer.GetSerializedContent()
require.NoError(t, err, "getting serialized content")
_, err = FromJSON(ctx, nbody)
assert.NoError(t, err, "converting to eml")
})
}
_, err = FromJSON(ctx, nbody)
assert.NoError(t, err, "converting to eml")
})
}
}
func (suite *EMLUnitSuite) TestConvert_eml_ics() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(testdata.EmailWithEventInfo)
out, err := FromJSON(ctx, body)
assert.NoError(t, err, "converting to eml")
rmsg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
msg := rmsg.(*models.EventMessageRequest)
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
require.NoError(t, err, "reading created eml")
require.NotNil(t, eml, "eml should not be nil")
require.Equal(t, 1, len(eml.OtherParts), "eml should have 1 attachment")
require.Equal(t, "text/calendar", eml.OtherParts[0].ContentType, "eml attachment should be a calendar")
catt := *eml.OtherParts[0]
cal, err := ical.ParseCalendar(bytes.NewReader(catt.Content))
require.NoError(t, err, "parsing calendar")
event := cal.Events()[0]
assert.Equal(t, ptr.Val(msg.GetId()), event.Id())
assert.Equal(t, ptr.Val(msg.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
assert.Equal(
t,
msg.GetCreatedDateTime().Format(ics.ICalDateTimeFormatUTC),
event.GetProperty(ical.ComponentPropertyCreated).Value)
assert.Equal(
t,
msg.GetLastModifiedDateTime().Format(ics.ICalDateTimeFormatUTC),
event.GetProperty(ical.ComponentPropertyLastModified).Value)
st, err := ics.GetUTCTime(
ptr.Val(msg.GetStartDateTime().GetDateTime()),
ptr.Val(msg.GetStartDateTime().GetTimeZone()))
require.NoError(t, err, "getting start time")
et, err := ics.GetUTCTime(
ptr.Val(msg.GetEndDateTime().GetDateTime()),
ptr.Val(msg.GetEndDateTime().GetTimeZone()))
require.NoError(t, err, "getting end time")
assert.Equal(
t,
st.Format(ics.ICalDateTimeFormatUTC),
event.GetProperty(ical.ComponentPropertyDtStart).Value)
assert.Equal(
t,
et.Format(ics.ICalDateTimeFormatUTC),
event.GetProperty(ical.ComponentPropertyDtEnd).Value)
tos := msg.GetToRecipients()
ccs := msg.GetCcRecipients()
att := event.Attendees()
assert.Equal(t, len(tos)+len(ccs), len(att))
for _, to := range tos {
found := false
for _, attendee := range att {
if "mailto:"+ptr.Val(to.GetEmailAddress().GetAddress()) == attendee.Value {
found = true
assert.Equal(t, "REQ-PARTICIPANT", attendee.ICalParameters["ROLE"][0])
break
}
}
assert.True(t, found, "to recipient not found in attendees")
}
for _, cc := range ccs {
found := false
for _, attendee := range att {
if "mailto:"+ptr.Val(cc.GetEmailAddress().GetAddress()) == attendee.Value {
found = true
assert.Equal(t, "OPT-PARTICIPANT", attendee.ICalParameters["ROLE"][0])
break
}
}
assert.True(t, found, "cc recipient not found in attendees")
}
}
func (suite *EMLUnitSuite) TestConvert_eml_ics_from_event_obj() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(testdata.EmailWithEventObject)
out, err := FromJSON(ctx, body)
assert.NoError(t, err, "converting to eml")
rmsg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
msg := rmsg.(*models.EventMessageRequest)
evt := msg.GetEvent()
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
require.NoError(t, err, "reading created eml")
require.NotNil(t, eml, "eml should not be nil")
require.Equal(t, 1, len(eml.OtherParts), "eml should have 1 attachment")
require.Equal(t, "text/calendar", eml.OtherParts[0].ContentType, "eml attachment should be a calendar")
catt := *eml.OtherParts[0]
cal, err := ical.ParseCalendar(bytes.NewReader(catt.Content))
require.NoError(t, err, "parsing calendar")
event := cal.Events()[0]
assert.Equal(t, ptr.Val(evt.GetId()), event.Id())
assert.NotEqual(t, ptr.Val(msg.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
assert.Equal(t, ptr.Val(evt.GetSubject()), event.GetProperty(ical.ComponentPropertySummary).Value)
}
//-------------------------------------------------------------
// Postable -> EML tests
//-------------------------------------------------------------
func (suite *EMLUnitSuite) TestConvert_postable_to_eml() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(stub.PostWithAttachments)
postMetadata := metadata.ConversationPostMetadata{
Recipients: []string{"group@example.com"},
Topic: "test subject",
}
out, err := FromJSONPostToEML(ctx, body, postMetadata)
assert.NoError(t, err, "converting to eml")
post, err := api.BytesToPostable(body)
require.NoError(t, err, "creating post")
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
require.NoError(t, err, "reading created eml")
assert.Equal(t, postMetadata.Topic, eml.GetHeader("Subject"))
assert.Equal(t, post.GetCreatedDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
assert.Equal(t, formatAddress(post.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
// Test recipients. The post metadata should contain the group email address.
tos := strings.Split(eml.GetHeader("To"), ", ")
for _, sourceTo := range postMetadata.Recipients {
assert.Contains(t, tos, sourceTo)
}
// Assert cc, bcc to be empty since they are not supported for posts right now.
assert.Equal(t, "", eml.GetHeader("Cc"))
assert.Equal(t, "", eml.GetHeader("Bcc"))
// Test attachments using PostWithAttachments data as a reference.
// This data has 1 direct attachment and 1 inline attachment.
assert.Equal(t, 1, len(eml.Attachments), "direct attachment count")
assert.Equal(t, 1, len(eml.Inlines), "inline attachment count")
for _, sourceAttachment := range post.GetAttachments() {
targetContent := eml.Attachments[0].Content
if ptr.Val(sourceAttachment.GetIsInline()) {
targetContent = eml.Inlines[0].Content
}
sourceContent, err := sourceAttachment.GetBackingStore().Get("contentBytes")
assert.NoError(t, err, "getting source attachment content")
assert.Equal(t, sourceContent, targetContent)
}
// Test body
source := strings.ReplaceAll(eml.HTML, "\n", "")
target := strings.ReplaceAll(ptr.Val(post.GetBody().GetContent()), "\n", "")
// replace the cid with a constant value to make the comparison
re := regexp.MustCompile(`(?:src|originalSrc)="cid:[^"]*"`)
source = re.ReplaceAllString(source, `src="cid:replaced"`)
target = re.ReplaceAllString(target, `src="cid:replaced"`)
assert.Equal(t, source, target)
}
// Tests an ics within an eml within another eml
func (suite *EMLUnitSuite) TestConvert_message_in_messageble_to_eml() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
body := []byte(testdata.EmailWithinEmail)
out, err := FromJSON(ctx, body)
assert.NoError(t, err, "converting to eml")
msg, err := api.BytesToMessageable(body)
require.NoError(t, err, "creating message")
eml, err := enmime.ReadEnvelope(strings.NewReader(out))
require.NoError(t, err, "reading created eml")
assert.Equal(t, ptr.Val(msg.GetSubject()), eml.GetHeader("Subject"))
assert.Equal(t, msg.GetSentDateTime().Format(time.RFC1123Z), eml.GetHeader("Date"))
assert.Equal(t, formatAddress(msg.GetFrom().GetEmailAddress()), eml.GetHeader("From"))
attachments := eml.Attachments
assert.Equal(t, 3, len(attachments), "attachment count in parent email")
ieml, err := enmime.ReadEnvelope(strings.NewReader(string(attachments[0].Content)))
require.NoError(t, err, "reading created eml")
itm, err := msg.GetAttachments()[0].GetBackingStore().Get("item")
require.NoError(t, err, "getting item from message")
imsg := itm.(*models.Message)
assert.Equal(t, ptr.Val(imsg.GetSubject()), ieml.GetHeader("Subject"))
assert.Equal(t, imsg.GetSentDateTime().Format(time.RFC1123Z), ieml.GetHeader("Date"))
assert.Equal(t, formatAddress(imsg.GetFrom().GetEmailAddress()), ieml.GetHeader("From"))
iattachments := ieml.Attachments
assert.Equal(t, 1, len(iattachments), "attachment count in child email")
// Known from testdata
assert.Contains(t, string(iattachments[0].Content), "X-LIC-LOCATION:Africa/Abidjan")
}

View File

@ -104,19 +104,6 @@
"contentId": null,
"contentLocation": null,
"contentBytes": "W1BhdGhzXQpQcmVmaXggPSAuLgo="
},
{
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "application/octet-stream",
"id": "ZZMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAEwbDEWAAABEgAQAD3rU0iyzCdHgz0xmOrWc9g=",
"lastModifiedDateTime": "2023-11-16T05:42:47Z",
"name": "qt2.conf",
"contentType": "application/octet-stream",
"size": 156,
"isInline": false,
"contentId": null,
"contentLocation": null,
"contentBytes": "Z1BhdGhzXQpQcmVmaXggPSAuLgo="
}
]
}

View File

@ -1,103 +0,0 @@
{
"@odata.type": "#microsoft.graph.eventMessageRequest",
"@odata.etag": "W/\"CwAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFY+7zz\"",
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFZU-vfAAA=",
"createdDateTime": "2024-01-15T11:26:41Z",
"lastModifiedDateTime": "2024-01-15T11:26:43Z",
"changeKey": "CwAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFY+7zz",
"categories": [],
"receivedDateTime": "2024-01-15T11:26:41Z",
"sentDateTime": "2024-01-15T11:26:41Z",
"hasAttachments": false,
"internetMessageId": "<PH0PR04MB7285DDED30186B1D8E1BD2AABC6C2@PH0PR04MB7285.namprd04.prod.outlook.com>",
"subject": "Invitable event",
"bodyPreview": "How come the sun is hot?\r\n\r\n________________________________________________________________________________\r\nMicrosoft Teams meeting\r\nJoin on your computer, mobile app or room device\r\nClick here to join the meeting\r\nMeeting ID: 290 273 192 285\r\nPasscode:",
"importance": "normal",
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQABoIUFzzByJMltJobtYSAJ0=",
"conversationIndex": "AdpHpbXXGghQXPMHIkyW0mhu1hIAnQ==",
"isDeliveryReceiptRequested": null,
"isReadReceiptRequested": false,
"isRead": true,
"isDraft": false,
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFZU%2FvfAAA%3D&exvsurl=1&viewmodel=ReadMessageItem",
"inferenceClassification": "focused",
"meetingMessageType": "meetingRequest",
"type": "seriesMaster",
"isOutOfDate": false,
"isAllDay": false,
"isDelegated": false,
"responseRequested": true,
"allowNewTimeProposals": null,
"meetingRequestType": "newMeetingRequest",
"body": {
"contentType": "html",
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"></head><body><div class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">How come the sun is hot?<br></div><br><div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span> </div><div class=\"me-email-text\" lang=\"en-US\" style=\"color:#252424; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\"><div style=\"margin-top:24px; margin-bottom:20px\"><span style=\"font-size:24px; color:#252424\">Microsoft Teams meeting</span> </div><div style=\"margin-bottom:20px\"><div style=\"margin-top:0px; margin-bottom:0px; font-weight:bold\"><span style=\"font-size:14px; color:#252424\">Join on your computer, mobile app or room device</span> </div><a class=\"me-email-headline\" href=\"https://teams.microsoft.com/l/meetup-join/19%3ameeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl%40thread.v2/0?context=%7b%22Tid%22%3a%22fb8afbaa-e94c-4ea5-8a8a-24aff04d7874%22%2c%22Oid%22%3a%227ceb8e03-bdc5-4509-a136-457526165ec0%22%7d\" target=\"_blank\" rel=\"noreferrer noopener\" style=\"font-size:14px; font-family:'Segoe UI Semibold','Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif; text-decoration:underline; color:#6264a7\">Click here to join the meeting</a> </div><div style=\"margin-bottom:20px; margin-top:20px\"><div style=\"margin-bottom:4px\"><span data-tid=\"meeting-code\" style=\"font-size:14px; color:#252424\">Meeting ID: <span style=\"font-size:16px; color:#252424\">290 273 192 285</span> </span><br><span style=\"font-size:14px; color:#252424\">Passcode: </span><span style=\"font-size:16px; color:#252424\">CwEBTS </span><div style=\"font-size:14px\"><a class=\"me-email-link\" target=\"_blank\" href=\"https://www.microsoft.com/en-us/microsoft-teams/download-app\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Download Teams</a> | <a class=\"me-email-link\" target=\"_blank\" href=\"https://www.microsoft.com/microsoft-teams/join-a-meeting\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Join on the web</a></div></div></div><div style=\"margin-bottom:24px; margin-top:20px\"><a class=\"me-email-link\" target=\"_blank\" href=\"https://aka.ms/JoinTeamsMeeting\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Learn More</a> | <a class=\"me-email-link\" target=\"_blank\" href=\"https://teams.microsoft.com/meetingOptions/?organizerId=7ceb8e03-bdc5-4509-a136-457526165ec0&amp;tenantId=fb8afbaa-e94c-4ea5-8a8a-24aff04d7874&amp;threadId=19_meeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl@thread.v2&amp;messageId=0&amp;language=en-US\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Meeting options</a> </div></div><div style=\"font-size:14px; margin-bottom:4px; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\"></div><div style=\"font-size:12px\"></div><div></div><div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span> </div></body></html>"
},
"sender": {
"emailAddress": {
"name": "Johanna Lorenz",
"address": "JohannaL@10rqc2.onmicrosoft.com"
}
},
"from": {
"emailAddress": {
"name": "Johanna Lorenz",
"address": "JohannaL@10rqc2.onmicrosoft.com"
}
},
"toRecipients": [
{
"emailAddress": {
"name": "Faker 1",
"address": "fakeid1@provider.com"
}
}
],
"ccRecipients": [
{
"emailAddress": {
"name": "Faker 2",
"address": "fakeid2@provider.com"
}
}
],
"bccRecipients": [],
"replyTo": [],
"flag": {
"flagStatus": "notFlagged"
},
"startDateTime": {
"dateTime": "2024-01-22T02:30:00.0000000",
"timeZone": "UTC"
},
"endDateTime": {
"dateTime": "2024-01-22T03:00:00.0000000",
"timeZone": "UTC"
},
"location": {
"displayName": "Microsoft Teams Meeting",
"locationType": "default",
"uniqueIdType": "unknown"
},
"recurrence": {
"pattern": {
"type": "daily",
"interval": 1,
"month": 0,
"dayOfMonth": 0,
"firstDayOfWeek": "sunday",
"index": "first"
},
"range": {
"type": "endDate",
"startDate": "2024-01-21",
"endDate": "2024-01-24",
"recurrenceTimeZone": "tzone://Microsoft/Utc",
"numberOfOccurrences": 0
}
},
"previousLocation": null,
"previousStartDateTime": null,
"previousEndDateTime": null
}

View File

@ -1,204 +0,0 @@
{
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users('JohannaL%4010rqc2.onmicrosoft.com')/messages(microsoft.graph.eventMessage/event())/$entity",
"@odata.type": "#microsoft.graph.eventMessageRequest",
"@odata.etag": "W/\"CwAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFY+7zz\"",
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFZU-vfAAA=",
"createdDateTime": "2024-01-15T11:26:41Z",
"lastModifiedDateTime": "2024-01-15T11:26:43Z",
"changeKey": "CwAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFY+7zz",
"categories": [],
"receivedDateTime": "2024-01-15T11:26:41Z",
"sentDateTime": "2024-01-15T11:26:41Z",
"hasAttachments": false,
"internetMessageId": "<PH0PR04MB7285DDED30186B1D8E1BD2AABC6C2@PH0PR04MB7285.namprd04.prod.outlook.com>",
"subject": "Invitable event",
"bodyPreview": "How come the sun is hot?\r\n\r\n________________________________________________________________________________\r\nMicrosoft Teams meeting\r\nJoin on your computer, mobile app or room device\r\nClick here to join the meeting\r\nMeeting ID: 290 273 192 285\r\nPasscode:",
"importance": "normal",
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQABoIUFzzByJMltJobtYSAJ0=",
"conversationIndex": "AdpHpbXXGghQXPMHIkyW0mhu1hIAnQ==",
"isDeliveryReceiptRequested": null,
"isReadReceiptRequested": false,
"isRead": true,
"isDraft": false,
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFZU%2FvfAAA%3D&exvsurl=1&viewmodel=ReadMessageItem",
"inferenceClassification": "focused",
"meetingMessageType": "meetingRequest",
"type": "seriesMaster",
"isOutOfDate": false,
"isAllDay": false,
"isDelegated": false,
"responseRequested": true,
"allowNewTimeProposals": null,
"meetingRequestType": "newMeetingRequest",
"body": {
"contentType": "html",
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"></head><body><div class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">How come the sun is hot?<br></div><br><div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span> </div><div class=\"me-email-text\" lang=\"en-US\" style=\"color:#252424; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\"><div style=\"margin-top:24px; margin-bottom:20px\"><span style=\"font-size:24px; color:#252424\">Microsoft Teams meeting</span> </div><div style=\"margin-bottom:20px\"><div style=\"margin-top:0px; margin-bottom:0px; font-weight:bold\"><span style=\"font-size:14px; color:#252424\">Join on your computer, mobile app or room device</span> </div><a class=\"me-email-headline\" href=\"https://teams.microsoft.com/l/meetup-join/19%3ameeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl%40thread.v2/0?context=%7b%22Tid%22%3a%22fb8afbaa-e94c-4ea5-8a8a-24aff04d7874%22%2c%22Oid%22%3a%227ceb8e03-bdc5-4509-a136-457526165ec0%22%7d\" target=\"_blank\" rel=\"noreferrer noopener\" style=\"font-size:14px; font-family:'Segoe UI Semibold','Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif; text-decoration:underline; color:#6264a7\">Click here to join the meeting</a> </div><div style=\"margin-bottom:20px; margin-top:20px\"><div style=\"margin-bottom:4px\"><span data-tid=\"meeting-code\" style=\"font-size:14px; color:#252424\">Meeting ID: <span style=\"font-size:16px; color:#252424\">290 273 192 285</span> </span><br><span style=\"font-size:14px; color:#252424\">Passcode: </span><span style=\"font-size:16px; color:#252424\">CwEBTS </span><div style=\"font-size:14px\"><a class=\"me-email-link\" target=\"_blank\" href=\"https://www.microsoft.com/en-us/microsoft-teams/download-app\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Download Teams</a> | <a class=\"me-email-link\" target=\"_blank\" href=\"https://www.microsoft.com/microsoft-teams/join-a-meeting\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Join on the web</a></div></div></div><div style=\"margin-bottom:24px; margin-top:20px\"><a class=\"me-email-link\" target=\"_blank\" href=\"https://aka.ms/JoinTeamsMeeting\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Learn More</a> | <a class=\"me-email-link\" target=\"_blank\" href=\"https://teams.microsoft.com/meetingOptions/?organizerId=7ceb8e03-bdc5-4509-a136-457526165ec0&amp;tenantId=fb8afbaa-e94c-4ea5-8a8a-24aff04d7874&amp;threadId=19_meeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl@thread.v2&amp;messageId=0&amp;language=en-US\" rel=\"noreferrer noopener\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Meeting options</a> </div></div><div style=\"font-size:14px; margin-bottom:4px; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\"></div><div style=\"font-size:12px\"></div><div></div><div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span> </div></body></html>"
},
"sender": {
"emailAddress": {
"name": "Johanna Lorenz",
"address": "JohannaL@10rqc2.onmicrosoft.com"
}
},
"from": {
"emailAddress": {
"name": "Johanna Lorenz",
"address": "JohannaL@10rqc2.onmicrosoft.com"
}
},
"toRecipients": [
{
"emailAddress": {
"name": "Pradeep Gupta",
"address": "PradeepG@10rqc2.onmicrosoft.com"
}
}
],
"ccRecipients": [],
"bccRecipients": [],
"replyTo": [],
"flag": {
"flagStatus": "notFlagged"
},
"startDateTime": {
"dateTime": "2024-01-22T02:30:00.0000000",
"timeZone": "UTC"
},
"endDateTime": {
"dateTime": "2024-01-22T03:00:00.0000000",
"timeZone": "UTC"
},
"location": {
"displayName": "Microsoft Teams Meeting",
"locationType": "default",
"uniqueIdType": "unknown"
},
"recurrence": {
"pattern": {
"type": "daily",
"interval": 1,
"month": 0,
"dayOfMonth": 0,
"firstDayOfWeek": "sunday",
"index": "first"
},
"range": {
"type": "endDate",
"startDate": "2024-01-21",
"endDate": "2024-01-24",
"recurrenceTimeZone": "tzone://Microsoft/Utc",
"numberOfOccurrences": 0
}
},
"previousLocation": null,
"previousStartDateTime": null,
"previousEndDateTime": null,
"event@odata.associationLink": "https://graph.microsoft.com/v1.0/users('JohannaL@10rqc2.onmicrosoft.com')/events('AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAAFZU--pAAA=')/$ref",
"event@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('JohannaL@10rqc2.onmicrosoft.com')/events('AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAAFZU--pAAA=')",
"event": {
"@odata.etag": "W/\"QRQ4NCSZO02NX5rCa4e7TQABWPwm+A==\"",
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAAFZU--pAAA=",
"createdDateTime": "2024-01-15T11:26:39.1524133Z",
"lastModifiedDateTime": "2024-01-16T10:29:34.8704061Z",
"changeKey": "QRQ4NCSZO02NX5rCa4e7TQABWPwm+A==",
"categories": [],
"transactionId": "d19de894-1d85-dde1-ea5f-9332e850667b",
"originalStartTimeZone": "India Standard Time",
"originalEndTimeZone": "India Standard Time",
"iCalUId": "040000008200E00074C5B7101A82E008000000002757EEB4A547DA01000000000000000010000000CCC41D0213F00E489061EF756A0E6864",
"reminderMinutesBeforeStart": 15,
"isReminderOn": true,
"hasAttachments": false,
"subject": "Different title to test",
"bodyPreview": "How come the sun is hot?\r\n\r\n________________________________________________________________________________\r\nMicrosoft Teams meeting\r\nJoin on your computer, mobile app or room device\r\nClick here to join the meeting\r\nMeeting ID: 290 273 192 285\r\nPasscode:",
"importance": "normal",
"sensitivity": "normal",
"isAllDay": false,
"isCancelled": false,
"isOrganizer": true,
"responseRequested": true,
"seriesMasterId": null,
"showAs": "busy",
"type": "seriesMaster",
"webLink": "https://outlook.office365.com/owa/?itemid=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAENAABBFDg0JJk7TY1fmsJrh7tNAAFZU%2F%2FpAAA%3D&exvsurl=1&path=/calendar/item",
"onlineMeetingUrl": null,
"isOnlineMeeting": true,
"onlineMeetingProvider": "teamsForBusiness",
"allowNewTimeProposals": true,
"occurrenceId": null,
"isDraft": false,
"hideAttendees": false,
"responseStatus": {
"response": "organizer",
"time": "0001-01-01T00:00:00Z"
},
"body": {
"contentType": "html",
"content": "<html>\r\n<head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\r\n</head>\r\n<body>\r\n<div class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">\r\nHow come the sun is hot?<br>\r\n</div>\r\n<br>\r\n<div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span>\r\n</div>\r\n<div class=\"me-email-text\" lang=\"en-US\" style=\"color:#252424; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">\r\n<div style=\"margin-top:24px; margin-bottom:20px\"><span style=\"font-size:24px; color:#252424\">Microsoft Teams meeting</span>\r\n</div>\r\n<div style=\"margin-bottom:20px\">\r\n<div style=\"margin-top:0px; margin-bottom:0px; font-weight:bold\"><span style=\"font-size:14px; color:#252424\">Join on your computer, mobile app or room device</span>\r\n</div>\r\n<a href=\"https://teams.microsoft.com/l/meetup-join/19%3ameeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl%40thread.v2/0?context=%7b%22Tid%22%3a%22fb8afbaa-e94c-4ea5-8a8a-24aff04d7874%22%2c%22Oid%22%3a%227ceb8e03-bdc5-4509-a136-457526165ec0%22%7d\" class=\"me-email-headline\" style=\"font-size:14px; font-family:'Segoe UI Semibold','Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif; text-decoration:underline; color:#6264a7\">Click\r\n here to join the meeting</a> </div>\r\n<div style=\"margin-bottom:20px; margin-top:20px\">\r\n<div style=\"margin-bottom:4px\"><span data-tid=\"meeting-code\" style=\"font-size:14px; color:#252424\">Meeting ID:\r\n<span style=\"font-size:16px; color:#252424\">290 273 192 285</span> </span><br>\r\n<span style=\"font-size:14px; color:#252424\">Passcode: </span><span style=\"font-size:16px; color:#252424\">CwEBTS\r\n</span>\r\n<div style=\"font-size:14px\"><a href=\"https://www.microsoft.com/en-us/microsoft-teams/download-app\" class=\"me-email-link\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Download\r\n Teams</a> | <a href=\"https://www.microsoft.com/microsoft-teams/join-a-meeting\" class=\"me-email-link\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">\r\nJoin on the web</a></div>\r\n</div>\r\n</div>\r\n<div style=\"margin-bottom:24px; margin-top:20px\"><a href=\"https://aka.ms/JoinTeamsMeeting\" class=\"me-email-link\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">Learn More</a>\r\n | <a href=\"https://teams.microsoft.com/meetingOptions/?organizerId=7ceb8e03-bdc5-4509-a136-457526165ec0&amp;tenantId=fb8afbaa-e94c-4ea5-8a8a-24aff04d7874&amp;threadId=19_meeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl@thread.v2&amp;messageId=0&amp;language=en-US\" class=\"me-email-link\" style=\"font-size:14px; text-decoration:underline; color:#6264a7; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">\r\nMeeting options</a> </div>\r\n</div>\r\n<div style=\"font-size:14px; margin-bottom:4px; font-family:'Segoe UI','Helvetica Neue',Helvetica,Arial,sans-serif\">\r\n</div>\r\n<div style=\"font-size:12px\"></div>\r\n<div></div>\r\n<div style=\"width:100%\"><span style=\"white-space:nowrap; color:#5F5F5F; opacity:.36\">________________________________________________________________________________</span>\r\n</div>\r\n</body>\r\n</html>\r\n"
},
"start": {
"dateTime": "2024-01-22T02:30:00.0000000",
"timeZone": "UTC"
},
"end": {
"dateTime": "2024-01-22T03:00:00.0000000",
"timeZone": "UTC"
},
"location": {
"displayName": "Microsoft Teams Meeting",
"locationType": "default",
"uniqueId": "Microsoft Teams Meeting",
"uniqueIdType": "private"
},
"locations": [
{
"displayName": "Microsoft Teams Meeting",
"locationType": "default",
"uniqueId": "Microsoft Teams Meeting",
"uniqueIdType": "private"
}
],
"recurrence": {
"pattern": {
"type": "daily",
"interval": 1,
"month": 0,
"dayOfMonth": 0,
"firstDayOfWeek": "sunday",
"index": "first"
},
"range": {
"type": "endDate",
"startDate": "2024-01-22",
"endDate": "2024-01-25",
"recurrenceTimeZone": "India Standard Time",
"numberOfOccurrences": 0
}
},
"attendees": [
{
"type": "required",
"status": {
"response": "none",
"time": "0001-01-01T00:00:00Z"
},
"emailAddress": {
"name": "Pradeep Gupta",
"address": "PradeepG@10rqc2.onmicrosoft.com"
}
}
],
"organizer": {
"emailAddress": {
"name": "Johanna Lorenz",
"address": "JohannaL@10rqc2.onmicrosoft.com"
}
},
"onlineMeeting": {
"joinUrl": "https://teams.microsoft.com/l/meetup-join/19%3ameeting_OGM4MWVlYjUtMjllMi00ZjY5LWE5YjgtMTc4MjJhMWI1MjRl%40thread.v2/0?context=%7b%22Tid%22%3a%22fb8afbaa-e94c-4ea5-8a8a-24aff04d7874%22%2c%22Oid%22%3a%227ceb8e03-bdc5-4509-a136-457526165ec0%22%7d"
},
"calendar@odata.associationLink": "https://graph.microsoft.com/v1.0/users('JohannaL@10rqc2.onmicrosoft.com')/calendars('AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBDQAAAA==')/$ref",
"calendar@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('JohannaL@10rqc2.onmicrosoft.com')/calendars('AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBDQAAAA==')"
}
}

View File

@ -1,268 +0,0 @@
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAA=",
"@odata.type": "#microsoft.graph.message",
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages/$entity",
"@odata.etag": "W/\"CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl\"",
"categories": [],
"changeKey": "CQAAABYAAABBFDg0JJk7TY1fmsJrh7tNAAFnDeBl",
"createdDateTime": "2024-02-05T09:33:23Z",
"lastModifiedDateTime": "2024-02-05T09:33:48Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl0k%3D&exvsurl=1&viewmodel=ItemAttachment"
}
},
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl02=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life part 2",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl02%3D&exvsurl=1&viewmodel=ItemAttachment"
}
},
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAABEgAQAEUyH0VS3HJBgHDlZdWZl03=",
"@odata.type": "#microsoft.graph.itemAttachment",
"item@odata.navigationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')",
"item@odata.associationLink": "https://graph.microsoft.com/v1.0/users('7ceb8e03-bdc5-4509-a136-457526165ec0')/messages('')/$ref",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Purpose of life part 3",
"size": 11840,
"item": {
"id": "",
"@odata.type": "#microsoft.graph.message",
"createdDateTime": "2024-02-05T09:33:24Z",
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"attachments": [
{
"id": "AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV-qAAACEgAQAEUyH0VS3HJBgHDlZdWZl0kSABAAjBhd4-oQaUS969pTkS-gzA==",
"@odata.type": "#microsoft.graph.fileAttachment",
"@odata.mediaContentType": "text/calendar",
"contentType": "text/calendar",
"isInline": false,
"lastModifiedDateTime": "2024-02-05T09:33:46Z",
"name": "Abidjan.ics",
"size": 573,
"contentBytes": "QkVHSU46VkNBTEVOREFSDQpQUk9ESUQ6LS8vdHp1cmwub3JnLy9OT05TR01MIE9sc29uIDIwMjNkLy9FTg0KVkVSU0lPTjoyLjANCkJFR0lOOlZUSU1FWk9ORQ0KVFpJRDpBZnJpY2EvQWJpZGphbg0KTEFTVC1NT0RJRklFRDoyMDIzMTIyMlQyMzMzNThaDQpUWlVSTDpodHRwczovL3d3dy50enVybC5vcmcvem9uZWluZm8vQWZyaWNhL0FiaWRqYW4NClgtTElDLUxPQ0FUSU9OOkFmcmljYS9BYmlkamFuDQpYLVBST0xFUFRJQy1UWk5BTUU6TE1UDQpCRUdJTjpTVEFOREFSRA0KVFpOQU1FOkdNVA0KVFpPRkZTRVRGUk9NOi0wMDE2MDgNClRaT0ZGU0VUVE86KzAwMDANCkRUU1RBUlQ6MTkxMjAxMDFUMDAwMDAwDQpFTkQ6U1RBTkRBUkQNCkVORDpWVElNRVpPTkUNCkVORDpWQ0FMRU5EQVINCg=="
}
],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none;\"> P {margin-top:0;margin-bottom:0;} </style></head><body dir=\"ltr\"><div class=\"elementToProof\" style=\"font-family: Aptos, Aptos_EmbeddedFont, Aptos_MSFontService, Calibri, Helvetica, sans-serif; font-size: 12pt; color: rgb(0, 0, 0);\">I just realized the purpose of my life is to be a test case. Good to know.<br></div></body></html>",
"contentType": "html"
},
"bodyPreview": "I just realized the purpose of my life is to be a test case. Good to know.",
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAFEnxDqYmbJEm8d2l3qfS6A=",
"conversationIndex": "AQHaWBYiUSfEOpiZskSbx3aXep9LoA==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"internetMessageId": "<SJ0PR04MB7294108E381BCCE5C207B6DEBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"receivedDateTime": "2024-02-05T09:33:12Z",
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:11Z",
"subject": "Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?AttachmentItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAABEgAQAEUyH0VS3HJBgHDlZdWZl03%3D&exvsurl=1&viewmodel=ItemAttachment"
}
}
],
"bccRecipients": [],
"body": {
"content": "<html><head>\r\n<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\"><style type=\"text/css\" style=\"display:none\">\r\n<!--\r\np\r\n\t{margin-top:0;\r\n\tmargin-bottom:0}\r\n-->\r\n</style></head><body dir=\"ltr\"><div><span class=\"elementToProof\" style=\"font-family:Aptos,Aptos_EmbeddedFont,Aptos_MSFontService,Calibri,Helvetica,sans-serif; font-size:12pt; color:rgb(0,0,0)\">Now, this is what we call nesting in this business.<br></span></div></body></html>",
"contentType": "html"
},
"bodyPreview": "Now, this is what we call nesting in this business.",
"ccRecipients": [],
"conversationId": "AAQkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNAAQAIv2-4RHwDhJhlqBV5PTE3Y=",
"conversationIndex": "AQHaWBZdi/b/hEfAOEmGWoFXk9MTdg==",
"flag": {
"flagStatus": "notFlagged"
},
"from": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"hasAttachments": true,
"importance": "normal",
"inferenceClassification": "focused",
"internetMessageId": "<SJ0PR04MB729409CE8C191E01151C110DBC472@SJ0PR04MB7294.namprd04.prod.outlook.com>",
"isDeliveryReceiptRequested": false,
"isDraft": false,
"isRead": true,
"isReadReceiptRequested": false,
"parentFolderId": "AQMkAGJiAGZhNjRlOC00OGI5LTQyNTItYjFkMy00NTJjMTgyZGZkMjQALgAAA0V2IruiJ9ZFvgAO6qBJFycBAEEUODQkmTtNjV_awmuHu00AAAIBCQAAAA==",
"receivedDateTime": "2024-02-05T09:33:46Z",
"replyTo": [],
"sender": {
"emailAddress": {
"address": "JohannaL@10rqc2.onmicrosoft.com",
"name": "Johanna Lorenz"
}
},
"sentDateTime": "2024-02-05T09:33:45Z",
"subject": "Fw: Purpose of life",
"toRecipients": [
{
"emailAddress": {
"address": "PradeepG@10rqc2.onmicrosoft.com",
"name": "Pradeep Gupta"
}
}
],
"webLink": "https://outlook.office365.com/owa/?ItemID=AAMkAGJiZmE2NGU4LTQ4YjktNDI1Mi1iMWQzLTQ1MmMxODJkZmQyNABGAAAAAABFdiK7oifWRb4ADuqgSRcnBwBBFDg0JJk7TY1fmsJrh7tNAAAAAAEJAABBFDg0JJk7TY1fmsJrh7tNAAFnbV%2FqAAA%3D&exvsurl=1&viewmodel=ReadMessageItem"
}

View File

@ -4,12 +4,3 @@ import _ "embed"
//go:embed email-with-attachments.json
var EmailWithAttachments string
//go:embed email-with-event-info.json
var EmailWithEventInfo string
//go:embed email-with-event-object.json
var EmailWithEventObject string
//go:embed email-within-email.json
var EmailWithinEmail string

Some files were not shown because too many files have changed in this diff Show More