Compare commits
4 Commits
main
...
moveOneDri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8938a94eec | ||
|
|
16cb69b8a9 | ||
|
|
341bfd4616 | ||
|
|
cde2ea8492 |
2
.github/ISSUE_TEMPLATE/BUG-REPORT.yaml
vendored
2
.github/ISSUE_TEMPLATE/BUG-REPORT.yaml
vendored
@ -35,6 +35,6 @@ body:
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please run Corso with `--log-level debug --mask-sensitive-data` and attach the log file.
|
||||
description: Please run Corso with `--log-level debug` and attach the log file.
|
||||
placeholder: This will be automatically formatted, so no need for backticks.
|
||||
render: shell
|
||||
|
||||
268
.github/actions/backup-restore-test/action.yml
vendored
268
.github/actions/backup-restore-test/action.yml
vendored
@ -1,268 +0,0 @@
|
||||
name: Backup Restore Test
|
||||
description: Run various backup/restore/export tests for a service.
|
||||
|
||||
inputs:
|
||||
service:
|
||||
description: Service to test
|
||||
required: true
|
||||
kind:
|
||||
description: Kind of test
|
||||
required: true
|
||||
backup-id:
|
||||
description: Backup to retrieve data out of
|
||||
required: false
|
||||
backup-args:
|
||||
description: Arguments to pass for backup
|
||||
required: false
|
||||
default: ""
|
||||
restore-args:
|
||||
description: Arguments to pass for restore; restore is skipped when missing.
|
||||
required: false
|
||||
default: ""
|
||||
export-args:
|
||||
description: Arguments to pass for export.
|
||||
required: false
|
||||
default: ""
|
||||
restore-container:
|
||||
description: Folder to use for testing
|
||||
required: true
|
||||
log-dir:
|
||||
description: Folder to store test log files
|
||||
required: true
|
||||
on-collision:
|
||||
description: Value for the --collisions flag
|
||||
required: false
|
||||
default: "replace"
|
||||
with-export:
|
||||
description: Runs export tests when true
|
||||
required: false
|
||||
default: false
|
||||
category:
|
||||
description: category of data for given service
|
||||
required: false
|
||||
|
||||
outputs:
|
||||
backup-id:
|
||||
value: ${{ steps.backup.outputs.result }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||
id: backup
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-backup-${{inputs.kind }}.log
|
||||
./corso backup create '${{ inputs.service }}' \
|
||||
--no-stats --hide-progress --json \
|
||||
${{ inputs.backup-args }} |
|
||||
tee /dev/stderr | # for printing logs
|
||||
jq -r '.[0] | .id' |
|
||||
sed 's/^/result=/' |
|
||||
tee $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: inputs.restore-args
|
||||
id: restore
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
||||
./corso restore '${{ inputs.service }}' \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--collisions ${{ inputs.on-collision }} \
|
||||
${{ inputs.restore-args }} \
|
||||
--backup '${{ steps.backup.outputs.result }}' \
|
||||
2>&1 |
|
||||
tee /tmp/corsologs |
|
||||
grep -i -e 'Restoring to folder ' |
|
||||
sed "s/Restoring to folder /result=/" |
|
||||
tee $GITHUB_OUTPUT
|
||||
|
||||
cat /tmp/corsologs
|
||||
|
||||
- name: Check restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: inputs.restore-args
|
||||
shell: bash
|
||||
working-directory: src
|
||||
env:
|
||||
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }}
|
||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||
# lists are not restored to a different folder. they get created adjacent to their originals
|
||||
# hence SANITY_TEST_RESTORE_CONTAINER_PREFIX is necessary to differentiate restored from original
|
||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
||||
./sanity-test restore ${{ inputs.service }}
|
||||
|
||||
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: ${{ inputs.with-export == 'true' }}
|
||||
id: export
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
||||
./corso export '${{ inputs.service }}' \
|
||||
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }} \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
${{ inputs.export-args }} \
|
||||
--backup '${{ steps.backup.outputs.result }}'
|
||||
|
||||
cat /tmp/corsologs
|
||||
|
||||
- name: Check export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: ${{ inputs.with-export == 'true' }}
|
||||
shell: bash
|
||||
working-directory: src
|
||||
env:
|
||||
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{ inputs.kind }}
|
||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||
# applies only for sharepoint lists
|
||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
||||
./sanity-test export ${{ inputs.service }}
|
||||
|
||||
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: ${{ inputs.with-export == 'true' }}
|
||||
id: export-archive
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
|
||||
./corso export '${{ inputs.service }}' \
|
||||
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--archive \
|
||||
${{ inputs.export-args }} \
|
||||
--backup '${{ steps.backup.outputs.result }}'
|
||||
|
||||
unzip /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive/*.zip \
|
||||
-d /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-unzipped
|
||||
cat /tmp/corsologs
|
||||
|
||||
- name: Check archive export ${{ inputs.service }} ${{ inputs.kind }}
|
||||
if: ${{ inputs.with-export == 'true' }}
|
||||
shell: bash
|
||||
working-directory: src
|
||||
env:
|
||||
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{inputs.kind }}-unzipped
|
||||
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
|
||||
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
|
||||
# applies only for sharepoint lists
|
||||
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
|
||||
SANITY_TEST_CATEGORY: ${{ inputs.category }}
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
|
||||
./sanity-test export ${{ inputs.service }}
|
||||
|
||||
- name: List ${{ inputs.service }} ${{ inputs.kind }}
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}${CATEGORY_SUFFIX}-list-${{inputs.kind }}.log
|
||||
./corso backup list ${{ inputs.service }} \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
2>&1 |
|
||||
tee /tmp/corso-backup-list.log
|
||||
|
||||
if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list.log
|
||||
then
|
||||
echo "Unable to find backup from previous run in backup list"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: List item ${{ inputs.service }} ${{ inputs.kind }}
|
||||
shell: bash
|
||||
working-directory: src
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
|
||||
echo "---------------------------"
|
||||
set -euo pipefail
|
||||
# Include category in the log file name if present
|
||||
CATEGORY_SUFFIX=""
|
||||
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
|
||||
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}${CATEGORY_SUFFIX}-single-${{inputs.kind }}.log
|
||||
./corso backup list ${{ inputs.service }} \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--backup "${{ steps.backup.outputs.result }}" \
|
||||
2>&1 |
|
||||
tee /tmp/corso-backup-list-item.log
|
||||
|
||||
if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list-item.log
|
||||
then
|
||||
echo "Unable to list previous backup"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
echo "---------------------------"
|
||||
echo Logging Results
|
||||
echo "---------------------------"
|
||||
|
||||
- name: Upload test log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "${{ inputs.service }}-${{ inputs.kind }}-logs"
|
||||
path: ${{ inputs.log-dir }}/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
1
.github/actions/go-setup-cache/action.yml
vendored
1
.github/actions/go-setup-cache/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Setup and Cache Golang
|
||||
description: Build golang binaries for later use in CI.
|
||||
|
||||
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
|
||||
#
|
||||
|
||||
76
.github/actions/publish-binary/action.yml
vendored
76
.github/actions/publish-binary/action.yml
vendored
@ -1,76 +0,0 @@
|
||||
name: Publish Binary
|
||||
description: Publish binary artifacts.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: Corso version to use for publishing
|
||||
required: true
|
||||
github_token:
|
||||
description: GitHub token for publishing
|
||||
required: true
|
||||
rudderstack_write_key:
|
||||
description: Write key for RudderStack
|
||||
required: true
|
||||
rudderstack_data_plane_url:
|
||||
description: Data plane URL for RudderStack
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # needed to pull changelog
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Mark snapshot release
|
||||
shell: bash
|
||||
if: ${{ !startsWith(github.ref , 'refs/tags/') }}
|
||||
run: |
|
||||
echo "grflags=--snapshot" >> $GITHUB_ENV
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
version: latest
|
||||
args: release --clean --timeout 500m --parallelism 1 ${{ env.grflags }}
|
||||
workdir: src
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.github_token }}
|
||||
RUDDERSTACK_CORSO_WRITE_KEY: ${{ inputs.rudderstack_write_key }}
|
||||
RUDDERSTACK_CORSO_DATA_PLANE_URL: ${{ inputs.rudderstack_data_plane_url }}
|
||||
CORSO_VERSION: ${{ inputs.version }}
|
||||
|
||||
- name: Upload darwin arm64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Darwin_arm64
|
||||
path: src/dist/corso_darwin_arm64/corso
|
||||
|
||||
- name: Upload linux arm64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Linux_arm64
|
||||
path: src/dist/corso_linux_arm64/corso
|
||||
|
||||
- name: Upload darwin amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Darwin_amd64
|
||||
path: src/dist/corso_darwin_amd64_v1/corso
|
||||
|
||||
- name: Upload linux amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Linux_amd64
|
||||
path: src/dist/corso_linux_amd64_v1/corso
|
||||
|
||||
- name: Upload windows amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Windows_amd64
|
||||
path: src/dist/corso_windows_amd64_v1/corso.exe
|
||||
1
.github/actions/publish-website/action.yml
vendored
1
.github/actions/publish-website/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Publish Website
|
||||
description: Publish website artifacts.
|
||||
|
||||
inputs:
|
||||
aws-iam-role:
|
||||
|
||||
64
.github/actions/purge-m365-data/action.yml
vendored
64
.github/actions/purge-m365-data/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Purge M365 User Data
|
||||
description: Deletes M365 data generated during CI tests.
|
||||
|
||||
# Hard deletion of an m365 user's data. Our CI processes create a lot
|
||||
# of data churn (creation and immediate deletion) of files, the likes
|
||||
@ -20,9 +19,7 @@ inputs:
|
||||
site:
|
||||
description: Sharepoint site where data is to be purged.
|
||||
libraries:
|
||||
description: List of library names within the site where data is to be purged.
|
||||
library-prefix:
|
||||
description: List of library names within the site where the library will get deleted entirely.
|
||||
description: List of library names within site where data is to be purged.
|
||||
folder-prefix:
|
||||
description: Name of the folder to be purged. If falsy, will purge the set of static, well known folders instead.
|
||||
older-than:
|
||||
@ -31,19 +28,12 @@ inputs:
|
||||
description: Secret value of for AZURE_CLIENT_ID
|
||||
azure-client-secret:
|
||||
description: Secret value of for AZURE_CLIENT_SECRET
|
||||
azure-pnp-client-id:
|
||||
description: Secret value of AZURE_PNP_CLIENT_ID
|
||||
azure-pnp-client-cert:
|
||||
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
|
||||
azure-tenant-id:
|
||||
description: Secret value of AZURE_TENANT_ID
|
||||
description: Secret value of for AZURE_TENANT_ID
|
||||
m365-admin-user:
|
||||
description: Secret value of for M365_TENANT_ADMIN_USER
|
||||
m365-admin-password:
|
||||
description: Secret value of for M365_TENANT_ADMIN_PASSWORD
|
||||
tenant-domain:
|
||||
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
@ -62,22 +52,17 @@ runs:
|
||||
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
|
||||
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
|
||||
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
|
||||
#- name: Reset retention for all mailboxes to 0
|
||||
# if: ${{ inputs.user == '' }}
|
||||
# shell: pwsh
|
||||
# working-directory: ./src/cmd/purge/scripts
|
||||
# env:
|
||||
# M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
# M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
# run: ./exchangeRetention.ps1
|
||||
- name: Reset retention for all mailboxes to 0
|
||||
if: ${{ inputs.user == '' }}
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
run: |
|
||||
./exchangeRetention.ps1
|
||||
|
||||
################################################################################################################
|
||||
# OneDrive
|
||||
@ -88,16 +73,10 @@ runs:
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
|
||||
################################################################################################################
|
||||
# Sharepoint
|
||||
@ -108,14 +87,7 @@ runs:
|
||||
shell: pwsh
|
||||
working-directory: ./src/cmd/purge/scripts
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }}
|
||||
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }}
|
||||
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
|
||||
M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
|
||||
M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
|
||||
run: |
|
||||
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
|
||||
{
|
||||
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -PurgeBeforeTimestamp ${{ inputs.older-than }}
|
||||
|
||||
92
.github/actions/teams-message/action.yml
vendored
92
.github/actions/teams-message/action.yml
vendored
@ -1,92 +0,0 @@
|
||||
name: Send a message to Teams
|
||||
description: Send messages to communication apps.
|
||||
|
||||
inputs:
|
||||
msg:
|
||||
description: The teams message text
|
||||
teams_url:
|
||||
description: passthrough for secrets.TEAMS_CORSO_CI_WEBHOOK_URL
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: set github ref
|
||||
shell: bash
|
||||
run: |
|
||||
echo "github_reference=${{ github.ref }}" >> $GITHUB_ENV
|
||||
|
||||
- name: trim github ref
|
||||
shell: bash
|
||||
run: |
|
||||
echo "trimmed_ref=${github_reference#refs/}" >> $GITHUB_ENV
|
||||
|
||||
- name: build urls
|
||||
shell: bash
|
||||
run: |
|
||||
echo "logurl=$(printf 'https://github.com/alcionai/corso/actions/runs/%s' ${{ github.run_id }})" >> $GITHUB_ENV
|
||||
echo "commiturl=$(printf 'https://github.com/alcionai/corso/commit/%s' ${{ github.sha }})" >> $GITHUB_ENV
|
||||
echo "refurl=$(printf 'https://github.com/alcionai/corso/%s' ${{ env.trimmed_ref }})" >> $GITHUB_ENV
|
||||
|
||||
- name: use url or blank val
|
||||
shell: bash
|
||||
run: |
|
||||
echo "STEP=${{ env.trimmed_ref || '' }}" >> $GITHUB_ENV
|
||||
echo "JOB=${{ github.job || '' }}" >> $GITHUB_ENV
|
||||
echo "LOGS=${{ github.run_id && env.logurl || '-' }}" >> $GITHUB_ENV
|
||||
echo "COMMIT=${{ github.sha && env.commiturl || '-' }}" >> $GITHUB_ENV
|
||||
echo "REF=${{ env.trimmed_ref && env.refurl || '-' }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Send JSON payload to Teams Webhook
|
||||
shell: bash
|
||||
run: |
|
||||
curl -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"type":"message",
|
||||
"attachments":[
|
||||
{
|
||||
"contentType":"application/vnd.microsoft.card.adaptive",
|
||||
"contentUrl":null,
|
||||
"content":{
|
||||
"$schema":"http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"type":"AdaptiveCard",
|
||||
"body": [
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"size": "Medium",
|
||||
"weight": "Bolder",
|
||||
"text": "${{ inputs.msg }}",
|
||||
"color": "Attention"
|
||||
},
|
||||
{
|
||||
"type": "TextBlock",
|
||||
"text": "${{ env.JOB }} :: ${{ env.STEP }}",
|
||||
"wrap": true
|
||||
}
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"type": "Action.OpenUrl",
|
||||
"title": "Action",
|
||||
"url": "${{ env.LOGS }}"
|
||||
},
|
||||
{
|
||||
"type": "Action.OpenUrl",
|
||||
"title": "Commit",
|
||||
"url": "${{ env.COMMIT }}"
|
||||
},
|
||||
{
|
||||
"type": "Action.OpenUrl",
|
||||
"title": "Ref",
|
||||
"url": "${{ env.REF }}"
|
||||
}
|
||||
],
|
||||
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
|
||||
"version": "1.5"
|
||||
}
|
||||
}
|
||||
]
|
||||
}' \
|
||||
${{ inputs.teams_url }}
|
||||
1
.github/actions/website-linting/action.yml
vendored
1
.github/actions/website-linting/action.yml
vendored
@ -1,5 +1,4 @@
|
||||
name: Lint Website
|
||||
description: Lint website content.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
|
||||
15
.github/workflows/_filechange_checker.yml
vendored
15
.github/workflows/_filechange_checker.yml
vendored
@ -9,9 +9,6 @@ on:
|
||||
websitefileschanged:
|
||||
description: "'true' if websites/** or .github/workflows/** files have changed in the branch"
|
||||
value: ${{ jobs.file-change-check.outputs.websitefileschanged }}
|
||||
actionsfileschanged:
|
||||
description: "'true' if .github/actions/** or .github/workflows/** files have changed in the branch"
|
||||
value: ${{ jobs.file-change-check.outputs.actionsfileschanged }}
|
||||
|
||||
jobs:
|
||||
file-change-check:
|
||||
@ -22,13 +19,12 @@ jobs:
|
||||
outputs:
|
||||
srcfileschanged: ${{ steps.srcchecker.outputs.srcfileschanged }}
|
||||
websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }}
|
||||
actionsfileschanged: ${{ steps.actionschecker.outputs.actionsfileschanged }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# only run CI tests if the src folder or workflow actions have changed
|
||||
- name: Check for file changes in src/ or .github/workflows/
|
||||
uses: dorny/paths-filter@v3
|
||||
uses: dorny/paths-filter@v2
|
||||
id: dornycheck
|
||||
with:
|
||||
list-files: json
|
||||
@ -54,10 +50,3 @@ jobs:
|
||||
run: |
|
||||
echo "website or workflow file changes occurred"
|
||||
echo websitefileschanged=true >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Check dorny for changes in actions filepaths
|
||||
id: actionschecker
|
||||
if: steps.dornycheck.outputs.actions == 'true'
|
||||
run: |
|
||||
echo "actions file changes occurred"
|
||||
echo actionsfileschanged=true >> $GITHUB_OUTPUT
|
||||
|
||||
46
.github/workflows/accSelector.yaml
vendored
46
.github/workflows/accSelector.yaml
vendored
@ -1,46 +0,0 @@
|
||||
name: SetM365AppAcc
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
client_app_slot:
|
||||
value: ${{ jobs.GetM365App.outputs.client_app_slot }}
|
||||
client_id_env:
|
||||
value: ${{ jobs.GetM365App.outputs.client_id_env }}
|
||||
client_secret_env:
|
||||
value: ${{ jobs.GetM365App.outputs.client_secret_env }}
|
||||
|
||||
jobs:
|
||||
GetM365App:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
|
||||
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
|
||||
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
|
||||
steps:
|
||||
- name: Figure out which client id to use
|
||||
id: roundrobin
|
||||
run: |
|
||||
slot=$((GITHUB_RUN_NUMBER % 4))
|
||||
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
|
||||
|
||||
case $slot in
|
||||
|
||||
0)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
1)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_2" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_2" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
2)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_3" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_3" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
3)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_4" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_4" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
esac
|
||||
22
.github/workflows/auto-merge.yml
vendored
22
.github/workflows/auto-merge.yml
vendored
@ -1,4 +1,3 @@
|
||||
# See https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#common-dependabot-automations
|
||||
name: auto-merge
|
||||
|
||||
on:
|
||||
@ -6,24 +5,11 @@ on:
|
||||
paths-ignore:
|
||||
- "src/**" # prevent auto-merge for go dependencies
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
auto-approve-label:
|
||||
auto-merge:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
steps:
|
||||
- name: Dependabot metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@v1
|
||||
- uses: actions/checkout@v3
|
||||
- uses: ahmadnassri/action-dependabot-auto-merge@v2 # https://github.com/marketplace/actions/dependabot-auto-merge
|
||||
with:
|
||||
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Enable auto-merge for Dependabot PRs
|
||||
if: ${{steps.metadata.outputs.update-type == 'version-update:semver-minor'}}
|
||||
run: |
|
||||
gh pr edit "$PR_URL" --add-label "mergequeue"
|
||||
gh pr review --approve "$PR_URL"
|
||||
env:
|
||||
PR_URL: ${{github.event.pull_request.html_url}}
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||
github-token: ${{ secrets.DEPENDABOT_TOKEN }}
|
||||
|
||||
44
.github/workflows/binary-publish.yml
vendored
44
.github/workflows/binary-publish.yml
vendored
@ -1,44 +0,0 @@
|
||||
name: Publish binary
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
SetEnv:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get version string
|
||||
id: version
|
||||
run: |
|
||||
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "version=$(git describe --exact-match --tags $(git rev-parse HEAD))" | tee -a $GITHUB_OUTPUT
|
||||
else
|
||||
echo "version=$(echo unreleased-$(git rev-parse --short HEAD))" | tee -a $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
Publish-Binary:
|
||||
needs: [SetEnv]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Publish Binary
|
||||
uses: ./.github/actions/publish-binary
|
||||
with:
|
||||
version: ${{ needs.SetEnv.outputs.version }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
|
||||
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Publishing Binary"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
337
.github/workflows/ci.yml
vendored
337
.github/workflows/ci.yml
vendored
@ -40,7 +40,7 @@ jobs:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# single setup and sum cache handling here.
|
||||
# the results will cascade onto both testing and linting.
|
||||
@ -52,7 +52,38 @@ jobs:
|
||||
|
||||
# SetM365App will decide which M365 app to use for this CI run
|
||||
SetM365App:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
|
||||
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
|
||||
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
|
||||
steps:
|
||||
- name: Figure out which client id to use
|
||||
id: roundrobin
|
||||
run: |
|
||||
slot=$((GITHUB_RUN_NUMBER % 4))
|
||||
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
|
||||
|
||||
case $slot in
|
||||
|
||||
0)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
1)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_2" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_2" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
2)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_3" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_3" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
3)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_4" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_4" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
esac
|
||||
|
||||
SetEnv:
|
||||
environment: Testing
|
||||
@ -63,7 +94,7 @@ jobs:
|
||||
website-bucket: ${{ steps.website-bucket.outputs.website-bucket }}
|
||||
website-cfid: ${{ steps.website-cfid.outputs.website-cfid }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Figure out environment
|
||||
id: environment
|
||||
@ -110,11 +141,10 @@ jobs:
|
||||
needs: [Precheck, Checkout, SetEnv]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true' # websitefileschanged also includes srcfileschanged
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Lint Website
|
||||
uses: ./.github/actions/website-linting
|
||||
@ -129,7 +159,6 @@ jobs:
|
||||
needs: [Precheck, Checkout, SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
|
||||
defaults:
|
||||
run:
|
||||
@ -139,10 +168,8 @@ jobs:
|
||||
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
||||
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
||||
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
@ -157,7 +184,7 @@ jobs:
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
@ -173,78 +200,8 @@ jobs:
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-failfast \
|
||||
-p 1 \
|
||||
-timeout 20m \
|
||||
./... \
|
||||
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ci-test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
Retention-Test-Suite-Trusted:
|
||||
needs: [Precheck, Checkout, SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
env:
|
||||
# Resolve the name of the secret that contains the Azure client ID/secret
|
||||
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
||||
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
||||
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci-retention.log
|
||||
CORSO_LOG_FILE: ./src/testlog/testlogging.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir testlog
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
# run the tests
|
||||
- name: Retention Tests
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_RETENTION_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
@ -253,16 +210,15 @@ jobs:
|
||||
-v \
|
||||
-failfast \
|
||||
-p 1 \
|
||||
-timeout 10m \
|
||||
./... \
|
||||
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
|
||||
-timeout 15m \
|
||||
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ci-retention-test-log
|
||||
name: test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
@ -271,16 +227,12 @@ jobs:
|
||||
needs: [Precheck, Checkout]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: needs.precheck.outputs.srcfileschanged == 'true'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
env:
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
@ -300,6 +252,8 @@ jobs:
|
||||
# something elsewhere.
|
||||
CORSO_M365_TEST_USER_ID: 'foo'
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: 'foo'
|
||||
CORSO_LOG_FILE: ./src/testlog/testlogging.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
@ -308,14 +262,13 @@ jobs:
|
||||
-v \
|
||||
-failfast \
|
||||
-p 1 \
|
||||
-timeout 20m \
|
||||
./... \
|
||||
2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests
|
||||
-timeout 15m \
|
||||
./... 2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: unit-test-log
|
||||
path: src/testlog/*
|
||||
@ -330,9 +283,6 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
env:
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
steps:
|
||||
- name: Fail check if not repository_dispatch
|
||||
if: github.event_name != 'repository_dispatch'
|
||||
@ -360,7 +310,7 @@ jobs:
|
||||
|
||||
# Check out merge commit
|
||||
- name: Fork based /ok-to-test checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.client_payload.pull_request.number }}/merge"
|
||||
|
||||
@ -377,7 +327,7 @@ jobs:
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
@ -392,29 +342,29 @@ jobs:
|
||||
CORSO_CI_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
CORSO_LOG_FILE: ./src/testlog/testlogging.log
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-json \
|
||||
-v \
|
||||
-timeout 15m \
|
||||
./... \
|
||||
2>&1 | tee ./testlog/gotest-fork.log | gotestfmt -hide successful-tests
|
||||
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test log as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: fork-test-log
|
||||
name: test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
# Update check run called "Test-Suite-Fork"
|
||||
- uses: actions/github-script@v7
|
||||
- uses: actions/github-script@v6
|
||||
id: update-check-run
|
||||
if: failure()
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
number: ${{ github.event.client_payload.pull_request.number }}
|
||||
job: ${{ github.job }}
|
||||
@ -445,17 +395,16 @@ jobs:
|
||||
# --- Source Code Linting ----------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Source-Code-Linting:
|
||||
Linting:
|
||||
needs: [Precheck, Checkout]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
@ -463,11 +412,11 @@ jobs:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Go Lint
|
||||
uses: golangci/golangci-lint-action@v4
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
# Keep pinned to a verson as sometimes updates will add new lint
|
||||
# failures in unchanged code.
|
||||
version: v1.54.2
|
||||
version: v1.52.2
|
||||
working-directory: src
|
||||
skip-pkg-cache: true
|
||||
skip-build-cache: true
|
||||
@ -485,101 +434,83 @@ jobs:
|
||||
install-go: false
|
||||
working-directory: src
|
||||
|
||||
- name: Run allowtags
|
||||
run: |
|
||||
go install github.com/ashmrtn/allowtags@latest
|
||||
allowtags --allow-key json --allow-key uriparametername ./...
|
||||
|
||||
# I could not find a way to install tree-grepper without nix
|
||||
# https://github.com/BrianHicks/tree-grepper/issues/293
|
||||
- uses: cachix/install-nix-action@v25
|
||||
- uses: cachix/cachix-action@v14
|
||||
with:
|
||||
name: tree-grepper
|
||||
- run: nix-env -if https://github.com/BrianHicks/tree-grepper/archive/refs/heads/main.tar.gz
|
||||
- name: Run trailing comma lint rule
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '(argument_list "," @no-trailing-comma .)' | grep .; then
|
||||
echo "No trailing commas for function calls"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check for empty string comparison
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((binary_expression (identifier) ["==" "!="] (interpreted_string_literal) @_ri) @exp (#eq? @_ri "\"\""))' | grep .; then
|
||||
echo "Use len check instead of empty string comparison"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check for cases where errors are not propagated
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((if_statement (binary_expression) @_if (block (return_statement (expression_list (call_expression (selector_expression) @_fun ) @ret .)))) (#match? @_if "err != nil") (#match? @_fun "clues.NewWC"))' | grep .; then
|
||||
echo "Make sure to propagate errors with clues"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check if clues without context are used when context is passed in
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
|
||||
echo "Do not use clues.*WC when context is passed in"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check clues with context is used when context is not passed in
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
|
||||
echo "Use clues.*WC when context is not passed in"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- GitHub Actions Linting -------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Actions-Lint:
|
||||
needs: [Precheck]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
if: needs.precheck.outputs.actionsfileschanged == 'true'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: actionlint
|
||||
uses: raven-actions/actionlint@v1
|
||||
with:
|
||||
fail-on-error: true
|
||||
cache: true
|
||||
# Ignore
|
||||
# * combining commands into a subshell and using single output
|
||||
# redirect
|
||||
# * various variable quoting patterns
|
||||
# * possible ineffective echo commands
|
||||
flags: "-ignore SC2129 -ignore SC2086 -ignore SC2046 -ignore 2116"
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Publish steps ----------------------------------------------------------------------------------
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
|
||||
Publish-Binary:
|
||||
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
|
||||
needs: [Test-Suite-Trusted, Linting, Website-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Publish Binary
|
||||
uses: ./.github/actions/publish-binary
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
version: ${{ needs.SetEnv.outputs.version }}
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
|
||||
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
|
||||
fetch-depth: 0 # needed to pull changelog
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- name: Decide goreleaser release mode
|
||||
shell: bash
|
||||
run: |
|
||||
if test '${{ github.ref }}' = "refs/heads/main"; then
|
||||
echo "grflags=--snapshot" >> $GITHUB_ENV
|
||||
else
|
||||
echo "grflags=" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v4
|
||||
with:
|
||||
version: latest
|
||||
args: release --rm-dist --timeout 500m --parallelism 1 ${{ env.grflags }}
|
||||
workdir: src
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RUDDERSTACK_CORSO_WRITE_KEY: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
|
||||
RUDDERSTACK_CORSO_DATA_PLANE_URL: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
|
||||
- name: Upload darwin arm64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Darwin_arm64
|
||||
path: src/dist/corso_darwin_arm64/corso
|
||||
|
||||
- name: Upload linux arm64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Linux_arm64
|
||||
path: src/dist/corso_linux_arm64/corso
|
||||
|
||||
- name: Upload darwin amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Darwin_amd64
|
||||
path: src/dist/corso_darwin_amd64_v1/corso
|
||||
|
||||
- name: Upload linux amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Linux_amd64
|
||||
path: src/dist/corso_linux_amd64_v1/corso
|
||||
|
||||
- name: Upload windows amd64
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: corso_Windows_amd64
|
||||
path: src/dist/corso_windows_amd64_v1/corso.exe
|
||||
|
||||
Publish-Image:
|
||||
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
|
||||
needs: [Test-Suite-Trusted, Linting, Website-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
@ -589,17 +520,17 @@ jobs:
|
||||
env:
|
||||
PLATFORMS: linux/amd64,linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# Setup buildx
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
# retrieve credentials for ghcr.io
|
||||
- name: Login to Github Packages
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@ -607,7 +538,7 @@ jobs:
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
@ -617,7 +548,7 @@ jobs:
|
||||
|
||||
# deploy the image
|
||||
- name: Build image and push to GitHub Container Registry
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./build/Dockerfile
|
||||
@ -670,9 +601,9 @@ jobs:
|
||||
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Validate amd64 container images
|
||||
run: |
|
||||
@ -721,13 +652,13 @@ jobs:
|
||||
./corso.exe --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
|
||||
|
||||
Publish-Website-Test:
|
||||
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
|
||||
needs: [Test-Suite-Trusted, Linting, Website-Linting, SetEnv]
|
||||
environment: ${{ needs.SetEnv.outputs.environment }}
|
||||
runs-on: ubuntu-latest
|
||||
if: github.ref == 'refs/heads/main'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4 # need to checkout to make the action available
|
||||
- uses: actions/checkout@v3 # need to checkout to make the action available
|
||||
|
||||
- name: Publish website
|
||||
uses: ./.github/actions/publish-website
|
||||
@ -743,7 +674,7 @@ jobs:
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4 # need to checkout to make the action available
|
||||
- uses: actions/checkout@v3 # need to checkout to make the action available
|
||||
|
||||
- name: Publish website
|
||||
uses: ./.github/actions/publish-website
|
||||
|
||||
44
.github/workflows/ci_test_cleanup.yml
vendored
44
.github/workflows/ci_test_cleanup.yml
vendored
@ -1,6 +1,5 @@
|
||||
name: CI Test Cleanup
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# every half hour
|
||||
- cron: "*/30 * * * *"
|
||||
@ -12,15 +11,18 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""]
|
||||
user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, EXT_SDK_TEST_USER_ID, '' ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# sets the maximum time to now-30m.
|
||||
# CI test have a 20 minute timeout.
|
||||
# CI test have a 10 minute timeout.
|
||||
# At 20 minutes ago, we should be safe from conflicts.
|
||||
# The additional 10 minutes is just to be good citizens.
|
||||
- name: Set purge boundary
|
||||
run: echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
run: |
|
||||
echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
|
||||
- name: Purge CI-Produced Folders for Users
|
||||
uses: ./.github/actions/purge-m365-data
|
||||
@ -33,16 +35,6 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
Test-Site-Data-Cleanup:
|
||||
environment: Testing
|
||||
@ -50,15 +42,18 @@ jobs:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL]
|
||||
site: [ CORSO_M365_TEST_SITE_URL, EXT_SDK_TEST_SITE_URL ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# sets the maximum time to now-30m.
|
||||
# CI test have a 20 minute timeout.
|
||||
# CI test have a 10 minute timeout.
|
||||
# At 20 minutes ago, we should be safe from conflicts.
|
||||
# The additional 10 minutes is just to be good citizens.
|
||||
- name: Set purge boundary
|
||||
run: echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
run: |
|
||||
echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
|
||||
- name: Purge CI-Produced Folders for Sites
|
||||
uses: ./.github/actions/purge-m365-data
|
||||
@ -66,20 +61,9 @@ jobs:
|
||||
site: ${{ vars[matrix.site] }}
|
||||
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
|
||||
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
|
||||
library-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
|
||||
older-than: ${{ env.HALF_HOUR_AGO }}
|
||||
azure-client-id: ${{ secrets.CLIENT_ID }}
|
||||
azure-client-secret: ${{ secrets.CLIENT_SECRET }}
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
33
.github/workflows/load_test.yml
vendored
33
.github/workflows/load_test.yml
vendored
@ -1,8 +1,10 @@
|
||||
name: Nightly Load Testing
|
||||
on:
|
||||
schedule:
|
||||
# every day at 03:59 GMT (roughly 8pm PST)
|
||||
- cron: "59 3 * * *"
|
||||
# every day at 01:59 (01:59am) UTC
|
||||
# - cron: "59 1 * * *"
|
||||
# temp, for testing: every 4 hours
|
||||
- cron: "0 */4 * * *"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -18,20 +20,16 @@ jobs:
|
||||
Load-Tests:
|
||||
environment: Load Testing
|
||||
runs-on: ubuntu-latest
|
||||
# Skipping load testing for now. They need some love to get up and
|
||||
# running properly, and it's better to not fight for resources with
|
||||
# tests that are guaranteed to fail.
|
||||
if: false
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build the otel-daemon
|
||||
run: make build-otel-daemon
|
||||
|
||||
# TODO: write logs to a file in src/testlog for archival
|
||||
# TODO: write logs to a file in src/test_results for archival
|
||||
- name: Run the otel-daemon
|
||||
run: make otel-daemon
|
||||
|
||||
@ -40,7 +38,7 @@ jobs:
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: mkdir ${{ github.workspace }}/testlog
|
||||
- run: mkdir test_results
|
||||
|
||||
# Install gotestfmt
|
||||
- name: Set up gotestfmt
|
||||
@ -55,7 +53,7 @@ jobs:
|
||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
||||
CORSO_LOAD_TESTS: true
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-load.log
|
||||
CORSO_LOG_FILE: ./src/test_results/testlogging.log
|
||||
CORSO_M365_LOAD_TEST_USER_ID: ${{ secrets.CORSO_M365_LOAD_TEST_USER_ID }}
|
||||
CORSO_M365_LOAD_TEST_ORG_USERS: ${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.CORSO_PASSPHRASE }}
|
||||
@ -73,9 +71,9 @@ jobs:
|
||||
-memprofile=mem.prof \
|
||||
-mutexprofile=mutex.prof \
|
||||
-trace=trace.out \
|
||||
-outputdir=testlog \
|
||||
-outputdir=test_results \
|
||||
./pkg/repository/repository_load_test.go \
|
||||
2>&1 | tee ${{ github.workspace }}/testlog/gotest-load.log | gotestfmt -hide successful-tests
|
||||
2>&1 | tee ./test_results/goloadtest.log | gotestfmt -hide successful-tests
|
||||
|
||||
# generate new entries to roll into the next load test
|
||||
# only runs if the test was successful
|
||||
@ -107,10 +105,10 @@ jobs:
|
||||
# package all artifacts for later review
|
||||
- name: Upload Log, Profilers, Traces
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: load-test-profiling
|
||||
path: ${{ github.workspace }}/testlog/*
|
||||
path: src/test_results/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
@ -120,7 +118,7 @@ jobs:
|
||||
outputs:
|
||||
matrix: ${{ steps.build.outputs.matrix }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- id: build
|
||||
run: |
|
||||
u=$(echo "${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}" | sed 's/\[/["/g' | sed 's/\]/"]/g' | sed 's/|/","/g')
|
||||
@ -140,7 +138,7 @@ jobs:
|
||||
matrix:
|
||||
user: [ CORSO_M365_LOAD_TEST_USER_ID, '' ]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set folder boundary datetime
|
||||
run: |
|
||||
echo "NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
|
||||
@ -155,6 +153,3 @@ jobs:
|
||||
azure-tenant-id: ${{ secrets.TENANT_ID }}
|
||||
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
|
||||
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
|
||||
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
|
||||
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
|
||||
tenant-domain: ${{ vars.TENANT_DOMAIN }}
|
||||
|
||||
396
.github/workflows/longevity_test.yml
vendored
396
.github/workflows/longevity_test.yml
vendored
@ -1,396 +0,0 @@
|
||||
name: Longevity Testing
|
||||
on:
|
||||
schedule:
|
||||
# Run every day at 04:00 GMT (roughly 8pm PST)
|
||||
- cron: "0 4 * * *"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
user:
|
||||
description: "User to run longevity test on"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
id-token: write
|
||||
contents: write
|
||||
|
||||
# cancel currently running jobs if a new version of the branch is pushed
|
||||
concurrency:
|
||||
group: longevity_testing-${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
SetM365App:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
|
||||
Longevity-Tests:
|
||||
needs: [SetM365App]
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Need these in the local env so that corso can read them
|
||||
AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
# re-used values
|
||||
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
|
||||
RESTORE_DEST_PFX: Corso_Test_Longevity_
|
||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
|
||||
PREFIX: "longevity"
|
||||
|
||||
# Options for retention.
|
||||
RETENTION_MODE: GOVERNANCE
|
||||
# Time to retain blobs for in hours.
|
||||
RETENTION_DURATION: 216
|
||||
defaults:
|
||||
run:
|
||||
working-directory: src
|
||||
|
||||
############################################################################
|
||||
# setup
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # needed to get latest tag
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
with:
|
||||
go-version-file: src/go.mod
|
||||
|
||||
- run: |
|
||||
go build -o longevity-test ./cmd/longevity_test
|
||||
go build -o s3checker ./cmd/s3checker
|
||||
|
||||
- name: Get version string
|
||||
id: version
|
||||
run: |
|
||||
echo version=$(git describe --tags --abbrev=0) | tee -a $GITHUB_OUTPUT
|
||||
|
||||
# Checkout the .github directory at the original branch's ref so we have a
|
||||
# stable view of the actions.
|
||||
- name: Code Checkout
|
||||
working-directory: ${{ github.workspace }}
|
||||
run: |
|
||||
git checkout ${{ steps.version.outputs.version }}
|
||||
git checkout ${{ github.ref }} -- .github
|
||||
|
||||
- run: go build -o corso
|
||||
timeout-minutes: 10
|
||||
|
||||
- run: mkdir ${CORSO_LOG_DIR}
|
||||
|
||||
# Use shorter-lived credentials obtained from assume-role since these
|
||||
# runs haven't been taking long.
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
timeout-minutes: 10
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
##########################################################################
|
||||
# Repository commands
|
||||
|
||||
- name: Version Test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
./corso --version | grep -c 'Corso version:'
|
||||
|
||||
- name: Repo init test
|
||||
id: repo-init
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }}
|
||||
./corso repo init s3 \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--retention-mode $(echo "${{ env.RETENTION_MODE }}" | tr '[:upper:]' '[:lower:]') \
|
||||
--retention-duration "${{ env.RETENTION_DURATION }}h" \
|
||||
--extend-retention \
|
||||
--prefix ${{ env.PREFIX }} \
|
||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||
|
||||
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
|
||||
then
|
||||
echo "Repo could not be initialized"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Repo connect test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
|
||||
./corso repo connect s3 \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--prefix ${{ env.PREFIX }} \
|
||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
|
||||
|
||||
if ! grep -q 'Connected to S3 bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
|
||||
then
|
||||
echo "Repo could not be connected"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
##########################################################################
|
||||
# Exchange
|
||||
|
||||
- name: Backup exchange test
|
||||
id: exchange-test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
echo -e "\nBackup Exchange test\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup create exchange \
|
||||
--no-stats \
|
||||
--mailbox "${TEST_USER}" \
|
||||
--hide-progress \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_exchange.txt
|
||||
|
||||
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_exchange.txt )
|
||||
|
||||
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
|
||||
echo "backup was not successful"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||
echo result=$data >> $GITHUB_OUTPUT
|
||||
|
||||
##########################################################################
|
||||
# Onedrive
|
||||
|
||||
- name: Backup onedrive test
|
||||
id: onedrive-test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nBackup OneDrive test\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup create onedrive \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--user "${TEST_USER}" \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_onedrive.txt
|
||||
|
||||
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_onedrive.txt )
|
||||
|
||||
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
|
||||
echo "backup was not successful"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||
echo result=$data >> $GITHUB_OUTPUT
|
||||
|
||||
##########################################################################
|
||||
# Sharepoint test
|
||||
- name: Backup sharepoint test
|
||||
id: sharepoint-test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nBackup SharePoint test\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
./corso backup create sharepoint \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt
|
||||
|
||||
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt )
|
||||
|
||||
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
|
||||
echo "backup was not successful"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||
echo result=$data >> $GITHUB_OUTPUT
|
||||
|
||||
##########################################################################
|
||||
# Backup Exchange Deletion test
|
||||
- name: Backup Delete exchange test
|
||||
id: delete-exchange-test
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
SERVICE: "exchange"
|
||||
DELETION_DAYS: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nDelete Backup exchange \n" >> ${CORSO_LOG_FILE}
|
||||
./longevity-test
|
||||
|
||||
##########################################################################
|
||||
# Backup Onedrive Deletion test
|
||||
- name: Backup Delete onedrive test
|
||||
id: delete-onedrive-test
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
SERVICE: "onedrive"
|
||||
DELETION_DAYS: 10
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nDelete Backup onedrive \n" >> ${CORSO_LOG_FILE}
|
||||
./longevity-test
|
||||
|
||||
##########################################################################
|
||||
# Backup Sharepoint Deletion test
|
||||
- name: Backup Delete Sharepoint test
|
||||
id: delete-sharepoint-test
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
SERVICE: "sharepoint"
|
||||
DELETION_DAYS: 5
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\nDelete Backup sharepoint \n" >> ${CORSO_LOG_FILE}
|
||||
./longevity-test
|
||||
|
||||
##########################################################################
|
||||
# Export OneDrive Test
|
||||
- name: OneDrive Export test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
echo -e "\Export OneDrive test - first entry\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup list onedrive 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
|
||||
while read -r line; do
|
||||
./corso export onedrive \
|
||||
"/tmp/corso-export--$line" \
|
||||
--no-stats \
|
||||
--backup "$line" \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_first.txt
|
||||
done
|
||||
|
||||
echo -e "\Export OneDrive test - last entry\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup list onedrive 2>/dev/null | tail -n1 | awk '{print $1}' |
|
||||
while read -r line; do
|
||||
./corso export onedrive \
|
||||
"/tmp/corso-export--$line" \
|
||||
--no-stats \
|
||||
--backup "$line" \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_last.txt
|
||||
done
|
||||
|
||||
##########################################################################
|
||||
# Export SharePoint Test
|
||||
- name: SharePoint Export test
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
echo -e "\Export SharePoint test - first entry\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup list sharepoint 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
|
||||
while read -r line; do
|
||||
./corso export sharepoint \
|
||||
"/tmp/corso-export--$line" \
|
||||
--no-stats \
|
||||
--backup "$line" \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_first.txt
|
||||
done
|
||||
|
||||
echo -e "\Export SharePoint test - last entry\n" >> ${CORSO_LOG_FILE}
|
||||
./corso backup list sharepoint 2>/dev/null | tail -n1 | awk '{print $1}' |
|
||||
while read -r line; do
|
||||
./corso export sharepoint \
|
||||
"/tmp/corso-export--$line" \
|
||||
--no-stats \
|
||||
--backup "$line" \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_last.txt
|
||||
done
|
||||
|
||||
##########################################################################
|
||||
# Maintenance test
|
||||
- name: Maintenance test Daily
|
||||
id: maintenance-test-daily
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo -e "\n Maintenance test Daily\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
# Run with the force flag so it doesn't fail if the github runner
|
||||
# hostname isn't what's expected. This is only safe because we can
|
||||
# guarantee only one runner will be executing maintenance at a time.
|
||||
./corso repo maintenance --mode metadata \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--force \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
|
||||
|
||||
- name: Maintenance test Weekly
|
||||
id: maintenance-test-weekly
|
||||
timeout-minutes: 30
|
||||
run: |
|
||||
if [[ $(date +%A) == "Saturday" ]]; then
|
||||
set -euo pipefail
|
||||
echo -e "\n Maintenance test Weekly\n" >> ${CORSO_LOG_FILE}
|
||||
|
||||
./corso repo maintenance --mode complete \
|
||||
--no-stats \
|
||||
--hide-progress \
|
||||
--force \
|
||||
--json \
|
||||
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_complete.txt
|
||||
|
||||
# TODO(ashmrtn): We can also check that non-current versions of
|
||||
# blobs don't have their retention extended if we want.
|
||||
#
|
||||
# Assuming no failures during full maintenance, current versions of
|
||||
# objects with the below versions should have retention times that
|
||||
# are roughly (now + RETENTION_DURATION). We can explicitly check
|
||||
# for this, but leave a little breathing room since maintenance may
|
||||
# take some time to run.
|
||||
#
|
||||
# If we pick a live-retention-duration that is too small then we'll
|
||||
# start seeing failures. The check for live objects is a lower bound
|
||||
# check.
|
||||
#
|
||||
# Blob prefixes are as follows:
|
||||
# - kopia.blobcfg - repo-wide config
|
||||
# - kopia.repository - repo-wide config
|
||||
# - p - data pack blobs (i.e. file data)
|
||||
# - q - metadata pack blobs (i.e. manifests, directory listings, etc)
|
||||
# - x - index blobs
|
||||
./s3checker \
|
||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||
--prefix ${{ env.PREFIX }} \
|
||||
--retention-mode ${{ env.RETENTION_MODE }} \
|
||||
--live-retention-duration "$((${{ env.RETENTION_DURATION }}-1))h" \
|
||||
--object-prefix "kopia.blobcfg" \
|
||||
--object-prefix "kopia.repository" \
|
||||
--object-prefix "p" \
|
||||
--object-prefix "q" \
|
||||
--object-prefix "x"
|
||||
fi
|
||||
|
||||
##########################################################################
|
||||
# Logging & Notifications
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: longevity-test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[CORSO FAILED] Longevity Test"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
112
.github/workflows/nightly_test.yml
vendored
112
.github/workflows/nightly_test.yml
vendored
@ -3,8 +3,8 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
schedule:
|
||||
# Run every day at 04:00 GMT (roughly 8pm PST)
|
||||
- cron: "0 4 * * *"
|
||||
# Run every day at 0 minutes and 0 hours (midnight GMT)
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
@ -30,7 +30,7 @@ jobs:
|
||||
run:
|
||||
working-directory: src
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# single setup and sum cache handling here.
|
||||
# the results will cascade onto both testing and linting.
|
||||
@ -41,7 +41,72 @@ jobs:
|
||||
|
||||
# SetM365App will decide which M365 app to use for this CI run
|
||||
SetM365App:
|
||||
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
|
||||
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
|
||||
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
|
||||
steps:
|
||||
- name: Figure out which client id to use
|
||||
id: roundrobin
|
||||
run: |
|
||||
slot=$((GITHUB_RUN_NUMBER % 4))
|
||||
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
|
||||
|
||||
case $slot in
|
||||
|
||||
0)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
1)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_2" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_2" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
2)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_3" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_3" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
3)
|
||||
echo "CLIENT_ID_ENV=CLIENT_ID_4" >> $GITHUB_OUTPUT
|
||||
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_4" >> $GITHUB_OUTPUT
|
||||
;;
|
||||
esac
|
||||
|
||||
SetEnv:
|
||||
environment: Testing
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
environment: ${{ steps.environment.outputs.environment }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
website-bucket: ${{ steps.website-bucket.outputs.website-bucket }}
|
||||
website-cfid: ${{ steps.website-cfid.outputs.website-cfid }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Figure out environment
|
||||
id: environment
|
||||
run: |
|
||||
echo "environment=Testing" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
- name: Get version string
|
||||
id: version
|
||||
run: |
|
||||
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "version=$(git describe --exact-match --tags $(git rev-parse HEAD))" | tee -a $GITHUB_OUTPUT
|
||||
else
|
||||
echo "version=$(echo unreleased-$(git rev-parse --short HEAD))" | tee -a $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Get bucket name for website
|
||||
id: website-bucket
|
||||
run: |
|
||||
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
|
||||
echo "website-bucket=corsobackup.io" | tee -a $GITHUB_OUTPUT
|
||||
else
|
||||
echo "website-bucket=test-corso-docs" | tee -a $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Nightly Testing -------------------------------------------------------------------
|
||||
@ -60,7 +125,7 @@ jobs:
|
||||
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
||||
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Golang with cache
|
||||
uses: magnetikonline/action-golang-cache@v4
|
||||
@ -73,50 +138,43 @@ jobs:
|
||||
- name: Set up gotestfmt
|
||||
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
# AWS creds
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
# run the tests
|
||||
- name: Integration Tests
|
||||
env:
|
||||
# Use long-lived AWS credentials.
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
|
||||
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
|
||||
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
|
||||
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
|
||||
CORSO_NIGHTLY_TESTS: true
|
||||
CORSO_E2E_TESTS: true
|
||||
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
|
||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-nightly.log
|
||||
CORSO_LOG_FILE: ./src/testlog/testlogging.log
|
||||
LOG_GRAPH_REQUESTS: true
|
||||
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
go test \
|
||||
-tags testing \
|
||||
-json \
|
||||
-v \
|
||||
-failfast \
|
||||
-p 1 \
|
||||
-timeout 2h \
|
||||
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
|
||||
|
||||
##########################################################################################################################################
|
||||
|
||||
# Logging & Notifications
|
||||
-timeout 15m \
|
||||
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
|
||||
|
||||
# Upload the original go test output as an artifact for later review.
|
||||
- name: Upload test log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: nightly-test-log
|
||||
name: test-log
|
||||
path: src/testlog/*
|
||||
if-no-files-found: error
|
||||
retention-days: 14
|
||||
|
||||
- name: Notify failure in teams
|
||||
if: failure()
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: "[COROS FAILED] Nightly Checks"
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
|
||||
4
.github/workflows/ok-to-test.yml
vendored
4
.github/workflows/ok-to-test.yml
vendored
@ -13,13 +13,13 @@ jobs:
|
||||
steps:
|
||||
- name: Generate token
|
||||
id: generate_token
|
||||
uses: tibdex/github-app-token@v2
|
||||
uses: tibdex/github-app-token@v1
|
||||
with:
|
||||
app_id: ${{ secrets.APP_ID }}
|
||||
private_key: ${{ secrets.PRIVATE_KEY }}
|
||||
|
||||
- name: Slash Command Dispatch
|
||||
uses: peter-evans/slash-command-dispatch@v4
|
||||
uses: peter-evans/slash-command-dispatch@v3
|
||||
env:
|
||||
TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
with:
|
||||
|
||||
889
.github/workflows/sanity-test.yaml
vendored
889
.github/workflows/sanity-test.yaml
vendored
File diff suppressed because it is too large
Load Diff
23
.github/workflows/testnotification.yml
vendored
23
.github/workflows/testnotification.yml
vendored
@ -1,23 +0,0 @@
|
||||
name: Manually Test Teams Action
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
msg:
|
||||
description: 'Message to send:'
|
||||
required: true
|
||||
default: 'This is a test message'
|
||||
|
||||
jobs:
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Send notification
|
||||
uses: ./.github/actions/teams-message
|
||||
with:
|
||||
msg: ${{ github.event.inputs.msg }}
|
||||
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
|
||||
9
.github/workflows/website-publish.yml
vendored
9
.github/workflows/website-publish.yml
vendored
@ -21,14 +21,15 @@ jobs:
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # needed to get latest tag
|
||||
|
||||
- name: Get version string
|
||||
id: version
|
||||
run: |
|
||||
echo version=$(git describe --tags --abbrev=0) | tee -a $GITHUB_OUTPUT
|
||||
echo "set-output name=version::$(git describe --tags --abbrev=0)"
|
||||
echo "::set-output name=version::$(git describe --tags --abbrev=0)"
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- Website Linting -----------------------------------------------------------------------------------
|
||||
@ -40,7 +41,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Lint Website
|
||||
uses: ./.github/actions/website-linting
|
||||
@ -56,7 +57,7 @@ jobs:
|
||||
working-directory: website
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4 # need to checkout to make the action available
|
||||
- uses: actions/checkout@v3 # need to checkout to make the action available
|
||||
|
||||
- name: Publish website
|
||||
uses: ./.github/actions/publish-website
|
||||
|
||||
26
.github/workflows/weekly_cleanup.yml
vendored
Normal file
26
.github/workflows/weekly_cleanup.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
name: Weekly S3 Test Bucket Cleanup
|
||||
on:
|
||||
schedule:
|
||||
# every saturday at 23:59 (11:59pm)
|
||||
- cron: "59 23 * * 6"
|
||||
|
||||
permissions:
|
||||
# required to retrieve AWS credentials
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
S3-Test-Cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
environment: Testing
|
||||
|
||||
steps:
|
||||
- name: Configure AWS credentials from Test account
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||
role-session-name: integration-testing
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Delete all files in the test bucket
|
||||
run: |
|
||||
aws s3 rm s3://${{ secrets.CI_TESTS_S3_BUCKET }} --recursive
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@ -8,8 +8,6 @@
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
test_results/
|
||||
testlog/
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
@ -23,9 +21,11 @@ testlog/
|
||||
.corso.toml
|
||||
|
||||
# Logging
|
||||
*.log
|
||||
.corso.log
|
||||
|
||||
# Build directories
|
||||
/bin
|
||||
/docker/bin
|
||||
/website/dist
|
||||
|
||||
*/test_results/**
|
||||
269
CHANGELOG.md
269
CHANGELOG.md
@ -6,259 +6,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased] (beta)
|
||||
### Fixed
|
||||
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
|
||||
- Emails attached within other emails are now correctly exported
|
||||
- Gracefully handle email and post attachments without name when exporting to eml
|
||||
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
|
||||
- Fixed an issue causing exports dealing with calendar data to have high memory usage
|
||||
|
||||
## [v0.19.0] (beta) - 2024-02-06
|
||||
|
||||
### Added
|
||||
- Events can now be exported from Exchange backups as .ics files.
|
||||
- Update repo init configuration to reduce the total number of GET requests sent
|
||||
to the object store when using corso. This affects repos that have many
|
||||
backups created in them per day the most.
|
||||
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
|
||||
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
|
||||
|
||||
### Fixed
|
||||
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
|
||||
- Backup attachments associated with group mailbox items.
|
||||
- Groups and Teams backups no longer fail when a resource has no display name.
|
||||
- Contacts in-place restore failed if the restore destination was empty.
|
||||
- Link shares with external users are now backed up and restored as expected
|
||||
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
|
||||
|
||||
### Changed
|
||||
- When running `backup details` on an empty backup returns a more helpful error message.
|
||||
- Backup List additionally shows the data category for each backup.
|
||||
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
|
||||
|
||||
### Known issues
|
||||
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
|
||||
- Event description for exchange exports might look slightly different for certain events.
|
||||
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
|
||||
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
|
||||
- External users with access through shared links will not receive these links as they are not sent via email during restore.
|
||||
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
|
||||
- SharePoint list item attachments are not available due to graph API limitations.
|
||||
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
|
||||
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
|
||||
|
||||
## [v0.18.0] (beta) - 2024-01-02
|
||||
|
||||
### Fixed
|
||||
- Handle the case where an email cannot be retrieved from Exchange due to an `ErrorInvalidRecipients` error. In
|
||||
this case, Corso will skip over the item but report this in the backup summary.
|
||||
- Fix `ErrorItemNotFound` errors when restoring emails with multiple attachments.
|
||||
- Avoid Graph SDK `Requests must contain extension changes exclusively.` errors by removing server-populated field from restored event items.
|
||||
- Improve Group mailbox(conversations) backup performance by only downloading new items or items with modified content.
|
||||
- Handle cases where Exchange backup stored invalid JSON blobs if there were special characters in the user content. These would result in errors during restore.
|
||||
|
||||
### Known issues
|
||||
- Restoring OneDrive, SharePoint, or Teams & Groups items shared with external users while the tenant or site is configured to not allow sharing with external users will not restore permissions.
|
||||
|
||||
### Added
|
||||
- Contacts can now be exported from Exchange backups as .vcf files
|
||||
|
||||
## [v0.17.0] (beta) - 2023-12-11
|
||||
|
||||
### Changed
|
||||
- Memory optimizations for large scale OneDrive and Sharepoint backups.
|
||||
|
||||
### Fixed
|
||||
- Resolved a possible deadlock when backing up Teams Channel Messages.
|
||||
- Fixed an attachment download failure(ErrorTooManyObjectsOpened) during exchange backup.
|
||||
|
||||
## [v0.16.0] (beta) - 2023-11-28
|
||||
|
||||
### Added
|
||||
- Export support for emails in exchange backups as `.eml` files.
|
||||
- More colorful and informational cli display.
|
||||
|
||||
### Changed
|
||||
- The file extension in Teams messages exports has switched to json to match the content type.
|
||||
- SDK consumption of the /services/m365 package has shifted from independent functions to a client-based api.
|
||||
- SDK consumers can now configure the /services/m365 graph api client configuration when constructing a new m365 client.
|
||||
- Dynamic api rate limiting allows small-scale Exchange backups to complete more quickly.
|
||||
- Kopia's local config files now uses unique filenames that match Corso configurations. This can protect concurrent Corso operations from mistakenly clobbering storage configs during runtime.
|
||||
|
||||
### Fixed
|
||||
- Handle OneDrive folders being deleted and recreated midway through a backup.
|
||||
- Automatically re-run a full delta query on incremental if the prior backup is found to have malformed prior-state information.
|
||||
- Retry drive item permission downloads during long-running backups after the jwt token expires and refreshes.
|
||||
- Retry item downloads during connection timeouts.
|
||||
|
||||
## [v0.15.0] (beta) - 2023-10-31
|
||||
|
||||
### Added
|
||||
- Added `corso repo update-passphrase` command to update the passphrase of an existing Corso repository
|
||||
- Added Subject and Message preview to channel messages detail entries
|
||||
|
||||
### Fixed
|
||||
- SharePoint backup would fail if any site had an empty display name
|
||||
- Fix a bug with exports hanging post completion
|
||||
- Handle 503 errors in nested OneDrive packages
|
||||
|
||||
### Changed
|
||||
- Item Details formatting in Groups and Teams backups
|
||||
|
||||
## [v0.14.2] (beta) - 2023-10-17
|
||||
|
||||
### Added
|
||||
- Skips graph calls for expired item download URLs.
|
||||
- Export operation now shows the stats at the end of the run
|
||||
|
||||
### Fixed
|
||||
- Catch and report cases where a protected resource is locked out of access. SDK consumers have a new errs sentinel that allows them to check for this case.
|
||||
- Fix a case where missing item LastModifiedTimes could cause incremental backups to fail.
|
||||
- Email size metadata was incorrectly set to the size of the last attachment. Emails will now correctly report the size of the mail content plus the size of all attachments.
|
||||
- Improves the filtering capabilities for Groups restore and backup
|
||||
- Improve check to skip OneNote files that cannot be downloaded.
|
||||
- Fix Groups backup for non Team groups
|
||||
|
||||
### Changed
|
||||
- Groups restore now expects the site whose backup we should restore
|
||||
|
||||
## [v0.14.0] (beta) - 2023-10-09
|
||||
|
||||
### Added
|
||||
- Enables local or network-attached storage for Corso repositories.
|
||||
- Reduce backup runtime for OneDrive and SharePoint incremental backups that have no file changes.
|
||||
- Increase Exchange backup performance by lazily fetching data only for items whose content changed.
|
||||
- Added `--backups` flag to delete multiple backups in `corso backup delete` command.
|
||||
- Backup now includes all sites that belongs to a team, not just the root site.
|
||||
|
||||
### Fixed
|
||||
- Teams Channels that cannot support delta tokens (those without messages) fall back to non-delta enumeration and no longer fail a backup.
|
||||
|
||||
### Known issues
|
||||
- Restoring the data into a different Group from the one it was backed up from is not currently supported
|
||||
|
||||
### Other
|
||||
- Groups and Teams service support is still in feature preview
|
||||
|
||||
## [v0.13.0] (beta) - 2023-09-18
|
||||
|
||||
### Added
|
||||
- Groups and Teams service support available as a feature preview! Channel messages and Files are now available for backup and restore in the CLI: `corso backup create groups --group '*'`
|
||||
- The cli commands for "groups" and "teams" can be used interchangeably, and will operate on the same backup data.
|
||||
- New permissions are required to backup Channel messages. See the [Corso Documentation](https://corsobackup.io/docs/setup/m365-access/#configure-required-permissions) for complete details.
|
||||
Even though Channel message restoration is not available, message write permissions are included to cover future integration.
|
||||
- This is a feature preview, and may be subject to breaking changes based on feedback and testing.
|
||||
|
||||
### Changed
|
||||
- Switched to Go 1.21
|
||||
- SharePoint exported libraries are now exported with a `Libraries` prefix.
|
||||
|
||||
### Fixed
|
||||
- Contacts backups no longer slices root-folder data if outlook is set to languages other than english.
|
||||
- Failed backups if the --disable-incrementals flag was passed when there was a valid merge base under some conditions.
|
||||
|
||||
## [v0.12.0] (beta) - 2023-08-29
|
||||
|
||||
### Added
|
||||
- Added `export` command to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
|
||||
- Restore commands now accept an optional resource override with the `--to-resource` flag. This allows restores to recreate backup data within different mailboxes, sites, and users.
|
||||
- Improve `--mask-sensitive-data` logging mode.
|
||||
- Reliability: Handle connection cancellation and resets observed when backing up or restoring large data sets.
|
||||
- Reliability: Recover from Graph SDK panics when the Graph API returns incomplete responses.
|
||||
- Performance: Improve backup delete performance by batching multiple storage operations into a single operation.
|
||||
|
||||
### Fixed
|
||||
- SharePoint document libraries deleted after the last backup can now be restored.
|
||||
- Restore requires the protected resource to have access to the service being restored.
|
||||
- SharePoint data from multiple document libraries are not merged in exports
|
||||
- `corso backup delete` was not removing the backup details data associated with that snapshot
|
||||
- Fix OneDrive restores could fail with a concurrent map write error
|
||||
- Fix backup list displaying backups that had errors
|
||||
- Fix OneDrive backup could fail if item was deleted during backup
|
||||
- Exchange backups would fail attempting to use delta tokens even if the user was over quota
|
||||
|
||||
|
||||
## [v0.11.1] (beta) - 2023-07-20
|
||||
|
||||
### Fixed
|
||||
- Allow repo connect to succeed when a `corso.toml` file was not provided but configuration is specified using environment variables and flags.
|
||||
|
||||
## [v0.11.0] (beta) - 2023-07-18
|
||||
|
||||
### Added
|
||||
- Drive items backup and restore link shares
|
||||
- Restore commands now accept an optional top-level restore destination with the `--destination` flag. Setting the destination to '/' will restore items back into their original location.
|
||||
- Restore commands can specify item collision behavior. Options are Skip (default), Replace, and Copy.
|
||||
- Introduced repository maintenance commands to help optimize the repository as well as unreferenced data.
|
||||
|
||||
### Fixed
|
||||
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
|
||||
- Added retries for http/2 stream connection failures when downloading large item content.
|
||||
- SharePoint document libraries that were deleted after the last backup can now be restored.
|
||||
|
||||
### Known issues
|
||||
- If a link share is created for an item with inheritance disabled
|
||||
(via the Graph API), the link shares restored in that item will
|
||||
not be inheritable by children
|
||||
- Link shares with password protection can't be restored
|
||||
|
||||
## [v0.10.0] (beta) - 2023-06-26
|
||||
|
||||
### Added
|
||||
- Exceptions and cancellations for recurring events are now backed up and restored
|
||||
- Introduced a URL cache for OneDrive that helps reduce Graph API calls for long running (>1hr) backups
|
||||
- Improve incremental backup behavior by leveraging information from incomplete backups
|
||||
- Improve restore performance and memory use for Exchange and OneDrive
|
||||
|
||||
### Fixed
|
||||
- Handle OLE conversion errors when trying to fetch attachments
|
||||
- Fix uploading large attachments for emails and calendar
|
||||
- Fixed high memory use in OneDrive backup related to logging
|
||||
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
|
||||
|
||||
### Changed
|
||||
- Switched to Go 1.20
|
||||
|
||||
## [v0.9.0] (beta) - 2023-06-05
|
||||
|
||||
### Added
|
||||
- Added ProtectedResourceName to the backup list json output. ProtectedResourceName holds either a UPN or a WebURL, depending on the resource type.
|
||||
- Rework base selection logic for incremental backups so it's more likely to find a valid base.
|
||||
- Improve OneDrive restore performance by paralleling item restores
|
||||
|
||||
### Fixed
|
||||
- Fix Exchange folder cache population error when parent folder isn't found.
|
||||
- Fix Exchange backup issue caused by incorrect json serialization
|
||||
- Fix issues with details model containing duplicate entry for api consumers
|
||||
|
||||
### Changed
|
||||
- Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`.
|
||||
|
||||
## [v0.8.0] (beta) - 2023-05-15
|
||||
|
||||
### Added
|
||||
- Released the --mask-sensitive-data flag, which will automatically obscure private data in logs.
|
||||
- Added `--disable-delta` flag to disable delta based backups for Exchange
|
||||
- Permission support for SharePoint libraries.
|
||||
|
||||
### Fixed
|
||||
- Graph requests now automatically retry in case of a Bad Gateway or Gateway Timeout.
|
||||
- POST Retries following certain status codes (500, 502, 504) will re-use the post body instead of retrying with a no-content request.
|
||||
- Fix nil pointer exception when running an incremental backup on SharePoint where the base backup used an older index data format.
|
||||
- --user and --mailbox flags have been removed from CLI examples for details and restore commands (they were already not supported, this only updates the docs).
|
||||
- Improve restore time on large restores by optimizing how items are loaded from the remote repository.
|
||||
- Remove exchange item filtering based on m365 item ID via the CLI.
|
||||
- OneDrive backups no longer include a user's non-default drives.
|
||||
- OneDrive and SharePoint file downloads will properly redirect from 3xx responses.
|
||||
- Refined oneDrive rate limiter controls to reduce throttling errors.
|
||||
- Fix handling of duplicate folders at the same hierarchy level in Exchange. Duplicate folders will be merged during restore operations.
|
||||
- Fix backup for mailboxes that has used up all their storage quota
|
||||
- Restored folders no longer appear in the Restore results. Only restored items will be displayed.
|
||||
|
||||
### Known Issues
|
||||
- Restore operations will merge duplicate Exchange folders at the same hierarchy level into a single folder.
|
||||
- Sharepoint SiteGroup permissions are not restored.
|
||||
- SharePoint document library data can't be restored after the library has been deleted.
|
||||
|
||||
## [v0.7.0] (beta) - 2023-05-02
|
||||
|
||||
@ -287,7 +34,6 @@ this case, Corso will skip over the item but report this in the backup summary.
|
||||
- The CORSO_LOG_FILE env is appropriately utilized if no --log-file flag is provided.
|
||||
- Fixed Exchange events progress output to show calendar names instead of IDs.
|
||||
- Fixed reporting no items match if restoring or listing details on an older Exchange backup and filtering by folder.
|
||||
- Fix backup for mailboxes that has used up all their storage quota
|
||||
|
||||
### Known Issues
|
||||
- Restoring a OneDrive or SharePoint file with the same name as a file with that name as its M365 ID may restore both items.
|
||||
@ -502,20 +248,7 @@ this case, Corso will skip over the item but report this in the backup summary.
|
||||
- Miscellaneous
|
||||
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
|
||||
|
||||
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD
|
||||
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
|
||||
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
|
||||
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
|
||||
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
|
||||
[v0.15.0]: https://github.com/alcionai/corso/compare/v0.14.0...v0.15.0
|
||||
[v0.14.0]: https://github.com/alcionai/corso/compare/v0.13.0...v0.14.0
|
||||
[v0.13.0]: https://github.com/alcionai/corso/compare/v0.12.0...v0.13.0
|
||||
[v0.12.0]: https://github.com/alcionai/corso/compare/v0.11.1...v0.12.0
|
||||
[v0.11.1]: https://github.com/alcionai/corso/compare/v0.11.0...v0.11.1
|
||||
[v0.11.0]: https://github.com/alcionai/corso/compare/v0.10.0...v0.11.0
|
||||
[v0.10.0]: https://github.com/alcionai/corso/compare/v0.9.0...v0.10.0
|
||||
[v0.9.0]: https://github.com/alcionai/corso/compare/v0.8.1...v0.9.0
|
||||
[v0.8.0]: https://github.com/alcionai/corso/compare/v0.7.1...v0.8.0
|
||||
[Unreleased]: https://github.com/alcionai/corso/compare/v0.7.0...HEAD
|
||||
[v0.7.0]: https://github.com/alcionai/corso/compare/v0.6.1...v0.7.0
|
||||
[v0.6.1]: https://github.com/alcionai/corso/compare/v0.5.0...v0.6.1
|
||||
[v0.5.0]: https://github.com/alcionai/corso/compare/v0.4.0...v0.5.0
|
||||
|
||||
@ -1,6 +1,3 @@
|
||||
> [!NOTE]
|
||||
> **The Corso project is no longer actively maintained and has been archived**.
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
|
||||
</p>
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
FROM golang:1.21-alpine as builder
|
||||
FROM golang:1.19-alpine as builder
|
||||
|
||||
WORKDIR /go/src/app
|
||||
COPY src .
|
||||
@ -6,7 +6,7 @@ COPY src .
|
||||
ARG CORSO_BUILD_LDFLAGS=""
|
||||
RUN go build -o corso -ldflags "$CORSO_BUILD_LDFLAGS"
|
||||
|
||||
FROM alpine:3
|
||||
FROM alpine:3.17
|
||||
|
||||
LABEL org.opencontainers.image.title="Corso"
|
||||
LABEL org.opencontainers.image.description="Free, Secure, and Open-Source Backup for Microsoft 365"
|
||||
|
||||
@ -12,7 +12,7 @@ usage() {
|
||||
}
|
||||
|
||||
ROOT=$(dirname $(dirname $(readlink -f $0)))
|
||||
GOVER=1.21 # go version
|
||||
GOVER=1.19 # go version
|
||||
CORSO_BUILD_CACHE="/tmp/.corsobuild" # shared persistent cache
|
||||
|
||||
# Figure out os and architecture
|
||||
|
||||
24
src/.gitignore
vendored
24
src/.gitignore
vendored
@ -1,26 +1,2 @@
|
||||
dist/
|
||||
corso
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
test_results/
|
||||
testlog/
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# IDE
|
||||
.vscode
|
||||
*.swp
|
||||
|
||||
# Standard configuration file names
|
||||
.corso_test.toml
|
||||
.corso.toml
|
||||
|
||||
# Logging
|
||||
*.log
|
||||
|
||||
# Build directories
|
||||
/bin
|
||||
/docker/bin
|
||||
/website/dist
|
||||
|
||||
@ -3,21 +3,14 @@ run:
|
||||
|
||||
linters:
|
||||
enable:
|
||||
- errcheck
|
||||
- exhaustive
|
||||
- forbidigo
|
||||
- gci
|
||||
- gofmt
|
||||
- gofumpt
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- errcheck
|
||||
- forbidigo
|
||||
- lll
|
||||
- loggercheck
|
||||
- misspell
|
||||
- revive
|
||||
- unused
|
||||
- usestdlibvars
|
||||
- wsl
|
||||
|
||||
disable:
|
||||
@ -26,11 +19,6 @@ linters:
|
||||
- staticcheck
|
||||
|
||||
linters-settings:
|
||||
exhaustive:
|
||||
check:
|
||||
- switch
|
||||
default-signifies-exhaustive: false
|
||||
explicit-exhaustive-switch: true
|
||||
gci:
|
||||
sections:
|
||||
- standard
|
||||
@ -55,13 +43,10 @@ linters-settings:
|
||||
# String formatting should be avoided in favor of structured errors (ie: err.With(k, v)).
|
||||
- '(errors|fmt)\.(New|Stack|Wrap|Error)f?\((# error handling should use clues pkg)?'
|
||||
# Avoid Warn-level logging in favor of Info or Error.
|
||||
- 'Warnw?f?\((# logging should use Info or Error)?'
|
||||
- 'Warn[wf]?\((# logging should use Info or Error)?'
|
||||
# Prefer suite.Run(name, func() {}) for subtests as testify has it instead
|
||||
# of suite.T().Run(name, func(t *testing.T) {}).
|
||||
- '(T\(\)|\st[a-zA-Z0-9]*)\.Run(# prefer testify suite.Run(name, func()) )?'
|
||||
# Prefer packing ctx values into the error using NewWC, WrapWC, or StackWC
|
||||
# instead of New|Stack|Wrap().WithClues(ctx)
|
||||
- 'WithClues(# prefer the builderWC variant - ex: StackWC(ctx, ...))?'
|
||||
lll:
|
||||
line-length: 120
|
||||
revive:
|
||||
@ -96,7 +81,6 @@ linters-settings:
|
||||
- name: time-equal
|
||||
- name: time-naming
|
||||
- name: unreachable-code
|
||||
- name: use-any
|
||||
- name: useless-break
|
||||
- name: var-declaration
|
||||
- name: var-naming
|
||||
@ -134,13 +118,7 @@ issues:
|
||||
linters:
|
||||
- forbidigo
|
||||
text: "context.(Background|TODO)"
|
||||
- path: internal/m365/collection/drive/collections_test.go
|
||||
linters:
|
||||
- lll
|
||||
- path: internal/m365/collection/drive/collections_tree_test.go
|
||||
linters:
|
||||
- lll
|
||||
- path: pkg/services/m365/api/graph/betasdk
|
||||
- path: internal/connector/graph/betasdk
|
||||
linters:
|
||||
- wsl
|
||||
- revive
|
||||
|
||||
@ -18,15 +18,13 @@ builds:
|
||||
- -X 'github.com/alcionai/corso/src/internal/events.RudderStackDataPlaneURL={{.Env.RUDDERSTACK_CORSO_DATA_PLANE_URL}}'
|
||||
|
||||
archives:
|
||||
# this name template makes the OS and Arch compatible with the results of uname.
|
||||
- name_template: >-
|
||||
{{ .ProjectName }}_
|
||||
{{- .Tag }}_
|
||||
{{- title .Os }}_
|
||||
{{- if eq .Arch "amd64" }}x86_64
|
||||
{{- else if eq .Arch "386" }}i386
|
||||
{{- else }}{{ .Arch }}{{ end }}
|
||||
{{- if .Arm }}v{{ .Arm }}{{ end }}
|
||||
- name_template: "{{ .ProjectName }}_{{ .Tag }}_{{ .Os }}_{{ .Arch }}"
|
||||
replacements:
|
||||
darwin: Darwin
|
||||
linux: Linux
|
||||
windows: Windows
|
||||
386: i386
|
||||
amd64: x86_64
|
||||
format: tar.gz
|
||||
format_overrides:
|
||||
- goos: windows
|
||||
|
||||
@ -1,56 +0,0 @@
|
||||
{{- /*gotype: github.com/gotesttools/gotestfmt/v2/parser.Package*/ -}}
|
||||
{{- /*
|
||||
This template contains the format for an individual package. GitHub actions does not currently support nested groups so
|
||||
we are creating a stylized header for each package.
|
||||
*/ -}}
|
||||
{{- $settings := .Settings -}}
|
||||
{{- if and (or (not $settings.HideSuccessfulPackages) (ne .Result "PASS")) (or (not $settings.HideEmptyPackages) (ne .Result "SKIP") (ne (len .TestCases) 0)) -}}
|
||||
{{- if eq .Result "PASS" -}}
|
||||
{{ "\033" }}[0;32m
|
||||
{{- else if eq .Result "SKIP" -}}
|
||||
{{ "\033" }}[0;33m
|
||||
{{- else -}}
|
||||
{{ "\033" }}[0;31m
|
||||
{{- end -}}
|
||||
📦 {{ .Name }}{{- "\033" }}[0m
|
||||
{{- with .Coverage -}}
|
||||
{{- "\033" -}}[0;37m ({{ . }}% coverage){{- "\033" -}}[0m
|
||||
{{- end -}}
|
||||
{{- " " -}}({{- .Duration -}})
|
||||
{{- "\n" -}}
|
||||
{{- with .Reason -}}
|
||||
{{- " " -}}🛑 {{ . -}}{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- with .Output -}}
|
||||
{{- . -}}{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- with .TestCases -}}
|
||||
{{- range . -}}
|
||||
{{- if or (not $settings.HideSuccessfulTests) (ne .Result "PASS") -}}
|
||||
::group::
|
||||
{{- if eq .Result "PASS" -}}
|
||||
{{ "\033" }}[0;32m✅
|
||||
{{- else if eq .Result "SKIP" -}}
|
||||
{{ "\033" }}[0;33m🚧
|
||||
{{- else -}}
|
||||
{{ "\033" }}[0;31m❌
|
||||
{{- end -}}
|
||||
{{ " " }}{{- .Name -}}
|
||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||
{{- with .Coverage -}}
|
||||
, coverage: {{ . }}%
|
||||
{{- end -}})
|
||||
{{- "\033" -}}[0m
|
||||
{{- "\n" -}}
|
||||
|
||||
{{- with .Output -}}
|
||||
{{- formatTestOutput . $settings -}}
|
||||
{{- "\n" -}}
|
||||
{{- end -}}
|
||||
|
||||
::endgroup::{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
{{- "\n" -}}
|
||||
{{- end -}}
|
||||
10
src/Makefile
10
src/Makefile
@ -1,5 +1,5 @@
|
||||
# This must match the version defined in .github/workflows/lint.yaml.
|
||||
WANTED_LINT_VERSION := 1.54.2
|
||||
WANTED_LINT_VERSION := 1.52.2
|
||||
LINT_VERSION := $(shell golangci-lint version | cut -d' ' -f4)
|
||||
HAS_LINT := $(shell which golangci-lint)
|
||||
|
||||
@ -18,7 +18,7 @@ lint: check-lint-version
|
||||
fmt:
|
||||
gofumpt -w .
|
||||
goimports -w .
|
||||
gci write --skip-generated -s 'standard' -s 'default' -s 'prefix(github.com/alcionai/corso)' .
|
||||
gci write --skip-generated -s 'standard,default,prefix(github.com/alcionai/corso)' .
|
||||
|
||||
check-lint-version: check-lint
|
||||
@if [ "$(LINT_VERSION)" != "$(WANTED_LINT_VERSION)" ]; then \
|
||||
@ -37,7 +37,7 @@ build-otel-daemon:
|
||||
docker build -t xray-daemon .
|
||||
|
||||
otel-daemon:
|
||||
results_dir=$$PWD/testlog; \
|
||||
results_dir=$$PWD/test_results; \
|
||||
cd ./testfiles/otel_daemon; \
|
||||
docker run \
|
||||
-d \
|
||||
@ -51,7 +51,7 @@ otel-daemon:
|
||||
--log-level debug
|
||||
|
||||
local-daemon:
|
||||
results_dir=$$PWD/testlog; \
|
||||
results_dir=$$PWD/test_results; \
|
||||
cd ./testfiles/otel_daemon; \
|
||||
docker run \
|
||||
--attach STDOUT \
|
||||
@ -78,7 +78,7 @@ load-test:
|
||||
-memprofile=mem.prof \
|
||||
-mutexprofile=mutex.prof \
|
||||
-trace=trace.out \
|
||||
-outputdir=testlog \
|
||||
-outputdir=test_results \
|
||||
./pkg/repository/loadtest/repository_load_test.go
|
||||
|
||||
getM365:
|
||||
|
||||
@ -9,17 +9,11 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/color"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/pkg/backup"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/errs/core"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
@ -27,8 +21,6 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
var ErrEmptyBackup = clues.New("no items in backup")
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// adding commands to cobra
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -44,8 +36,6 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||
addExchangeCommands,
|
||||
addOneDriveCommands,
|
||||
addSharePointCommands,
|
||||
addGroupsCommands,
|
||||
addTeamsChatsCommands,
|
||||
}
|
||||
|
||||
// AddCommands attaches all `corso backup * *` commands to the parent.
|
||||
@ -58,13 +48,43 @@ func AddCommands(cmd *cobra.Command) {
|
||||
backupC.AddCommand(subCommand)
|
||||
|
||||
for _, addBackupTo := range serviceCommands {
|
||||
sc := addBackupTo(subCommand)
|
||||
flags.AddAllProviderFlags(sc)
|
||||
flags.AddAllStorageFlags(sc)
|
||||
addBackupTo(subCommand)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// common flags and flag attachers for commands
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// list output filter flags
|
||||
var (
|
||||
failedItemsFN = "failed-items"
|
||||
listFailedItems string
|
||||
skippedItemsFN = "skipped-items"
|
||||
listSkippedItems string
|
||||
recoveredErrorsFN = "recovered-errors"
|
||||
listRecoveredErrors string
|
||||
)
|
||||
|
||||
func addFailedItemsFN(cmd *cobra.Command) {
|
||||
cmd.Flags().StringVar(
|
||||
&listFailedItems, failedItemsFN, "show",
|
||||
"Toggles showing or hiding the list of items that failed.")
|
||||
}
|
||||
|
||||
func addSkippedItemsFN(cmd *cobra.Command) {
|
||||
cmd.Flags().StringVar(
|
||||
&listSkippedItems, skippedItemsFN, "show",
|
||||
"Toggles showing or hiding the list of items that were skipped.")
|
||||
}
|
||||
|
||||
func addRecoveredErrorsFN(cmd *cobra.Command) {
|
||||
cmd.Flags().StringVar(
|
||||
&listRecoveredErrors, recoveredErrorsFN, "show",
|
||||
"Toggles showing or hiding the list of errors which corso recovered from.")
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// commands
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -170,10 +190,10 @@ func handleDeleteCmd(cmd *cobra.Command, args []string) error {
|
||||
// standard set of selector behavior that we want used in the cli
|
||||
var defaultSelectorConfig = selectors.Config{OnlyMatchItemNames: true}
|
||||
|
||||
func genericCreateCommand(
|
||||
func runBackups(
|
||||
ctx context.Context,
|
||||
r repository.Repositoryer,
|
||||
serviceName string,
|
||||
r repository.Repository,
|
||||
serviceName, resourceOwnerType string,
|
||||
selectorSet []selectors.Selector,
|
||||
ins idname.Cacher,
|
||||
) error {
|
||||
@ -187,50 +207,21 @@ func genericCreateCommand(
|
||||
|
||||
var (
|
||||
owner = discSel.DiscreteOwner
|
||||
ictx = clues.Add(ctx, "resource_owner_selected", owner)
|
||||
ictx = clues.Add(ctx, "resource_owner", owner)
|
||||
)
|
||||
|
||||
logger.Ctx(ictx).Infof("setting up backup")
|
||||
|
||||
bo, err := r.NewBackupWithLookup(ictx, discSel, ins)
|
||||
if err != nil {
|
||||
cerr := clues.WrapWC(ictx, err, owner)
|
||||
errs = append(errs, cerr)
|
||||
|
||||
Errf(
|
||||
ictx,
|
||||
"%s\nCause: %s",
|
||||
"Unable to initiate backup",
|
||||
err.Error())
|
||||
errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
|
||||
Errf(ictx, "%v\n", err)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
ictx = clues.Add(
|
||||
ictx,
|
||||
"resource_owner_id", bo.ResourceOwner.ID(),
|
||||
"resource_owner_name", clues.Hide(bo.ResourceOwner.Name()))
|
||||
|
||||
logger.Ctx(ictx).Infof("running backup")
|
||||
|
||||
err = bo.Run(ictx)
|
||||
if err != nil {
|
||||
if errors.Is(err, core.ErrServiceNotEnabled) {
|
||||
logger.Ctx(ictx).Infow("service not enabled",
|
||||
"resource_owner_id", bo.ResourceOwner.ID(),
|
||||
"service", serviceName)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
cerr := clues.Wrap(err, owner)
|
||||
errs = append(errs, cerr)
|
||||
|
||||
Errf(
|
||||
ictx,
|
||||
"%s\nCause: %s",
|
||||
"Unable to complete backup",
|
||||
err.Error())
|
||||
errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
|
||||
Errf(ictx, "%v\n", err)
|
||||
|
||||
continue
|
||||
}
|
||||
@ -238,10 +229,10 @@ func genericCreateCommand(
|
||||
bIDs = append(bIDs, string(bo.Results.BackupID))
|
||||
|
||||
if !DisplayJSONFormat() {
|
||||
Infof(ictx, fmt.Sprintf("Backup complete %s %s", observe.Bullet, color.BlueOutput(bo.Results.BackupID)))
|
||||
printBackupStats(ictx, r, string(bo.Results.BackupID))
|
||||
Infof(ctx, "Done\n")
|
||||
printBackupStats(ctx, r, string(bo.Results.BackupID))
|
||||
} else {
|
||||
Infof(ictx, "Backup complete - ID: %v\n", bo.Results.BackupID)
|
||||
Infof(ctx, "Done - ID: %v\n", bo.Results.BackupID)
|
||||
}
|
||||
}
|
||||
|
||||
@ -250,10 +241,8 @@ func genericCreateCommand(
|
||||
return Only(ctx, clues.Wrap(berrs.Failure(), "Unable to retrieve backup results from storage"))
|
||||
}
|
||||
|
||||
if len(bups) > 0 {
|
||||
Info(ctx, "\nCompleted Backups:")
|
||||
Info(ctx, "Completed Backups:")
|
||||
backup.PrintAll(ctx, bups)
|
||||
}
|
||||
|
||||
if len(errs) > 0 {
|
||||
sb := fmt.Sprintf("%d of %d backups failed:\n", len(errs), len(selectorSet))
|
||||
@ -271,53 +260,35 @@ func genericCreateCommand(
|
||||
|
||||
// genericDeleteCommand is a helper function that all services can use
|
||||
// for the removal of an entry from the repository
|
||||
func genericDeleteCommand(
|
||||
cmd *cobra.Command,
|
||||
pst path.ServiceType,
|
||||
designation string,
|
||||
bID, args []string,
|
||||
) error {
|
||||
func genericDeleteCommand(cmd *cobra.Command, bID, designation string, args []string) error {
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
ctx := clues.Add(cmd.Context(), "delete_backup_id", bID)
|
||||
|
||||
r, _, err := utils.GetAccountAndConnect(ctx, cmd, pst)
|
||||
r, _, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
if err := r.DeleteBackups(ctx, true, bID...); err != nil {
|
||||
return Only(ctx, clues.Wrap(err, fmt.Sprintf("Deleting backup %v", bID)))
|
||||
if err := r.DeleteBackup(ctx, bID); err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Deleting backup "+bID))
|
||||
}
|
||||
|
||||
Infof(ctx, "Deleted %s backup %v", designation, bID)
|
||||
Infof(ctx, "Deleted %s backup %s", designation, bID)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// genericListCommand is a helper function that all services can use
|
||||
// to display the backup IDs saved within the repository
|
||||
func genericListCommand(
|
||||
cmd *cobra.Command,
|
||||
bID string,
|
||||
service path.ServiceType,
|
||||
args []string,
|
||||
) error {
|
||||
func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
r, _, err := utils.GetAccountAndConnect(ctx, cmd, service)
|
||||
r, _, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
@ -335,12 +306,7 @@ func genericListCommand(
|
||||
}
|
||||
|
||||
b.Print(ctx)
|
||||
fe.PrintItems(
|
||||
ctx,
|
||||
!ifShow(flags.ListAlertsFV),
|
||||
!ifShow(flags.FailedItemsFV),
|
||||
!ifShow(flags.ListSkippedItemsFV),
|
||||
!ifShow(flags.ListRecoveredErrorsFV))
|
||||
fe.PrintItems(ctx, !ifShow(listFailedItems), !ifShow(listSkippedItems), !ifShow(listRecoveredErrors))
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -355,78 +321,16 @@ func genericListCommand(
|
||||
return nil
|
||||
}
|
||||
|
||||
func genericDetailsCommand(
|
||||
cmd *cobra.Command,
|
||||
backupID string,
|
||||
sel selectors.Selector,
|
||||
) (*details.Details, error) {
|
||||
ctx := cmd.Context()
|
||||
|
||||
r, rdao, err := utils.GetAccountAndConnect(ctx, cmd, path.OneDriveService)
|
||||
if err != nil {
|
||||
return nil, clues.Stack(err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
return genericDetailsCore(
|
||||
ctx,
|
||||
r,
|
||||
backupID,
|
||||
sel,
|
||||
rdao.Opts)
|
||||
}
|
||||
|
||||
func genericDetailsCore(
|
||||
ctx context.Context,
|
||||
bg repository.BackupGetter,
|
||||
backupID string,
|
||||
sel selectors.Selector,
|
||||
opts control.Options,
|
||||
) (*details.Details, error) {
|
||||
ctx = clues.Add(ctx, "backup_id", backupID)
|
||||
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
|
||||
d, _, errs := bg.GetBackupDetails(ctx, backupID)
|
||||
// TODO: log/track recoverable errors
|
||||
if errs.Failure() != nil {
|
||||
if errors.Is(errs.Failure(), data.ErrNotFound) {
|
||||
return nil, clues.New("no backup exists with the id " + backupID)
|
||||
}
|
||||
|
||||
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
|
||||
}
|
||||
|
||||
if len(d.Entries) == 0 {
|
||||
return nil, ErrEmptyBackup
|
||||
}
|
||||
|
||||
if opts.SkipReduce {
|
||||
return d, nil
|
||||
}
|
||||
|
||||
d, err := sel.Reduce(ctx, d, errs)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "filtering backup details to selection")
|
||||
}
|
||||
|
||||
return d, nil
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helper funcs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func ifShow(flag string) bool {
|
||||
return strings.ToLower(strings.TrimSpace(flag)) == "show"
|
||||
}
|
||||
|
||||
func printBackupStats(ctx context.Context, r repository.Repositoryer, bid string) {
|
||||
func printBackupStats(ctx context.Context, r repository.Repository, bid string) {
|
||||
b, err := r.Backup(ctx, bid)
|
||||
if err != nil {
|
||||
logger.CtxErr(ctx, err).Error("finding backup immediately after backup operation completion")
|
||||
}
|
||||
|
||||
b.ToPrintable().Stats.PrintProperties(ctx)
|
||||
b.ToPrintable().Stats.Print(ctx)
|
||||
Info(ctx, " ")
|
||||
}
|
||||
|
||||
@ -1,97 +0,0 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
type BackupUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestBackupUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &BackupUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *BackupUnitSuite) TestGenericDetailsCore() {
|
||||
t := suite.T()
|
||||
|
||||
expected := append(
|
||||
append(
|
||||
dtd.GetItemsForVersion(
|
||||
t,
|
||||
path.ExchangeService,
|
||||
path.EmailCategory,
|
||||
0,
|
||||
-1),
|
||||
dtd.GetItemsForVersion(
|
||||
t,
|
||||
path.ExchangeService,
|
||||
path.EventsCategory,
|
||||
0,
|
||||
-1)...),
|
||||
dtd.GetItemsForVersion(
|
||||
t,
|
||||
path.ExchangeService,
|
||||
path.ContactsCategory,
|
||||
0,
|
||||
-1)...)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
bg := testdata.VersionedBackupGetter{
|
||||
Details: dtd.GetDetailsSetForVersion(t, 0),
|
||||
}
|
||||
|
||||
sel := selectors.NewExchangeBackup([]string{"user-id"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
output, err := genericDetailsCore(
|
||||
ctx,
|
||||
bg,
|
||||
"backup-ID",
|
||||
sel.Selector,
|
||||
control.DefaultOptions())
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ElementsMatch(t, expected, output.Entries)
|
||||
}
|
||||
|
||||
func (suite *BackupUnitSuite) TestGenericDetailsCore_empty() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
bg := testdata.VersionedBackupGetter{
|
||||
Details: &details.Details{
|
||||
DetailsModel: details.DetailsModel{
|
||||
Entries: []details.Entry{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
sel := selectors.NewExchangeBackup([]string{"user-id"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
_, err := genericDetailsCore(
|
||||
ctx,
|
||||
bg,
|
||||
"backup-ID",
|
||||
sel.Selector,
|
||||
control.DefaultOptions())
|
||||
require.Error(t, err, "has error")
|
||||
assert.ErrorIs(t, err, ErrEmptyBackup, clues.ToCore(err))
|
||||
}
|
||||
@ -1,14 +1,21 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
@ -24,8 +31,8 @@ const (
|
||||
|
||||
const (
|
||||
exchangeServiceCommand = "exchange"
|
||||
exchangeServiceCommandCreateUseSuffix = "--mailbox <email> | '" + flags.Wildcard + "'"
|
||||
exchangeServiceCommandDeleteUseSuffix = "--backups <backupId>"
|
||||
exchangeServiceCommandCreateUseSuffix = "--mailbox <email> | '" + utils.Wildcard + "'"
|
||||
exchangeServiceCommandDeleteUseSuffix = "--backup <backupId>"
|
||||
exchangeServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
@ -39,75 +46,81 @@ corso backup create exchange --mailbox alice@example.com,bob@example.com --data
|
||||
# Backup all Exchange data for all M365 users
|
||||
corso backup create exchange --mailbox '*'`
|
||||
|
||||
exchangeServiceCommandDeleteExamples = `# Delete Exchange backup with IDs 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
and 1234abcd-12ab-cd34-56de-1234abce
|
||||
corso backup delete exchange --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
|
||||
exchangeServiceCommandDeleteExamples = `# Delete Exchange backup with ID 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup delete exchange --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
|
||||
exchangeServiceCommandDetailsExamples = `# Explore items in Alice's latest backup (1234abcd...)
|
||||
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
exchangeServiceCommandDetailsExamples = `# Explore Alice's items in backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --mailbox alice@example.com
|
||||
|
||||
# Explore emails in the folder "Inbox" with subject containing "Hello world"
|
||||
# Explore Alice's emails with subject containing "Hello world" in folder "Inbox" from a specific backup
|
||||
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--email-subject "Hello world" --email-folder Inbox
|
||||
--mailbox alice@example.com --email-subject "Hello world" --email-folder Inbox
|
||||
|
||||
# Explore calendar events occurring after start of 2022
|
||||
# Explore Bobs's events occurring after start of 2022 from a specific backup
|
||||
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--event-starts-after 2022-01-01T00:00:00
|
||||
--mailbox bob@example.com --event-starts-after 2022-01-01T00:00:00
|
||||
|
||||
# Explore contacts named Andy
|
||||
# Explore Alice's contacts with name containing Andy from a specific backup
|
||||
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--contact-name Andy`
|
||||
--mailbox alice@example.com --contact-name Andy`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch cmd.Use {
|
||||
case createCommand:
|
||||
c, _ = utils.AddCommand(cmd, exchangeCreateCmd())
|
||||
c, fs = utils.AddCommand(cmd, exchangeCreateCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + exchangeServiceCommandCreateUseSuffix
|
||||
c.Example = exchangeServiceCommandCreateExamples
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
flags.AddMailBoxFlag(c)
|
||||
flags.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false)
|
||||
flags.AddFetchParallelismFlag(c)
|
||||
flags.AddDisableDeltaFlag(c)
|
||||
flags.AddEnableImmutableIDFlag(c)
|
||||
flags.AddDeltaPageSizeFlag(c)
|
||||
flags.AddGenericBackupFlags(c)
|
||||
flags.AddDisableSlidingWindowLimiterFlag(c)
|
||||
utils.AddMailBoxFlag(c)
|
||||
utils.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false)
|
||||
options.AddFetchParallelismFlag(c)
|
||||
options.AddFailFastFlag(c)
|
||||
options.AddDisableIncrementalsFlag(c)
|
||||
options.AddEnableImmutableIDFlag(c)
|
||||
options.AddDisableConcurrencyLimiterFlag(c)
|
||||
|
||||
case listCommand:
|
||||
c, _ = utils.AddCommand(cmd, exchangeListCmd())
|
||||
c, fs = utils.AddCommand(cmd, exchangeListCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
flags.AddAllBackupListFlags(c)
|
||||
utils.AddBackupIDFlag(c, false)
|
||||
addFailedItemsFN(c)
|
||||
addSkippedItemsFN(c)
|
||||
addRecoveredErrorsFN(c)
|
||||
|
||||
case detailsCommand:
|
||||
c, _ = utils.AddCommand(cmd, exchangeDetailsCmd())
|
||||
c, fs = utils.AddCommand(cmd, exchangeDetailsCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + exchangeServiceCommandDetailsUseSuffix
|
||||
c.Example = exchangeServiceCommandDetailsExamples
|
||||
|
||||
flags.AddSkipReduceFlag(c)
|
||||
options.AddSkipReduceFlag(c)
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddExchangeDetailsAndRestoreFlags(c, false)
|
||||
utils.AddBackupIDFlag(c, true)
|
||||
utils.AddExchangeDetailsAndRestoreFlags(c)
|
||||
|
||||
case deleteCommand:
|
||||
c, _ = utils.AddCommand(cmd, exchangeDeleteCmd())
|
||||
c, fs = utils.AddCommand(cmd, exchangeDeleteCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + exchangeServiceCommandDeleteUseSuffix
|
||||
c.Example = exchangeServiceCommandDeleteExamples
|
||||
|
||||
flags.AddMultipleBackupIDsFlag(c, false)
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
utils.AddBackupIDFlag(c, true)
|
||||
}
|
||||
|
||||
return c
|
||||
@ -135,32 +148,20 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateExchangeBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
|
||||
if err := validateExchangeBackupCreateFlags(utils.UserFV, utils.CategoryDataFV); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
|
||||
ctx,
|
||||
cmd,
|
||||
path.ExchangeService)
|
||||
r, acct, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
sel := exchangeBackupCreateSelectors(flags.UserFV, flags.CategoryDataFV)
|
||||
sel := exchangeBackupCreateSelectors(utils.UserFV, utils.CategoryDataFV)
|
||||
|
||||
ins, err := utils.UsersMap(
|
||||
ctx,
|
||||
*acct,
|
||||
utils.Control(),
|
||||
r.Counter(),
|
||||
fault.New(true))
|
||||
ins, err := utils.UsersMap(ctx, *acct, fault.New(true))
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
|
||||
}
|
||||
@ -171,10 +172,10 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
selectorSet = append(selectorSet, discSel.Selector)
|
||||
}
|
||||
|
||||
return genericCreateCommand(
|
||||
return runBackups(
|
||||
ctx,
|
||||
r,
|
||||
"Exchange",
|
||||
"Exchange", "user",
|
||||
selectorSet,
|
||||
ins)
|
||||
}
|
||||
@ -233,7 +234,7 @@ func exchangeListCmd() *cobra.Command {
|
||||
|
||||
// lists the history of backup operations
|
||||
func listExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericListCommand(cmd, flags.BackupIDFV, path.ExchangeService, args)
|
||||
return genericListCommand(cmd, utils.BackupIDFV, path.ExchangeService, args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
@ -257,35 +258,71 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
return runDetailsExchangeCmd(cmd)
|
||||
}
|
||||
|
||||
func runDetailsExchangeCmd(cmd *cobra.Command) error {
|
||||
ctx := cmd.Context()
|
||||
opts := utils.MakeExchangeOpts(cmd)
|
||||
|
||||
sel := utils.IncludeExchangeRestoreDataSelectors(opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterExchangeRestoreInfoSelectors(sel, opts)
|
||||
|
||||
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
|
||||
r, _, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) > 0 {
|
||||
ds.PrintEntries(ctx)
|
||||
} else {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
ctrlOpts := options.Control()
|
||||
|
||||
ds, err := runDetailsExchangeCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) == 0 {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
return nil
|
||||
}
|
||||
|
||||
ds.PrintEntries(ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// runDetailsExchangeCmd actually performs the lookup in backup details.
|
||||
// the fault.Errors return is always non-nil. Callers should check if
|
||||
// errs.Failure() == nil.
|
||||
func runDetailsExchangeCmd(
|
||||
ctx context.Context,
|
||||
r repository.BackupGetter,
|
||||
backupID string,
|
||||
opts utils.ExchangeOpts,
|
||||
skipReduce bool,
|
||||
) (*details.Details, error) {
|
||||
if err := utils.ValidateExchangeRestoreFlags(backupID, opts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "backup_id", backupID)
|
||||
|
||||
d, _, errs := r.GetBackupDetails(ctx, backupID)
|
||||
// TODO: log/track recoverable errors
|
||||
if errs.Failure() != nil {
|
||||
if errors.Is(errs.Failure(), data.ErrNotFound) {
|
||||
return nil, clues.New("No backup exists with the id " + backupID)
|
||||
}
|
||||
|
||||
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "details_entries", len(d.Entries))
|
||||
|
||||
if !skipReduce {
|
||||
sel := utils.IncludeExchangeRestoreDataSelectors(opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterExchangeRestoreInfoSelectors(sel, opts)
|
||||
d = sel.Reduce(ctx, d, errs)
|
||||
}
|
||||
|
||||
return d, nil
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup delete
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
@ -302,15 +339,5 @@ func exchangeDeleteCmd() *cobra.Command {
|
||||
|
||||
// deletes an exchange service backup.
|
||||
func deleteExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
var backupIDValue []string
|
||||
|
||||
if len(flags.BackupIDsFV) > 0 {
|
||||
backupIDValue = flags.BackupIDsFV
|
||||
} else if len(flags.BackupIDFV) > 0 {
|
||||
backupIDValue = append(backupIDValue, flags.BackupIDFV)
|
||||
} else {
|
||||
return clues.New("either --backup or --backups flag is required")
|
||||
}
|
||||
|
||||
return genericDeleteCommand(cmd, path.ExchangeService, "Exchange", backupIDValue, args)
|
||||
return genericDeleteCommand(cmd, utils.BackupIDFV, "Exchange", args)
|
||||
}
|
||||
|
||||
@ -1,30 +1,32 @@
|
||||
package backup_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -34,46 +36,59 @@ var (
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests that depend on no backups existing
|
||||
// tests with no backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type NoBackupExchangeE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
m365UserID string
|
||||
recorder strings.Builder
|
||||
}
|
||||
|
||||
func TestNoBackupExchangeE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
|
||||
suite.Run(t, &NoBackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
tester.CorsoCITests,
|
||||
)})
|
||||
}
|
||||
|
||||
func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||
t := suite.T()
|
||||
acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
|
||||
|
||||
suite.acct = acct
|
||||
suite.st = st
|
||||
suite.repo = repo
|
||||
suite.vpr = vpr
|
||||
suite.recorder = recorder
|
||||
suite.cfgFP = cfgFilePath
|
||||
suite.m365UserID = tester.M365UserID(t)
|
||||
}
|
||||
|
||||
func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
|
||||
func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_empty() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
suite.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "list", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
"--config-file", suite.cfgFP)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
cmd.SetErr(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
@ -81,10 +96,10 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
result := suite.recorder.String()
|
||||
|
||||
// as an offhand check: the result should contain the m365 user id
|
||||
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
|
||||
assert.Equal(t, "No backups available\n", result)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -93,55 +108,61 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
|
||||
|
||||
type BackupExchangeE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
m365UserID string
|
||||
}
|
||||
|
||||
func TestBackupExchangeE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
tester.CorsoCITests,
|
||||
)})
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||
t := suite.T()
|
||||
acct, st, repo, vpr, _, cfgFilePath := prepM365Test(t, ctx)
|
||||
|
||||
suite.acct = acct
|
||||
suite.st = st
|
||||
suite.repo = repo
|
||||
suite.vpr = vpr
|
||||
suite.cfgFP = cfgFilePath
|
||||
suite.m365UserID = tester.M365UserID(t)
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_email() {
|
||||
runExchangeBackupCategoryTest(suite, email)
|
||||
runExchangeBackupCategoryTest(suite, "email")
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_contacts() {
|
||||
runExchangeBackupCategoryTest(suite, contacts)
|
||||
runExchangeBackupCategoryTest(suite, "contacts")
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_events() {
|
||||
runExchangeBackupCategoryTest(suite, events)
|
||||
runExchangeBackupCategoryTest(suite, "events")
|
||||
}
|
||||
|
||||
func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.CategoryType) {
|
||||
func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildExchangeBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
suite.m365.User.ID,
|
||||
category.String(),
|
||||
&recorder)
|
||||
cmd, ctx := buildExchangeBackupCmd(ctx, suite.cfgFP, suite.m365UserID, category, &recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
@ -150,78 +171,34 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
|
||||
runExchangeBackupServiceNotEnabledTest(suite, email)
|
||||
}
|
||||
|
||||
func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, category path.CategoryType) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
// run the command
|
||||
|
||||
cmd, ctx := buildExchangeBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.m365.User.ID),
|
||||
category.String(),
|
||||
&recorder)
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
// as an offhand check: the result should contain the m365 user id
|
||||
assert.Contains(t, result, suite.m365UserID)
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
|
||||
runExchangeBackupUserNotFoundTest(suite, email)
|
||||
runExchangeBackupUserNotFoundTest(suite, "email")
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_contacts() {
|
||||
runExchangeBackupUserNotFoundTest(suite, contacts)
|
||||
runExchangeBackupUserNotFoundTest(suite, "contacts")
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_events() {
|
||||
runExchangeBackupUserNotFoundTest(suite, events)
|
||||
runExchangeBackupUserNotFoundTest(suite, "events")
|
||||
}
|
||||
|
||||
func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category path.CategoryType) {
|
||||
func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildExchangeBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
"foo@not-there.com",
|
||||
category.String(),
|
||||
&recorder)
|
||||
cmd, ctx := buildExchangeBackupCmd(ctx, suite.cfgFP, "foo@not-there.com", category, &recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
@ -229,8 +206,7 @@ func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category p
|
||||
assert.Contains(
|
||||
t,
|
||||
err.Error(),
|
||||
"not found",
|
||||
"error missing user not found")
|
||||
"not found in tenant", "error missing user not found")
|
||||
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
|
||||
|
||||
t.Logf("backup error message: %s", err.Error())
|
||||
@ -239,120 +215,49 @@ func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category p
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFlag() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "exchange",
|
||||
"--user", suite.m365.User.ID,
|
||||
"--azure-client-id", "invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "exchange",
|
||||
"--user", suite.m365.User.ID,
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
t.Log("backup results", result)
|
||||
|
||||
// As an offhand check: the result should contain the m365 user's email.
|
||||
assert.Contains(
|
||||
t,
|
||||
strings.ToLower(result),
|
||||
strings.ToLower(suite.m365.User.Provider.Name()))
|
||||
}
|
||||
|
||||
// AWS flags
|
||||
func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "exchange",
|
||||
"--user", suite.m365.User.ID,
|
||||
"--aws-access-key", "invalid-value",
|
||||
"--aws-secret-access-key", "some-invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
// since invalid aws creds are explicitly set, should see a failure
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests prepared with a previous backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type PreparedBackupExchangeE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
m365UserID string
|
||||
backupOps map[path.CategoryType]string
|
||||
m365 its.M365IntgTestSetup
|
||||
recorder strings.Builder
|
||||
}
|
||||
|
||||
func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &PreparedBackupExchangeE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
suite.Run(t, &PreparedBackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
tester.CorsoCITests,
|
||||
)})
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||
t := suite.T()
|
||||
acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
|
||||
|
||||
suite.acct = acct
|
||||
suite.st = st
|
||||
suite.repo = repo
|
||||
suite.vpr = vpr
|
||||
suite.recorder = recorder
|
||||
suite.cfgFP = cfgFilePath
|
||||
suite.m365UserID = tester.M365UserID(t)
|
||||
suite.backupOps = make(map[path.CategoryType]string)
|
||||
|
||||
var (
|
||||
users = []string{suite.m365.User.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
|
||||
users = []string{suite.m365UserID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365UserID: suite.m365UserID})
|
||||
)
|
||||
|
||||
for _, set := range []path.CategoryType{email, contacts, events} {
|
||||
@ -363,18 +268,18 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
|
||||
|
||||
switch set {
|
||||
case email:
|
||||
scopes = sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())
|
||||
scopes = sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch())
|
||||
|
||||
case contacts:
|
||||
scopes = sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch())
|
||||
scopes = sel.ContactFolders([]string{exchange.DefaultContactFolder}, selectors.PrefixMatch())
|
||||
|
||||
case events:
|
||||
scopes = sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch())
|
||||
scopes = sel.EventCalendars([]string{exchange.DefaultCalendar}, selectors.PrefixMatch())
|
||||
}
|
||||
|
||||
sel.Include(scopes)
|
||||
|
||||
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
bop, err := suite.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = bop.Run(ctx)
|
||||
@ -383,11 +288,11 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
|
||||
bIDs := string(bop.Results.BackupID)
|
||||
|
||||
// sanity check, ensure we can find the backup and its details immediately
|
||||
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
|
||||
b, err := suite.repo.Backup(ctx, string(bop.Results.BackupID))
|
||||
require.NoError(t, err, "retrieving recent backup by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
|
||||
|
||||
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
|
||||
_, b, errs := suite.repo.GetBackupDetails(ctx, bIDs)
|
||||
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
|
||||
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
|
||||
@ -409,20 +314,20 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_events() {
|
||||
}
|
||||
|
||||
func runExchangeListCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
suite.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "list", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
"--config-file", suite.cfgFP)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
cmd.SetOut(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
@ -431,7 +336,7 @@ func runExchangeListCmdTest(suite *PreparedBackupExchangeE2ESuite, category path
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
result := suite.recorder.String()
|
||||
assert.Contains(t, result, suite.backupOps[category])
|
||||
}
|
||||
|
||||
@ -448,24 +353,24 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_singleID_events
|
||||
}
|
||||
|
||||
func runExchangeListSingleCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
suite.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "list", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--config-file", suite.cfgFP,
|
||||
"--backup", string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
cmd.SetOut(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
@ -474,21 +379,21 @@ func runExchangeListSingleCmdTest(suite *PreparedBackupExchangeE2ESuite, categor
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
result := suite.recorder.String()
|
||||
assert.Contains(t, result, bID)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_badID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "list", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--config-file", suite.cfgFP,
|
||||
"--backup", "smarfs")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
@ -512,28 +417,28 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeDetailsCmd_events() {
|
||||
}
|
||||
|
||||
func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
suite.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
// fetch the details from the repo first
|
||||
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
|
||||
deets, _, errs := suite.repo.GetBackupDetails(ctx, string(bID))
|
||||
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
|
||||
require.Empty(t, errs.Recovered())
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "details", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN, string(bID))
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
cmd.SetOut(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
@ -542,7 +447,7 @@ func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category p
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
result := suite.recorder.String()
|
||||
|
||||
i := 0
|
||||
foundFolders := 0
|
||||
@ -572,59 +477,64 @@ func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category p
|
||||
|
||||
type BackupDeleteExchangeE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps [3]operations.BackupOperation
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
backupOp operations.BackupOperation
|
||||
}
|
||||
|
||||
func TestBackupDeleteExchangeE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupDeleteExchangeE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
tester.CorsoCITests,
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
|
||||
t := suite.T()
|
||||
acct, st, repo, vpr, _, cfgFilePath := prepM365Test(t, ctx)
|
||||
|
||||
m365UserID := tconfig.M365UserID(t)
|
||||
suite.acct = acct
|
||||
suite.st = st
|
||||
suite.repo = repo
|
||||
suite.vpr = vpr
|
||||
suite.cfgFP = cfgFilePath
|
||||
|
||||
m365UserID := tester.M365UserID(t)
|
||||
users := []string{m365UserID}
|
||||
|
||||
// some tests require an existing backup
|
||||
sel := selectors.NewExchangeBackup(users)
|
||||
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
|
||||
sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()))
|
||||
|
||||
for i := 0; i < cap(suite.backupOps); i++ {
|
||||
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
|
||||
backupOp, err := suite.repo.NewBackup(ctx, sel.Selector)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.backupOps[i] = backupOp
|
||||
suite.backupOp = backupOp
|
||||
|
||||
err = suite.backupOps[i].Run(ctx)
|
||||
err = suite.backupOp.Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "delete", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN,
|
||||
fmt.Sprintf("%s,%s",
|
||||
string(suite.backupOps[0].Results.BackupID),
|
||||
string(suite.backupOps[1].Results.BackupID)))
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, string(suite.backupOp.Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
@ -632,50 +542,10 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
cmd = tester.StubRootCmd(
|
||||
"backup", "details", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(suite.backupOps[0].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(suite.backupOps[1].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_SingleID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN,
|
||||
string(suite.backupOps[2].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(suite.backupOps[2].Results.BackupID))
|
||||
"--config-file", suite.cfgFP,
|
||||
"--backup", string(suite.backupOp.Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
@ -684,16 +554,15 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_SingleID(
|
||||
|
||||
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "delete", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN, uuid.NewString())
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, uuid.NewString())
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// unknown backupIDs should error since the modelStore can't find the backup
|
||||
@ -701,20 +570,22 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_NoBackupID() {
|
||||
t := suite.T()
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "exchange",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
func buildExchangeBackupCmd(
|
||||
ctx context.Context,
|
||||
configFile, user, category string,
|
||||
recorder *strings.Builder,
|
||||
) (*cobra.Command, context.Context) {
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "create", "exchange",
|
||||
"--config-file", configFile,
|
||||
"--"+utils.UserFN, user,
|
||||
"--"+utils.CategoryDataFN, category)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(recorder)
|
||||
|
||||
// empty backupIDs should error since no data provided
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
return cmd, print.SetRootCmd(ctx, cmd)
|
||||
}
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
@ -10,12 +9,10 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
)
|
||||
|
||||
type ExchangeUnitSuite struct {
|
||||
@ -34,35 +31,71 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
flags []string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "create exchange",
|
||||
use: createCommand,
|
||||
expectUse: expectUse + " " + exchangeServiceCommandCreateUseSuffix,
|
||||
expectShort: exchangeCreateCmd().Short,
|
||||
expectRunE: createExchangeCmd,
|
||||
"create exchange",
|
||||
createCommand,
|
||||
expectUse + " " + exchangeServiceCommandCreateUseSuffix,
|
||||
exchangeCreateCmd().Short,
|
||||
[]string{
|
||||
utils.UserFN,
|
||||
utils.CategoryDataFN,
|
||||
options.DisableIncrementalsFN,
|
||||
options.FailFastFN,
|
||||
options.FetchParallelismFN,
|
||||
options.SkipReduceFN,
|
||||
options.NoStatsFN,
|
||||
},
|
||||
createExchangeCmd,
|
||||
},
|
||||
{
|
||||
name: "list exchange",
|
||||
use: listCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: exchangeListCmd().Short,
|
||||
expectRunE: listExchangeCmd,
|
||||
"list exchange",
|
||||
listCommand,
|
||||
expectUse,
|
||||
exchangeListCmd().Short,
|
||||
[]string{
|
||||
utils.BackupFN,
|
||||
failedItemsFN,
|
||||
skippedItemsFN,
|
||||
recoveredErrorsFN,
|
||||
},
|
||||
listExchangeCmd,
|
||||
},
|
||||
{
|
||||
name: "details exchange",
|
||||
use: detailsCommand,
|
||||
expectUse: expectUse + " " + exchangeServiceCommandDetailsUseSuffix,
|
||||
expectShort: exchangeDetailsCmd().Short,
|
||||
expectRunE: detailsExchangeCmd,
|
||||
"details exchange",
|
||||
detailsCommand,
|
||||
expectUse + " " + exchangeServiceCommandDetailsUseSuffix,
|
||||
exchangeDetailsCmd().Short,
|
||||
[]string{
|
||||
utils.BackupFN,
|
||||
utils.ContactFN,
|
||||
utils.ContactFolderFN,
|
||||
utils.ContactNameFN,
|
||||
utils.EmailFN,
|
||||
utils.EmailFolderFN,
|
||||
utils.EmailReceivedAfterFN,
|
||||
utils.EmailReceivedBeforeFN,
|
||||
utils.EmailSenderFN,
|
||||
utils.EmailSubjectFN,
|
||||
utils.EventFN,
|
||||
utils.EventCalendarFN,
|
||||
utils.EventOrganizerFN,
|
||||
utils.EventRecursFN,
|
||||
utils.EventStartsAfterFN,
|
||||
utils.EventStartsBeforeFN,
|
||||
utils.EventSubjectFN,
|
||||
},
|
||||
detailsExchangeCmd,
|
||||
},
|
||||
{
|
||||
name: "delete exchange",
|
||||
use: deleteCommand,
|
||||
expectUse: expectUse + " " + exchangeServiceCommandDeleteUseSuffix,
|
||||
expectShort: exchangeDeleteCmd().Short,
|
||||
expectRunE: deleteExchangeCmd,
|
||||
"delete exchange",
|
||||
deleteCommand,
|
||||
expectUse + " " + exchangeServiceCommandDeleteUseSuffix,
|
||||
exchangeDeleteCmd().Short,
|
||||
[]string{utils.BackupFN},
|
||||
deleteExchangeCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
@ -85,147 +118,6 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestBackupCreateFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: createCommand},
|
||||
addExchangeCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
exchangeServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.MailBoxFN, flagsTD.FlgInputs(flagsTD.MailboxInput),
|
||||
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.ExchangeCategoryDataInput),
|
||||
"--" + flags.FetchParallelismFN, flagsTD.FetchParallelism,
|
||||
"--" + flags.DeltaPageSizeFN, flagsTD.DeltaPageSize,
|
||||
|
||||
// bool flags
|
||||
"--" + flags.DisableDeltaFN,
|
||||
"--" + flags.EnableImmutableIDFN,
|
||||
"--" + flags.DisableSlidingWindowLimiterFN,
|
||||
},
|
||||
flagsTD.PreparedGenericBackupFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
opts := utils.MakeExchangeOpts(cmd)
|
||||
co := utils.Control()
|
||||
backupOpts := utils.ParseBackupOptions()
|
||||
|
||||
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
|
||||
// restore flags are switched over too and we no longer parse flags beyond
|
||||
// connection info into control.Options.
|
||||
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(backupOpts.Parallelism.ItemFetch))
|
||||
assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(backupOpts.M365.DeltaPageSize)))
|
||||
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
|
||||
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
|
||||
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
|
||||
assert.True(t, backupOpts.M365.DisableDeltaEndpoint)
|
||||
assert.True(t, backupOpts.M365.ExchangeImmutableIDs)
|
||||
assert.True(t, backupOpts.ServiceRateLimiter.DisableSlidingWindowLimiter)
|
||||
|
||||
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch))
|
||||
assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(co.DeltaPageSize)))
|
||||
assert.Equal(t, control.FailFast, co.FailureHandling)
|
||||
assert.True(t, co.ToggleFeatures.DisableIncrementals)
|
||||
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
|
||||
assert.True(t, co.ToggleFeatures.DisableDelta)
|
||||
assert.True(t, co.ToggleFeatures.ExchangeImmutableIDs)
|
||||
assert.True(t, co.ToggleFeatures.DisableSlidingWindowLimiter)
|
||||
|
||||
assert.ElementsMatch(t, flagsTD.MailboxInput, opts.Users)
|
||||
flagsTD.AssertGenericBackupFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestBackupListFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: listCommand},
|
||||
addExchangeCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
exchangeServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedBackupListFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertBackupListFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestBackupDetailsFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: detailsCommand},
|
||||
addExchangeCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
exchangeServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.SkipReduceFN,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
co := utils.Control()
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.True(t, co.SkipReduce)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestBackupDeleteFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: deleteCommand},
|
||||
addExchangeCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
exchangeServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestValidateBackupCreateFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
@ -275,7 +167,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
|
||||
},
|
||||
{
|
||||
name: "any users, no data",
|
||||
user: []string{flags.Wildcard},
|
||||
user: []string{utils.Wildcard},
|
||||
expectIncludeLen: 3,
|
||||
},
|
||||
{
|
||||
@ -285,7 +177,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
|
||||
},
|
||||
{
|
||||
name: "any users, contacts",
|
||||
user: []string{flags.Wildcard},
|
||||
user: []string{utils.Wildcard},
|
||||
data: []string{dataContacts},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
@ -297,7 +189,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
|
||||
},
|
||||
{
|
||||
name: "any users, email",
|
||||
user: []string{flags.Wildcard},
|
||||
user: []string{utils.Wildcard},
|
||||
data: []string{dataEmail},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
@ -309,7 +201,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
|
||||
},
|
||||
{
|
||||
name: "any users, events",
|
||||
user: []string{flags.Wildcard},
|
||||
user: []string{utils.Wildcard},
|
||||
data: []string{dataEvents},
|
||||
expectIncludeLen: 1,
|
||||
},
|
||||
@ -321,7 +213,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
|
||||
},
|
||||
{
|
||||
name: "any users, contacts + email",
|
||||
user: []string{flags.Wildcard},
|
||||
user: []string{utils.Wildcard},
|
||||
data: []string{dataContacts, dataEmail},
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
@ -333,7 +225,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
|
||||
},
|
||||
{
|
||||
name: "any users, email + events",
|
||||
user: []string{flags.Wildcard},
|
||||
user: []string{utils.Wildcard},
|
||||
data: []string{dataEmail, dataEvents},
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
@ -345,7 +237,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
|
||||
},
|
||||
{
|
||||
name: "any users, events + contacts",
|
||||
user: []string{flags.Wildcard},
|
||||
user: []string{utils.Wildcard},
|
||||
data: []string{dataEvents, dataContacts},
|
||||
expectIncludeLen: 2,
|
||||
},
|
||||
@ -377,3 +269,43 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestExchangeBackupDetailsSelectors() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
for _, test := range testdata.ExchangeOptionDetailLookups {
|
||||
suite.Run(test.Name, func() {
|
||||
t := suite.T()
|
||||
|
||||
output, err := runDetailsExchangeCmd(
|
||||
ctx,
|
||||
test.BackupGetter,
|
||||
"backup-ID",
|
||||
test.Opts,
|
||||
false)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ElementsMatch(t, test.Expected, output.Entries)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestExchangeBackupDetailsSelectorsBadFormats() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
for _, test := range testdata.BadExchangeOptionsFormats {
|
||||
suite.Run(test.Name, func() {
|
||||
t := suite.T()
|
||||
|
||||
output, err := runDetailsExchangeCmd(
|
||||
ctx,
|
||||
test.BackupGetter,
|
||||
"backup-ID",
|
||||
test.Opts,
|
||||
false)
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
assert.Empty(t, output)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,329 +0,0 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/filters"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
)
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// setup and globals
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
const (
|
||||
groupsServiceCommand = "groups"
|
||||
teamsServiceCommand = "teams"
|
||||
groupsServiceCommandCreateUseSuffix = "--group <groupName> | '" + flags.Wildcard + "'"
|
||||
groupsServiceCommandDeleteUseSuffix = "--backups <backupId>"
|
||||
groupsServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
const (
|
||||
groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group
|
||||
corso backup create groups --group Marketing
|
||||
|
||||
# Backup only Teams channel messages
|
||||
corso backup create groups --group Marketing --data messages
|
||||
|
||||
# Backup only group mailbox posts
|
||||
corso backup create groups --group Marketing --data conversations
|
||||
|
||||
# Backup all Groups and Teams data for all groups
|
||||
corso backup create groups --group '*'`
|
||||
|
||||
groupsServiceCommandDeleteExamples = `# Delete Groups backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
and 1234abcd-12ab-cd34-56de-1234abce
|
||||
corso backup delete groups --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
|
||||
|
||||
groupsServiceCommandDetailsExamples = `# Explore items in Marketing's latest backup (1234abcd...)
|
||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
|
||||
# Explore Marketing messages posted after the start of 2022
|
||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--last-message-reply-after 2022-01-01T00:00:00
|
||||
|
||||
# Explore group mailbox posts with conversation subject "hello world"
|
||||
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case createCommand:
|
||||
c, _ = utils.AddCommand(cmd, groupsCreateCmd(), utils.MarkPreviewCommand())
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandCreateUseSuffix
|
||||
c.Example = groupsServiceCommandCreateExamples
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
flags.AddGroupFlag(c)
|
||||
flags.AddDataFlag(c, []string{flags.DataLibraries, flags.DataMessages, flags.DataConversations}, false)
|
||||
flags.AddFetchParallelismFlag(c)
|
||||
flags.AddDisableDeltaFlag(c)
|
||||
flags.AddGenericBackupFlags(c)
|
||||
flags.AddDisableLazyItemReader(c)
|
||||
|
||||
case listCommand:
|
||||
c, _ = utils.AddCommand(cmd, groupsListCmd(), utils.MarkPreviewCommand())
|
||||
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
flags.AddAllBackupListFlags(c)
|
||||
|
||||
case detailsCommand:
|
||||
c, _ = utils.AddCommand(cmd, groupsDetailsCmd(), utils.MarkPreviewCommand())
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandDetailsUseSuffix
|
||||
c.Example = groupsServiceCommandDetailsExamples
|
||||
|
||||
flags.AddSkipReduceFlag(c)
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddGroupDetailsAndRestoreFlags(c)
|
||||
flags.AddSharePointDetailsAndRestoreFlags(c)
|
||||
|
||||
case deleteCommand:
|
||||
c, _ = utils.AddCommand(cmd, groupsDeleteCmd(), utils.MarkPreviewCommand())
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandDeleteUseSuffix
|
||||
c.Example = groupsServiceCommandDeleteExamples
|
||||
|
||||
flags.AddMultipleBackupIDsFlag(c, false)
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup create
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup create groups [<flag>...]`
|
||||
func groupsCreateCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Aliases: []string{teamsServiceCommand},
|
||||
Short: "Backup M365 Groups & Teams service data",
|
||||
RunE: createGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a groups service backup.
|
||||
func createGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateGroupsBackupCreateFlags(flags.GroupFV, flags.CategoryDataFV); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
|
||||
ctx,
|
||||
cmd,
|
||||
path.GroupsService)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
// TODO: log/print recoverable errors
|
||||
errs := fault.New(false)
|
||||
|
||||
svcCli, err := m365.NewM365Client(ctx, *acct)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Stack(err))
|
||||
}
|
||||
|
||||
ins, err := svcCli.AC.Groups().GetAllIDsAndNames(ctx, errs)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 groups"))
|
||||
}
|
||||
|
||||
sel := groupsBackupCreateSelectors(ctx, ins, flags.GroupFV, flags.CategoryDataFV)
|
||||
selectorSet := []selectors.Selector{}
|
||||
|
||||
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
|
||||
selectorSet = append(selectorSet, discSel.Selector)
|
||||
}
|
||||
|
||||
return genericCreateCommand(
|
||||
ctx,
|
||||
r,
|
||||
"Group",
|
||||
selectorSet,
|
||||
ins)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup list
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup list groups [<flag>...]`
|
||||
func groupsListCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "List the history of M365 Groups service backups",
|
||||
RunE: listGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// lists the history of backup operations
|
||||
func listGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericListCommand(cmd, flags.BackupIDFV, path.GroupsService, args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup details
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup details groups [<flag>...]`
|
||||
func groupsDetailsCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "Shows the details of a M365 Groups service backup",
|
||||
RunE: detailsGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a groups service backup.
|
||||
func detailsGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
return runDetailsGroupsCmd(cmd)
|
||||
}
|
||||
|
||||
func runDetailsGroupsCmd(cmd *cobra.Command) error {
|
||||
ctx := cmd.Context()
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
|
||||
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
||||
|
||||
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) > 0 {
|
||||
ds.PrintEntries(ctx)
|
||||
} else {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup delete
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup delete groups [<flag>...]`
|
||||
func groupsDeleteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "Delete backed-up M365 Groups service data",
|
||||
RunE: deleteGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// deletes an groups service backup.
|
||||
func deleteGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
backupIDValue := []string{}
|
||||
|
||||
if len(flags.BackupIDsFV) > 0 {
|
||||
backupIDValue = flags.BackupIDsFV
|
||||
} else if len(flags.BackupIDFV) > 0 {
|
||||
backupIDValue = append(backupIDValue, flags.BackupIDFV)
|
||||
} else {
|
||||
return clues.New("either --backup or --backups flag is required")
|
||||
}
|
||||
|
||||
return genericDeleteCommand(cmd, path.GroupsService, "Groups", backupIDValue, args)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func validateGroupsBackupCreateFlags(groups, cats []string) error {
|
||||
if len(groups) == 0 {
|
||||
return clues.New(
|
||||
"requires one or more --" +
|
||||
flags.GroupFN + " ids, or the wildcard --" +
|
||||
flags.GroupFN + " *")
|
||||
}
|
||||
|
||||
// TODO(keepers): release conversations support
|
||||
|
||||
msg := fmt.Sprintf(
|
||||
" is an unrecognized data type; only %s and %s are supported",
|
||||
flags.DataLibraries, flags.DataMessages)
|
||||
|
||||
// msg := fmt.Sprintf(
|
||||
// " is an unrecognized data type; only %s, %s and %s are supported",
|
||||
// flags.DataLibraries, flags.DataMessages, flags.DataConversations)
|
||||
|
||||
allowedCats := utils.GroupsAllowedCategories()
|
||||
|
||||
for _, d := range cats {
|
||||
if _, ok := allowedCats[d]; !ok {
|
||||
return clues.New(d + msg)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func groupsBackupCreateSelectors(
|
||||
ctx context.Context,
|
||||
ins idname.Cacher,
|
||||
group, cats []string,
|
||||
) *selectors.GroupsBackup {
|
||||
if filters.PathContains(group).Compare(flags.Wildcard) {
|
||||
return includeAllGroupsWithCategories(ins, cats)
|
||||
}
|
||||
|
||||
sel := selectors.NewGroupsBackup(slices.Clone(group))
|
||||
|
||||
return utils.AddGroupsCategories(sel, cats)
|
||||
}
|
||||
|
||||
func includeAllGroupsWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
|
||||
return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
|
||||
}
|
||||
@ -1,690 +0,0 @@
|
||||
package backup_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests that require no existing backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type NoBackupGroupsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestNoBackupGroupsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupGroupsE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
}
|
||||
|
||||
func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||
}
|
||||
|
||||
func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
|
||||
// as an offhand check: the result should contain the m365 group id
|
||||
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests with no prior backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type BackupGroupsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestBackupGroupsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupGroupsE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
|
||||
runGroupsBackupCategoryTest(suite, flags.DataMessages)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
|
||||
// skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
runGroupsBackupCategoryTest(suite, flags.DataConversations)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_libraries() {
|
||||
runGroupsBackupCategoryTest(suite, flags.DataLibraries)
|
||||
}
|
||||
|
||||
func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildGroupsBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
suite.m365.Group.ID,
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_channelMessages() {
|
||||
runGroupsBackupGroupNotFoundTest(suite, flags.DataMessages)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_conversations() {
|
||||
runGroupsBackupGroupNotFoundTest(suite, flags.DataConversations)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_libraries() {
|
||||
runGroupsBackupGroupNotFoundTest(suite, flags.DataLibraries)
|
||||
}
|
||||
|
||||
func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildGroupsBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
"foo@not-there.com",
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
assert.Contains(
|
||||
t,
|
||||
err.Error(),
|
||||
"not found",
|
||||
"error missing user not found")
|
||||
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
|
||||
|
||||
t.Logf("backup error message: %s", err.Error())
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "groups",
|
||||
"--group", suite.m365.Group.ID,
|
||||
"--azure-client-id", "invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
|
||||
// Skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "groups",
|
||||
"--group", suite.m365.Group.ID,
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// AWS flags
|
||||
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "groups",
|
||||
"--group", suite.m365.Group.ID,
|
||||
"--aws-access-key", "invalid-value",
|
||||
"--aws-secret-access-key", "some-invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
// since invalid aws creds are explicitly set, should see a failure
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests prepared with a previous backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type PreparedBackupGroupsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps map[path.CategoryType]string
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &PreparedBackupGroupsE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||
suite.backupOps = make(map[path.CategoryType]string)
|
||||
|
||||
var (
|
||||
groups = []string{suite.m365.Group.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ChannelMessagesCategory,
|
||||
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
|
||||
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
|
||||
// we go fix the group mailbox.
|
||||
// path.ConversationPostsCategory,
|
||||
path.LibrariesCategory,
|
||||
}
|
||||
)
|
||||
|
||||
for _, set := range cats {
|
||||
var (
|
||||
sel = selectors.NewGroupsBackup(groups)
|
||||
scopes []selectors.GroupsScope
|
||||
)
|
||||
|
||||
switch set {
|
||||
case path.ChannelMessagesCategory:
|
||||
scopes = selTD.GroupsBackupChannelScope(sel)
|
||||
|
||||
case path.ConversationPostsCategory:
|
||||
scopes = selTD.GroupsBackupConversationScope(sel)
|
||||
|
||||
case path.LibrariesCategory:
|
||||
scopes = selTD.GroupsBackupLibraryFolderScope(sel)
|
||||
}
|
||||
|
||||
sel.Include(scopes)
|
||||
|
||||
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = bop.Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
bIDs := string(bop.Results.BackupID)
|
||||
|
||||
// sanity check, ensure we can find the backup and its details immediately
|
||||
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
|
||||
require.NoError(t, err, "retrieving recent backup by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
|
||||
|
||||
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
|
||||
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
|
||||
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
|
||||
|
||||
suite.backupOps[set] = string(b.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_channelMessages() {
|
||||
runGroupsListCmdTest(suite, path.ChannelMessagesCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_conversations() {
|
||||
runGroupsListCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_libraries() {
|
||||
runGroupsListCmdTest(suite, path.LibrariesCategory)
|
||||
}
|
||||
|
||||
func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.Contains(t, result, suite.backupOps[category])
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_channelMessages() {
|
||||
runGroupsListSingleCmdTest(suite, path.ChannelMessagesCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_conversations() {
|
||||
runGroupsListSingleCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_libraries() {
|
||||
runGroupsListSingleCmdTest(suite, path.LibrariesCategory)
|
||||
}
|
||||
|
||||
func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.Contains(t, result, bID)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_badID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", "smarfs")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages() {
|
||||
runGroupsDetailsCmdTest(suite, path.ChannelMessagesCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
|
||||
// skip
|
||||
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
|
||||
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_libraries() {
|
||||
runGroupsDetailsCmdTest(suite, path.LibrariesCategory)
|
||||
}
|
||||
|
||||
func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
// fetch the details from the repo first
|
||||
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
|
||||
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
|
||||
require.Empty(t, errs.Recovered())
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "details", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN, string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
|
||||
i := 0
|
||||
foundFolders := 0
|
||||
|
||||
for _, ent := range deets.Entries {
|
||||
// Skip folders as they don't mean anything to the end group.
|
||||
if ent.Folder != nil {
|
||||
foundFolders++
|
||||
continue
|
||||
}
|
||||
|
||||
suite.Run(fmt.Sprintf("detail %d", i), func() {
|
||||
assert.Contains(suite.T(), result, ent.ShortRef)
|
||||
})
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
// We only backup the default folder for each category so there should be at
|
||||
// least that folder (we don't make details entries for prefix folders).
|
||||
assert.GreaterOrEqual(t, foundFolders, 1)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests for deleting backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type BackupDeleteGroupsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps [3]operations.BackupOperation
|
||||
}
|
||||
|
||||
func TestBackupDeleteGroupsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupDeleteGroupsE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteGroupsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
|
||||
|
||||
m365GroupID := tconfig.M365TeamID(t)
|
||||
groups := []string{m365GroupID}
|
||||
|
||||
// some tests require an existing backup
|
||||
sel := selectors.NewGroupsBackup(groups)
|
||||
sel.Include(selTD.GroupsBackupChannelScope(sel))
|
||||
|
||||
for i := 0; i < cap(suite.backupOps); i++ {
|
||||
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.backupOps[i] = backupOp
|
||||
|
||||
err = suite.backupOps[i].Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN,
|
||||
fmt.Sprintf("%s,%s",
|
||||
string(suite.backupOps[0].Results.BackupID),
|
||||
string(suite.backupOps[1].Results.BackupID)))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backups", string(suite.backupOps[0].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_SingleID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN,
|
||||
string(suite.backupOps[2].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(suite.backupOps[2].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_UnknownID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN, uuid.NewString())
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// unknown backupIDs should error since the modelStore can't find the backup
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_NoBackupID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// empty backupIDs should error since no data provided
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func buildGroupsBackupCmd(
|
||||
ctx context.Context,
|
||||
configFile, group, category string,
|
||||
recorder *strings.Builder,
|
||||
) (*cobra.Command, context.Context) {
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "groups",
|
||||
"--"+flags.ConfigFileFN, configFile,
|
||||
"--"+flags.GroupFN, group,
|
||||
"--"+flags.CategoryDataFN, category)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(recorder)
|
||||
|
||||
return cmd, print.SetRootCmd(ctx, cmd)
|
||||
}
|
||||
@ -1,273 +0,0 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
)
|
||||
|
||||
type GroupsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestGroupsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
|
||||
expectUse := groupsServiceCommand
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "create groups",
|
||||
use: createCommand,
|
||||
expectUse: expectUse + " " + groupsServiceCommandCreateUseSuffix,
|
||||
expectShort: groupsCreateCmd().Short,
|
||||
expectRunE: createGroupsCmd,
|
||||
},
|
||||
{
|
||||
name: "list groups",
|
||||
use: listCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: groupsListCmd().Short,
|
||||
expectRunE: listGroupsCmd,
|
||||
},
|
||||
{
|
||||
name: "details groups",
|
||||
use: detailsCommand,
|
||||
expectUse: expectUse + " " + groupsServiceCommandDetailsUseSuffix,
|
||||
expectShort: groupsDetailsCmd().Short,
|
||||
expectRunE: detailsGroupsCmd,
|
||||
},
|
||||
{
|
||||
name: "delete groups",
|
||||
use: deleteCommand,
|
||||
expectUse: expectUse + " " + groupsServiceCommandDeleteUseSuffix,
|
||||
expectShort: groupsDeleteCmd().Short,
|
||||
expectRunE: deleteGroupsCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
c := addGroupsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestValidateGroupsBackupCreateFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
cats []string
|
||||
expect assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "none",
|
||||
cats: []string{},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "libraries",
|
||||
cats: []string{flags.DataLibraries},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "messages",
|
||||
cats: []string{flags.DataMessages},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "conversations",
|
||||
cats: []string{flags.DataConversations},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "all allowed",
|
||||
cats: []string{
|
||||
flags.DataLibraries,
|
||||
flags.DataMessages,
|
||||
flags.DataConversations,
|
||||
},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "bad inputs",
|
||||
cats: []string{"foo"},
|
||||
expect: assert.Error,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
err := validateGroupsBackupCreateFlags([]string{"*"}, test.cats)
|
||||
test.expect(suite.T(), err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestBackupCreateFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: createCommand},
|
||||
addGroupsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
groupsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.GroupFN, flagsTD.FlgInputs(flagsTD.GroupsInput),
|
||||
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.GroupsCategoryDataInput),
|
||||
"--" + flags.FetchParallelismFN, flagsTD.FetchParallelism,
|
||||
"--" + flags.DisableDeltaFN,
|
||||
"--" + flags.DisableLazyItemReaderFN,
|
||||
},
|
||||
flagsTD.PreparedGenericBackupFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
co := utils.Control()
|
||||
backupOpts := utils.ParseBackupOptions()
|
||||
|
||||
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
|
||||
// restore flags are switched over too and we no longer parse flags beyond
|
||||
// connection info into control.Options.
|
||||
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(backupOpts.Parallelism.ItemFetch))
|
||||
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
|
||||
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
|
||||
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
|
||||
assert.True(t, backupOpts.M365.DisableDeltaEndpoint)
|
||||
|
||||
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch))
|
||||
assert.Equal(t, control.FailFast, co.FailureHandling)
|
||||
assert.True(t, co.ToggleFeatures.DisableIncrementals)
|
||||
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
|
||||
assert.True(t, co.ToggleFeatures.DisableDelta)
|
||||
assert.True(t, co.ToggleFeatures.DisableLazyItemReader)
|
||||
|
||||
assert.ElementsMatch(t, flagsTD.GroupsInput, opts.Groups)
|
||||
flagsTD.AssertGenericBackupFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestBackupListFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: listCommand},
|
||||
addGroupsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
groupsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedBackupListFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertBackupListFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: detailsCommand},
|
||||
addGroupsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
groupsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.SkipReduceFN,
|
||||
},
|
||||
flagsTD.PreparedChannelFlags(),
|
||||
flagsTD.PreparedConversationFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags(),
|
||||
flagsTD.PreparedLibraryFlags()))
|
||||
|
||||
co := utils.Control()
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.True(t, co.SkipReduce)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
flagsTD.AssertChannelFlags(t, cmd)
|
||||
flagsTD.AssertConversationFlags(t, cmd)
|
||||
flagsTD.AssertLibraryFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestBackupDeleteFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: deleteCommand},
|
||||
addGroupsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
groupsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
53
src/cli/backup/help_e2e_test.go
Normal file
53
src/cli/backup/help_e2e_test.go
Normal file
@ -0,0 +1,53 @@
|
||||
package backup_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
func prepM365Test(
|
||||
t *testing.T,
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
) (
|
||||
account.Account,
|
||||
storage.Storage,
|
||||
repository.Repository,
|
||||
*viper.Viper,
|
||||
strings.Builder,
|
||||
string,
|
||||
) {
|
||||
var (
|
||||
acct = tester.NewM365Account(t)
|
||||
st = tester.NewPrefixedS3Storage(t)
|
||||
recorder = strings.Builder{}
|
||||
)
|
||||
|
||||
cfg, err := st.S3Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
force := map[string]string{
|
||||
tester.TestCfgAccountProvider: "M365",
|
||||
tester.TestCfgStorageProvider: "S3",
|
||||
tester.TestCfgPrefix: cfg.Prefix,
|
||||
}
|
||||
|
||||
vpr, cfgFP := tester.MakeTempTestConfigClone(t, force)
|
||||
ctx = config.SetViper(ctx, vpr)
|
||||
|
||||
repo, err := repository.Initialize(ctx, acct, st, control.Defaults())
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return acct, st, repo, vpr, recorder, cfgFP
|
||||
}
|
||||
@ -1,98 +0,0 @@
|
||||
package backup_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/alcionai/corso/src/cli"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
"github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
type dependencies struct {
|
||||
st storage.Storage
|
||||
repo repository.Repositoryer
|
||||
vpr *viper.Viper
|
||||
recorder strings.Builder
|
||||
configFilePath string
|
||||
}
|
||||
|
||||
func prepM365Test(
|
||||
t *testing.T,
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
pst path.ServiceType,
|
||||
) dependencies {
|
||||
var (
|
||||
acct = tconfig.NewM365Account(t)
|
||||
st = testdata.NewPrefixedS3Storage(t)
|
||||
recorder = strings.Builder{}
|
||||
)
|
||||
|
||||
cfg, err := st.ToS3Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
force := map[string]string{
|
||||
tconfig.TestCfgAccountProvider: account.ProviderM365.String(),
|
||||
tconfig.TestCfgStorageProvider: storage.ProviderS3.String(),
|
||||
tconfig.TestCfgPrefix: cfg.Prefix,
|
||||
}
|
||||
|
||||
vpr, cfgFP := tconfig.MakeTempTestConfigClone(t, force)
|
||||
ctx = config.SetViper(ctx, vpr)
|
||||
|
||||
repo, err := repository.New(
|
||||
ctx,
|
||||
acct,
|
||||
st,
|
||||
control.DefaultOptions(),
|
||||
repository.NewRepoID)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = repo.Initialize(ctx, repository.InitConfig{
|
||||
Service: pst,
|
||||
})
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return dependencies{
|
||||
st: st,
|
||||
repo: repo,
|
||||
vpr: vpr,
|
||||
recorder: recorder,
|
||||
configFilePath: cfgFP,
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// funcs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func buildExchangeBackupCmd(
|
||||
ctx context.Context,
|
||||
configFile, user, category string,
|
||||
recorder *strings.Builder,
|
||||
) (*cobra.Command, context.Context) {
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "exchange",
|
||||
"--"+flags.ConfigFileFN, configFile,
|
||||
"--"+flags.UserFN, user,
|
||||
"--"+flags.CategoryDataFN, category)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(recorder)
|
||||
|
||||
return cmd, print.SetRootCmd(ctx, cmd)
|
||||
}
|
||||
@ -1,15 +1,21 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
@ -19,8 +25,8 @@ import (
|
||||
|
||||
const (
|
||||
oneDriveServiceCommand = "onedrive"
|
||||
oneDriveServiceCommandCreateUseSuffix = "--user <email> | '" + flags.Wildcard + "'"
|
||||
oneDriveServiceCommandDeleteUseSuffix = "--backups <backupId>"
|
||||
oneDriveServiceCommandCreateUseSuffix = "--user <email> | '" + utils.Wildcard + "'"
|
||||
oneDriveServiceCommandDeleteUseSuffix = "--backup <backupId>"
|
||||
oneDriveServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
@ -34,20 +40,19 @@ corso backup create onedrive --user alice@example.com,bob@example.com
|
||||
# Backup all OneDrive data for all M365 users
|
||||
corso backup create onedrive --user '*'`
|
||||
|
||||
oneDriveServiceCommandDeleteExamples = `# Delete OneDrive backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
and 1234abcd-12ab-cd34-56de-1234abce
|
||||
corso backup delete onedrive --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
|
||||
oneDriveServiceCommandDeleteExamples = `# Delete OneDrive backup with ID 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup delete onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
|
||||
oneDriveServiceCommandDetailsExamples = `# Explore items in Bob's latest backup (1234abcd...)
|
||||
corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
oneDriveServiceCommandDetailsExamples = `# Explore Alice's files from backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --user alice@example.com
|
||||
|
||||
# Explore files in the folder "Reports" named "Fiscal 22"
|
||||
# Explore Alice or Bob's files with name containing "Fiscal 22" in folder "Reports"
|
||||
corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file-name "Fiscal 22" --folder "Reports"
|
||||
--user alice@example.com,bob@example.com --file-name "Fiscal 22" --folder "Reports"
|
||||
|
||||
# Explore files created before the end of 2015
|
||||
# Explore Alice's files created before end of 2015 from a specific backup
|
||||
corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file-created-before 2015-01-01T00:00:00`
|
||||
--user alice@example.com --file-created-before 2015-01-01T00:00:00`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
@ -60,43 +65,43 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
|
||||
switch cmd.Use {
|
||||
case createCommand:
|
||||
c, fs = utils.AddCommand(cmd, oneDriveCreateCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix
|
||||
c.Example = oneDriveServiceCommandCreateExamples
|
||||
|
||||
flags.AddUserFlag(c)
|
||||
flags.AddGenericBackupFlags(c)
|
||||
fs.BoolVar(
|
||||
&flags.UseOldDeltaProcessFV,
|
||||
flags.UseOldDeltaProcessFN,
|
||||
false,
|
||||
"process backups using the old delta processor instead of tree-based enumeration")
|
||||
cobra.CheckErr(fs.MarkHidden(flags.UseOldDeltaProcessFN))
|
||||
utils.AddUserFlag(c)
|
||||
options.AddFailFastFlag(c)
|
||||
options.AddDisableIncrementalsFlag(c)
|
||||
|
||||
case listCommand:
|
||||
c, _ = utils.AddCommand(cmd, oneDriveListCmd())
|
||||
c, fs = utils.AddCommand(cmd, oneDriveListCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
flags.AddAllBackupListFlags(c)
|
||||
utils.AddBackupIDFlag(c, false)
|
||||
addFailedItemsFN(c)
|
||||
addSkippedItemsFN(c)
|
||||
addRecoveredErrorsFN(c)
|
||||
|
||||
case detailsCommand:
|
||||
c, _ = utils.AddCommand(cmd, oneDriveDetailsCmd())
|
||||
c, fs = utils.AddCommand(cmd, oneDriveDetailsCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + oneDriveServiceCommandDetailsUseSuffix
|
||||
c.Example = oneDriveServiceCommandDetailsExamples
|
||||
|
||||
flags.AddSkipReduceFlag(c)
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddOneDriveDetailsAndRestoreFlags(c)
|
||||
options.AddSkipReduceFlag(c)
|
||||
utils.AddBackupIDFlag(c, true)
|
||||
utils.AddOneDriveDetailsAndRestoreFlags(c)
|
||||
|
||||
case deleteCommand:
|
||||
c, _ = utils.AddCommand(cmd, oneDriveDeleteCmd())
|
||||
c, fs = utils.AddCommand(cmd, oneDriveDeleteCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + oneDriveServiceCommandDeleteUseSuffix
|
||||
c.Example = oneDriveServiceCommandDeleteExamples
|
||||
|
||||
flags.AddMultipleBackupIDsFlag(c, false)
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
utils.AddBackupIDFlag(c, true)
|
||||
}
|
||||
|
||||
return c
|
||||
@ -125,32 +130,20 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateOneDriveBackupCreateFlags(flags.UserFV); err != nil {
|
||||
if err := validateOneDriveBackupCreateFlags(utils.UserFV); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
|
||||
ctx,
|
||||
cmd,
|
||||
path.OneDriveService)
|
||||
r, acct, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
sel := oneDriveBackupCreateSelectors(flags.UserFV)
|
||||
sel := oneDriveBackupCreateSelectors(utils.UserFV)
|
||||
|
||||
ins, err := utils.UsersMap(
|
||||
ctx,
|
||||
*acct,
|
||||
utils.Control(),
|
||||
r.Counter(),
|
||||
fault.New(true))
|
||||
ins, err := utils.UsersMap(ctx, *acct, fault.New(true))
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
|
||||
}
|
||||
@ -161,10 +154,10 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
selectorSet = append(selectorSet, discSel.Selector)
|
||||
}
|
||||
|
||||
return genericCreateCommand(
|
||||
return runBackups(
|
||||
ctx,
|
||||
r,
|
||||
"OneDrive",
|
||||
"OneDrive", "user",
|
||||
selectorSet,
|
||||
ins)
|
||||
}
|
||||
@ -200,7 +193,7 @@ func oneDriveListCmd() *cobra.Command {
|
||||
|
||||
// lists the history of backup operations
|
||||
func listOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericListCommand(cmd, flags.BackupIDFV, path.OneDriveService, args)
|
||||
return genericListCommand(cmd, utils.BackupIDFV, path.OneDriveService, args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
@ -224,35 +217,71 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
return runDetailsOneDriveCmd(cmd)
|
||||
}
|
||||
|
||||
func runDetailsOneDriveCmd(cmd *cobra.Command) error {
|
||||
ctx := cmd.Context()
|
||||
opts := utils.MakeOneDriveOpts(cmd)
|
||||
|
||||
sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
|
||||
|
||||
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
|
||||
r, _, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) > 0 {
|
||||
ds.PrintEntries(ctx)
|
||||
} else {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
ctrlOpts := options.Control()
|
||||
|
||||
ds, err := runDetailsOneDriveCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) == 0 {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
return nil
|
||||
}
|
||||
|
||||
ds.PrintEntries(ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// runDetailsOneDriveCmd actually performs the lookup in backup details.
|
||||
// the fault.Errors return is always non-nil. Callers should check if
|
||||
// errs.Failure() == nil.
|
||||
func runDetailsOneDriveCmd(
|
||||
ctx context.Context,
|
||||
r repository.BackupGetter,
|
||||
backupID string,
|
||||
opts utils.OneDriveOpts,
|
||||
skipReduce bool,
|
||||
) (*details.Details, error) {
|
||||
if err := utils.ValidateOneDriveRestoreFlags(backupID, opts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "backup_id", backupID)
|
||||
|
||||
d, _, errs := r.GetBackupDetails(ctx, backupID)
|
||||
// TODO: log/track recoverable errors
|
||||
if errs.Failure() != nil {
|
||||
if errors.Is(errs.Failure(), data.ErrNotFound) {
|
||||
return nil, clues.New("no backup exists with the id " + backupID)
|
||||
}
|
||||
|
||||
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "details_entries", len(d.Entries))
|
||||
|
||||
if !skipReduce {
|
||||
sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
|
||||
d = sel.Reduce(ctx, d, errs)
|
||||
}
|
||||
|
||||
return d, nil
|
||||
}
|
||||
|
||||
// `corso backup delete onedrive [<flag>...]`
|
||||
func oneDriveDeleteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
@ -266,15 +295,5 @@ func oneDriveDeleteCmd() *cobra.Command {
|
||||
|
||||
// deletes a oneDrive service backup.
|
||||
func deleteOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
backupIDValue := []string{}
|
||||
|
||||
if len(flags.BackupIDsFV) > 0 {
|
||||
backupIDValue = flags.BackupIDsFV
|
||||
} else if len(flags.BackupIDFV) > 0 {
|
||||
backupIDValue = append(backupIDValue, flags.BackupIDFV)
|
||||
} else {
|
||||
return clues.New("either --backup or --backups flag is required")
|
||||
}
|
||||
|
||||
return genericDeleteCommand(cmd, path.OneDriveService, "OneDrive", backupIDValue, args)
|
||||
return genericDeleteCommand(cmd, utils.BackupIDFV, "OneDrive", args)
|
||||
}
|
||||
|
||||
@ -7,23 +7,22 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -32,42 +31,56 @@ import (
|
||||
|
||||
type NoBackupOneDriveE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
m365UserID string
|
||||
recorder strings.Builder
|
||||
}
|
||||
|
||||
func TestNoBackupOneDriveE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &NoBackupOneDriveE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
tester.CorsoCITests,
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *NoBackupOneDriveE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
suite.dpnd = prepM365Test(t, ctx, path.OneDriveService)
|
||||
t := suite.T()
|
||||
acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
|
||||
|
||||
suite.acct = acct
|
||||
suite.st = st
|
||||
suite.repo = repo
|
||||
suite.recorder = recorder
|
||||
suite.vpr = vpr
|
||||
suite.cfgFP = cfgFilePath
|
||||
suite.m365UserID = tester.M365UserID(t)
|
||||
}
|
||||
|
||||
func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
suite.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "list", "onedrive",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
"--config-file", suite.cfgFP)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
cmd.SetErr(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
@ -75,26 +88,26 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
result := suite.recorder.String()
|
||||
|
||||
// as an offhand check: the result should contain the m365 user id
|
||||
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
|
||||
assert.Equal(t, "No backups available\n", result)
|
||||
}
|
||||
|
||||
func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
|
||||
func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_UserNotInTenant() {
|
||||
recorder := strings.Builder{}
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "create", "onedrive",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.UserFN, "foo@not-there.com")
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.UserFN, "foo@nothere.com")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&recorder)
|
||||
@ -107,8 +120,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
|
||||
assert.Contains(
|
||||
t,
|
||||
err.Error(),
|
||||
"not found",
|
||||
"error missing user not found")
|
||||
"not found in tenant", "error missing user not found")
|
||||
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
|
||||
|
||||
t.Logf("backup error message: %s", err.Error())
|
||||
@ -123,66 +135,73 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
|
||||
|
||||
type BackupDeleteOneDriveE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps [3]operations.BackupOperation
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
backupOp operations.BackupOperation
|
||||
recorder strings.Builder
|
||||
}
|
||||
|
||||
func TestBackupDeleteOneDriveE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupDeleteOneDriveE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
tester.CorsoCITests,
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
suite.dpnd = prepM365Test(t, ctx, path.OneDriveService)
|
||||
t := suite.T()
|
||||
acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
|
||||
|
||||
suite.acct = acct
|
||||
suite.st = st
|
||||
suite.repo = repo
|
||||
suite.recorder = recorder
|
||||
suite.vpr = vpr
|
||||
suite.cfgFP = cfgFilePath
|
||||
|
||||
var (
|
||||
m365UserID = tconfig.M365UserID(t)
|
||||
m365UserID = tester.M365UserID(t)
|
||||
users = []string{m365UserID}
|
||||
ins = idname.NewCache(map[string]string{m365UserID: m365UserID})
|
||||
)
|
||||
|
||||
// some tests require an existing backup
|
||||
sel := selectors.NewOneDriveBackup(users)
|
||||
sel.Include(selTD.OneDriveBackupFolderScope(sel))
|
||||
sel.Include(sel.Folders(selectors.Any()))
|
||||
|
||||
for i := 0; i < cap(suite.backupOps); i++ {
|
||||
backupOp, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
backupOp, err := suite.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.backupOps[i] = backupOp
|
||||
suite.backupOp = backupOp
|
||||
|
||||
err = suite.backupOps[i].Run(ctx)
|
||||
err = suite.backupOp.Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
suite.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "delete", "onedrive",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN,
|
||||
fmt.Sprintf("%s,%s",
|
||||
string(suite.backupOps[0].Results.BackupID),
|
||||
string(suite.backupOps[1].Results.BackupID)))
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, string(suite.backupOp.Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
cmd.SetErr(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
@ -190,61 +209,15 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.True(t,
|
||||
strings.HasSuffix(
|
||||
result,
|
||||
fmt.Sprintf("Deleted OneDrive backup [%s %s]\n",
|
||||
string(suite.backupOps[0].Results.BackupID),
|
||||
string(suite.backupOps[1].Results.BackupID))))
|
||||
result := suite.recorder.String()
|
||||
|
||||
assert.Equal(t, fmt.Sprintf("Deleted OneDrive backup %s\n", string(suite.backupOp.Results.BackupID)), result)
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
cmd = tester.StubRootCmd(
|
||||
"backup", "details", "onedrive",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backups", string(suite.backupOps[0].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_SingleID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "onedrive",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN,
|
||||
string(suite.backupOps[2].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.True(t,
|
||||
strings.HasSuffix(
|
||||
result,
|
||||
fmt.Sprintf("Deleted OneDrive backup [%s]\n",
|
||||
string(suite.backupOps[2].Results.BackupID))))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "onedrive",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(suite.backupOps[0].Results.BackupID))
|
||||
"--config-file", suite.cfgFP,
|
||||
"--backup", string(suite.backupOp.Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
@ -253,37 +226,18 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_SingleID(
|
||||
|
||||
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "delete", "onedrive",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN, uuid.NewString())
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, uuid.NewString())
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// unknown backupIDs should error since the modelStore can't find the backup
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_NoBackupID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "onedrive",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// empty backupIDs should error since no data provided
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
@ -9,12 +9,10 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
)
|
||||
|
||||
type OneDriveUnitSuite struct {
|
||||
@ -33,35 +31,57 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
flags []string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "create onedrive",
|
||||
use: createCommand,
|
||||
expectUse: expectUse + " " + oneDriveServiceCommandCreateUseSuffix,
|
||||
expectShort: oneDriveCreateCmd().Short,
|
||||
expectRunE: createOneDriveCmd,
|
||||
"create onedrive",
|
||||
createCommand,
|
||||
expectUse + " " + oneDriveServiceCommandCreateUseSuffix,
|
||||
oneDriveCreateCmd().Short,
|
||||
[]string{
|
||||
utils.UserFN,
|
||||
options.DisableIncrementalsFN,
|
||||
options.FailFastFN,
|
||||
},
|
||||
createOneDriveCmd,
|
||||
},
|
||||
{
|
||||
name: "list onedrive",
|
||||
use: listCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: oneDriveListCmd().Short,
|
||||
expectRunE: listOneDriveCmd,
|
||||
"list onedrive",
|
||||
listCommand,
|
||||
expectUse,
|
||||
oneDriveListCmd().Short,
|
||||
[]string{
|
||||
utils.BackupFN,
|
||||
failedItemsFN,
|
||||
skippedItemsFN,
|
||||
recoveredErrorsFN,
|
||||
},
|
||||
listOneDriveCmd,
|
||||
},
|
||||
{
|
||||
name: "details onedrive",
|
||||
use: detailsCommand,
|
||||
expectUse: expectUse + " " + oneDriveServiceCommandDetailsUseSuffix,
|
||||
expectShort: oneDriveDetailsCmd().Short,
|
||||
expectRunE: detailsOneDriveCmd,
|
||||
"details onedrive",
|
||||
detailsCommand,
|
||||
expectUse + " " + oneDriveServiceCommandDetailsUseSuffix,
|
||||
oneDriveDetailsCmd().Short,
|
||||
[]string{
|
||||
utils.BackupFN,
|
||||
utils.FolderFN,
|
||||
utils.FileFN,
|
||||
utils.FileCreatedAfterFN,
|
||||
utils.FileCreatedBeforeFN,
|
||||
utils.FileModifiedAfterFN,
|
||||
utils.FileModifiedBeforeFN,
|
||||
},
|
||||
detailsOneDriveCmd,
|
||||
},
|
||||
{
|
||||
name: "delete onedrive",
|
||||
use: deleteCommand,
|
||||
expectUse: expectUse + " " + oneDriveServiceCommandDeleteUseSuffix,
|
||||
expectShort: oneDriveDeleteCmd().Short,
|
||||
expectRunE: deleteOneDriveCmd,
|
||||
"delete onedrive",
|
||||
deleteCommand,
|
||||
expectUse + " " + oneDriveServiceCommandDeleteUseSuffix,
|
||||
oneDriveDeleteCmd().Short,
|
||||
[]string{utils.BackupFN},
|
||||
deleteOneDriveCmd,
|
||||
},
|
||||
}
|
||||
|
||||
@ -81,133 +101,14 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
|
||||
for _, f := range test.flags {
|
||||
assert.NotNil(t, c.Flag(f), f+" flag")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestBackupCreateFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: createCommand},
|
||||
addOneDriveCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
oneDriveServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
|
||||
},
|
||||
flagsTD.PreparedGenericBackupFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
opts := utils.MakeOneDriveOpts(cmd)
|
||||
co := utils.Control()
|
||||
backupOpts := utils.ParseBackupOptions()
|
||||
|
||||
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
|
||||
// restore flags are switched over too and we no longer parse flags beyond
|
||||
// connection info into control.Options.
|
||||
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
|
||||
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
|
||||
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
|
||||
|
||||
assert.Equal(t, control.FailFast, co.FailureHandling)
|
||||
assert.True(t, co.ToggleFeatures.DisableIncrementals)
|
||||
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
|
||||
|
||||
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
|
||||
flagsTD.AssertGenericBackupFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestBackupListFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: listCommand},
|
||||
addOneDriveCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
oneDriveServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedBackupListFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertBackupListFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestBackupDetailsFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: detailsCommand},
|
||||
addOneDriveCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
oneDriveServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.SkipReduceFN,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
co := utils.Control()
|
||||
|
||||
assert.True(t, co.SkipReduce)
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestBackupDeleteFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: deleteCommand},
|
||||
addOneDriveCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
oneDriveServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestValidateOneDriveBackupCreateFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
@ -231,3 +132,43 @@ func (suite *OneDriveUnitSuite) TestValidateOneDriveBackupCreateFlags() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestOneDriveBackupDetailsSelectors() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
for _, test := range testdata.OneDriveOptionDetailLookups {
|
||||
suite.Run(test.Name, func() {
|
||||
t := suite.T()
|
||||
|
||||
output, err := runDetailsOneDriveCmd(
|
||||
ctx,
|
||||
test.BackupGetter,
|
||||
"backup-ID",
|
||||
test.Opts,
|
||||
false)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ElementsMatch(t, test.Expected, output.Entries)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestOneDriveBackupDetailsSelectorsBadFormats() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
for _, test := range testdata.BadOneDriveOptionsFormats {
|
||||
suite.Run(test.Name, func() {
|
||||
t := suite.T()
|
||||
|
||||
output, err := runDetailsOneDriveCmd(
|
||||
ctx,
|
||||
test.BackupGetter,
|
||||
"backup-ID",
|
||||
test.Opts,
|
||||
false)
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
assert.Empty(t, output)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,16 +4,21 @@ import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/filters"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
)
|
||||
@ -22,109 +27,93 @@ import (
|
||||
// setup and globals
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
const (
|
||||
dataLibraries = "libraries"
|
||||
dataPages = "pages"
|
||||
)
|
||||
|
||||
const (
|
||||
sharePointServiceCommand = "sharepoint"
|
||||
sharePointServiceCommandCreateUseSuffix = "--site <siteURL> | '" + flags.Wildcard + "'"
|
||||
sharePointServiceCommandDeleteUseSuffix = "--backups <backupId>"
|
||||
sharePointServiceCommandCreateUseSuffix = "--site <siteURL> | '" + utils.Wildcard + "'"
|
||||
sharePointServiceCommandDeleteUseSuffix = "--backup <backupId>"
|
||||
sharePointServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
const (
|
||||
sharePointServiceCommandCreateExamples = `# Backup SharePoint data in the HR Site
|
||||
corso backup create sharepoint --site https://example.com/hr
|
||||
sharePointServiceCommandCreateExamples = `# Backup SharePoint data for a Site
|
||||
corso backup create sharepoint --site <siteURL>
|
||||
|
||||
# Backup SharePoint for the HR and Team sites
|
||||
# Backup SharePoint for two sites: HR and Team
|
||||
corso backup create sharepoint --site https://example.com/hr,https://example.com/team
|
||||
|
||||
# Backup all SharePoint data for all Sites
|
||||
corso backup create sharepoint --site '*'
|
||||
corso backup create sharepoint --site '*'`
|
||||
|
||||
# Backup all SharePoint list data for a Site
|
||||
corso backup create sharepoint --site https://example.com/hr --data lists
|
||||
`
|
||||
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup delete sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
|
||||
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
and 1234abcd-12ab-cd34-56de-1234abce
|
||||
corso backup delete sharepoint --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
|
||||
|
||||
sharePointServiceCommandDetailsExamples = `# Explore items in the HR site's latest backup (1234abcd...)
|
||||
sharePointServiceCommandDetailsExamples = `# Explore a site's files from backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
|
||||
# Explore files in the folder "Reports" named "Fiscal 22"
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file-name "Fiscal 22" --folder "Reports"
|
||||
|
||||
# Explore files in the folder ""Display Templates/Style Sheets"" created before the end of 2015.
|
||||
# Find all files that were created before a certain date.
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file-created-before 2015-01-01T00:00:00 --folder "Display Templates/Style Sheets"
|
||||
|
||||
# Explore all files within the document library "Work Documents"
|
||||
# Find all files within a specific library.
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--library "Work Documents"
|
||||
|
||||
# Explore lists by their name(s)
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list "list-name-1,list-name-2"
|
||||
|
||||
# Explore lists created after a given time
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-after 2024-01-01T12:23:34
|
||||
|
||||
# Explore lists created before a given time
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-before 2024-01-01T12:23:34
|
||||
|
||||
# Explore lists modified before a given time
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-before 2024-01-01T12:23:34
|
||||
|
||||
# Explore lists modified after a given time
|
||||
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-after 2024-01-01T12:23:34`
|
||||
--library documents --folder "Display Templates/Style Sheets"
|
||||
`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
var (
|
||||
c *cobra.Command
|
||||
fs *pflag.FlagSet
|
||||
)
|
||||
|
||||
switch cmd.Use {
|
||||
case createCommand:
|
||||
c, _ = utils.AddCommand(cmd, sharePointCreateCmd())
|
||||
c, fs = utils.AddCommand(cmd, sharePointCreateCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix
|
||||
c.Example = sharePointServiceCommandCreateExamples
|
||||
|
||||
flags.AddSiteFlag(c, true)
|
||||
flags.AddSiteIDFlag(c, true)
|
||||
// [TODO](hitesh) to add lists flag to invoke backup for lists
|
||||
// when explicit invoke is not required anymore
|
||||
flags.AddDataFlag(c, []string{flags.DataLibraries}, true)
|
||||
flags.AddGenericBackupFlags(c)
|
||||
utils.AddSiteFlag(c)
|
||||
utils.AddSiteIDFlag(c)
|
||||
utils.AddDataFlag(c, []string{dataLibraries}, true)
|
||||
options.AddFailFastFlag(c)
|
||||
options.AddDisableIncrementalsFlag(c)
|
||||
|
||||
case listCommand:
|
||||
c, _ = utils.AddCommand(cmd, sharePointListCmd())
|
||||
c, fs = utils.AddCommand(cmd, sharePointListCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
flags.AddAllBackupListFlags(c)
|
||||
utils.AddBackupIDFlag(c, false)
|
||||
addFailedItemsFN(c)
|
||||
addSkippedItemsFN(c)
|
||||
addRecoveredErrorsFN(c)
|
||||
|
||||
case detailsCommand:
|
||||
c, _ = utils.AddCommand(cmd, sharePointDetailsCmd())
|
||||
c, fs = utils.AddCommand(cmd, sharePointDetailsCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix
|
||||
c.Example = sharePointServiceCommandDetailsExamples
|
||||
|
||||
flags.AddSkipReduceFlag(c)
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddSharePointDetailsAndRestoreFlags(c)
|
||||
options.AddSkipReduceFlag(c)
|
||||
utils.AddBackupIDFlag(c, true)
|
||||
utils.AddSharePointDetailsAndRestoreFlags(c)
|
||||
|
||||
case deleteCommand:
|
||||
c, _ = utils.AddCommand(cmd, sharePointDeleteCmd())
|
||||
c, fs = utils.AddCommand(cmd, sharePointDeleteCmd())
|
||||
fs.SortFlags = false
|
||||
|
||||
c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix
|
||||
c.Example = sharePointServiceCommandDeleteExamples
|
||||
|
||||
flags.AddMultipleBackupIDsFlag(c, false)
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
utils.AddBackupIDFlag(c, true)
|
||||
}
|
||||
|
||||
return c
|
||||
@ -153,18 +142,11 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateSharePointBackupCreateFlags(flags.SiteIDFV, flags.WebURLFV, flags.CategoryDataFV); err != nil {
|
||||
if err := validateSharePointBackupCreateFlags(utils.SiteIDFV, utils.WebURLFV, utils.CategoryDataFV); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
|
||||
ctx,
|
||||
cmd,
|
||||
path.SharePointService)
|
||||
r, acct, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
@ -174,17 +156,12 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
// TODO: log/print recoverable errors
|
||||
errs := fault.New(false)
|
||||
|
||||
svcCli, err := m365.NewM365Client(ctx, *acct)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Stack(err))
|
||||
}
|
||||
|
||||
ins, err := svcCli.SitesMap(ctx, errs)
|
||||
ins, err := m365.SitesMap(ctx, *acct, errs)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 sites"))
|
||||
}
|
||||
|
||||
sel, err := sharePointBackupCreateSelectors(ctx, ins, flags.SiteIDFV, flags.WebURLFV, flags.CategoryDataFV)
|
||||
sel, err := sharePointBackupCreateSelectors(ctx, ins, utils.SiteIDFV, utils.WebURLFV, utils.CategoryDataFV)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Retrieving up sharepoint sites by ID and URL"))
|
||||
}
|
||||
@ -195,10 +172,10 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
selectorSet = append(selectorSet, discSel.Selector)
|
||||
}
|
||||
|
||||
return genericCreateCommand(
|
||||
return runBackups(
|
||||
ctx,
|
||||
r,
|
||||
"SharePoint",
|
||||
"SharePoint", "site",
|
||||
selectorSet,
|
||||
ins)
|
||||
}
|
||||
@ -207,16 +184,16 @@ func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
|
||||
if len(sites) == 0 && len(weburls) == 0 {
|
||||
return clues.New(
|
||||
"requires one or more --" +
|
||||
flags.SiteFN + " urls, or the wildcard --" +
|
||||
flags.SiteFN + " *")
|
||||
utils.SiteFN + " urls, or the wildcard --" +
|
||||
utils.SiteFN + " *",
|
||||
)
|
||||
}
|
||||
|
||||
allowedCats := utils.SharePointAllowedCategories()
|
||||
|
||||
for _, d := range cats {
|
||||
if _, ok := allowedCats[d]; !ok {
|
||||
if d != dataLibraries && d != dataPages {
|
||||
return clues.New(
|
||||
d + " is an unrecognized data type; only " + flags.DataLibraries + " supported")
|
||||
d + " is an unrecognized data type; either " + dataLibraries + "or " + dataPages,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@ -233,21 +210,39 @@ func sharePointBackupCreateSelectors(
|
||||
return selectors.NewSharePointBackup(selectors.None()), nil
|
||||
}
|
||||
|
||||
if filters.PathContains(sites).Compare(flags.Wildcard) {
|
||||
if filters.PathContains(sites).Compare(utils.Wildcard) {
|
||||
return includeAllSitesWithCategories(ins, cats), nil
|
||||
}
|
||||
|
||||
if filters.PathContains(weburls).Compare(flags.Wildcard) {
|
||||
if filters.PathContains(weburls).Compare(utils.Wildcard) {
|
||||
return includeAllSitesWithCategories(ins, cats), nil
|
||||
}
|
||||
|
||||
sel := selectors.NewSharePointBackup(append(slices.Clone(sites), weburls...))
|
||||
|
||||
return utils.AddCategories(sel, cats), nil
|
||||
return addCategories(sel, cats), nil
|
||||
}
|
||||
|
||||
func includeAllSitesWithCategories(ins idname.Cacher, categories []string) *selectors.SharePointBackup {
|
||||
return utils.AddCategories(selectors.NewSharePointBackup(ins.IDs()), categories)
|
||||
return addCategories(selectors.NewSharePointBackup(ins.IDs()), categories)
|
||||
}
|
||||
|
||||
func addCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
|
||||
// Issue #2631: Libraries are the only supported feature for SharePoint at this time.
|
||||
if len(cats) == 0 {
|
||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||
}
|
||||
|
||||
for _, d := range cats {
|
||||
switch d {
|
||||
case dataLibraries:
|
||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||
case dataPages:
|
||||
sel.Include(sel.Pages(selectors.Any()))
|
||||
}
|
||||
}
|
||||
|
||||
return sel
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
@ -266,7 +261,7 @@ func sharePointListCmd() *cobra.Command {
|
||||
|
||||
// lists the history of backup operations
|
||||
func listSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericListCommand(cmd, flags.BackupIDFV, path.SharePointService, args)
|
||||
return genericListCommand(cmd, utils.BackupIDFV, path.SharePointService, args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
@ -286,24 +281,14 @@ func sharePointDeleteCmd() *cobra.Command {
|
||||
|
||||
// deletes a sharePoint service backup.
|
||||
func deleteSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
backupIDValue := []string{}
|
||||
|
||||
if len(flags.BackupIDsFV) > 0 {
|
||||
backupIDValue = flags.BackupIDsFV
|
||||
} else if len(flags.BackupIDFV) > 0 {
|
||||
backupIDValue = append(backupIDValue, flags.BackupIDFV)
|
||||
} else {
|
||||
return clues.New("either --backup or --backups flag is required")
|
||||
}
|
||||
|
||||
return genericDeleteCommand(cmd, path.SharePointService, "SharePoint", backupIDValue, args)
|
||||
return genericDeleteCommand(cmd, utils.BackupIDFV, "SharePoint", args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup details
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup details SharePoint [<flag>...]`
|
||||
// `corso backup details onedrive [<flag>...]`
|
||||
func sharePointDetailsCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: sharePointServiceCommand,
|
||||
@ -320,31 +305,67 @@ func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
return runDetailsSharePointCmd(cmd)
|
||||
}
|
||||
|
||||
func runDetailsSharePointCmd(cmd *cobra.Command) error {
|
||||
ctx := cmd.Context()
|
||||
opts := utils.MakeSharePointOpts(cmd)
|
||||
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||
|
||||
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
|
||||
r, _, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) > 0 {
|
||||
ds.PrintEntries(ctx)
|
||||
} else {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
ctrlOpts := options.Control()
|
||||
|
||||
ds, err := runDetailsSharePointCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) == 0 {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
return nil
|
||||
}
|
||||
|
||||
ds.PrintEntries(ctx)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// runDetailsSharePointCmd actually performs the lookup in backup details.
|
||||
// the fault.Errors return is always non-nil. Callers should check if
|
||||
// errs.Failure() == nil.
|
||||
func runDetailsSharePointCmd(
|
||||
ctx context.Context,
|
||||
r repository.BackupGetter,
|
||||
backupID string,
|
||||
opts utils.SharePointOpts,
|
||||
skipReduce bool,
|
||||
) (*details.Details, error) {
|
||||
if err := utils.ValidateSharePointRestoreFlags(backupID, opts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "backup_id", backupID)
|
||||
|
||||
d, _, errs := r.GetBackupDetails(ctx, backupID)
|
||||
// TODO: log/track recoverable errors
|
||||
if errs.Failure() != nil {
|
||||
if errors.Is(errs.Failure(), data.ErrNotFound) {
|
||||
return nil, clues.New("no backup exists with the id " + backupID)
|
||||
}
|
||||
|
||||
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
|
||||
}
|
||||
|
||||
ctx = clues.Add(ctx, "details_entries", len(d.Entries))
|
||||
|
||||
if !skipReduce {
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||
d = sel.Reduce(ctx, d, errs)
|
||||
}
|
||||
|
||||
return d, nil
|
||||
}
|
||||
|
||||
@ -1,276 +1,85 @@
|
||||
package backup_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests that require no existing backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type NoBackupSharePointE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
}
|
||||
|
||||
func TestNoBackupSharePointE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &NoBackupSharePointE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
}
|
||||
|
||||
func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
||||
}
|
||||
|
||||
func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "sharepoint",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
|
||||
// as an offhand check: the result should contain the m365 sitet id
|
||||
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests with no prior backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type BackupSharepointE2ESuite struct {
|
||||
type NoBackupSharePointE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
m365SiteID string
|
||||
recorder strings.Builder
|
||||
}
|
||||
|
||||
func TestBackupSharepointE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupSharepointE2ESuite{Suite: tester.NewE2ESuite(
|
||||
func TestNoBackupSharePointE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &NoBackupSharePointE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
tester.CorsoCITests,
|
||||
)})
|
||||
}
|
||||
|
||||
func (suite *BackupSharepointE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
||||
}
|
||||
|
||||
func (suite *BackupSharepointE2ESuite) TestSharepointBackupCmd_lists() {
|
||||
// Issue: https://github.com/alcionai/corso/issues/4754
|
||||
suite.T().Skip("unskip when sharepoint lists support is enabled")
|
||||
runSharepointBackupCategoryTest(suite, flags.DataLists)
|
||||
}
|
||||
|
||||
func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
suite.acct = acct
|
||||
suite.st = st
|
||||
suite.repo = repo
|
||||
suite.vpr = vpr
|
||||
suite.recorder = recorder
|
||||
suite.cfgFP = cfgFilePath
|
||||
suite.m365SiteID = tester.M365SiteID(t)
|
||||
}
|
||||
|
||||
func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildSharepointBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
suite.m365.Site.ID,
|
||||
category,
|
||||
&recorder)
|
||||
suite.recorder.Reset()
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *BackupSharepointE2ESuite) TestSharepointBackupCmd_siteNotFound_lists() {
|
||||
// Issue: https://github.com/alcionai/corso/issues/4754
|
||||
suite.T().Skip("un-skip test when lists support is enabled")
|
||||
runSharepointBackupSiteNotFoundTest(suite, flags.DataLists)
|
||||
}
|
||||
|
||||
func runSharepointBackupSiteNotFoundTest(suite *BackupSharepointE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildSharepointBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
uuid.NewString(),
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
assert.Contains(
|
||||
t,
|
||||
err.Error(),
|
||||
"Invalid hostname for this tenancy", "error missing site not found")
|
||||
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
|
||||
|
||||
t.Logf("backup error message: %s", err.Error())
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests prepared with a previous backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type PreparedBackupSharepointE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps map[path.CategoryType]string
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &PreparedBackupSharepointE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupSharepointE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
||||
suite.backupOps = make(map[path.CategoryType]string)
|
||||
|
||||
var (
|
||||
sites = []string{suite.m365.Site.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365.Site.ID: suite.m365.Site.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ListsCategory,
|
||||
}
|
||||
)
|
||||
|
||||
for _, set := range cats {
|
||||
var (
|
||||
sel = selectors.NewSharePointBackup(sites)
|
||||
scopes []selectors.SharePointScope
|
||||
)
|
||||
|
||||
switch set {
|
||||
case path.ListsCategory:
|
||||
scopes = testdata.SharePointBackupListsScope(sel)
|
||||
}
|
||||
|
||||
sel.Include(scopes)
|
||||
|
||||
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = bop.Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
bIDs := string(bop.Results.BackupID)
|
||||
|
||||
// sanity check, ensure we can find the backup and its details immediately
|
||||
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
|
||||
require.NoError(t, err, "retrieving recent backup by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
|
||||
|
||||
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
|
||||
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
|
||||
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
|
||||
|
||||
suite.backupOps[set] = string(b.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointListCmd_lists() {
|
||||
runSharepointListCmdTest(suite, path.ListsCategory)
|
||||
}
|
||||
|
||||
func runSharepointListCmdTest(suite *PreparedBackupSharepointE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "list", "sharepoint",
|
||||
"--config-file", suite.dpnd.configFilePath)
|
||||
"--config-file", suite.cfgFP)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
cmd.SetErr(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
@ -278,100 +87,10 @@ func runSharepointListCmdTest(suite *PreparedBackupSharepointE2ESuite, category
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.Contains(t, result, suite.backupOps[category])
|
||||
result := suite.recorder.String()
|
||||
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointListCmd_badID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "sharepoint",
|
||||
"--config-file", suite.dpnd.configFilePath,
|
||||
"--backup", uuid.NewString())
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointDetailsCmd_lists() {
|
||||
runSharepointDetailsCmdTest(suite, path.ListsCategory)
|
||||
}
|
||||
|
||||
func runSharepointDetailsCmdTest(suite *PreparedBackupSharepointE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
// fetch the details from the repo first
|
||||
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
|
||||
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
|
||||
require.Empty(t, errs.Recovered())
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "details", "sharepoint",
|
||||
"--config-file", suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN, string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
|
||||
i := 0
|
||||
findings := make(map[path.CategoryType]int)
|
||||
|
||||
incrementor := func(cond bool, cat path.CategoryType) {
|
||||
if cond {
|
||||
findings[cat]++
|
||||
}
|
||||
}
|
||||
|
||||
for _, ent := range deets.Entries {
|
||||
if ent.SharePoint == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
isSharePointList := ent.SharePoint.ItemType == details.SharePointList
|
||||
hasListName := isSharePointList && len(ent.SharePoint.List.Name) > 0
|
||||
hasItemName := !isSharePointList && len(ent.SharePoint.ItemName) > 0
|
||||
|
||||
incrementor(hasListName, category)
|
||||
incrementor(hasItemName, category)
|
||||
|
||||
suite.Run(fmt.Sprintf("detail %d", i), func() {
|
||||
assert.Contains(suite.T(), result, ent.ShortRef)
|
||||
})
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
assert.GreaterOrEqual(t, findings[category], 1)
|
||||
// as an offhand check: the result should contain the m365 sitet id
|
||||
assert.Equal(t, "No backups available\n", result)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -380,29 +99,41 @@ func runSharepointDetailsCmdTest(suite *PreparedBackupSharepointE2ESuite, catego
|
||||
|
||||
type BackupDeleteSharePointE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
acct account.Account
|
||||
st storage.Storage
|
||||
vpr *viper.Viper
|
||||
cfgFP string
|
||||
repo repository.Repository
|
||||
backupOp operations.BackupOperation
|
||||
secondaryBackupOp operations.BackupOperation
|
||||
recorder strings.Builder
|
||||
}
|
||||
|
||||
func TestBackupDeleteSharePointE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupDeleteSharePointE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
tester.CorsoCITests,
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
|
||||
t := suite.T()
|
||||
acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
|
||||
|
||||
suite.acct = acct
|
||||
suite.st = st
|
||||
suite.repo = repo
|
||||
suite.vpr = vpr
|
||||
suite.recorder = recorder
|
||||
suite.cfgFP = cfgFilePath
|
||||
|
||||
var (
|
||||
m365SiteID = tconfig.M365SiteID(t)
|
||||
m365SiteID = tester.M365SiteID(t)
|
||||
sites = []string{m365SiteID}
|
||||
ins = idname.NewCache(map[string]string{m365SiteID: m365SiteID})
|
||||
)
|
||||
@ -411,43 +142,30 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
|
||||
sel := selectors.NewSharePointBackup(sites)
|
||||
sel.Include(testdata.SharePointBackupFolderScope(sel))
|
||||
|
||||
backupOp, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
backupOp, err := suite.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.backupOp = backupOp
|
||||
|
||||
err = suite.backupOp.Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// secondary backup
|
||||
secondaryBackupOp, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.secondaryBackupOp = secondaryBackupOp
|
||||
|
||||
err = suite.secondaryBackupOp.Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
suite.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "delete", "sharepoint",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN,
|
||||
fmt.Sprintf("%s,%s",
|
||||
string(suite.backupOp.Results.BackupID),
|
||||
string(suite.secondaryBackupOp.Results.BackupID)))
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, string(suite.backupOp.Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
cmd.SetErr(&suite.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
@ -455,20 +173,16 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.True(t,
|
||||
strings.HasSuffix(
|
||||
result,
|
||||
fmt.Sprintf("Deleted SharePoint backup [%s %s]\n",
|
||||
string(suite.backupOp.Results.BackupID),
|
||||
string(suite.secondaryBackupOp.Results.BackupID))))
|
||||
result := suite.recorder.String()
|
||||
expect := fmt.Sprintf("Deleted SharePoint backup %s\n", string(suite.backupOp.Results.BackupID))
|
||||
assert.Equal(t, expect, result)
|
||||
}
|
||||
|
||||
// moved out of the func above to make the linter happy
|
||||
// // a follow-up details call should fail, due to the backup ID being deleted
|
||||
// cmd = cliTD.StubRootCmd(
|
||||
// cmd = tester.StubRootCmd(
|
||||
// "backup", "details", "sharepoint",
|
||||
// "--"+flags.ConfigFileFN, suite.cfgFP,
|
||||
// "--config-file", suite.cfgFP,
|
||||
// "--backup", string(suite.backupOp.Results.BackupID))
|
||||
// cli.BuildCommandTree(cmd)
|
||||
|
||||
@ -477,57 +191,18 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
|
||||
|
||||
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unknownID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
ctx, flush := tester.NewContext()
|
||||
ctx = config.SetViper(ctx, suite.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
cmd := tester.StubRootCmd(
|
||||
"backup", "delete", "sharepoint",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN, uuid.NewString())
|
||||
"--config-file", suite.cfgFP,
|
||||
"--"+utils.BackupFN, uuid.NewString())
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// unknown backupIDs should error since the modelStore can't find the backup
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_NoBackupID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "groups",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// empty backupIDs should error since no data provided
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func buildSharepointBackupCmd(
|
||||
ctx context.Context,
|
||||
configFile, site, category string,
|
||||
recorder *strings.Builder,
|
||||
) (*cobra.Command, context.Context) {
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "sharepoint",
|
||||
"--config-file", configFile,
|
||||
"--"+flags.SiteIDFN, site,
|
||||
"--"+flags.CategoryDataFN, category)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(recorder)
|
||||
|
||||
return cmd, print.SetRootCmd(ctx, cmd)
|
||||
}
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
@ -10,13 +9,11 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
@ -36,35 +33,58 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
flags []string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "create sharepoint",
|
||||
use: createCommand,
|
||||
expectUse: expectUse + " " + sharePointServiceCommandCreateUseSuffix,
|
||||
expectShort: sharePointCreateCmd().Short,
|
||||
expectRunE: createSharePointCmd,
|
||||
"create sharepoint",
|
||||
createCommand,
|
||||
expectUse + " " + sharePointServiceCommandCreateUseSuffix,
|
||||
sharePointCreateCmd().Short,
|
||||
[]string{
|
||||
utils.SiteFN,
|
||||
options.DisableIncrementalsFN,
|
||||
options.FailFastFN,
|
||||
},
|
||||
createSharePointCmd,
|
||||
},
|
||||
{
|
||||
name: "list sharepoint",
|
||||
use: listCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: sharePointListCmd().Short,
|
||||
expectRunE: listSharePointCmd,
|
||||
"list sharepoint",
|
||||
listCommand,
|
||||
expectUse,
|
||||
sharePointListCmd().Short,
|
||||
[]string{
|
||||
utils.BackupFN,
|
||||
failedItemsFN,
|
||||
skippedItemsFN,
|
||||
recoveredErrorsFN,
|
||||
},
|
||||
listSharePointCmd,
|
||||
},
|
||||
{
|
||||
name: "details sharepoint",
|
||||
use: detailsCommand,
|
||||
expectUse: expectUse + " " + sharePointServiceCommandDetailsUseSuffix,
|
||||
expectShort: sharePointDetailsCmd().Short,
|
||||
expectRunE: detailsSharePointCmd,
|
||||
"details sharepoint",
|
||||
detailsCommand,
|
||||
expectUse + " " + sharePointServiceCommandDetailsUseSuffix,
|
||||
sharePointDetailsCmd().Short,
|
||||
[]string{
|
||||
utils.BackupFN,
|
||||
utils.LibraryFN,
|
||||
utils.FolderFN,
|
||||
utils.FileFN,
|
||||
utils.FileCreatedAfterFN,
|
||||
utils.FileCreatedBeforeFN,
|
||||
utils.FileModifiedAfterFN,
|
||||
utils.FileModifiedBeforeFN,
|
||||
},
|
||||
detailsSharePointCmd,
|
||||
},
|
||||
{
|
||||
name: "delete sharepoint",
|
||||
use: deleteCommand,
|
||||
expectUse: expectUse + " " + sharePointServiceCommandDeleteUseSuffix,
|
||||
expectShort: sharePointDeleteCmd().Short,
|
||||
expectRunE: deleteSharePointCmd,
|
||||
"delete sharepoint",
|
||||
deleteCommand,
|
||||
expectUse + " " + sharePointServiceCommandDeleteUseSuffix,
|
||||
sharePointDeleteCmd().Short,
|
||||
[]string{utils.BackupFN},
|
||||
deleteSharePointCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
@ -83,142 +103,19 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
|
||||
for _, f := range test.flags {
|
||||
assert.NotNil(t, c.Flag(f), f+" flag")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestBackupCreateFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: createCommand},
|
||||
addSharePointCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
sharePointServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.SiteIDFN, flagsTD.FlgInputs(flagsTD.SiteIDInput),
|
||||
"--" + flags.SiteFN, flagsTD.FlgInputs(flagsTD.WebURLInput),
|
||||
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.SharepointCategoryDataInput),
|
||||
},
|
||||
flagsTD.PreparedGenericBackupFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
opts := utils.MakeSharePointOpts(cmd)
|
||||
co := utils.Control()
|
||||
backupOpts := utils.ParseBackupOptions()
|
||||
|
||||
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
|
||||
// restore flags are switched over too and we no longer parse flags beyond
|
||||
// connection info into control.Options.
|
||||
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
|
||||
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
|
||||
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
|
||||
|
||||
assert.Equal(t, control.FailFast, co.FailureHandling)
|
||||
assert.True(t, co.ToggleFeatures.DisableIncrementals)
|
||||
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
|
||||
|
||||
assert.ElementsMatch(t, []string{strings.Join(flagsTD.SiteIDInput, ",")}, opts.SiteID)
|
||||
assert.ElementsMatch(t, flagsTD.WebURLInput, opts.WebURL)
|
||||
flagsTD.AssertGenericBackupFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestBackupListFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: listCommand},
|
||||
addSharePointCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
sharePointServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedBackupListFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertBackupListFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestBackupDetailsFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: detailsCommand},
|
||||
addSharePointCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
sharePointServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.SkipReduceFN,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
co := utils.Control()
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.True(t, co.SkipReduce)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestBackupDeleteFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: deleteCommand},
|
||||
addSharePointCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
sharePointServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
site []string
|
||||
weburl []string
|
||||
cats []string
|
||||
expect assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
@ -226,61 +123,25 @@ func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() {
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "sites but no category",
|
||||
name: "sites",
|
||||
site: []string{"smarf"},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "web urls but no category",
|
||||
name: "urls",
|
||||
weburl: []string{"fnord"},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "both web urls and sites but no category",
|
||||
name: "both",
|
||||
site: []string{"smarf"},
|
||||
weburl: []string{"fnord"},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "site with libraries category",
|
||||
site: []string{"smarf"},
|
||||
cats: []string{flags.DataLibraries},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "site with invalid category",
|
||||
site: []string{"smarf"},
|
||||
cats: []string{"invalid category"},
|
||||
expect: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "site with lists category",
|
||||
site: []string{"smarf"},
|
||||
cats: []string{flags.DataLists},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
|
||||
// [TODO]: Uncomment when pages are enabled
|
||||
|
||||
// {
|
||||
// name: "site with pages category",
|
||||
// site: []string{"smarf"},
|
||||
// cats: []string{flags.DataPages},
|
||||
// expect: assert.NoError,
|
||||
// },
|
||||
|
||||
// [TODO]: Uncomment when pages & lists are enabled
|
||||
|
||||
// {
|
||||
// name: "site with all categories",
|
||||
// site: []string{"smarf"},
|
||||
// cats: []string{flags.DataLists, flags.DataPages, flags.DataLibraries},
|
||||
// expect: assert.NoError,
|
||||
// },
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
err := validateSharePointBackupCreateFlags(test.site, test.weburl, test.cats)
|
||||
err := validateSharePointBackupCreateFlags(test.site, test.weburl, nil)
|
||||
test.expect(suite.T(), err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
@ -305,6 +166,7 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
|
||||
weburl []string
|
||||
data []string
|
||||
expect []string
|
||||
expectScopesLen int
|
||||
}{
|
||||
{
|
||||
name: "no sites or urls",
|
||||
@ -318,71 +180,114 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
|
||||
},
|
||||
{
|
||||
name: "site wildcard",
|
||||
site: []string{flags.Wildcard},
|
||||
site: []string{utils.Wildcard},
|
||||
expect: bothIDs,
|
||||
expectScopesLen: 2,
|
||||
},
|
||||
{
|
||||
name: "url wildcard",
|
||||
weburl: []string{flags.Wildcard},
|
||||
weburl: []string{utils.Wildcard},
|
||||
expect: bothIDs,
|
||||
expectScopesLen: 2,
|
||||
},
|
||||
{
|
||||
name: "sites",
|
||||
site: []string{id1, id2},
|
||||
expect: []string{id1, id2},
|
||||
expectScopesLen: 2,
|
||||
},
|
||||
{
|
||||
name: "urls",
|
||||
weburl: []string{url1, url2},
|
||||
expect: []string{url1, url2},
|
||||
expectScopesLen: 2,
|
||||
},
|
||||
{
|
||||
name: "mix sites and urls",
|
||||
site: []string{id1},
|
||||
weburl: []string{url2},
|
||||
expect: []string{id1, url2},
|
||||
expectScopesLen: 2,
|
||||
},
|
||||
{
|
||||
name: "duplicate sites and urls",
|
||||
site: []string{id1, id2},
|
||||
weburl: []string{url1, url2},
|
||||
expect: []string{id1, id2, url1, url2},
|
||||
expectScopesLen: 2,
|
||||
},
|
||||
{
|
||||
name: "unnecessary site wildcard",
|
||||
site: []string{id1, flags.Wildcard},
|
||||
site: []string{id1, utils.Wildcard},
|
||||
weburl: []string{url1, url2},
|
||||
expect: bothIDs,
|
||||
expectScopesLen: 2,
|
||||
},
|
||||
{
|
||||
name: "unnecessary url wildcard",
|
||||
site: []string{id1},
|
||||
weburl: []string{url1, flags.Wildcard},
|
||||
weburl: []string{url1, utils.Wildcard},
|
||||
expect: bothIDs,
|
||||
expectScopesLen: 2,
|
||||
},
|
||||
{
|
||||
name: "Pages",
|
||||
site: bothIDs,
|
||||
data: []string{flags.DataPages},
|
||||
expect: bothIDs,
|
||||
},
|
||||
{
|
||||
name: "Lists",
|
||||
site: bothIDs,
|
||||
data: []string{flags.DataLists},
|
||||
data: []string{dataPages},
|
||||
expect: bothIDs,
|
||||
expectScopesLen: 1,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
sel, err := sharePointBackupCreateSelectors(ctx, ins, test.site, test.weburl, test.data)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.ElementsMatch(t, test.expect, sel.ResourceOwners.Targets)
|
||||
assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestSharePointBackupDetailsSelectors() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
for _, test := range testdata.SharePointOptionDetailLookups {
|
||||
suite.Run(test.Name, func() {
|
||||
t := suite.T()
|
||||
|
||||
output, err := runDetailsSharePointCmd(
|
||||
ctx,
|
||||
test.BackupGetter,
|
||||
"backup-ID",
|
||||
test.Opts,
|
||||
false)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ElementsMatch(t, test.Expected, output.Entries)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestSharePointBackupDetailsSelectorsBadFormats() {
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
for _, test := range testdata.BadSharePointOptionsFormats {
|
||||
suite.Run(test.Name, func() {
|
||||
t := suite.T()
|
||||
|
||||
output, err := runDetailsSharePointCmd(
|
||||
ctx,
|
||||
test.BackupGetter,
|
||||
"backup-ID",
|
||||
test.Opts,
|
||||
false)
|
||||
assert.Error(t, err, clues.ToCore(err))
|
||||
assert.Empty(t, output)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,305 +0,0 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/filters"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365"
|
||||
)
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// setup and globals
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
const (
|
||||
teamschatsServiceCommand = "chats"
|
||||
teamschatsServiceCommandCreateUseSuffix = "--user <userEmail> | '" + flags.Wildcard + "'"
|
||||
teamschatsServiceCommandDeleteUseSuffix = "--backups <backupId>"
|
||||
teamschatsServiceCommandDetailsUseSuffix = "--backup <backupId>"
|
||||
)
|
||||
|
||||
const (
|
||||
teamschatsServiceCommandCreateExamples = `# Backup all chats with bob@company.hr
|
||||
corso backup create chats --user bob@company.hr
|
||||
|
||||
# Backup all chats for all users
|
||||
corso backup create chats --user '*'`
|
||||
|
||||
teamschatsServiceCommandDeleteExamples = `# Delete chats backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
and 1234abcd-12ab-cd34-56de-1234abce
|
||||
corso backup delete chats --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
|
||||
|
||||
teamschatsServiceCommandDetailsExamples = `# Explore chats in Bob's latest backup (1234abcd...)
|
||||
corso backup details chats --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
)
|
||||
|
||||
// called by backup.go to map subcommands to provider-specific handling.
|
||||
func addTeamsChatsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case createCommand:
|
||||
c, _ = utils.AddCommand(cmd, teamschatsCreateCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + teamschatsServiceCommandCreateUseSuffix
|
||||
c.Example = teamschatsServiceCommandCreateExamples
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
flags.AddUserFlag(c)
|
||||
flags.AddDataFlag(c, []string{flags.DataChats}, false)
|
||||
flags.AddGenericBackupFlags(c)
|
||||
|
||||
case listCommand:
|
||||
c, _ = utils.AddCommand(cmd, teamschatsListCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
flags.AddAllBackupListFlags(c)
|
||||
|
||||
case detailsCommand:
|
||||
c, _ = utils.AddCommand(cmd, teamschatsDetailsCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + teamschatsServiceCommandDetailsUseSuffix
|
||||
c.Example = teamschatsServiceCommandDetailsExamples
|
||||
|
||||
flags.AddSkipReduceFlag(c)
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic (ex: --user) and more frequently used flags take precedence.
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddTeamsChatsDetailsAndRestoreFlags(c)
|
||||
|
||||
case deleteCommand:
|
||||
c, _ = utils.AddCommand(cmd, teamschatsDeleteCmd(), utils.MarkPreReleaseCommand())
|
||||
|
||||
c.Use = c.Use + " " + teamschatsServiceCommandDeleteUseSuffix
|
||||
c.Example = teamschatsServiceCommandDeleteExamples
|
||||
|
||||
flags.AddMultipleBackupIDsFlag(c, false)
|
||||
flags.AddBackupIDFlag(c, false)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup create
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup create chats [<flag>...]`
|
||||
func teamschatsCreateCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamschatsServiceCommand,
|
||||
Aliases: []string{teamsServiceCommand},
|
||||
Short: "Backup M365 Chats data",
|
||||
RunE: createTeamsChatsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a teamschats backup.
|
||||
func createTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := validateTeamsChatsBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
|
||||
ctx,
|
||||
cmd,
|
||||
path.TeamsChatsService)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
// TODO: log/print recoverable errors
|
||||
errs := fault.New(false)
|
||||
|
||||
svcCli, err := m365.NewM365Client(ctx, *acct)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Stack(err))
|
||||
}
|
||||
|
||||
ins, err := svcCli.AC.Users().GetAllIDsAndNames(ctx, errs)
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 teamschats"))
|
||||
}
|
||||
|
||||
sel := teamschatsBackupCreateSelectors(ctx, ins, flags.UserFV, flags.CategoryDataFV)
|
||||
selectorSet := []selectors.Selector{}
|
||||
|
||||
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
|
||||
selectorSet = append(selectorSet, discSel.Selector)
|
||||
}
|
||||
|
||||
return genericCreateCommand(
|
||||
ctx,
|
||||
r,
|
||||
"Chats",
|
||||
selectorSet,
|
||||
ins)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup list
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup list teamschats [<flag>...]`
|
||||
func teamschatsListCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamschatsServiceCommand,
|
||||
Short: "List the history of M365 Chats backups",
|
||||
RunE: listTeamsChatsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// lists the history of backup operations
|
||||
func listTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsChatsService, args)
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup details
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup details teamschats [<flag>...]`
|
||||
func teamschatsDetailsCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamschatsServiceCommand,
|
||||
Short: "Shows the details of a M365 Chats backup",
|
||||
RunE: detailsTeamsChatsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// processes a teamschats backup.
|
||||
func detailsTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
return runDetailsTeamsChatsCmd(cmd)
|
||||
}
|
||||
|
||||
func runDetailsTeamsChatsCmd(cmd *cobra.Command) error {
|
||||
ctx := cmd.Context()
|
||||
opts := utils.MakeTeamsChatsOpts(cmd)
|
||||
|
||||
sel := utils.IncludeTeamsChatsRestoreDataSelectors(ctx, opts)
|
||||
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
|
||||
utils.FilterTeamsChatsRestoreInfoSelectors(sel, opts)
|
||||
|
||||
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
if len(ds.Entries) > 0 {
|
||||
ds.PrintEntries(ctx)
|
||||
} else {
|
||||
Info(ctx, selectors.ErrorNoMatchingItems)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
// backup delete
|
||||
// ------------------------------------------------------------------------------------------------
|
||||
|
||||
// `corso backup delete teamschats [<flag>...]`
|
||||
func teamschatsDeleteCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: teamschatsServiceCommand,
|
||||
Short: "Delete backed-up M365 Chats data",
|
||||
RunE: deleteTeamsChatsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// deletes an teamschats backup.
|
||||
func deleteTeamsChatsCmd(cmd *cobra.Command, args []string) error {
|
||||
backupIDValue := []string{}
|
||||
|
||||
if len(flags.BackupIDsFV) > 0 {
|
||||
backupIDValue = flags.BackupIDsFV
|
||||
} else if len(flags.BackupIDFV) > 0 {
|
||||
backupIDValue = append(backupIDValue, flags.BackupIDFV)
|
||||
} else {
|
||||
return clues.New("either --backup or --backups flag is required")
|
||||
}
|
||||
|
||||
return genericDeleteCommand(cmd, path.TeamsChatsService, "TeamsChats", backupIDValue, args)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func validateTeamsChatsBackupCreateFlags(teamschats, cats []string) error {
|
||||
if len(teamschats) == 0 {
|
||||
return clues.New(
|
||||
"requires one or more --" +
|
||||
flags.UserFN + " ids, or the wildcard --" +
|
||||
flags.UserFN + " *")
|
||||
}
|
||||
|
||||
msg := fmt.Sprintf(
|
||||
" is an unrecognized data type; only %s is supported",
|
||||
flags.DataChats)
|
||||
|
||||
allowedCats := utils.TeamsChatsAllowedCategories()
|
||||
|
||||
for _, d := range cats {
|
||||
if _, ok := allowedCats[d]; !ok {
|
||||
return clues.New(d + msg)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func teamschatsBackupCreateSelectors(
|
||||
ctx context.Context,
|
||||
ins idname.Cacher,
|
||||
users, cats []string,
|
||||
) *selectors.TeamsChatsBackup {
|
||||
if filters.PathContains(users).Compare(flags.Wildcard) {
|
||||
return includeAllTeamsChatsWithCategories(ins, cats)
|
||||
}
|
||||
|
||||
sel := selectors.NewTeamsChatsBackup(slices.Clone(users))
|
||||
|
||||
return utils.AddTeamsChatsCategories(sel, cats)
|
||||
}
|
||||
|
||||
func includeAllTeamsChatsWithCategories(ins idname.Cacher, categories []string) *selectors.TeamsChatsBackup {
|
||||
return utils.AddTeamsChatsCategories(selectors.NewTeamsChatsBackup(ins.IDs()), categories)
|
||||
}
|
||||
@ -1,636 +0,0 @@
|
||||
package backup_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/tester/its"
|
||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests that require no existing backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type NoBackupTeamsChatsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestNoBackupTeamsChatsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
}
|
||||
|
||||
func (suite *NoBackupTeamsChatsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
t.Skip("not fully implemented")
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
||||
}
|
||||
|
||||
func (suite *NoBackupTeamsChatsE2ESuite) TestTeamsChatsBackupListCmd_noBackups() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := suite.dpnd.recorder.String()
|
||||
|
||||
// as an offhand check: the result should contain the m365 teamschat id
|
||||
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests with no prior backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type BackupTeamsChatsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestBackupTeamsChatsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
t.Skip("not fully implemented")
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_chats() {
|
||||
runTeamsChatsBackupCategoryTest(suite, flags.DataChats)
|
||||
}
|
||||
|
||||
func runTeamsChatsBackupCategoryTest(suite *BackupTeamsChatsE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildTeamsChatsBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
suite.m365.User.ID,
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_teamschatNotFound_chats() {
|
||||
runTeamsChatsBackupTeamsChatNotFoundTest(suite, flags.DataChats)
|
||||
}
|
||||
|
||||
func runTeamsChatsBackupTeamsChatNotFoundTest(suite *BackupTeamsChatsE2ESuite, category string) {
|
||||
recorder := strings.Builder{}
|
||||
recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd, ctx := buildTeamsChatsBackupCmd(
|
||||
ctx,
|
||||
suite.dpnd.configFilePath,
|
||||
"foo@not-there.com",
|
||||
category,
|
||||
&recorder)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
assert.Contains(
|
||||
t,
|
||||
err.Error(),
|
||||
"not found",
|
||||
"error missing user not found")
|
||||
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
|
||||
|
||||
t.Logf("backup error message: %s", err.Error())
|
||||
|
||||
result := recorder.String()
|
||||
t.Log("backup results", result)
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAzureClientIDFlag() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "chats",
|
||||
"--teamschat", suite.m365.User.ID,
|
||||
"--azure-client-id", "invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetErr(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_fromConfigFile() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "chats",
|
||||
"--teamschat", suite.m365.User.ID,
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// AWS flags
|
||||
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAWSFlags() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
|
||||
defer flush()
|
||||
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "chats",
|
||||
"--teamschat", suite.m365.User.ID,
|
||||
"--aws-access-key", "invalid-value",
|
||||
"--aws-secret-access-key", "some-invalid-value")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
// since invalid aws creds are explicitly set, should see a failure
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests prepared with a previous backup
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type PreparedBackupTeamsChatsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps map[path.CategoryType]string
|
||||
m365 its.M365IntgTestSetup
|
||||
}
|
||||
|
||||
func TestPreparedBackupTeamsChatsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &PreparedBackupTeamsChatsE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
t.Skip("not fully implemented")
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.m365 = its.GetM365(t)
|
||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
||||
suite.backupOps = make(map[path.CategoryType]string)
|
||||
|
||||
var (
|
||||
teamschats = []string{suite.m365.User.ID}
|
||||
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
|
||||
cats = []path.CategoryType{
|
||||
path.ChatsCategory,
|
||||
}
|
||||
)
|
||||
|
||||
for _, set := range cats {
|
||||
var (
|
||||
sel = selectors.NewTeamsChatsBackup(teamschats)
|
||||
scopes []selectors.TeamsChatsScope
|
||||
)
|
||||
|
||||
switch set {
|
||||
case path.ChatsCategory:
|
||||
scopes = selTD.TeamsChatsBackupChatScope(sel)
|
||||
}
|
||||
|
||||
sel.Include(scopes)
|
||||
|
||||
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
err = bop.Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
bIDs := string(bop.Results.BackupID)
|
||||
|
||||
// sanity check, ensure we can find the backup and its details immediately
|
||||
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
|
||||
require.NoError(t, err, "retrieving recent backup by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
|
||||
|
||||
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
|
||||
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
|
||||
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
|
||||
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
|
||||
|
||||
suite.backupOps[set] = string(b.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_chats() {
|
||||
runTeamsChatsListCmdTest(suite, path.ChatsCategory)
|
||||
}
|
||||
|
||||
func runTeamsChatsListCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.Contains(t, result, suite.backupOps[category])
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_singleID_chats() {
|
||||
runTeamsChatsListSingleCmdTest(suite, path.ChatsCategory)
|
||||
}
|
||||
|
||||
func runTeamsChatsListSingleCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
assert.Contains(t, result, bID)
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_badID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "list", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", "smarfs")
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsDetailsCmd_chats() {
|
||||
runTeamsChatsDetailsCmdTest(suite, path.ChatsCategory)
|
||||
}
|
||||
|
||||
func runTeamsChatsDetailsCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
|
||||
suite.dpnd.recorder.Reset()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
bID := suite.backupOps[category]
|
||||
|
||||
// fetch the details from the repo first
|
||||
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
|
||||
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
|
||||
require.Empty(t, errs.Recovered())
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "details", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN, string(bID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(&suite.dpnd.recorder)
|
||||
|
||||
ctx = print.SetRootCmd(ctx, cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// compare the output
|
||||
result := suite.dpnd.recorder.String()
|
||||
|
||||
i := 0
|
||||
foundFolders := 0
|
||||
|
||||
for _, ent := range deets.Entries {
|
||||
// Skip folders as they don't mean anything to the end teamschat.
|
||||
if ent.Folder != nil {
|
||||
foundFolders++
|
||||
continue
|
||||
}
|
||||
|
||||
suite.Run(fmt.Sprintf("detail %d", i), func() {
|
||||
assert.Contains(suite.T(), result, ent.ShortRef)
|
||||
})
|
||||
|
||||
i++
|
||||
}
|
||||
|
||||
// We only backup the default folder for each category so there should be at
|
||||
// least that folder (we don't make details entries for prefix folders).
|
||||
assert.GreaterOrEqual(t, foundFolders, 1)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// tests for deleting backups
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type BackupDeleteTeamsChatsE2ESuite struct {
|
||||
tester.Suite
|
||||
dpnd dependencies
|
||||
backupOps [3]operations.BackupOperation
|
||||
}
|
||||
|
||||
func TestBackupDeleteTeamsChatsE2ESuite(t *testing.T) {
|
||||
suite.Run(t, &BackupDeleteTeamsChatsE2ESuite{
|
||||
Suite: tester.NewE2ESuite(
|
||||
t,
|
||||
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
|
||||
})
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) SetupSuite() {
|
||||
t := suite.T()
|
||||
t.Skip("not fully implemented")
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
|
||||
|
||||
m365TeamsChatID := tconfig.M365TeamID(t)
|
||||
teamschats := []string{m365TeamsChatID}
|
||||
|
||||
// some tests require an existing backup
|
||||
sel := selectors.NewTeamsChatsBackup(teamschats)
|
||||
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
|
||||
|
||||
for i := 0; i < cap(suite.backupOps); i++ {
|
||||
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
suite.backupOps[i] = backupOp
|
||||
|
||||
err = suite.backupOps[i].Run(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN,
|
||||
fmt.Sprintf("%s,%s",
|
||||
string(suite.backupOps[0].Results.BackupID),
|
||||
string(suite.backupOps[1].Results.BackupID)))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backups", string(suite.backupOps[0].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_SingleID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupFN,
|
||||
string(suite.backupOps[2].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// run the command
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// a follow-up details call should fail, due to the backup ID being deleted
|
||||
cmd = cliTD.StubRootCmd(
|
||||
"backup", "details", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--backup", string(suite.backupOps[2].Results.BackupID))
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_UnknownID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
|
||||
"--"+flags.BackupIDsFN, uuid.NewString())
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// unknown backupIDs should error since the modelStore can't find the backup
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_NoBackupID() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
ctx = config.SetViper(ctx, suite.dpnd.vpr)
|
||||
|
||||
defer flush()
|
||||
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "delete", "chats",
|
||||
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
|
||||
cli.BuildCommandTree(cmd)
|
||||
|
||||
// empty backupIDs should error since no data provided
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
require.Error(t, err, clues.ToCore(err))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func buildTeamsChatsBackupCmd(
|
||||
ctx context.Context,
|
||||
configFile, resource, category string,
|
||||
recorder *strings.Builder,
|
||||
) (*cobra.Command, context.Context) {
|
||||
cmd := cliTD.StubRootCmd(
|
||||
"backup", "create", "chats",
|
||||
"--"+flags.ConfigFileFN, configFile,
|
||||
"--"+flags.UserFN, resource,
|
||||
"--"+flags.CategoryDataFN, category)
|
||||
cli.BuildCommandTree(cmd)
|
||||
cmd.SetOut(recorder)
|
||||
|
||||
return cmd, print.SetRootCmd(ctx, cmd)
|
||||
}
|
||||
@ -1,248 +0,0 @@
|
||||
package backup
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
)
|
||||
|
||||
type TeamsChatsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestTeamsChatsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &TeamsChatsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestAddTeamsChatsCommands() {
|
||||
expectUse := teamschatsServiceCommand
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "create teamschats",
|
||||
use: createCommand,
|
||||
expectUse: expectUse + " " + teamschatsServiceCommandCreateUseSuffix,
|
||||
expectShort: teamschatsCreateCmd().Short,
|
||||
expectRunE: createTeamsChatsCmd,
|
||||
},
|
||||
{
|
||||
name: "list teamschats",
|
||||
use: listCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: teamschatsListCmd().Short,
|
||||
expectRunE: listTeamsChatsCmd,
|
||||
},
|
||||
{
|
||||
name: "details teamschats",
|
||||
use: detailsCommand,
|
||||
expectUse: expectUse + " " + teamschatsServiceCommandDetailsUseSuffix,
|
||||
expectShort: teamschatsDetailsCmd().Short,
|
||||
expectRunE: detailsTeamsChatsCmd,
|
||||
},
|
||||
{
|
||||
name: "delete teamschats",
|
||||
use: deleteCommand,
|
||||
expectUse: expectUse + " " + teamschatsServiceCommandDeleteUseSuffix,
|
||||
expectShort: teamschatsDeleteCmd().Short,
|
||||
expectRunE: deleteTeamsChatsCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := &cobra.Command{Use: test.use}
|
||||
|
||||
c := addTeamsChatsCommands(cmd)
|
||||
require.NotNil(t, c)
|
||||
|
||||
cmds := cmd.Commands()
|
||||
require.Len(t, cmds, 1)
|
||||
|
||||
child := cmds[0]
|
||||
assert.Equal(t, test.expectUse, child.Use)
|
||||
assert.Equal(t, test.expectShort, child.Short)
|
||||
tester.AreSameFunc(t, test.expectRunE, child.RunE)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestValidateTeamsChatsBackupCreateFlags() {
|
||||
table := []struct {
|
||||
name string
|
||||
cats []string
|
||||
expect assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "none",
|
||||
cats: []string{},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "chats",
|
||||
cats: []string{flags.DataChats},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "all allowed",
|
||||
cats: []string{
|
||||
flags.DataChats,
|
||||
},
|
||||
expect: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "bad inputs",
|
||||
cats: []string{"foo"},
|
||||
expect: assert.Error,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
err := validateTeamsChatsBackupCreateFlags([]string{"*"}, test.cats)
|
||||
test.expect(suite.T(), err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestBackupCreateFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: createCommand},
|
||||
addTeamsChatsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
teamschatsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
|
||||
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.TeamsChatsCategoryDataInput),
|
||||
},
|
||||
flagsTD.PreparedGenericBackupFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
opts := utils.MakeTeamsChatsOpts(cmd)
|
||||
co := utils.Control()
|
||||
backupOpts := utils.ParseBackupOptions()
|
||||
|
||||
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
|
||||
// restore flags are switched over too and we no longer parse flags beyond
|
||||
// connection info into control.Options.
|
||||
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
|
||||
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
|
||||
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
|
||||
|
||||
assert.Equal(t, control.FailFast, co.FailureHandling)
|
||||
assert.True(t, co.ToggleFeatures.DisableIncrementals)
|
||||
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
|
||||
|
||||
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
|
||||
flagsTD.AssertGenericBackupFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestBackupListFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: listCommand},
|
||||
addTeamsChatsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
teamschatsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedBackupListFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertBackupListFlags(t, cmd)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestBackupDetailsFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: detailsCommand},
|
||||
addTeamsChatsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
teamschatsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.SkipReduceFN,
|
||||
},
|
||||
flagsTD.PreparedTeamsChatsFlags(),
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
co := utils.Control()
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.True(t, co.SkipReduce)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
flagsTD.AssertTeamsChatsFlags(t, cmd)
|
||||
}
|
||||
|
||||
func (suite *TeamsChatsUnitSuite) TestBackupDeleteFlags() {
|
||||
t := suite.T()
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
&cobra.Command{Use: deleteCommand},
|
||||
addTeamsChatsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
teamschatsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
}
|
||||
@ -10,16 +10,15 @@ import (
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/backup"
|
||||
"github.com/alcionai/corso/src/cli/debug"
|
||||
"github.com/alcionai/corso/src/cli/export"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/cli/help"
|
||||
"github.com/alcionai/corso/src/cli/options"
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/repo"
|
||||
"github.com/alcionai/corso/src/cli/restore"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/config"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
@ -38,27 +37,57 @@ var corsoCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
func preRun(cc *cobra.Command, args []string) error {
|
||||
if err := config.InitCmd(cc, args); err != nil {
|
||||
if err := config.InitFunc(cc, args); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ctx := cc.Context()
|
||||
log := logger.Ctx(ctx)
|
||||
|
||||
fs := flags.GetPopulatedFlags(cc)
|
||||
flagSl := make([]string, 0, len(fs))
|
||||
flags := utils.GetPopulatedFlags(cc)
|
||||
flagSl := make([]string, 0, len(flags))
|
||||
|
||||
// currently only tracking flag names to avoid pii leakage.
|
||||
for f := range fs {
|
||||
for f := range flags {
|
||||
flagSl = append(flagSl, f)
|
||||
}
|
||||
|
||||
avoidTheseCommands := []string{
|
||||
"corso", "env", "help", "backup", "details", "list", "restore", "export", "delete", "repo", "init", "connect",
|
||||
"corso", "env", "help", "backup", "details", "list", "restore", "delete", "repo", "init", "connect",
|
||||
}
|
||||
|
||||
if len(logger.ResolvedLogFile) > 0 && !slices.Contains(avoidTheseCommands, cc.Use) {
|
||||
print.Infof(ctx, "Logging to file: %s", logger.ResolvedLogFile)
|
||||
if len(logger.LogFile) > 0 && !slices.Contains(avoidTheseCommands, cc.Use) {
|
||||
print.Info(ctx, "Logging to file: "+logger.LogFile)
|
||||
}
|
||||
|
||||
avoidTheseDescription := []string{
|
||||
"Initialize a repository.",
|
||||
"Initialize a S3 repository",
|
||||
"Help about any command",
|
||||
"Free, Secure, Open-Source Backup for M365.",
|
||||
}
|
||||
|
||||
if !slices.Contains(avoidTheseDescription, cc.Short) {
|
||||
overrides := map[string]string{}
|
||||
if cc.Short == "Connect to a S3 repository" {
|
||||
// Get s3 overrides for connect. Ideally we also need this
|
||||
// for init, but we don't reach this block for init.
|
||||
overrides = repo.S3Overrides()
|
||||
}
|
||||
|
||||
cfg, err := config.GetConfigRepoDetails(ctx, true, overrides)
|
||||
if err != nil {
|
||||
log.Error("Error while getting config info to run command: ", cc.Use)
|
||||
return err
|
||||
}
|
||||
|
||||
utils.SendStartCorsoEvent(
|
||||
ctx,
|
||||
cfg.Storage,
|
||||
cfg.Account.ID(),
|
||||
map[string]any{"command": cc.CommandPath()},
|
||||
cfg.RepoID,
|
||||
options.Control())
|
||||
}
|
||||
|
||||
// handle deprecated user flag in Backup exchange command
|
||||
@ -73,7 +102,7 @@ func preRun(cc *cobra.Command, args []string) error {
|
||||
|
||||
func handleMailBoxFlag(ctx context.Context, c *cobra.Command, flagNames []string) {
|
||||
if !slices.Contains(flagNames, "user") && !slices.Contains(flagNames, "mailbox") {
|
||||
print.Err(ctx, "either --user or --mailbox flag is required")
|
||||
print.Errf(ctx, "either --user or --mailbox flag is required")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@ -109,7 +138,7 @@ func CorsoCommand() *cobra.Command {
|
||||
func BuildCommandTree(cmd *cobra.Command) {
|
||||
// want to order flags explicitly
|
||||
cmd.PersistentFlags().SortFlags = false
|
||||
flags.AddRunModeFlag(cmd, true)
|
||||
utils.AddRunModeFlag(cmd, true)
|
||||
|
||||
cmd.Flags().BoolP("version", "v", false, "current version info")
|
||||
cmd.PersistentPreRunE = preRun
|
||||
@ -117,7 +146,7 @@ func BuildCommandTree(cmd *cobra.Command) {
|
||||
logger.AddLoggingFlags(cmd)
|
||||
observe.AddProgressBarFlags(cmd)
|
||||
print.AddOutputFlag(cmd)
|
||||
flags.AddGlobalOperationFlags(cmd)
|
||||
options.AddGlobalOperationFlags(cmd)
|
||||
cmd.SetUsageTemplate(indentExamplesTemplate(corsoCmd.UsageTemplate()))
|
||||
|
||||
cmd.CompletionOptions.DisableDefaultCmd = true
|
||||
@ -125,8 +154,6 @@ func BuildCommandTree(cmd *cobra.Command) {
|
||||
repo.AddCommands(cmd)
|
||||
backup.AddCommands(cmd)
|
||||
restore.AddCommands(cmd)
|
||||
export.AddCommands(cmd)
|
||||
debug.AddCommands(cmd)
|
||||
help.AddCommands(cmd)
|
||||
}
|
||||
|
||||
@ -138,15 +165,15 @@ func BuildCommandTree(cmd *cobra.Command) {
|
||||
func Handle() {
|
||||
//nolint:forbidigo
|
||||
ctx := config.Seed(context.Background())
|
||||
ctx, log := logger.Seed(ctx, logger.PreloadLoggingFlags(os.Args[1:]))
|
||||
ctx = print.SetRootCmd(ctx, corsoCmd)
|
||||
ctx = observe.SeedObserver(ctx, print.StderrWriter(ctx), observe.PreloadFlags())
|
||||
|
||||
observe.SeedWriter(ctx, print.StderrWriter(ctx), observe.PreloadFlags())
|
||||
|
||||
BuildCommandTree(corsoCmd)
|
||||
|
||||
defer func() {
|
||||
observe.Flush(ctx) // flush the progress bars
|
||||
ctx, log := logger.Seed(ctx, logger.PreloadLoggingFlags(os.Args[1:]))
|
||||
|
||||
defer func() {
|
||||
_ = log.Sync() // flush all logs in the buffer
|
||||
}()
|
||||
|
||||
|
||||
89
src/cli/config/account.go
Normal file
89
src/cli/config/account.go
Normal file
@ -0,0 +1,89 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/credentials"
|
||||
)
|
||||
|
||||
// prerequisite: readRepoConfig must have been run prior to this to populate the global viper values.
|
||||
func m365ConfigsFromViper(vpr *viper.Viper) (account.M365Config, error) {
|
||||
var m365 account.M365Config
|
||||
|
||||
providerType := vpr.GetString(AccountProviderTypeKey)
|
||||
if providerType != account.ProviderM365.String() {
|
||||
return m365, clues.New("unsupported account provider: " + providerType)
|
||||
}
|
||||
|
||||
m365.AzureTenantID = vpr.GetString(AzureTenantIDKey)
|
||||
|
||||
return m365, nil
|
||||
}
|
||||
|
||||
func m365Overrides(in map[string]string) map[string]string {
|
||||
return map[string]string{
|
||||
account.AzureTenantID: in[account.AzureTenantID],
|
||||
AccountProviderTypeKey: in[AccountProviderTypeKey],
|
||||
}
|
||||
}
|
||||
|
||||
// configureAccount builds a complete account configuration from a mix of
|
||||
// viper properties and manual overrides.
|
||||
func configureAccount(
|
||||
vpr *viper.Viper,
|
||||
readConfigFromViper bool,
|
||||
overrides map[string]string,
|
||||
) (account.Account, error) {
|
||||
var (
|
||||
m365Cfg account.M365Config
|
||||
acct account.Account
|
||||
err error
|
||||
)
|
||||
|
||||
if readConfigFromViper {
|
||||
m365Cfg, err = m365ConfigsFromViper(vpr)
|
||||
if err != nil {
|
||||
return acct, clues.Wrap(err, "reading m365 configs from corso config file")
|
||||
}
|
||||
|
||||
if err := mustMatchConfig(vpr, m365Overrides(overrides)); err != nil {
|
||||
return acct, clues.Wrap(err, "verifying m365 configs in corso config file")
|
||||
}
|
||||
}
|
||||
|
||||
// compose the m365 config and credentials
|
||||
m365 := credentials.GetM365()
|
||||
if err := m365.Validate(); err != nil {
|
||||
return acct, clues.Wrap(err, "validating m365 credentials")
|
||||
}
|
||||
|
||||
m365Cfg = account.M365Config{
|
||||
M365: m365,
|
||||
AzureTenantID: common.First(
|
||||
overrides[account.AzureTenantID],
|
||||
m365Cfg.AzureTenantID,
|
||||
os.Getenv(account.AzureTenantID)),
|
||||
}
|
||||
|
||||
// ensure required properties are present
|
||||
if err := requireProps(map[string]string{
|
||||
credentials.AzureClientID: m365Cfg.AzureClientID,
|
||||
credentials.AzureClientSecret: m365Cfg.AzureClientSecret,
|
||||
account.AzureTenantID: m365Cfg.AzureTenantID,
|
||||
}); err != nil {
|
||||
return acct, err
|
||||
}
|
||||
|
||||
// build the account
|
||||
acct, err = account.NewAccount(account.ProviderM365, m365Cfg)
|
||||
if err != nil {
|
||||
return acct, clues.Wrap(err, "retrieving m365 account configuration")
|
||||
}
|
||||
|
||||
return acct, nil
|
||||
}
|
||||
@ -2,38 +2,38 @@ package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
const (
|
||||
// S3 config
|
||||
StorageProviderTypeKey = "provider"
|
||||
BucketNameKey = "bucket"
|
||||
EndpointKey = "endpoint"
|
||||
PrefixKey = "prefix"
|
||||
DisableTLSKey = "disable_tls"
|
||||
DisableTLSVerificationKey = "disable_tls_verification"
|
||||
RepoID = "repo_id"
|
||||
|
||||
// Corso passphrase in config
|
||||
CorsoPassphrase = "passphrase"
|
||||
CorsoUser = "corso_user"
|
||||
CorsoHost = "corso_host"
|
||||
// M365 config
|
||||
AccountProviderTypeKey = "account_provider"
|
||||
AzureTenantIDKey = "azure_tenantid"
|
||||
)
|
||||
|
||||
var (
|
||||
defaultConfigFilePath string
|
||||
configFilePath string
|
||||
configFilePathFlag string
|
||||
configDir string
|
||||
displayDefaultFP = filepath.Join("$HOME", ".corso.toml")
|
||||
)
|
||||
@ -44,8 +44,6 @@ type RepoDetails struct {
|
||||
Storage storage.Storage
|
||||
Account account.Account
|
||||
RepoID string
|
||||
RepoUser string
|
||||
RepoHost string
|
||||
}
|
||||
|
||||
// Attempts to set the default dir and config file path.
|
||||
@ -57,7 +55,7 @@ func init() {
|
||||
Infof(context.Background(), "cannot stat CORSO_CONFIG_DIR [%s]: %v", envDir, err)
|
||||
} else {
|
||||
configDir = envDir
|
||||
defaultConfigFilePath = filepath.Join(configDir, ".corso.toml")
|
||||
configFilePath = filepath.Join(configDir, ".corso.toml")
|
||||
}
|
||||
}
|
||||
|
||||
@ -68,71 +66,43 @@ func init() {
|
||||
|
||||
if len(configDir) == 0 {
|
||||
configDir = homeDir
|
||||
defaultConfigFilePath = filepath.Join(configDir, ".corso.toml")
|
||||
configFilePath = filepath.Join(configDir, ".corso.toml")
|
||||
}
|
||||
}
|
||||
|
||||
// adds the persistent flag --config-file to the provided command.
|
||||
func AddConfigFlags(cmd *cobra.Command) {
|
||||
pf := cmd.PersistentFlags()
|
||||
pf.StringVar(
|
||||
&flags.ConfigFileFV,
|
||||
flags.ConfigFileFN, displayDefaultFP, "config file location")
|
||||
fs := cmd.PersistentFlags()
|
||||
fs.StringVar(
|
||||
&configFilePathFlag,
|
||||
"config-file", displayDefaultFP, "config file location")
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------------------------------------
|
||||
// Initialization & Storage
|
||||
// ---------------------------------------------------------------------------------------------------------
|
||||
|
||||
// InitCmd provides a func that lazily initializes viper and
|
||||
// InitFunc provides a func that lazily initializes viper and
|
||||
// verifies that the configuration was able to read a file.
|
||||
func InitCmd(cmd *cobra.Command, args []string) error {
|
||||
_, err := commonInit(cmd.Context(), flags.ConfigFileFV)
|
||||
return clues.Stack(err).OrNil()
|
||||
}
|
||||
|
||||
// InitConfig allows sdk consumers to initialize viper.
|
||||
func InitConfig(
|
||||
ctx context.Context,
|
||||
userDefinedConfigFile string,
|
||||
) (context.Context, error) {
|
||||
return commonInit(ctx, userDefinedConfigFile)
|
||||
}
|
||||
|
||||
func commonInit(
|
||||
ctx context.Context,
|
||||
userDefinedConfigFile string,
|
||||
) (context.Context, error) {
|
||||
fp := userDefinedConfigFile
|
||||
func InitFunc(cmd *cobra.Command, args []string) error {
|
||||
fp := configFilePathFlag
|
||||
if len(fp) == 0 || fp == displayDefaultFP {
|
||||
fp = defaultConfigFilePath
|
||||
fp = configFilePath
|
||||
}
|
||||
|
||||
vpr := GetViper(ctx)
|
||||
if err := initWithViper(ctx, vpr, fp); err != nil {
|
||||
return ctx, err
|
||||
err := initWithViper(GetViper(cmd.Context()), fp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return SetViper(ctx, vpr), clues.Stack(Read(ctx)).OrNil()
|
||||
return Read(cmd.Context())
|
||||
}
|
||||
|
||||
// initWithViper implements InitConfig, but takes in a viper
|
||||
// struct for testing.
|
||||
func initWithViper(
|
||||
ctx context.Context,
|
||||
vpr *viper.Viper,
|
||||
configFP string,
|
||||
) error {
|
||||
logger.Ctx(ctx).Debugw("initializing viper", "config_file_path", configFP)
|
||||
|
||||
defer func() {
|
||||
logger.Ctx(ctx).Debugw("initialized config", "config_file_path", configFP)
|
||||
}()
|
||||
|
||||
func initWithViper(vpr *viper.Viper, configFP string) error {
|
||||
// Configure default config file location
|
||||
if len(configFP) == 0 || configFP == displayDefaultFP {
|
||||
configFP = defaultConfigFilePath
|
||||
|
||||
if configFP == "" || configFP == displayDefaultFP {
|
||||
// Find home directory.
|
||||
_, err := os.Stat(configDir)
|
||||
if err != nil {
|
||||
@ -143,7 +113,16 @@ func initWithViper(
|
||||
vpr.AddConfigPath(configDir)
|
||||
vpr.SetConfigType("toml")
|
||||
vpr.SetConfigName(".corso")
|
||||
} else {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
vpr.SetConfigFile(configFP)
|
||||
// We also configure the path, type and filename
|
||||
// because `vpr.SafeWriteConfig` needs these set to
|
||||
// work correctly (it does not use the configured file)
|
||||
vpr.AddConfigPath(filepath.Dir(configFP))
|
||||
|
||||
ext := filepath.Ext(configFP)
|
||||
if len(ext) == 0 {
|
||||
return clues.New("config file requires an extension e.g. `toml`")
|
||||
@ -153,12 +132,6 @@ func initWithViper(
|
||||
fileName = strings.TrimSuffix(fileName, ext)
|
||||
vpr.SetConfigType(strings.TrimPrefix(ext, "."))
|
||||
vpr.SetConfigName(fileName)
|
||||
vpr.SetConfigFile(configFP)
|
||||
// We also configure the path, type and filename
|
||||
// because `vpr.SafeWriteConfig` needs these set to
|
||||
// work correctly (it does not use the configured file)
|
||||
vpr.AddConfigPath(filepath.Dir(configFP))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -214,44 +187,34 @@ func Read(ctx context.Context) error {
|
||||
// It does not check for conflicts or existing data.
|
||||
func WriteRepoConfig(
|
||||
ctx context.Context,
|
||||
wcs storage.WriteConfigToStorer,
|
||||
s3Config storage.S3Config,
|
||||
m365Config account.M365Config,
|
||||
repoOpts repository.Options,
|
||||
repoID string,
|
||||
) error {
|
||||
return writeRepoConfigWithViper(
|
||||
GetViper(ctx),
|
||||
wcs,
|
||||
m365Config,
|
||||
repoOpts,
|
||||
repoID)
|
||||
return writeRepoConfigWithViper(GetViper(ctx), s3Config, m365Config, repoID)
|
||||
}
|
||||
|
||||
// writeRepoConfigWithViper implements WriteRepoConfig, but takes in a viper
|
||||
// struct for testing.
|
||||
func writeRepoConfigWithViper(
|
||||
vpr *viper.Viper,
|
||||
wcs storage.WriteConfigToStorer,
|
||||
s3Config storage.S3Config,
|
||||
m365Config account.M365Config,
|
||||
repoOpts repository.Options,
|
||||
repoID string,
|
||||
) error {
|
||||
// Write storage configuration to viper
|
||||
wcs.WriteConfigToStore(vpr)
|
||||
|
||||
s3Config = s3Config.Normalize()
|
||||
// Rudimentary support for persisting repo config
|
||||
// TODO: Handle conflicts, support other config types
|
||||
vpr.Set(StorageProviderTypeKey, storage.ProviderS3.String())
|
||||
vpr.Set(BucketNameKey, s3Config.Bucket)
|
||||
vpr.Set(EndpointKey, s3Config.Endpoint)
|
||||
vpr.Set(PrefixKey, s3Config.Prefix)
|
||||
vpr.Set(DisableTLSKey, s3Config.DoNotUseTLS)
|
||||
vpr.Set(DisableTLSVerificationKey, s3Config.DoNotVerifyTLS)
|
||||
vpr.Set(RepoID, repoID)
|
||||
|
||||
// Need if-checks as Viper will write empty values otherwise.
|
||||
if len(repoOpts.User) > 0 {
|
||||
vpr.Set(CorsoUser, repoOpts.User)
|
||||
}
|
||||
|
||||
if len(repoOpts.Host) > 0 {
|
||||
vpr.Set(CorsoHost, repoOpts.Host)
|
||||
}
|
||||
|
||||
vpr.Set(account.AccountProviderTypeKey, account.ProviderM365.String())
|
||||
vpr.Set(account.AzureTenantIDKey, m365Config.AzureTenantID)
|
||||
vpr.Set(AccountProviderTypeKey, account.ProviderM365.String())
|
||||
vpr.Set(AzureTenantIDKey, m365Config.AzureTenantID)
|
||||
|
||||
if err := vpr.SafeWriteConfig(); err != nil {
|
||||
if _, ok := err.(viper.ConfigFileAlreadyExistsError); ok {
|
||||
@ -264,36 +227,30 @@ func writeRepoConfigWithViper(
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReadCorsoConfig creates a storage and account instance by mediating all the possible
|
||||
// GetStorageAndAccount creates a storage and account instance by mediating all the possible
|
||||
// data sources (config file, env vars, flag overrides) and the config file.
|
||||
func ReadCorsoConfig(
|
||||
func GetConfigRepoDetails(
|
||||
ctx context.Context,
|
||||
provider storage.ProviderType,
|
||||
readFromFile bool,
|
||||
mustMatchFromConfig bool,
|
||||
overrides map[string]string,
|
||||
) (RepoDetails, error) {
|
||||
config, err := getStorageAndAccountWithViper(
|
||||
ctx,
|
||||
GetViper(ctx),
|
||||
provider,
|
||||
readFromFile,
|
||||
mustMatchFromConfig,
|
||||
overrides)
|
||||
|
||||
) (
|
||||
RepoDetails,
|
||||
error,
|
||||
) {
|
||||
config, err := getStorageAndAccountWithViper(GetViper(ctx), readFromFile, overrides)
|
||||
return config, err
|
||||
}
|
||||
|
||||
// getSorageAndAccountWithViper implements GetSorageAndAccount, but takes in a viper
|
||||
// struct for testing.
|
||||
func getStorageAndAccountWithViper(
|
||||
ctx context.Context,
|
||||
vpr *viper.Viper,
|
||||
provider storage.ProviderType,
|
||||
readFromFile bool,
|
||||
mustMatchFromConfig bool,
|
||||
overrides map[string]string,
|
||||
) (RepoDetails, error) {
|
||||
) (
|
||||
RepoDetails,
|
||||
error,
|
||||
) {
|
||||
var (
|
||||
config RepoDetails
|
||||
err error
|
||||
@ -303,19 +260,12 @@ func getStorageAndAccountWithViper(
|
||||
|
||||
// possibly read the prior config from a .corso file
|
||||
if readFromFile {
|
||||
ctx = clues.Add(ctx, "viper_config_file", vpr.ConfigFileUsed())
|
||||
logger.Ctx(ctx).Debug("reading config from file")
|
||||
|
||||
if err := vpr.ReadInConfig(); err != nil {
|
||||
configNotSet := errors.As(err, &viper.ConfigFileNotFoundError{})
|
||||
configNotFound := errors.Is(err, fs.ErrNotExist)
|
||||
|
||||
if !configNotSet && !configNotFound {
|
||||
err = vpr.ReadInConfig()
|
||||
if err != nil {
|
||||
if _, ok := err.(viper.ConfigFileNotFoundError); !ok {
|
||||
return config, clues.Wrap(err, "reading corso config file: "+vpr.ConfigFileUsed())
|
||||
}
|
||||
|
||||
logger.Ctx(ctx).Info("config file not found")
|
||||
|
||||
readConfigFromViper = false
|
||||
}
|
||||
|
||||
@ -323,49 +273,36 @@ func getStorageAndAccountWithViper(
|
||||
config.RepoID = vpr.GetString(RepoID)
|
||||
}
|
||||
|
||||
config.Account, err = configureAccount(vpr, readConfigFromViper, mustMatchFromConfig, overrides)
|
||||
config.Account, err = configureAccount(vpr, readConfigFromViper, overrides)
|
||||
if err != nil {
|
||||
return config, clues.Wrap(err, "retrieving account configuration details")
|
||||
}
|
||||
|
||||
config.Storage, err = configureStorage(
|
||||
vpr,
|
||||
provider,
|
||||
readConfigFromViper,
|
||||
mustMatchFromConfig,
|
||||
overrides)
|
||||
config.Storage, err = configureStorage(vpr, readConfigFromViper, overrides)
|
||||
if err != nil {
|
||||
return config, clues.Wrap(err, "retrieving storage provider details")
|
||||
}
|
||||
|
||||
config.RepoUser, config.RepoHost = getUserHost(vpr, readConfigFromViper)
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func getUserHost(vpr *viper.Viper, readConfigFromViper bool) (string, string) {
|
||||
user := str.First(flags.UserMaintenanceFV, vpr.GetString(CorsoUser))
|
||||
host := str.First(flags.HostnameMaintenanceFV, vpr.GetString(CorsoHost))
|
||||
|
||||
// Fine if these are empty; later code will assign a meaningful default if
|
||||
// needed.
|
||||
return user, host
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helper funcs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
var constToTomlKeyMap = map[string]string{
|
||||
account.AzureTenantID: account.AzureTenantIDKey,
|
||||
account.AccountProviderTypeKey: account.AccountProviderTypeKey,
|
||||
account.AzureTenantID: AzureTenantIDKey,
|
||||
AccountProviderTypeKey: AccountProviderTypeKey,
|
||||
storage.Bucket: BucketNameKey,
|
||||
storage.Endpoint: EndpointKey,
|
||||
storage.Prefix: PrefixKey,
|
||||
StorageProviderTypeKey: StorageProviderTypeKey,
|
||||
}
|
||||
|
||||
// mustMatchConfig compares the values of each key to their config file value in viper.
|
||||
// If any value differs from the viper value, an error is returned.
|
||||
// values in m that aren't stored in the config are ignored.
|
||||
// TODO(pandeyabs): This code is currently duplicated in 2 places.
|
||||
func mustMatchConfig(vpr *viper.Viper, m map[string]string, pathKeys []string) error {
|
||||
func mustMatchConfig(vpr *viper.Viper, m map[string]string) error {
|
||||
for k, v := range m {
|
||||
if len(v) == 0 {
|
||||
continue // empty variables will get caught by configuration validators, if necessary
|
||||
@ -377,16 +314,7 @@ func mustMatchConfig(vpr *viper.Viper, m map[string]string, pathKeys []string) e
|
||||
}
|
||||
|
||||
vv := vpr.GetString(tomlK)
|
||||
areEqual := false
|
||||
|
||||
// some of the values maybe paths, hence they require more than just string equality
|
||||
if len(pathKeys) > 0 && slices.Contains(pathKeys, k) {
|
||||
areEqual = path.ArePathsEquivalent(v, vv)
|
||||
} else {
|
||||
areEqual = v == vv
|
||||
}
|
||||
|
||||
if !areEqual {
|
||||
if v != vv {
|
||||
return clues.New("value of " + k + " (" + v + ") does not match corso configuration value (" + vv + ")")
|
||||
}
|
||||
}
|
||||
327
src/cli/config/config_test.go
Normal file
327
src/cli/config/config_test.go
Normal file
@ -0,0 +1,327 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/credentials"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
const (
|
||||
configFileTemplate = `
|
||||
` + BucketNameKey + ` = '%s'
|
||||
` + EndpointKey + ` = 's3.amazonaws.com'
|
||||
` + PrefixKey + ` = 'test-prefix/'
|
||||
` + StorageProviderTypeKey + ` = 'S3'
|
||||
` + AccountProviderTypeKey + ` = 'M365'
|
||||
` + AzureTenantIDKey + ` = '%s'
|
||||
` + DisableTLSKey + ` = 'false'
|
||||
` + DisableTLSVerificationKey + ` = 'false'
|
||||
`
|
||||
)
|
||||
|
||||
type ConfigSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestConfigSuite(t *testing.T) {
|
||||
suite.Run(t, &ConfigSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *ConfigSuite) TestRequireProps() {
|
||||
table := []struct {
|
||||
name string
|
||||
props map[string]string
|
||||
errCheck assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
props: map[string]string{"exists": "I have seen the fnords!"},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
props: map[string]string{"not-exists": ""},
|
||||
errCheck: assert.Error,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
err := requireProps(test.props)
|
||||
test.errCheck(suite.T(), err, clues.ToCore(err))
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *ConfigSuite) TestReadRepoConfigBasic() {
|
||||
var (
|
||||
t = suite.T()
|
||||
vpr = viper.New()
|
||||
)
|
||||
|
||||
const (
|
||||
b = "read-repo-config-basic-bucket"
|
||||
tID = "6f34ac30-8196-469b-bf8f-d83deadbbbba"
|
||||
)
|
||||
|
||||
// Generate test config file
|
||||
testConfigData := fmt.Sprintf(configFileTemplate, b, tID)
|
||||
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
|
||||
err := os.WriteFile(testConfigFilePath, []byte(testConfigData), 0o700)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// Configure viper to read test config file
|
||||
vpr.SetConfigFile(testConfigFilePath)
|
||||
|
||||
// Read and validate config
|
||||
err = vpr.ReadInConfig()
|
||||
require.NoError(t, err, "reading repo config", clues.ToCore(err))
|
||||
|
||||
s3Cfg, err := s3ConfigsFromViper(vpr)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, b, s3Cfg.Bucket)
|
||||
|
||||
m365, err := m365ConfigsFromViper(vpr)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, tID, m365.AzureTenantID)
|
||||
}
|
||||
|
||||
func (suite *ConfigSuite) TestWriteReadConfig() {
|
||||
var (
|
||||
t = suite.T()
|
||||
vpr = viper.New()
|
||||
// Configure viper to read test config file
|
||||
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
|
||||
)
|
||||
|
||||
const (
|
||||
bkt = "write-read-config-bucket"
|
||||
tid = "3c0748d2-470e-444c-9064-1268e52609d5"
|
||||
)
|
||||
|
||||
err := initWithViper(vpr, testConfigFilePath)
|
||||
require.NoError(t, err, "initializing repo config", clues.ToCore(err))
|
||||
|
||||
s3Cfg := storage.S3Config{Bucket: bkt, DoNotUseTLS: true, DoNotVerifyTLS: true}
|
||||
m365 := account.M365Config{AzureTenantID: tid}
|
||||
|
||||
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
|
||||
require.NoError(t, err, "writing repo config", clues.ToCore(err))
|
||||
|
||||
err = vpr.ReadInConfig()
|
||||
require.NoError(t, err, "reading repo config", clues.ToCore(err))
|
||||
|
||||
readS3Cfg, err := s3ConfigsFromViper(vpr)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
|
||||
assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS)
|
||||
assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS)
|
||||
|
||||
readM365, err := m365ConfigsFromViper(vpr)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
|
||||
}
|
||||
|
||||
func (suite *ConfigSuite) TestMustMatchConfig() {
|
||||
var (
|
||||
t = suite.T()
|
||||
vpr = viper.New()
|
||||
// Configure viper to read test config file
|
||||
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
|
||||
)
|
||||
|
||||
const (
|
||||
bkt = "must-match-config-bucket"
|
||||
tid = "dfb12063-7598-458b-85ab-42352c5c25e2"
|
||||
)
|
||||
|
||||
err := initWithViper(vpr, testConfigFilePath)
|
||||
require.NoError(t, err, "initializing repo config")
|
||||
|
||||
s3Cfg := storage.S3Config{Bucket: bkt}
|
||||
m365 := account.M365Config{AzureTenantID: tid}
|
||||
|
||||
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
|
||||
require.NoError(t, err, "writing repo config", clues.ToCore(err))
|
||||
|
||||
err = vpr.ReadInConfig()
|
||||
require.NoError(t, err, "reading repo config", clues.ToCore(err))
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
input map[string]string
|
||||
errCheck assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "full match",
|
||||
input: map[string]string{
|
||||
storage.Bucket: bkt,
|
||||
account.AzureTenantID: tid,
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "empty values",
|
||||
input: map[string]string{
|
||||
storage.Bucket: "",
|
||||
account.AzureTenantID: "",
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "no overrides",
|
||||
input: map[string]string{},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "nil map",
|
||||
input: nil,
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "no recognized keys",
|
||||
input: map[string]string{
|
||||
"fnords": "smurfs",
|
||||
"nonsense": "",
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "mismatch",
|
||||
input: map[string]string{
|
||||
storage.Bucket: tid,
|
||||
account.AzureTenantID: bkt,
|
||||
},
|
||||
errCheck: assert.Error,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input), clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ------------------------------------------------------------
|
||||
// integration tests
|
||||
// ------------------------------------------------------------
|
||||
|
||||
type ConfigIntegrationSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestConfigIntegrationSuite(t *testing.T) {
|
||||
suite.Run(t, &ConfigIntegrationSuite{Suite: tester.NewIntegrationSuite(
|
||||
t,
|
||||
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
|
||||
)})
|
||||
}
|
||||
|
||||
func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
|
||||
t := suite.T()
|
||||
vpr := viper.New()
|
||||
|
||||
const (
|
||||
bkt = "get-storage-and-account-bucket"
|
||||
end = "https://get-storage-and-account.com"
|
||||
pfx = "get-storage-and-account-prefix/"
|
||||
tid = "3a2faa4e-a882-445c-9d27-f552ef189381"
|
||||
)
|
||||
|
||||
// Configure viper to read test config file
|
||||
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
|
||||
|
||||
err := initWithViper(vpr, testConfigFilePath)
|
||||
require.NoError(t, err, "initializing repo config", clues.ToCore(err))
|
||||
|
||||
s3Cfg := storage.S3Config{
|
||||
Bucket: bkt,
|
||||
Endpoint: end,
|
||||
Prefix: pfx,
|
||||
DoNotVerifyTLS: true,
|
||||
DoNotUseTLS: true,
|
||||
}
|
||||
m365 := account.M365Config{AzureTenantID: tid}
|
||||
|
||||
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
|
||||
require.NoError(t, err, "writing repo config", clues.ToCore(err))
|
||||
|
||||
err = vpr.ReadInConfig()
|
||||
require.NoError(t, err, "reading repo config", clues.ToCore(err))
|
||||
|
||||
config, err := getStorageAndAccountWithViper(vpr, true, nil)
|
||||
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
|
||||
|
||||
readS3Cfg, err := config.Storage.S3Config()
|
||||
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
|
||||
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
|
||||
assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint)
|
||||
assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix)
|
||||
assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS)
|
||||
assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS)
|
||||
assert.Equal(t, config.RepoID, "repoid")
|
||||
|
||||
common, err := config.Storage.CommonConfig()
|
||||
require.NoError(t, err, "reading common config from storage", clues.ToCore(err))
|
||||
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
|
||||
|
||||
readM365, err := config.Account.M365Config()
|
||||
require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
|
||||
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
|
||||
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
|
||||
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
|
||||
}
|
||||
|
||||
func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverrides() {
|
||||
t := suite.T()
|
||||
vpr := viper.New()
|
||||
|
||||
const (
|
||||
bkt = "get-storage-and-account-no-file-bucket"
|
||||
end = "https://get-storage-and-account.com/no-file"
|
||||
pfx = "get-storage-and-account-no-file-prefix/"
|
||||
tid = "88f8522b-18e4-4d0f-b514-2d7b34d4c5a1"
|
||||
)
|
||||
|
||||
m365 := account.M365Config{AzureTenantID: tid}
|
||||
|
||||
overrides := map[string]string{
|
||||
account.AzureTenantID: tid,
|
||||
AccountProviderTypeKey: account.ProviderM365.String(),
|
||||
storage.Bucket: bkt,
|
||||
storage.Endpoint: end,
|
||||
storage.Prefix: pfx,
|
||||
storage.DoNotUseTLS: "true",
|
||||
storage.DoNotVerifyTLS: "true",
|
||||
StorageProviderTypeKey: storage.ProviderS3.String(),
|
||||
}
|
||||
|
||||
config, err := getStorageAndAccountWithViper(vpr, false, overrides)
|
||||
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
|
||||
|
||||
readS3Cfg, err := config.Storage.S3Config()
|
||||
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
|
||||
assert.Equal(t, readS3Cfg.Bucket, bkt)
|
||||
assert.Equal(t, config.RepoID, "")
|
||||
assert.Equal(t, readS3Cfg.Endpoint, end)
|
||||
assert.Equal(t, readS3Cfg.Prefix, pfx)
|
||||
assert.True(t, readS3Cfg.DoNotUseTLS)
|
||||
assert.True(t, readS3Cfg.DoNotVerifyTLS)
|
||||
|
||||
common, err := config.Storage.CommonConfig()
|
||||
require.NoError(t, err, "reading common config from storage", clues.ToCore(err))
|
||||
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
|
||||
|
||||
readM365, err := config.Account.M365Config()
|
||||
require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
|
||||
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
|
||||
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
|
||||
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
|
||||
}
|
||||
128
src/cli/config/storage.go
Normal file
128
src/cli/config/storage.go
Normal file
@ -0,0 +1,128 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/aws/aws-sdk-go/aws/defaults"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/pkg/credentials"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
// prerequisite: readRepoConfig must have been run prior to this to populate the global viper values.
|
||||
func s3ConfigsFromViper(vpr *viper.Viper) (storage.S3Config, error) {
|
||||
var s3Config storage.S3Config
|
||||
|
||||
providerType := vpr.GetString(StorageProviderTypeKey)
|
||||
if providerType != storage.ProviderS3.String() {
|
||||
return s3Config, clues.New("unsupported storage provider: " + providerType)
|
||||
}
|
||||
|
||||
s3Config.Bucket = vpr.GetString(BucketNameKey)
|
||||
s3Config.Endpoint = vpr.GetString(EndpointKey)
|
||||
s3Config.Prefix = vpr.GetString(PrefixKey)
|
||||
s3Config.DoNotUseTLS = vpr.GetBool(DisableTLSKey)
|
||||
s3Config.DoNotVerifyTLS = vpr.GetBool(DisableTLSVerificationKey)
|
||||
|
||||
return s3Config, nil
|
||||
}
|
||||
|
||||
func s3Overrides(in map[string]string) map[string]string {
|
||||
return map[string]string{
|
||||
storage.Bucket: in[storage.Bucket],
|
||||
storage.Endpoint: in[storage.Endpoint],
|
||||
storage.Prefix: in[storage.Prefix],
|
||||
storage.DoNotUseTLS: in[storage.DoNotUseTLS],
|
||||
storage.DoNotVerifyTLS: in[storage.DoNotVerifyTLS],
|
||||
StorageProviderTypeKey: in[StorageProviderTypeKey],
|
||||
}
|
||||
}
|
||||
|
||||
// configureStorage builds a complete storage configuration from a mix of
|
||||
// viper properties and manual overrides.
|
||||
func configureStorage(
|
||||
vpr *viper.Viper,
|
||||
readConfigFromViper bool,
|
||||
overrides map[string]string,
|
||||
) (storage.Storage, error) {
|
||||
var (
|
||||
s3Cfg storage.S3Config
|
||||
store storage.Storage
|
||||
err error
|
||||
)
|
||||
|
||||
if readConfigFromViper {
|
||||
if s3Cfg, err = s3ConfigsFromViper(vpr); err != nil {
|
||||
return store, clues.Wrap(err, "reading s3 configs from corso config file")
|
||||
}
|
||||
|
||||
if b, ok := overrides[storage.Bucket]; ok {
|
||||
overrides[storage.Bucket] = common.NormalizeBucket(b)
|
||||
}
|
||||
|
||||
if p, ok := overrides[storage.Prefix]; ok {
|
||||
overrides[storage.Prefix] = common.NormalizePrefix(p)
|
||||
}
|
||||
|
||||
if err := mustMatchConfig(vpr, s3Overrides(overrides)); err != nil {
|
||||
return store, clues.Wrap(err, "verifying s3 configs in corso config file")
|
||||
}
|
||||
}
|
||||
|
||||
_, err = defaults.CredChain(defaults.Config().WithCredentialsChainVerboseErrors(true), defaults.Handlers()).Get()
|
||||
if err != nil {
|
||||
return store, clues.Wrap(err, "validating aws credentials")
|
||||
}
|
||||
|
||||
s3Cfg = storage.S3Config{
|
||||
Bucket: common.First(overrides[storage.Bucket], s3Cfg.Bucket, os.Getenv(storage.BucketKey)),
|
||||
Endpoint: common.First(overrides[storage.Endpoint], s3Cfg.Endpoint, os.Getenv(storage.EndpointKey)),
|
||||
Prefix: common.First(overrides[storage.Prefix], s3Cfg.Prefix, os.Getenv(storage.PrefixKey)),
|
||||
DoNotUseTLS: common.ParseBool(common.First(
|
||||
overrides[storage.DoNotUseTLS],
|
||||
strconv.FormatBool(s3Cfg.DoNotUseTLS),
|
||||
os.Getenv(storage.PrefixKey))),
|
||||
DoNotVerifyTLS: common.ParseBool(common.First(
|
||||
overrides[storage.DoNotVerifyTLS],
|
||||
strconv.FormatBool(s3Cfg.DoNotVerifyTLS),
|
||||
os.Getenv(storage.PrefixKey))),
|
||||
}
|
||||
|
||||
// compose the common config and credentials
|
||||
corso := credentials.GetCorso()
|
||||
if err := corso.Validate(); err != nil {
|
||||
return store, clues.Wrap(err, "validating corso credentials")
|
||||
}
|
||||
|
||||
cCfg := storage.CommonConfig{
|
||||
Corso: corso,
|
||||
}
|
||||
// the following is a hack purely for integration testing.
|
||||
// the value is not required, and if empty, kopia will default
|
||||
// to its routine behavior
|
||||
if t, ok := vpr.Get("corso-testing").(bool); t && ok {
|
||||
dir, _ := filepath.Split(vpr.ConfigFileUsed())
|
||||
cCfg.KopiaCfgDir = dir
|
||||
}
|
||||
|
||||
// ensure required properties are present
|
||||
if err := requireProps(map[string]string{
|
||||
storage.Bucket: s3Cfg.Bucket,
|
||||
credentials.CorsoPassphrase: corso.CorsoPassphrase,
|
||||
}); err != nil {
|
||||
return storage.Storage{}, err
|
||||
}
|
||||
|
||||
// build the storage
|
||||
store, err = storage.NewStorage(storage.ProviderS3, s3Cfg, cCfg)
|
||||
if err != nil {
|
||||
return store, clues.Wrap(err, "configuring repository storage")
|
||||
}
|
||||
|
||||
return store, nil
|
||||
}
|
||||
@ -1,120 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
var subCommandFuncs = []func() *cobra.Command{
|
||||
metadataFilesCmd,
|
||||
}
|
||||
|
||||
var debugCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||
addOneDriveCommands,
|
||||
addSharePointCommands,
|
||||
addGroupsCommands,
|
||||
addExchangeCommands,
|
||||
}
|
||||
|
||||
// AddCommands attaches all `corso debug * *` commands to the parent.
|
||||
func AddCommands(cmd *cobra.Command) {
|
||||
debugC, _ := utils.AddCommand(cmd, debugCmd(), utils.MarkDebugCommand())
|
||||
|
||||
for _, sc := range subCommandFuncs {
|
||||
subCommand := sc()
|
||||
utils.AddCommand(debugC, subCommand, utils.MarkDebugCommand())
|
||||
|
||||
for _, addTo := range debugCommands {
|
||||
servCmd := addTo(subCommand)
|
||||
flags.AddAllProviderFlags(servCmd)
|
||||
flags.AddAllStorageFlags(servCmd)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Commands
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const debugCommand = "debug"
|
||||
|
||||
// The debug category of commands.
|
||||
// `corso debug [<subcommand>] [<flag>...]`
|
||||
func debugCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: debugCommand,
|
||||
Short: "debugging & troubleshooting utilities",
|
||||
Long: `debug the data stored in corso.`,
|
||||
RunE: handledebugCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// Handler for flat calls to `corso debug`.
|
||||
// Produces the same output as `corso debug --help`.
|
||||
func handledebugCmd(cmd *cobra.Command, args []string) error {
|
||||
return cmd.Help()
|
||||
}
|
||||
|
||||
// The debug metadataFiles subcommand.
|
||||
// `corso debug metadata-files <service> [<flag>...]`
|
||||
var metadataFilesCommand = "metadata-files"
|
||||
|
||||
func metadataFilesCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: metadataFilesCommand,
|
||||
Short: "display all the metadata file contents stored by the service",
|
||||
RunE: handleMetadataFilesCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// Handler for calls to `corso debug metadata-files`.
|
||||
// Produces the same output as `corso debug metadata-files --help`.
|
||||
func handleMetadataFilesCmd(cmd *cobra.Command, args []string) error {
|
||||
return cmd.Help()
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// runners
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func genericMetadataFiles(
|
||||
ctx context.Context,
|
||||
cmd *cobra.Command,
|
||||
args []string,
|
||||
sel selectors.Selector,
|
||||
backupID string,
|
||||
) error {
|
||||
ctx = clues.Add(ctx, "backup_id", backupID)
|
||||
|
||||
r, _, err := utils.GetAccountAndConnect(ctx, cmd, sel.PathService())
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
// read metadata
|
||||
files, err := r.GetBackupMetadata(ctx, sel, backupID, fault.New(true))
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "retrieving metadata files"))
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
Infof(ctx, "\n------------------------------")
|
||||
Info(ctx, file.Name)
|
||||
Info(ctx, file.Path)
|
||||
Pretty(ctx, file.Data)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -1,66 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
// called by debug.go to map subcommands to provider-specific handling.
|
||||
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case metadataFilesCommand:
|
||||
c, _ = utils.AddCommand(cmd, exchangeMetadataFilesCmd(), utils.MarkDebugCommand())
|
||||
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
const (
|
||||
exchangeServiceCommand = "exchange"
|
||||
exchangeServiceCommandUseSuffix = "--backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
exchangeServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
|
||||
corso debug metadata-files exchange --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
)
|
||||
|
||||
// `corso debug metadata-files exchange [<flag>...] <destination>`
|
||||
func exchangeMetadataFilesCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: exchangeServiceCommand,
|
||||
Short: "Display exchange metadata file content",
|
||||
RunE: metadataFilesExchangeCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: exchangeServiceCommandDebugExamples,
|
||||
}
|
||||
}
|
||||
|
||||
func metadataFilesExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewExchangeBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
@ -1,77 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type ExchangeUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestExchangeUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &ExchangeUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestExchangeCommands() {
|
||||
expectUse := exchangeServiceCommand + " " + exchangeServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "metdata-files exchange",
|
||||
use: metadataFilesCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: exchangeMetadataFilesCmd().Short,
|
||||
expectRunE: metadataFilesExchangeCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
parent := &cobra.Command{Use: metadataFilesCommand}
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
parent,
|
||||
addExchangeCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
exchangeServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
cliTD.CheckCmdChild(
|
||||
t,
|
||||
parent,
|
||||
3,
|
||||
test.expectUse,
|
||||
test.expectShort,
|
||||
test.expectRunE)
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertProviderFlags(t, cmd)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,68 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
// called by debug.go to map subcommands to provider-specific handling.
|
||||
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case metadataFilesCommand:
|
||||
c, _ = utils.AddCommand(cmd, groupsMetadataFilesCmd(), utils.MarkDebugCommand())
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// TODO: correct examples
|
||||
const (
|
||||
groupsServiceCommand = "groups"
|
||||
groupsServiceCommandUseSuffix = "--backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
groupsServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
|
||||
corso debug metadata-files groups --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
)
|
||||
|
||||
// `corso debug metadata-files groups [<flag>...] <destination>`
|
||||
func groupsMetadataFilesCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Short: "Display groups metadata file content",
|
||||
RunE: metadataFilesGroupsCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: groupsServiceCommandDebugExamples,
|
||||
}
|
||||
}
|
||||
|
||||
func metadataFilesGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewGroupsBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
@ -1,76 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type GroupsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestGroupsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
|
||||
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "metdata-files groups",
|
||||
use: metadataFilesCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: groupsMetadataFilesCmd().Short,
|
||||
expectRunE: metadataFilesGroupsCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
parent := &cobra.Command{Use: metadataFilesCommand}
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
parent,
|
||||
addGroupsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
groupsServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
cliTD.CheckCmdChild(
|
||||
t,
|
||||
parent,
|
||||
3,
|
||||
test.expectUse,
|
||||
test.expectShort,
|
||||
test.expectRunE)
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,66 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
// called by debug.go to map subcommands to provider-specific handling.
|
||||
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case metadataFilesCommand:
|
||||
c, _ = utils.AddCommand(cmd, oneDriveMetadataFilesCmd(), utils.MarkDebugCommand())
|
||||
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
const (
|
||||
oneDriveServiceCommand = "onedrive"
|
||||
oneDriveServiceCommandUseSuffix = "--backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
oneDriveServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
|
||||
corso debug metadata-files onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
)
|
||||
|
||||
// `corso debug metadata-files onedrive [<flag>...] <destination>`
|
||||
func oneDriveMetadataFilesCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: oneDriveServiceCommand,
|
||||
Short: "Display onedrive metadata file content",
|
||||
RunE: metadataFilesOneDriveCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: oneDriveServiceCommandDebugExamples,
|
||||
}
|
||||
}
|
||||
|
||||
func metadataFilesOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewOneDriveBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
@ -1,76 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type OneDriveUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestOneDriveUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &OneDriveUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
|
||||
expectUse := oneDriveServiceCommand + " " + oneDriveServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "metadata-files onedrive",
|
||||
use: metadataFilesCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: oneDriveMetadataFilesCmd().Short,
|
||||
expectRunE: metadataFilesOneDriveCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
parent := &cobra.Command{Use: metadataFilesCommand}
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
parent,
|
||||
addOneDriveCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
oneDriveServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
cliTD.CheckCmdChild(
|
||||
t,
|
||||
parent,
|
||||
3,
|
||||
test.expectUse,
|
||||
test.expectShort,
|
||||
test.expectRunE)
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,66 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
// called by debug.go to map subcommands to provider-specific handling.
|
||||
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case metadataFilesCommand:
|
||||
c, _ = utils.AddCommand(cmd, sharePointMetadataFilesCmd(), utils.MarkDebugCommand())
|
||||
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
const (
|
||||
sharePointServiceCommand = "sharepoint"
|
||||
sharePointServiceCommandUseSuffix = "--backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
sharePointServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
|
||||
corso debug metadata-files sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd`
|
||||
)
|
||||
|
||||
// `corso debug metadata-files sharepoint [<flag>...] <destination>`
|
||||
func sharePointMetadataFilesCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: sharePointServiceCommand,
|
||||
Short: "Display sharepoint metadata file content",
|
||||
RunE: metadataFilesSharePointCmd,
|
||||
Args: cobra.NoArgs,
|
||||
Example: sharePointServiceCommandDebugExamples,
|
||||
}
|
||||
}
|
||||
|
||||
func metadataFilesSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewSharePointBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
@ -1,76 +0,0 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type SharePointUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestSharePointUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &SharePointUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
||||
expectUse := sharePointServiceCommand + " " + sharePointServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{
|
||||
name: "metdata-files sharepoint",
|
||||
use: metadataFilesCommand,
|
||||
expectUse: expectUse,
|
||||
expectShort: sharePointMetadataFilesCmd().Short,
|
||||
expectRunE: metadataFilesSharePointCmd,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
parent := &cobra.Command{Use: metadataFilesCommand}
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
parent,
|
||||
addSharePointCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
sharePointServiceCommand,
|
||||
[]string{
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
cliTD.CheckCmdChild(
|
||||
t,
|
||||
parent,
|
||||
3,
|
||||
test.expectUse,
|
||||
test.expectShort,
|
||||
test.expectRunE)
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,101 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
// called by export.go to map subcommands to provider-specific handling.
|
||||
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case exportCommand:
|
||||
c, _ = utils.AddCommand(cmd, exchangeExportCmd())
|
||||
|
||||
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddExchangeDetailsAndRestoreFlags(c, true)
|
||||
flags.AddExportConfigFlags(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
const (
|
||||
exchangeServiceCommand = "exchange"
|
||||
exchangeServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||
|
||||
// TODO(meain): remove message about only supporting email exports once others are added
|
||||
//nolint:lll
|
||||
exchangeServiceCommandExportExamples = `> Only email exports are supported as of now.
|
||||
|
||||
# Export emails with ID 98765abcdef and 12345abcdef from Alice's last backup (1234abcd...) to my-folder
|
||||
corso export exchange my-folder --backup 1234abcd-12ab-cd34-56de-1234abcd --email 98765abcdef,12345abcdef
|
||||
|
||||
# Export emails with subject containing "Hello world" in the "Inbox" to my-folder
|
||||
corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--email-subject "Hello world" --email-folder Inbox my-folder`
|
||||
|
||||
// TODO(meain): Uncomment once support for these are added
|
||||
// `# Export an entire calendar to my-folder
|
||||
// corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
// --event-calendar Calendar my-folder
|
||||
|
||||
// # Export the contact with ID abdef0101 to my-folder
|
||||
// corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --contact abdef0101 my-folder`
|
||||
)
|
||||
|
||||
// `corso export exchange [<flag>...] <destination>`
|
||||
func exchangeExportCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: exchangeServiceCommand,
|
||||
Short: "Export M365 Exchange service data",
|
||||
RunE: exportExchangeCmd,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing export destination")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Example: exchangeServiceCommandExportExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an exchange service export.
|
||||
func exportExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
opts := utils.MakeExchangeOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := utils.ValidateExchangeRestoreFlags(flags.BackupIDFV, opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sel := utils.IncludeExchangeRestoreDataSelectors(opts)
|
||||
utils.FilterExchangeRestoreInfoSelectors(sel, opts)
|
||||
|
||||
return runExport(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
opts.ExportCfg,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV,
|
||||
"Exchange",
|
||||
defaultAcceptedFormatTypes)
|
||||
}
|
||||
@ -1,78 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type ExchangeUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestExchangeUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &ExchangeUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
|
||||
expectUse := exchangeServiceCommand + " " + exchangeServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{"export exchange", exportCommand, expectUse, exchangeExportCmd().Short, exportExchangeCmd},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
parent := &cobra.Command{Use: exportCommand}
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
parent,
|
||||
addExchangeCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
exchangeServiceCommand,
|
||||
[]string{
|
||||
flagsTD.RestoreDestination,
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.FormatFN, flagsTD.FormatType,
|
||||
"--" + flags.ArchiveFN,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
cliTD.CheckCmdChild(
|
||||
t,
|
||||
parent,
|
||||
3,
|
||||
test.expectUse,
|
||||
test.expectShort,
|
||||
test.expectRunE)
|
||||
|
||||
opts := utils.MakeExchangeOpts(cmd)
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
|
||||
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,153 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/dustin/go-humanize"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/internal/operations"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/dttm"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||
addOneDriveCommands,
|
||||
addSharePointCommands,
|
||||
addGroupsCommands,
|
||||
addExchangeCommands,
|
||||
}
|
||||
|
||||
var defaultAcceptedFormatTypes = []string{string(control.DefaultFormat)}
|
||||
|
||||
// AddCommands attaches all `corso export * *` commands to the parent.
|
||||
func AddCommands(cmd *cobra.Command) {
|
||||
subCommand := exportCmd()
|
||||
cmd.AddCommand(subCommand)
|
||||
|
||||
for _, addExportTo := range exportCommands {
|
||||
sc := addExportTo(subCommand)
|
||||
flags.AddAllStorageFlags(sc)
|
||||
}
|
||||
}
|
||||
|
||||
const exportCommand = "export"
|
||||
|
||||
// The export category of commands.
|
||||
// `corso export [<subcommand>] [<flag>...]`
|
||||
func exportCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: exportCommand,
|
||||
Short: "Export your service data",
|
||||
Long: `Export the data stored in one of your M365 services.`,
|
||||
RunE: handleExportCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
// Handler for flat calls to `corso export`.
|
||||
// Produces the same output as `corso export --help`.
|
||||
func handleExportCmd(cmd *cobra.Command, args []string) error {
|
||||
return cmd.Help()
|
||||
}
|
||||
|
||||
func runExport(
|
||||
ctx context.Context,
|
||||
cmd *cobra.Command,
|
||||
args []string,
|
||||
ueco utils.ExportCfgOpts,
|
||||
sel selectors.Selector,
|
||||
backupID, serviceName string,
|
||||
acceptedFormatTypes []string,
|
||||
) error {
|
||||
if err := utils.ValidateExportConfigFlags(&ueco, acceptedFormatTypes); err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
r, _, err := utils.GetAccountAndConnect(ctx, cmd, sel.PathService())
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
exportLocation := args[0]
|
||||
if len(exportLocation) == 0 {
|
||||
// This should not be possible, but adding it just in case.
|
||||
exportLocation = control.DefaultRestoreLocation + dttm.FormatNow(dttm.HumanReadableDriveItem)
|
||||
}
|
||||
|
||||
Infof(ctx, "Exporting to folder %s", exportLocation)
|
||||
|
||||
eo, err := r.NewExport(
|
||||
ctx,
|
||||
backupID,
|
||||
sel,
|
||||
utils.MakeExportConfig(ctx, ueco))
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "Failed to initialize "+serviceName+" export"))
|
||||
}
|
||||
|
||||
collections, err := eo.Run(ctx)
|
||||
if err != nil {
|
||||
if errors.Is(err, data.ErrNotFound) {
|
||||
return Only(ctx, clues.New("Backup or backup details missing for id "+backupID))
|
||||
}
|
||||
|
||||
return Only(ctx, clues.Wrap(err, "Failed to run "+serviceName+" export"))
|
||||
}
|
||||
|
||||
if err = showExportProgress(ctx, eo, collections, exportLocation); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(eo.Errors.Recovered()) > 0 {
|
||||
Infof(ctx, "\nExport failures")
|
||||
|
||||
for _, i := range eo.Errors.Recovered() {
|
||||
Err(ctx, i.Error())
|
||||
}
|
||||
|
||||
return Only(ctx, clues.New("Incomplete export of "+serviceName+" data"))
|
||||
}
|
||||
|
||||
stats := eo.GetStats()
|
||||
if len(stats) > 0 {
|
||||
Infof(ctx, "\nExport details")
|
||||
}
|
||||
|
||||
for k, s := range stats {
|
||||
Infof(ctx, "%s: %d items (%s)", k.HumanString(), s.ResourceCount, humanize.Bytes(uint64(s.BytesRead)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// slim wrapper that allows us to defer the progress bar closure with the expected scope.
|
||||
func showExportProgress(
|
||||
ctx context.Context,
|
||||
op operations.ExportOperation,
|
||||
collections []export.Collectioner,
|
||||
exportLocation string,
|
||||
) error {
|
||||
// It would be better to give a progressbar than a spinner, but we
|
||||
// have any way of knowing how many files are available as of now.
|
||||
progressMessage := observe.MessageWithCompletion(ctx, observe.DefaultCfg(), "Writing data to disk")
|
||||
defer close(progressMessage)
|
||||
|
||||
err := export.ConsumeExportCollections(ctx, exportLocation, collections, op.Errors)
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -1,115 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
)
|
||||
|
||||
// called by export.go to map subcommands to provider-specific handling.
|
||||
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case exportCommand:
|
||||
c, _ = utils.AddCommand(cmd, groupsExportCmd(), utils.MarkPreviewCommand())
|
||||
|
||||
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddSiteFlag(c, false)
|
||||
flags.AddSiteIDFlag(c, false)
|
||||
flags.AddSharePointDetailsAndRestoreFlags(c)
|
||||
flags.AddGroupDetailsAndRestoreFlags(c)
|
||||
flags.AddExportConfigFlags(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
const (
|
||||
groupsServiceCommand = "groups"
|
||||
teamsServiceCommand = "teams"
|
||||
groupsServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
groupsServiceCommandExportExamples = `# Export a message in Marketing's last backup (1234abcd...) to /my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --message 98765abcdef
|
||||
|
||||
# Export all messages named in channel "Finance Reports" to the current directory
|
||||
corso export groups . --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--message '*' --channel "Finance Reports"
|
||||
|
||||
# Export all messages in channel "Finance Reports" that were created before 2020 to /my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
|
||||
--channel "Finance Reports" --message-created-before 2020-01-01T00:00:00
|
||||
|
||||
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00
|
||||
|
||||
# Export all posts from a conversation with topic "hello world" from group mailbox's last backup to /my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"
|
||||
|
||||
# Export post with ID 98765abcdef from a conversation from group mailbox's last backup to /my-exports
|
||||
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world" --post 98765abcdef`
|
||||
)
|
||||
|
||||
// `corso export groups [<flag>...] <destination>`
|
||||
func groupsExportCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: groupsServiceCommand,
|
||||
Aliases: []string{teamsServiceCommand},
|
||||
Short: "Export M365 Groups service data",
|
||||
RunE: exportGroupsCmd,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing export destination")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Example: groupsServiceCommandExportExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an groups service export.
|
||||
func exportGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
||||
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
||||
|
||||
acceptedGroupsFormatTypes := []string{
|
||||
string(control.DefaultFormat),
|
||||
string(control.JSONFormat),
|
||||
}
|
||||
|
||||
return runExport(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
opts.ExportCfg,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV,
|
||||
"Groups",
|
||||
acceptedGroupsFormatTypes)
|
||||
}
|
||||
@ -1,78 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type GroupsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestGroupsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
|
||||
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{"export groups", exportCommand, expectUse, groupsExportCmd().Short, exportGroupsCmd},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
parent := &cobra.Command{Use: exportCommand}
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
parent,
|
||||
addGroupsCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
groupsServiceCommand,
|
||||
[]string{
|
||||
flagsTD.RestoreDestination,
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.FormatFN, flagsTD.FormatType,
|
||||
"--" + flags.ArchiveFN,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
cliTD.CheckCmdChild(
|
||||
t,
|
||||
parent,
|
||||
3,
|
||||
test.expectUse,
|
||||
test.expectShort,
|
||||
test.expectRunE)
|
||||
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
|
||||
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,94 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
// called by export.go to map subcommands to provider-specific handling.
|
||||
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case exportCommand:
|
||||
c, _ = utils.AddCommand(cmd, oneDriveExportCmd())
|
||||
|
||||
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddOneDriveDetailsAndRestoreFlags(c)
|
||||
flags.AddExportConfigFlags(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
const (
|
||||
oneDriveServiceCommand = "onedrive"
|
||||
oneDriveServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
oneDriveServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's last backup (1234abcd...) to /my-exports
|
||||
corso export onedrive my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
|
||||
|
||||
# Export files named "FY2021 Planning.xlsx" in "Documents/Finance Reports" to he current directory
|
||||
corso export onedrive . --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
|
||||
|
||||
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
|
||||
corso export onedrive my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
|
||||
)
|
||||
|
||||
// `corso export onedrive [<flag>...] <destination>`
|
||||
func oneDriveExportCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: oneDriveServiceCommand,
|
||||
Short: "Export M365 OneDrive service data",
|
||||
RunE: exportOneDriveCmd,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing export destination")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Example: oneDriveServiceCommandExportExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an onedrive service export.
|
||||
func exportOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
opts := utils.MakeOneDriveOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := utils.ValidateOneDriveRestoreFlags(flags.BackupIDFV, opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
|
||||
utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
|
||||
|
||||
return runExport(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
opts.ExportCfg,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV,
|
||||
"OneDrive",
|
||||
defaultAcceptedFormatTypes)
|
||||
}
|
||||
@ -1,92 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type OneDriveUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestOneDriveUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &OneDriveUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
|
||||
expectUse := oneDriveServiceCommand + " " + oneDriveServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{"export onedrive", exportCommand, expectUse, oneDriveExportCmd().Short, exportOneDriveCmd},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
parent := &cobra.Command{Use: exportCommand}
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
parent,
|
||||
addOneDriveCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
oneDriveServiceCommand,
|
||||
[]string{
|
||||
flagsTD.RestoreDestination,
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
|
||||
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
|
||||
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
|
||||
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
|
||||
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
|
||||
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
|
||||
|
||||
"--" + flags.FormatFN, flagsTD.FormatType,
|
||||
|
||||
// bool flags
|
||||
"--" + flags.ArchiveFN,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
cliTD.CheckCmdChild(
|
||||
t,
|
||||
parent,
|
||||
3,
|
||||
test.expectUse,
|
||||
test.expectShort,
|
||||
test.expectRunE)
|
||||
|
||||
opts := utils.MakeOneDriveOpts(cmd)
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
|
||||
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
|
||||
assert.Equal(t, flagsTD.FileCreatedAfterInput, opts.FileCreatedAfter)
|
||||
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
|
||||
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
|
||||
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
|
||||
assert.Equal(t, flagsTD.CorsoPassphrase, flags.PassphraseFV)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,118 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
// called by export.go to map subcommands to provider-specific handling.
|
||||
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
|
||||
var c *cobra.Command
|
||||
|
||||
switch cmd.Use {
|
||||
case exportCommand:
|
||||
c, _ = utils.AddCommand(cmd, sharePointExportCmd())
|
||||
|
||||
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
|
||||
|
||||
flags.AddBackupIDFlag(c, true)
|
||||
flags.AddSharePointDetailsAndRestoreFlags(c)
|
||||
flags.AddExportConfigFlags(c)
|
||||
flags.AddFailFastFlag(c)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
const (
|
||||
sharePointServiceCommand = "sharepoint"
|
||||
sharePointServiceCommandUseSuffix = "<destination> --backup <backupId>"
|
||||
|
||||
//nolint:lll
|
||||
sharePointServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's latest backup (1234abcd...) to /my-exports
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef my-exports
|
||||
|
||||
# Export file "ServerRenderTemplate.xsl" in "Display Templates/Style Sheets" as archive to the current directory
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file "ServerRenderTemplate.xsl" --folder "Display Templates/Style Sheets" --archive .
|
||||
|
||||
# Export all files in the folder "Display Templates/Style Sheets" that were created before 2020 to /my-exports
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--file-created-before 2020-01-01T00:00:00 --folder "Display Templates/Style Sheets" my-exports
|
||||
|
||||
# Export all files in the "Documents" library to the current directory.
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--library Documents --folder "Display Templates/Style Sheets" .
|
||||
|
||||
# Export lists by their name(s)
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list "list-name-1,list-name-2" .
|
||||
|
||||
# Export lists created after a given time
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-after 2024-01-01T12:23:34 .
|
||||
|
||||
# Export lists created before a given time
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-created-before 2024-01-01T12:23:34 .
|
||||
|
||||
# Export lists modified before a given time
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-before 2024-01-01T12:23:34 .
|
||||
|
||||
# Export lists modified after a given time
|
||||
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
|
||||
--list-modified-after 2024-01-01T12:23:34 .`
|
||||
)
|
||||
|
||||
// `corso export sharepoint [<flag>...] <destination>`
|
||||
func sharePointExportCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: sharePointServiceCommand,
|
||||
Short: "Export M365 SharePoint service data",
|
||||
RunE: exportSharePointCmd,
|
||||
Args: func(cmd *cobra.Command, args []string) error {
|
||||
if len(args) != 1 {
|
||||
return errors.New("missing export destination")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Example: sharePointServiceCommandExportExamples,
|
||||
}
|
||||
}
|
||||
|
||||
// processes an sharepoint service export.
|
||||
func exportSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
if utils.HasNoFlagsAndShownHelp(cmd) {
|
||||
return nil
|
||||
}
|
||||
|
||||
opts := utils.MakeSharePointOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := utils.ValidateSharePointRestoreFlags(flags.BackupIDFV, opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
|
||||
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
|
||||
|
||||
return runExport(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
opts.ExportCfg,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV,
|
||||
"SharePoint",
|
||||
defaultAcceptedFormatTypes)
|
||||
}
|
||||
@ -1,106 +0,0 @@
|
||||
package export
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
|
||||
cliTD "github.com/alcionai/corso/src/cli/testdata"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type SharePointUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestSharePointUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &SharePointUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
|
||||
expectUse := sharePointServiceCommand + " " + sharePointServiceCommandUseSuffix
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
use string
|
||||
expectUse string
|
||||
expectShort string
|
||||
expectRunE func(*cobra.Command, []string) error
|
||||
}{
|
||||
{"export sharepoint", exportCommand, expectUse, sharePointExportCmd().Short, exportSharePointCmd},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
parent := &cobra.Command{Use: exportCommand}
|
||||
|
||||
cmd := cliTD.SetUpCmdHasFlags(
|
||||
t,
|
||||
parent,
|
||||
addSharePointCommands,
|
||||
[]cliTD.UseCobraCommandFn{
|
||||
flags.AddAllProviderFlags,
|
||||
flags.AddAllStorageFlags,
|
||||
},
|
||||
flagsTD.WithFlags(
|
||||
sharePointServiceCommand,
|
||||
[]string{
|
||||
flagsTD.RestoreDestination,
|
||||
"--" + flags.RunModeFN, flags.RunModeFlagTest,
|
||||
"--" + flags.BackupFN, flagsTD.BackupInput,
|
||||
"--" + flags.LibraryFN, flagsTD.LibraryInput,
|
||||
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
|
||||
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
|
||||
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
|
||||
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
|
||||
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
|
||||
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
|
||||
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
|
||||
"--" + flags.ListCreatedAfterFN, flagsTD.ListCreatedAfterInput,
|
||||
"--" + flags.ListCreatedBeforeFN, flagsTD.ListCreatedBeforeInput,
|
||||
"--" + flags.ListModifiedAfterFN, flagsTD.ListModifiedAfterInput,
|
||||
"--" + flags.ListModifiedBeforeFN, flagsTD.ListModifiedBeforeInput,
|
||||
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
|
||||
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
|
||||
"--" + flags.FormatFN, flagsTD.FormatType,
|
||||
"--" + flags.ArchiveFN,
|
||||
},
|
||||
flagsTD.PreparedProviderFlags(),
|
||||
flagsTD.PreparedStorageFlags()))
|
||||
|
||||
cliTD.CheckCmdChild(
|
||||
t,
|
||||
parent,
|
||||
3,
|
||||
test.expectUse,
|
||||
test.expectShort,
|
||||
test.expectRunE)
|
||||
|
||||
opts := utils.MakeSharePointOpts(cmd)
|
||||
|
||||
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
|
||||
assert.Equal(t, flagsTD.LibraryInput, opts.Library)
|
||||
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
|
||||
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
|
||||
assert.Equal(t, flagsTD.FileCreatedAfterInput, opts.FileCreatedAfter)
|
||||
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
|
||||
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
|
||||
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
|
||||
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
|
||||
assert.Equal(t, flagsTD.ListCreatedAfterInput, opts.ListCreatedAfter)
|
||||
assert.Equal(t, flagsTD.ListCreatedBeforeInput, opts.ListCreatedBefore)
|
||||
assert.Equal(t, flagsTD.ListModifiedAfterInput, opts.ListModifiedAfter)
|
||||
assert.Equal(t, flagsTD.ListModifiedBeforeInput, opts.ListModifiedBefore)
|
||||
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
|
||||
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
|
||||
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
|
||||
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
|
||||
flagsTD.AssertStorageFlags(t, cmd)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,11 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func AddGenericBackupFlags(cmd *cobra.Command) {
|
||||
AddFailFastFlag(cmd)
|
||||
AddDisableIncrementalsFlag(cmd)
|
||||
AddForceItemDataDownloadFlag(cmd)
|
||||
}
|
||||
@ -1,38 +0,0 @@
|
||||
package flags
|
||||
|
||||
import "github.com/spf13/cobra"
|
||||
|
||||
const Show = "show"
|
||||
|
||||
func AddAllBackupListFlags(cmd *cobra.Command) {
|
||||
AddFailedItemsFN(cmd)
|
||||
AddSkippedItemsFN(cmd)
|
||||
AddRecoveredErrorsFN(cmd)
|
||||
AddAlertsFN(cmd)
|
||||
}
|
||||
|
||||
func AddFailedItemsFN(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&FailedItemsFV, FailedItemsFN, Show,
|
||||
"Toggles showing or hiding the list of items that failed.")
|
||||
cobra.CheckErr(fs.MarkHidden(FailedItemsFN))
|
||||
}
|
||||
|
||||
func AddSkippedItemsFN(cmd *cobra.Command) {
|
||||
cmd.Flags().StringVar(
|
||||
&ListSkippedItemsFV, SkippedItemsFN, Show,
|
||||
"Toggles showing or hiding the list of items that were skipped.")
|
||||
}
|
||||
|
||||
func AddRecoveredErrorsFN(cmd *cobra.Command) {
|
||||
cmd.Flags().StringVar(
|
||||
&ListRecoveredErrorsFV, RecoveredErrorsFN, Show,
|
||||
"Toggles showing or hiding the list of errors which Corso recovered from.")
|
||||
}
|
||||
|
||||
func AddAlertsFN(cmd *cobra.Command) {
|
||||
cmd.Flags().StringVar(
|
||||
&ListAlertsFV, AlertsFN, Show,
|
||||
"Toggles showing or hiding the list of alerts produced during the operation.")
|
||||
}
|
||||
@ -1,130 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
ContactFN = "contact"
|
||||
ContactFolderFN = "contact-folder"
|
||||
ContactNameFN = "contact-name"
|
||||
|
||||
EmailFN = "email"
|
||||
EmailFolderFN = "email-folder"
|
||||
EmailReceivedAfterFN = "email-received-after"
|
||||
EmailReceivedBeforeFN = "email-received-before"
|
||||
EmailSenderFN = "email-sender"
|
||||
EmailSubjectFN = "email-subject"
|
||||
|
||||
EventFN = "event"
|
||||
EventCalendarFN = "event-calendar"
|
||||
EventOrganizerFN = "event-organizer"
|
||||
EventRecursFN = "event-recurs"
|
||||
EventStartsAfterFN = "event-starts-after"
|
||||
EventStartsBeforeFN = "event-starts-before"
|
||||
EventSubjectFN = "event-subject"
|
||||
)
|
||||
|
||||
// flag values (ie: FV)
|
||||
var (
|
||||
ContactFV []string
|
||||
ContactFolderFV []string
|
||||
ContactNameFV string
|
||||
|
||||
EmailFV []string
|
||||
EmailFolderFV []string
|
||||
EmailReceivedAfterFV string
|
||||
EmailReceivedBeforeFV string
|
||||
EmailSenderFV string
|
||||
EmailSubjectFV string
|
||||
|
||||
EventFV []string
|
||||
EventCalendarFV []string
|
||||
EventOrganizerFV string
|
||||
EventRecursFV string
|
||||
EventStartsAfterFV string
|
||||
EventStartsBeforeFV string
|
||||
EventSubjectFV string
|
||||
)
|
||||
|
||||
// AddExchangeDetailsAndRestoreFlags adds flags that are common to both the
|
||||
// details and restore commands.
|
||||
func AddExchangeDetailsAndRestoreFlags(cmd *cobra.Command, emailOnly bool) {
|
||||
fs := cmd.Flags()
|
||||
|
||||
// email flags
|
||||
fs.StringSliceVar(
|
||||
&EmailFV,
|
||||
EmailFN, nil,
|
||||
"Select email messages by ID; accepts '"+Wildcard+"' to select all emails.")
|
||||
fs.StringSliceVar(
|
||||
&EmailFolderFV,
|
||||
EmailFolderFN, nil,
|
||||
"Select emails within a folder; accepts '"+Wildcard+"' to select all email folders.")
|
||||
fs.StringVar(
|
||||
&EmailSubjectFV,
|
||||
EmailSubjectFN, "",
|
||||
"Select emails with a subject containing this value.")
|
||||
fs.StringVar(
|
||||
&EmailSenderFV,
|
||||
EmailSenderFN, "",
|
||||
"Select emails from a specific sender.")
|
||||
fs.StringVar(
|
||||
&EmailReceivedAfterFV,
|
||||
EmailReceivedAfterFN, "",
|
||||
"Select emails received after this datetime.")
|
||||
fs.StringVar(
|
||||
&EmailReceivedBeforeFV,
|
||||
EmailReceivedBeforeFN, "",
|
||||
"Select emails received before this datetime.")
|
||||
|
||||
// NOTE: Only temporary until we add support for exporting the
|
||||
// others as well in exchange.
|
||||
if emailOnly {
|
||||
return
|
||||
}
|
||||
|
||||
// event flags
|
||||
fs.StringSliceVar(
|
||||
&EventFV,
|
||||
EventFN, nil,
|
||||
"Select events by event ID; accepts '"+Wildcard+"' to select all events.")
|
||||
fs.StringSliceVar(
|
||||
&EventCalendarFV,
|
||||
EventCalendarFN, nil,
|
||||
"Select events under a calendar; accepts '"+Wildcard+"' to select all events.")
|
||||
fs.StringVar(
|
||||
&EventSubjectFV,
|
||||
EventSubjectFN, "",
|
||||
"Select events with a subject containing this value.")
|
||||
fs.StringVar(
|
||||
&EventOrganizerFV,
|
||||
EventOrganizerFN, "",
|
||||
"Select events from a specific organizer.")
|
||||
fs.StringVar(
|
||||
&EventRecursFV,
|
||||
EventRecursFN, "",
|
||||
"Select recurring events. Use `--event-recurs false` to select non-recurring events.")
|
||||
fs.StringVar(
|
||||
&EventStartsAfterFV,
|
||||
EventStartsAfterFN, "",
|
||||
"Select events starting after this datetime.")
|
||||
fs.StringVar(
|
||||
&EventStartsBeforeFV,
|
||||
EventStartsBeforeFN, "",
|
||||
"Select events starting before this datetime.")
|
||||
|
||||
// contact flags
|
||||
fs.StringSliceVar(
|
||||
&ContactFV,
|
||||
ContactFN, nil,
|
||||
"Select contacts by contact ID; accepts '"+Wildcard+"' to select all contacts.")
|
||||
fs.StringSliceVar(
|
||||
&ContactFolderFV,
|
||||
ContactFolderFN, nil,
|
||||
"Select contacts within a folder; accepts '"+Wildcard+"' to select all contact folders.")
|
||||
fs.StringVar(
|
||||
&ContactNameFV,
|
||||
ContactNameFN, "",
|
||||
"Select contacts whose contact name contains this value.")
|
||||
}
|
||||
@ -1,23 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
ArchiveFN = "archive"
|
||||
FormatFN = "format"
|
||||
)
|
||||
|
||||
var (
|
||||
ArchiveFV bool
|
||||
FormatFV string
|
||||
)
|
||||
|
||||
// AddExportConfigFlags adds the restore config flag set.
|
||||
func AddExportConfigFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(&ArchiveFV, ArchiveFN, false, "Export data as an archive instead of individual files")
|
||||
fs.StringVar(&FormatFV, FormatFN, "", "Specify the export file format")
|
||||
cobra.CheckErr(fs.MarkHidden(FormatFN))
|
||||
}
|
||||
@ -1,48 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
// filesystem flag names
|
||||
const (
|
||||
FilesystemPathFN = "path"
|
||||
)
|
||||
|
||||
// filesystem flag values
|
||||
var (
|
||||
FilesystemPathFV string
|
||||
)
|
||||
|
||||
func AddFilesystemFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
|
||||
AddAzureCredsFlags(cmd)
|
||||
AddCorsoPassphaseFlags(cmd)
|
||||
|
||||
fs.StringVar(
|
||||
&FilesystemPathFV,
|
||||
FilesystemPathFN,
|
||||
"",
|
||||
"path to local or network storage")
|
||||
cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN))
|
||||
}
|
||||
|
||||
func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string {
|
||||
fs := GetPopulatedFlags(cmd)
|
||||
return PopulateFilesystemFlags(fs)
|
||||
}
|
||||
|
||||
func PopulateFilesystemFlags(flagset PopulatedFlags) map[string]string {
|
||||
fsOverrides := map[string]string{
|
||||
storage.StorageProviderTypeKey: storage.ProviderFilesystem.String(),
|
||||
}
|
||||
|
||||
if _, ok := flagset[FilesystemPathFN]; ok {
|
||||
fsOverrides[FilesystemPathFN] = FilesystemPathFV
|
||||
}
|
||||
|
||||
return fsOverrides
|
||||
}
|
||||
@ -1,36 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
)
|
||||
|
||||
const Wildcard = "*"
|
||||
|
||||
type PopulatedFlags map[string]struct{}
|
||||
|
||||
func (fs PopulatedFlags) populate(pf *pflag.Flag) {
|
||||
if pf == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if pf.Changed {
|
||||
fs[pf.Name] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
// GetPopulatedFlags returns a map of flags that have been
|
||||
// populated by the user. Entry keys match the flag's long
|
||||
// name. Values are empty.
|
||||
func GetPopulatedFlags(cmd *cobra.Command) PopulatedFlags {
|
||||
pop := PopulatedFlags{}
|
||||
|
||||
fs := cmd.Flags()
|
||||
if fs == nil {
|
||||
return pop
|
||||
}
|
||||
|
||||
fs.VisitAll(pop.populate)
|
||||
|
||||
return pop
|
||||
}
|
||||
@ -1,93 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
DataMessages = "messages"
|
||||
DataConversations = "conversations"
|
||||
)
|
||||
|
||||
const (
|
||||
ChannelFN = "channel"
|
||||
ConversationFN = "conversation"
|
||||
GroupFN = "group"
|
||||
MessageFN = "message"
|
||||
PostFN = "post"
|
||||
|
||||
MessageCreatedAfterFN = "message-created-after"
|
||||
MessageCreatedBeforeFN = "message-created-before"
|
||||
MessageLastReplyAfterFN = "message-last-reply-after"
|
||||
MessageLastReplyBeforeFN = "message-last-reply-before"
|
||||
)
|
||||
|
||||
var (
|
||||
ChannelFV []string
|
||||
ConversationFV []string
|
||||
GroupFV []string
|
||||
MessageFV []string
|
||||
PostFV []string
|
||||
|
||||
MessageCreatedAfterFV string
|
||||
MessageCreatedBeforeFV string
|
||||
MessageLastReplyAfterFV string
|
||||
MessageLastReplyBeforeFV string
|
||||
)
|
||||
|
||||
func AddGroupDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
|
||||
fs.StringSliceVar(
|
||||
&ChannelFV,
|
||||
ChannelFN, nil,
|
||||
"Select data within a Team's Channel.")
|
||||
|
||||
fs.StringSliceVar(
|
||||
&MessageFV,
|
||||
MessageFN, nil,
|
||||
"Select messages by reference.")
|
||||
|
||||
fs.StringVar(
|
||||
&MessageCreatedAfterFV,
|
||||
MessageCreatedAfterFN, "",
|
||||
"Select messages created after this datetime.")
|
||||
|
||||
fs.StringVar(
|
||||
&MessageCreatedBeforeFV,
|
||||
MessageCreatedBeforeFN, "",
|
||||
"Select messages created before this datetime.")
|
||||
|
||||
fs.StringVar(
|
||||
&MessageLastReplyAfterFV,
|
||||
MessageLastReplyAfterFN, "",
|
||||
"Select messages with replies after this datetime.")
|
||||
|
||||
fs.StringVar(
|
||||
&MessageLastReplyBeforeFV,
|
||||
MessageLastReplyBeforeFN, "",
|
||||
"Select messages with replies before this datetime.")
|
||||
|
||||
fs.StringSliceVar(
|
||||
&ConversationFV,
|
||||
ConversationFN, nil,
|
||||
"Select data within a Group's Conversation.")
|
||||
|
||||
fs.StringSliceVar(
|
||||
&PostFV,
|
||||
PostFN, nil,
|
||||
"Select Conversation Posts by reference.")
|
||||
}
|
||||
|
||||
// AddGroupFlag adds the --group flag, which accepts either the id,
|
||||
// the display name, or the mailbox address as its values. Users are
|
||||
// expected to supply the display name. The ID is supported becase, well,
|
||||
// IDs. The mailbox address is supported as a lookup fallback for certain
|
||||
// SDK cases, therefore it's also supported here, though that support
|
||||
// isn't exposed to end users.
|
||||
func AddGroupFlag(cmd *cobra.Command) {
|
||||
cmd.Flags().StringSliceVar(
|
||||
&GroupFV,
|
||||
GroupFN, nil,
|
||||
"Backup data by group; accepts '"+Wildcard+"' to select all groups.")
|
||||
}
|
||||
@ -1,42 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var CategoryDataFV []string
|
||||
|
||||
const CategoryDataFN = "data"
|
||||
|
||||
func AddDataFlag(cmd *cobra.Command, allowed []string, hide bool) {
|
||||
var (
|
||||
allowedMsg string
|
||||
fs = cmd.Flags()
|
||||
)
|
||||
|
||||
switch len(allowed) {
|
||||
case 0:
|
||||
return
|
||||
case 1:
|
||||
allowedMsg = allowed[0]
|
||||
case 2:
|
||||
allowedMsg = fmt.Sprintf("%s or %s", allowed[0], allowed[1])
|
||||
default:
|
||||
allowedMsg = fmt.Sprintf(
|
||||
"%s or %s",
|
||||
strings.Join(allowed[:len(allowed)-1], ", "),
|
||||
allowed[len(allowed)-1])
|
||||
}
|
||||
|
||||
fs.StringSliceVar(
|
||||
&CategoryDataFV,
|
||||
CategoryDataFN, nil,
|
||||
"Select one or more types of data to backup: "+allowedMsg+".")
|
||||
|
||||
if hide {
|
||||
cobra.CheckErr(fs.MarkHidden(CategoryDataFN))
|
||||
}
|
||||
}
|
||||
@ -1,56 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
UserFN = "user"
|
||||
MailBoxFN = "mailbox"
|
||||
AzureClientTenantFN = "azure-tenant-id"
|
||||
AzureClientIDFN = "azure-client-id"
|
||||
AzureClientSecretFN = "azure-client-secret"
|
||||
)
|
||||
|
||||
var (
|
||||
UserFV []string
|
||||
AzureClientTenantFV string
|
||||
AzureClientIDFV string
|
||||
AzureClientSecretFV string
|
||||
)
|
||||
|
||||
// AddUserFlag adds the --user flag.
|
||||
func AddUserFlag(cmd *cobra.Command) {
|
||||
cmd.Flags().StringSliceVar(
|
||||
&UserFV,
|
||||
UserFN, nil,
|
||||
"Backup a specific user's data; accepts '"+Wildcard+"' to select all users.")
|
||||
cobra.CheckErr(cmd.MarkFlagRequired(UserFN))
|
||||
}
|
||||
|
||||
// AddMailBoxFlag adds the --user and --mailbox flag.
|
||||
func AddMailBoxFlag(cmd *cobra.Command) {
|
||||
flags := cmd.Flags()
|
||||
|
||||
flags.StringSliceVar(
|
||||
&UserFV,
|
||||
UserFN, nil,
|
||||
"Backup a specific user's data; accepts '"+Wildcard+"' to select all users.")
|
||||
|
||||
cobra.CheckErr(flags.MarkDeprecated(UserFN, fmt.Sprintf("use --%s instead", MailBoxFN)))
|
||||
|
||||
flags.StringSliceVar(
|
||||
&UserFV,
|
||||
MailBoxFN, nil,
|
||||
"Backup a specific mailbox's data; accepts '"+Wildcard+"' to select all mailbox.")
|
||||
}
|
||||
|
||||
// AddAzureCredsFlags adds M365 cred flags
|
||||
func AddAzureCredsFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(&AzureClientTenantFV, AzureClientTenantFN, "", "Azure tenant ID")
|
||||
fs.StringVar(&AzureClientIDFV, AzureClientIDFN, "", "Azure app client ID")
|
||||
fs.StringVar(&AzureClientSecretFV, AzureClientSecretFN, "", "Azure app client secret")
|
||||
}
|
||||
@ -1,60 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
)
|
||||
|
||||
const (
|
||||
MaintenanceModeFN = "mode"
|
||||
ForceMaintenanceFN = "force"
|
||||
UserMaintenanceFN = "user"
|
||||
HostnameMaintenanceFN = "host"
|
||||
)
|
||||
|
||||
var (
|
||||
MaintenanceModeFV string
|
||||
ForceMaintenanceFV bool
|
||||
UserMaintenanceFV string
|
||||
HostnameMaintenanceFV string
|
||||
)
|
||||
|
||||
func AddMaintenanceModeFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&MaintenanceModeFV,
|
||||
MaintenanceModeFN,
|
||||
repository.CompleteMaintenance.String(),
|
||||
"Type of maintenance operation to run ('"+
|
||||
repository.MetadataMaintenance.String()+"' | '"+
|
||||
repository.CompleteMaintenance.String()+"' )")
|
||||
}
|
||||
|
||||
func AddForceMaintenanceFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(
|
||||
&ForceMaintenanceFV,
|
||||
ForceMaintenanceFN,
|
||||
false,
|
||||
"Force maintenance. Caution: user must ensure this is not run concurrently on a single repo")
|
||||
cobra.CheckErr(fs.MarkHidden(ForceMaintenanceFN))
|
||||
}
|
||||
|
||||
func AddMaintenanceUserFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&UserMaintenanceFV,
|
||||
UserMaintenanceFN,
|
||||
"",
|
||||
"Attempt to run maintenance as the specified user for the repo owner user")
|
||||
}
|
||||
|
||||
func AddMaintenanceHostnameFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&HostnameMaintenanceFV,
|
||||
HostnameMaintenanceFN,
|
||||
"",
|
||||
"Attempt to run maintenance with the specified hostname for the repo owner hostname")
|
||||
}
|
||||
@ -1,65 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
FileFN = "file"
|
||||
FolderFN = "folder"
|
||||
|
||||
FileCreatedAfterFN = "file-created-after"
|
||||
FileCreatedBeforeFN = "file-created-before"
|
||||
FileModifiedAfterFN = "file-modified-after"
|
||||
FileModifiedBeforeFN = "file-modified-before"
|
||||
|
||||
UseOldDeltaProcessFN = "use-old-delta-process"
|
||||
)
|
||||
|
||||
var (
|
||||
FolderPathFV []string
|
||||
FileNameFV []string
|
||||
|
||||
FileCreatedAfterFV string
|
||||
FileCreatedBeforeFV string
|
||||
FileModifiedAfterFV string
|
||||
FileModifiedBeforeFV string
|
||||
|
||||
UseOldDeltaProcessFV bool
|
||||
)
|
||||
|
||||
// AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the
|
||||
// details and restore commands.
|
||||
func AddOneDriveDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
|
||||
fs.StringSliceVar(
|
||||
&FolderPathFV,
|
||||
FolderFN, nil,
|
||||
"Select files by OneDrive folder; defaults to root.")
|
||||
|
||||
fs.StringSliceVar(
|
||||
&FileNameFV,
|
||||
FileFN, nil,
|
||||
"Select files by name.")
|
||||
|
||||
fs.StringVar(
|
||||
&FileCreatedAfterFV,
|
||||
FileCreatedAfterFN, "",
|
||||
"Select files created after this datetime.")
|
||||
|
||||
fs.StringVar(
|
||||
&FileCreatedBeforeFV,
|
||||
FileCreatedBeforeFN, "",
|
||||
"Select files created before this datetime.")
|
||||
|
||||
fs.StringVar(
|
||||
&FileModifiedAfterFV,
|
||||
FileModifiedAfterFN, "",
|
||||
"Select files modified after this datetime.")
|
||||
|
||||
fs.StringVar(
|
||||
&FileModifiedBeforeFV,
|
||||
FileModifiedBeforeFN, "",
|
||||
"Select files modified before this datetime.")
|
||||
}
|
||||
@ -1,198 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
AlertsFN = "alerts"
|
||||
ConfigFileFN = "config-file"
|
||||
DeltaPageSizeFN = "delta-page-size"
|
||||
DisableDeltaFN = "disable-delta"
|
||||
DisableIncrementalsFN = "disable-incrementals"
|
||||
DisableLazyItemReaderFN = "disable-lazy-item-reader"
|
||||
DisableSlidingWindowLimiterFN = "disable-sliding-window-limiter"
|
||||
ForceItemDataDownloadFN = "force-item-data-download"
|
||||
EnableImmutableIDFN = "enable-immutable-id"
|
||||
FailFastFN = "fail-fast"
|
||||
FailedItemsFN = "failed-items"
|
||||
FetchParallelismFN = "fetch-parallelism"
|
||||
NoPermissionsFN = "no-permissions"
|
||||
NoStatsFN = "no-stats"
|
||||
RecoveredErrorsFN = "recovered-errors"
|
||||
RunModeFN = "run-mode"
|
||||
SkippedItemsFN = "skipped-items"
|
||||
SkipReduceFN = "skip-reduce"
|
||||
)
|
||||
|
||||
var (
|
||||
ConfigFileFV string
|
||||
DeltaPageSizeFV int
|
||||
DisableDeltaFV bool
|
||||
DisableIncrementalsFV bool
|
||||
DisableLazyItemReaderFV bool
|
||||
DisableSlidingWindowLimiterFV bool
|
||||
ForceItemDataDownloadFV bool
|
||||
EnableImmutableIDFV bool
|
||||
FailFastFV bool
|
||||
FailedItemsFV string
|
||||
FetchParallelismFV int
|
||||
ListAlertsFV string
|
||||
ListSkippedItemsFV string
|
||||
ListRecoveredErrorsFV string
|
||||
NoPermissionsFV bool
|
||||
NoStatsFV bool
|
||||
// RunMode describes the type of run, such as:
|
||||
// flagtest, dry, run. Should default to 'run'.
|
||||
RunModeFV string
|
||||
SkipReduceFV bool
|
||||
)
|
||||
|
||||
// well-known flag values
|
||||
const (
|
||||
RunModeFlagTest = "flag-test"
|
||||
RunModeRun = "run"
|
||||
)
|
||||
|
||||
// AddGlobalOperationFlags adds the global operations flag set.
|
||||
func AddGlobalOperationFlags(cmd *cobra.Command) {
|
||||
fs := cmd.PersistentFlags()
|
||||
fs.BoolVar(&NoStatsFV, NoStatsFN, false, "disable anonymous usage statistics gathering")
|
||||
}
|
||||
|
||||
// AddFailFastFlag adds a flag to toggle fail-fast error handling behavior.
|
||||
func AddFailFastFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(&FailFastFV, FailFastFN, false, "stop processing immediately if any error occurs")
|
||||
// TODO: reveal this flag when fail-fast support is implemented
|
||||
cobra.CheckErr(fs.MarkHidden(FailFastFN))
|
||||
}
|
||||
|
||||
// AddNoPermissionsFlag adds OneDrive flag for skipping restoring permissions
|
||||
func AddNoPermissionsFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(&NoPermissionsFV, NoPermissionsFN, false, "don't restore file and folder permissions")
|
||||
}
|
||||
|
||||
// AddSkipReduceFlag adds a hidden flag that allows callers to skip the selector
|
||||
// reduction step. Currently only intended for details commands, not restore.
|
||||
func AddSkipReduceFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(&SkipReduceFV, SkipReduceFN, false, "Skip the selector reduce filtering")
|
||||
cobra.CheckErr(fs.MarkHidden(SkipReduceFN))
|
||||
}
|
||||
|
||||
// AddDeltaPageSizeFlag adds a hidden flag that allows callers to reduce delta
|
||||
// query page sizes below 500.
|
||||
func AddDeltaPageSizeFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.IntVar(
|
||||
&DeltaPageSizeFV,
|
||||
DeltaPageSizeFN,
|
||||
500,
|
||||
"Control quantity of items returned in paged queries. Valid range is [1-500]. Default: 500")
|
||||
cobra.CheckErr(fs.MarkHidden(DeltaPageSizeFN))
|
||||
}
|
||||
|
||||
// AddFetchParallelismFlag adds a hidden flag that allows callers to reduce call
|
||||
// paralellism (ie, the corso worker pool size) from 4 to as low as 1.
|
||||
func AddFetchParallelismFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.IntVar(
|
||||
&FetchParallelismFV,
|
||||
FetchParallelismFN,
|
||||
4,
|
||||
"Control the number of concurrent data fetches for Exchange. Valid range is [1-4]. Default: 4")
|
||||
cobra.CheckErr(fs.MarkHidden(FetchParallelismFN))
|
||||
}
|
||||
|
||||
// Adds the hidden '--disable-incrementals' cli flag which, when set, disables
|
||||
// incremental backups.
|
||||
func AddDisableIncrementalsFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(
|
||||
&DisableIncrementalsFV,
|
||||
DisableIncrementalsFN,
|
||||
false,
|
||||
"Disable incremental data retrieval in backups.")
|
||||
cobra.CheckErr(fs.MarkHidden(DisableIncrementalsFN))
|
||||
}
|
||||
|
||||
// Adds the hidden '--force-item-data-download' cli flag which, when set,
|
||||
// disables kopia-assisted incremental backups.
|
||||
func AddForceItemDataDownloadFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(
|
||||
&ForceItemDataDownloadFV,
|
||||
ForceItemDataDownloadFN,
|
||||
false,
|
||||
"Disable cached data checks in backups to force item redownloads for "+
|
||||
"items changed since the last successful backup.")
|
||||
cobra.CheckErr(fs.MarkHidden(ForceItemDataDownloadFN))
|
||||
}
|
||||
|
||||
// Adds the hidden '--disable-delta' cli flag which, when set, disables
|
||||
// delta based backups.
|
||||
func AddDisableDeltaFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(
|
||||
&DisableDeltaFV,
|
||||
DisableDeltaFN,
|
||||
false,
|
||||
"Disable delta based data retrieval in backups.")
|
||||
cobra.CheckErr(fs.MarkHidden(DisableDeltaFN))
|
||||
}
|
||||
|
||||
// Adds the hidden '--enable-immutable-id' cli flag which, when set, enables
|
||||
// immutable IDs for Exchange
|
||||
func AddEnableImmutableIDFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(
|
||||
&EnableImmutableIDFV,
|
||||
EnableImmutableIDFN,
|
||||
false,
|
||||
"Enable exchange immutable ID.")
|
||||
cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN))
|
||||
}
|
||||
|
||||
// AddRunModeFlag adds the hidden --run-mode flag.
|
||||
func AddRunModeFlag(cmd *cobra.Command, persistent bool) {
|
||||
fs := cmd.Flags()
|
||||
if persistent {
|
||||
fs = cmd.PersistentFlags()
|
||||
}
|
||||
|
||||
fs.StringVar(&RunModeFV, RunModeFN, "run", "What mode to run: dry, test, run. Defaults to run.")
|
||||
cobra.CheckErr(fs.MarkHidden(RunModeFN))
|
||||
}
|
||||
|
||||
// AddDisableSlidingWindowLimiterFN disables the experimental sliding window rate
|
||||
// limiter for graph API requests. This is only relevant for exchange backups.
|
||||
// Exchange restores continue to use the default token bucket rate limiter.
|
||||
// Setting this flag switches exchange backups to use the default token bucket
|
||||
// rate limiter.
|
||||
func AddDisableSlidingWindowLimiterFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(
|
||||
&DisableSlidingWindowLimiterFV,
|
||||
DisableSlidingWindowLimiterFN,
|
||||
false,
|
||||
"Disable sliding window rate limiter.")
|
||||
cobra.CheckErr(fs.MarkHidden(DisableSlidingWindowLimiterFN))
|
||||
}
|
||||
|
||||
// AddDisableLazyItemReader disables lazy item reader, such that we fall back to
|
||||
// prefetch reader. This flag is currently only meant for groups conversations
|
||||
// backup. Although it can be utilized for other services in future.
|
||||
//
|
||||
// This flag should only be used if lazy item reader is the default choice and
|
||||
// we want to fallback to prefetch reader.
|
||||
func AddDisableLazyItemReader(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(
|
||||
&DisableLazyItemReaderFV,
|
||||
DisableLazyItemReaderFN,
|
||||
false,
|
||||
"Disable lazy item reader.")
|
||||
cobra.CheckErr(fs.MarkHidden(DisableLazyItemReaderFN))
|
||||
}
|
||||
@ -1,97 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
BackupFN = "backup"
|
||||
BackupIDsFN = "backups"
|
||||
AWSAccessKeyFN = "aws-access-key"
|
||||
AWSSecretAccessKeyFN = "aws-secret-access-key"
|
||||
AWSSessionTokenFN = "aws-session-token"
|
||||
|
||||
// Corso Flags
|
||||
PassphraseFN = "passphrase"
|
||||
NewPassphraseFN = "new-passphrase"
|
||||
)
|
||||
|
||||
var (
|
||||
BackupIDFV string
|
||||
BackupIDsFV []string
|
||||
AWSAccessKeyFV string
|
||||
AWSSecretAccessKeyFV string
|
||||
AWSSessionTokenFV string
|
||||
PassphraseFV string
|
||||
NewPhasephraseFV string
|
||||
)
|
||||
|
||||
// AddMultipleBackupIDsFlag adds the --backups flag.
|
||||
func AddMultipleBackupIDsFlag(cmd *cobra.Command, require bool) {
|
||||
cmd.Flags().StringSliceVar(
|
||||
&BackupIDsFV,
|
||||
BackupIDsFN, nil,
|
||||
"',' separated IDs of the backup to retrieve")
|
||||
|
||||
if require {
|
||||
cobra.CheckErr(cmd.MarkFlagRequired(BackupIDsFN))
|
||||
}
|
||||
}
|
||||
|
||||
// AddBackupIDFlag adds the --backup flag.
|
||||
func AddBackupIDFlag(cmd *cobra.Command, require bool) {
|
||||
cmd.Flags().StringVar(&BackupIDFV, BackupFN, "", "ID of the backup to retrieve.")
|
||||
|
||||
if require {
|
||||
cobra.CheckErr(cmd.MarkFlagRequired(BackupFN))
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// storage
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func AddAllStorageFlags(cmd *cobra.Command) {
|
||||
AddCorsoPassphaseFlags(cmd)
|
||||
// AddAzureCredsFlags is added by ProviderFlags
|
||||
AddAWSCredsFlags(cmd)
|
||||
}
|
||||
|
||||
func AddAWSCredsFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(&AWSAccessKeyFV, AWSAccessKeyFN, "", "S3 access key")
|
||||
fs.StringVar(&AWSSecretAccessKeyFV, AWSSecretAccessKeyFN, "", "S3 access secret")
|
||||
fs.StringVar(&AWSSessionTokenFV, AWSSessionTokenFN, "", "S3 session token")
|
||||
}
|
||||
|
||||
// M365 flags
|
||||
func AddCorsoPassphaseFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&PassphraseFV,
|
||||
PassphraseFN,
|
||||
"",
|
||||
"Passphrase to protect encrypted repository contents")
|
||||
}
|
||||
|
||||
// M365 flags
|
||||
func AddUpdatePassphraseFlags(cmd *cobra.Command, require bool) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&NewPhasephraseFV,
|
||||
NewPassphraseFN,
|
||||
"",
|
||||
"update Corso passphrase for repo")
|
||||
|
||||
if require {
|
||||
cobra.CheckErr(cmd.MarkFlagRequired(NewPassphraseFN))
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Provider
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func AddAllProviderFlags(cmd *cobra.Command) {
|
||||
AddAzureCredsFlags(cmd)
|
||||
}
|
||||
@ -1,37 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
)
|
||||
|
||||
const (
|
||||
CollisionsFN = "collisions"
|
||||
DestinationFN = "destination"
|
||||
ToResourceFN = "to-resource"
|
||||
)
|
||||
|
||||
var (
|
||||
CollisionsFV string
|
||||
DestinationFV string
|
||||
ToResourceFV string
|
||||
)
|
||||
|
||||
// AddRestoreConfigFlags adds the restore config flag set.
|
||||
func AddRestoreConfigFlags(cmd *cobra.Command, canRestoreToAlternate bool) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&CollisionsFV, CollisionsFN, string(control.Skip),
|
||||
//nolint:lll
|
||||
"Sets the behavior for existing item collisions: "+string(control.Skip)+", "+string(control.Copy)+", or "+string(control.Replace))
|
||||
fs.StringVar(
|
||||
&DestinationFV, DestinationFN, "",
|
||||
"Overrides the folder where items get restored; '/' places items into their original location")
|
||||
|
||||
if canRestoreToAlternate {
|
||||
fs.StringVar(
|
||||
&ToResourceFV, ToResourceFN, "",
|
||||
"Overrides the protected resource (mailbox, site, user, etc) where data gets restored")
|
||||
}
|
||||
}
|
||||
@ -1,50 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
)
|
||||
|
||||
const (
|
||||
RetentionModeFN = "retention-mode"
|
||||
RetentionDurationFN = "retention-duration"
|
||||
ExtendRetentionFN = "extend-retention"
|
||||
)
|
||||
|
||||
var (
|
||||
RetentionModeFV string
|
||||
RetentionDurationFV time.Duration
|
||||
ExtendRetentionFV bool
|
||||
)
|
||||
|
||||
// AddRetentionConfigFlags adds the retention config flag set.
|
||||
func AddRetentionConfigFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&RetentionModeFV,
|
||||
RetentionModeFN,
|
||||
repository.NoRetention.String(),
|
||||
"Sets object locking mode (if any) to use in remote storage: "+
|
||||
repository.NoRetention.String()+", "+
|
||||
repository.GovernanceRetention.String()+", or "+
|
||||
repository.ComplianceRetention.String())
|
||||
cobra.CheckErr(fs.MarkHidden(RetentionModeFN))
|
||||
|
||||
fs.DurationVar(
|
||||
&RetentionDurationFV,
|
||||
RetentionDurationFN,
|
||||
time.Duration(0),
|
||||
"Set the amount of time to lock individual objects in remote storage")
|
||||
cobra.CheckErr(fs.MarkHidden(RetentionDurationFN))
|
||||
|
||||
fs.BoolVar(
|
||||
&ExtendRetentionFV,
|
||||
ExtendRetentionFN,
|
||||
false,
|
||||
"Extends object locks during maintenance. "+
|
||||
"Extends locks by the most recently set value of "+RetentionDurationFN)
|
||||
cobra.CheckErr(fs.MarkHidden(ExtendRetentionFN))
|
||||
}
|
||||
@ -1,86 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/credentials"
|
||||
"github.com/alcionai/corso/src/pkg/storage"
|
||||
)
|
||||
|
||||
// S3 bucket flags
|
||||
const (
|
||||
BucketFN = "bucket"
|
||||
EndpointFN = "endpoint"
|
||||
PrefixFN = "prefix"
|
||||
DoNotUseTLSFN = "disable-tls"
|
||||
DoNotVerifyTLSFN = "disable-tls-verification"
|
||||
)
|
||||
|
||||
// S3 bucket flag values
|
||||
var (
|
||||
BucketFV string
|
||||
EndpointFV string
|
||||
PrefixFV string
|
||||
DoNotUseTLSFV bool
|
||||
DoNotVerifyTLSFV bool
|
||||
)
|
||||
|
||||
// S3 bucket flags
|
||||
func AddS3BucketFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
|
||||
// Flags addition ordering should follow the order we want them to appear in help and docs:
|
||||
// More generic and more frequently used flags take precedence.
|
||||
fs.StringVar(&BucketFV, BucketFN, "", "Name of S3 bucket for repo. (required)")
|
||||
fs.StringVar(&PrefixFV, PrefixFN, "", "Repo prefix within bucket.")
|
||||
fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.")
|
||||
fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)")
|
||||
fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.")
|
||||
}
|
||||
|
||||
func S3FlagOverrides(cmd *cobra.Command) map[string]string {
|
||||
fs := GetPopulatedFlags(cmd)
|
||||
return PopulateS3Flags(fs)
|
||||
}
|
||||
|
||||
func PopulateS3Flags(flagset PopulatedFlags) map[string]string {
|
||||
s3Overrides := map[string]string{
|
||||
storage.StorageProviderTypeKey: storage.ProviderS3.String(),
|
||||
}
|
||||
|
||||
if _, ok := flagset[AWSAccessKeyFN]; ok {
|
||||
s3Overrides[credentials.AWSAccessKeyID] = AWSAccessKeyFV
|
||||
}
|
||||
|
||||
if _, ok := flagset[AWSSecretAccessKeyFN]; ok {
|
||||
s3Overrides[credentials.AWSSecretAccessKey] = AWSSecretAccessKeyFV
|
||||
}
|
||||
|
||||
if _, ok := flagset[AWSSessionTokenFN]; ok {
|
||||
s3Overrides[credentials.AWSSessionToken] = AWSSessionTokenFV
|
||||
}
|
||||
|
||||
if _, ok := flagset[BucketFN]; ok {
|
||||
s3Overrides[storage.Bucket] = BucketFV
|
||||
}
|
||||
|
||||
if _, ok := flagset[PrefixFN]; ok {
|
||||
s3Overrides[storage.Prefix] = PrefixFV
|
||||
}
|
||||
|
||||
if _, ok := flagset[DoNotUseTLSFN]; ok {
|
||||
s3Overrides[storage.DoNotUseTLS] = strconv.FormatBool(DoNotUseTLSFV)
|
||||
}
|
||||
|
||||
if _, ok := flagset[DoNotVerifyTLSFN]; ok {
|
||||
s3Overrides[storage.DoNotVerifyTLS] = strconv.FormatBool(DoNotVerifyTLSFV)
|
||||
}
|
||||
|
||||
if _, ok := flagset[EndpointFN]; ok {
|
||||
s3Overrides[storage.Endpoint] = EndpointFV
|
||||
}
|
||||
|
||||
return s3Overrides
|
||||
}
|
||||
@ -1,144 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
DataLibraries = "libraries"
|
||||
DataPages = "pages"
|
||||
DataLists = "lists"
|
||||
)
|
||||
|
||||
const (
|
||||
LibraryFN = "library"
|
||||
|
||||
ListFN = "list"
|
||||
ListModifiedAfterFN = "list-modified-after"
|
||||
ListModifiedBeforeFN = "list-modified-before"
|
||||
ListCreatedAfterFN = "list-created-after"
|
||||
ListCreatedBeforeFN = "list-created-before"
|
||||
|
||||
PageFolderFN = "page-folder"
|
||||
PageFN = "page"
|
||||
|
||||
SiteFN = "site" // site only accepts WebURL values
|
||||
SiteIDFN = "site-id" // site-id accepts actual site ids
|
||||
)
|
||||
|
||||
var (
|
||||
LibraryFV string
|
||||
|
||||
ListFV []string
|
||||
ListModifiedAfterFV string
|
||||
ListModifiedBeforeFV string
|
||||
ListCreatedAfterFV string
|
||||
ListCreatedBeforeFV string
|
||||
|
||||
PageFolderFV []string
|
||||
PageFV []string
|
||||
|
||||
SiteIDFV []string
|
||||
WebURLFV []string
|
||||
)
|
||||
|
||||
// AddSharePointDetailsAndRestoreFlags adds flags that are common to both the
|
||||
// details and restore commands.
|
||||
func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
|
||||
// libraries
|
||||
|
||||
fs.StringVar(
|
||||
&LibraryFV,
|
||||
LibraryFN, "",
|
||||
"Select only this library; defaults to all libraries.")
|
||||
fs.StringSliceVar(
|
||||
&FolderPathFV,
|
||||
FolderFN, nil,
|
||||
"Select by folder; defaults to root.")
|
||||
fs.StringSliceVar(
|
||||
&FileNameFV,
|
||||
FileFN, nil,
|
||||
"Select by file name.")
|
||||
fs.StringVar(
|
||||
&FileCreatedAfterFV,
|
||||
FileCreatedAfterFN, "",
|
||||
"Select files created after this datetime.")
|
||||
fs.StringVar(
|
||||
&FileCreatedBeforeFV,
|
||||
FileCreatedBeforeFN, "",
|
||||
"Select files created before this datetime.")
|
||||
fs.StringVar(
|
||||
&FileModifiedAfterFV,
|
||||
FileModifiedAfterFN, "",
|
||||
"Select files modified after this datetime.")
|
||||
fs.StringVar(
|
||||
&FileModifiedBeforeFV,
|
||||
FileModifiedBeforeFN, "",
|
||||
"Select files modified before this datetime.")
|
||||
|
||||
// lists
|
||||
fs.StringSliceVar(
|
||||
&ListFV,
|
||||
ListFN, nil,
|
||||
"Select lists by name.")
|
||||
fs.StringVar(
|
||||
&ListModifiedAfterFV,
|
||||
ListModifiedAfterFN, "",
|
||||
"Select lists modified after this datetime.")
|
||||
fs.StringVar(
|
||||
&ListModifiedBeforeFV,
|
||||
ListModifiedBeforeFN, "",
|
||||
"Select lists modified before this datetime.")
|
||||
fs.StringVar(
|
||||
&ListCreatedAfterFV,
|
||||
ListCreatedAfterFN, "",
|
||||
"Select lists created after this datetime.")
|
||||
fs.StringVar(
|
||||
&ListCreatedBeforeFV,
|
||||
ListCreatedBeforeFN, "",
|
||||
"Select lists created before this datetime.")
|
||||
|
||||
// pages
|
||||
|
||||
fs.StringSliceVar(
|
||||
&PageFolderFV,
|
||||
PageFolderFN, nil,
|
||||
"Select pages by folder name; accepts '"+Wildcard+"' to select all pages.")
|
||||
cobra.CheckErr(fs.MarkHidden(PageFolderFN))
|
||||
fs.StringSliceVar(
|
||||
&PageFV,
|
||||
PageFN, nil,
|
||||
"Select pages by item name; accepts '"+Wildcard+"' to select all pages.")
|
||||
cobra.CheckErr(fs.MarkHidden(PageFN))
|
||||
}
|
||||
|
||||
// AddSiteIDFlag adds the --site-id flag, which accepts site ID values.
|
||||
// This flag is hidden, since we expect users to prefer the --site url
|
||||
// and do not want to encourage confusion.
|
||||
func AddSiteIDFlag(cmd *cobra.Command, multiple bool) {
|
||||
fs := cmd.Flags()
|
||||
|
||||
message := "ID of the site to operate on"
|
||||
if multiple {
|
||||
//nolint:lll
|
||||
message += "; accepts '" + Wildcard + "' to select all sites. Args cannot be comma-delimited and must use multiple flags."
|
||||
}
|
||||
|
||||
// note string ARRAY var. IDs naturally contain commas, so we cannot accept
|
||||
// duplicate values within a flag declaration. ie: --site-id a,b,c does not
|
||||
// work. Users must call --site-id a --site-id b --site-id c.
|
||||
fs.StringArrayVar(&SiteIDFV, SiteIDFN, nil, message)
|
||||
cobra.CheckErr(fs.MarkHidden(SiteIDFN))
|
||||
}
|
||||
|
||||
// AddSiteFlag adds the --site flag, which accepts webURL values.
|
||||
func AddSiteFlag(cmd *cobra.Command, multiple bool) {
|
||||
message := "Web URL of the site to operate on"
|
||||
if multiple {
|
||||
message += "; accepts '" + Wildcard + "' to select all sites."
|
||||
}
|
||||
|
||||
cmd.Flags().StringSliceVar(&WebURLFV, SiteFN, nil, message)
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
TeamFN = "team"
|
||||
)
|
||||
|
||||
var TeamFV []string
|
||||
|
||||
func AddTeamDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
// TODO: implement flags
|
||||
}
|
||||
|
||||
// AddTeamFlag adds the --team flag, which accepts id or name values.
|
||||
// TODO: need to decide what the appropriate "name" to accept here is.
|
||||
// keepers thinks its either DisplayName or MailNickname or Mail
|
||||
// Mail is most accurate, MailNickame is accurate and shorter, but the end user
|
||||
// may not see either one visibly.
|
||||
// https://learn.microsoft.com/en-us/graph/api/team-list?view=graph-rest-1.0&tabs=http
|
||||
func AddTeamFlag(cmd *cobra.Command) {
|
||||
cmd.Flags().StringSliceVar(
|
||||
&TeamFV,
|
||||
TeamFN, nil,
|
||||
"Backup data by team; accepts '"+Wildcard+"' to select all teams.")
|
||||
}
|
||||
@ -1,13 +0,0 @@
|
||||
package flags
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
DataChats = "chats"
|
||||
)
|
||||
|
||||
func AddTeamsChatsDetailsAndRestoreFlags(cmd *cobra.Command) {
|
||||
// TODO: add details flags
|
||||
}
|
||||
26
src/cli/flags/testdata/backup_list.go
vendored
26
src/cli/flags/testdata/backup_list.go
vendored
@ -1,26 +0,0 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"gotest.tools/v3/assert"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
func PreparedBackupListFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.AlertsFN, flags.Show,
|
||||
"--" + flags.FailedItemsFN, flags.Show,
|
||||
"--" + flags.SkippedItemsFN, flags.Show,
|
||||
"--" + flags.RecoveredErrorsFN, flags.Show,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertBackupListFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.Equal(t, flags.Show, flags.ListAlertsFV)
|
||||
assert.Equal(t, flags.Show, flags.FailedItemsFV)
|
||||
assert.Equal(t, flags.Show, flags.ListSkippedItemsFV)
|
||||
assert.Equal(t, flags.Show, flags.ListRecoveredErrorsFV)
|
||||
}
|
||||
130
src/cli/flags/testdata/flags.go
vendored
130
src/cli/flags/testdata/flags.go
vendored
@ -1,130 +0,0 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func FlgInputs(in []string) string { return strings.Join(in, ",") }
|
||||
|
||||
var (
|
||||
BackupInput = "backup-id"
|
||||
SiteInput = "site-id"
|
||||
|
||||
GroupsInput = []string{"team1", "group2"}
|
||||
MailboxInput = []string{"mailbox1", "mailbox2"}
|
||||
UsersInput = []string{"users1", "users2"}
|
||||
SiteIDInput = []string{"siteID1", "siteID2"}
|
||||
WebURLInput = []string{"webURL1", "webURL2"}
|
||||
|
||||
ExchangeCategoryDataInput = []string{"email", "events", "contacts"}
|
||||
SharepointCategoryDataInput = []string{"files", "lists", "pages"}
|
||||
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
|
||||
TeamsChatsCategoryDataInput = []string{"chats"}
|
||||
|
||||
ChannelInput = []string{"channel1", "channel2"}
|
||||
MessageInput = []string{"message1", "message2"}
|
||||
MessageCreatedAfterInput = "messageCreatedAfter"
|
||||
MessageCreatedBeforeInput = "messageCreatedBefore"
|
||||
MessageLastReplyAfterInput = "messageLastReplyAfter"
|
||||
MessageLastReplyBeforeInput = "messageLastReplyBefore"
|
||||
|
||||
ContactInput = []string{"contact1", "contact2"}
|
||||
ContactFldInput = []string{"contactFld1", "contactFld2"}
|
||||
ContactNameInput = "contactName"
|
||||
|
||||
ConversationInput = []string{"conversation1", "conversation2"}
|
||||
PostInput = []string{"post1", "post2"}
|
||||
|
||||
EmailInput = []string{"mail1", "mail2"}
|
||||
EmailFldInput = []string{"mailFld1", "mailFld2"}
|
||||
EmailReceivedAfterInput = "mailReceivedAfter"
|
||||
EmailReceivedBeforeInput = "mailReceivedBefore"
|
||||
EmailSenderInput = "mailSender"
|
||||
EmailSubjectInput = "mailSubject"
|
||||
|
||||
EventInput = []string{"event1", "event2"}
|
||||
EventCalInput = []string{"eventCal1", "eventCal2"}
|
||||
EventOrganizerInput = "eventOrganizer"
|
||||
EventRecursInput = "eventRecurs"
|
||||
EventStartsAfterInput = "eventStartsAfter"
|
||||
EventStartsBeforeInput = "eventStartsBefore"
|
||||
EventSubjectInput = "eventSubject"
|
||||
|
||||
LibraryInput = "library"
|
||||
FileNameInput = []string{"fileName1", "fileName2"}
|
||||
FolderPathInput = []string{"folderPath1", "folderPath2"}
|
||||
FileCreatedAfterInput = "fileCreatedAfter"
|
||||
FileCreatedBeforeInput = "fileCreatedBefore"
|
||||
FileModifiedAfterInput = "fileModifiedAfter"
|
||||
FileModifiedBeforeInput = "fileModifiedBefore"
|
||||
|
||||
ListsInput = []string{"listName1", "listName2"}
|
||||
ListCreatedAfterInput = "listCreatedAfter"
|
||||
ListCreatedBeforeInput = "listCreatedBefore"
|
||||
ListModifiedAfterInput = "listModifiedAfter"
|
||||
ListModifiedBeforeInput = "listModifiedBefore"
|
||||
|
||||
PageFolderInput = []string{"pageFolder1", "pageFolder2"}
|
||||
PageInput = []string{"page1", "page2"}
|
||||
|
||||
Collisions = "collisions"
|
||||
Destination = "destination"
|
||||
ToResource = "toResource"
|
||||
SkipPermissions = false
|
||||
|
||||
DeltaPageSize = "7"
|
||||
|
||||
Archive = true
|
||||
FormatType = "json"
|
||||
|
||||
AzureClientID = "testAzureClientId"
|
||||
AzureTenantID = "testAzureTenantId"
|
||||
AzureClientSecret = "testAzureClientSecret"
|
||||
|
||||
AWSAccessKeyID = "testAWSAccessKeyID"
|
||||
AWSSecretAccessKey = "testAWSSecretAccessKey"
|
||||
AWSSessionToken = "testAWSSessionToken"
|
||||
|
||||
CorsoPassphrase = "testCorsoPassphrase"
|
||||
|
||||
RestoreDestination = "test-restore-destination"
|
||||
|
||||
FetchParallelism = "3"
|
||||
|
||||
FailFast = true
|
||||
DisableIncrementals = true
|
||||
ForceItemDataDownload = true
|
||||
DisableDelta = true
|
||||
EnableImmutableID = true
|
||||
)
|
||||
|
||||
func WithFlags2(
|
||||
cc *cobra.Command,
|
||||
command string,
|
||||
flagSets ...[]string,
|
||||
) {
|
||||
args := []string{command}
|
||||
|
||||
for _, sl := range flagSets {
|
||||
args = append(args, sl...)
|
||||
}
|
||||
|
||||
cc.SetArgs(args)
|
||||
}
|
||||
|
||||
func WithFlags(
|
||||
command string,
|
||||
flagSets ...[]string,
|
||||
) func(*cobra.Command) {
|
||||
return func(cc *cobra.Command) {
|
||||
args := []string{command}
|
||||
|
||||
for _, sl := range flagSets {
|
||||
args = append(args, sl...)
|
||||
}
|
||||
|
||||
cc.SetArgs(args)
|
||||
}
|
||||
}
|
||||
42
src/cli/flags/testdata/groups.go
vendored
42
src/cli/flags/testdata/groups.go
vendored
@ -1,42 +0,0 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
func PreparedChannelFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.ChannelFN, FlgInputs(ChannelInput),
|
||||
"--" + flags.MessageFN, FlgInputs(MessageInput),
|
||||
"--" + flags.MessageCreatedAfterFN, MessageCreatedAfterInput,
|
||||
"--" + flags.MessageCreatedBeforeFN, MessageCreatedBeforeInput,
|
||||
"--" + flags.MessageLastReplyAfterFN, MessageLastReplyAfterInput,
|
||||
"--" + flags.MessageLastReplyBeforeFN, MessageLastReplyBeforeInput,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertChannelFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.ElementsMatch(t, ChannelInput, flags.ChannelFV)
|
||||
assert.ElementsMatch(t, MessageInput, flags.MessageFV)
|
||||
assert.Equal(t, MessageCreatedAfterInput, flags.MessageCreatedAfterFV)
|
||||
assert.Equal(t, MessageCreatedBeforeInput, flags.MessageCreatedBeforeFV)
|
||||
assert.Equal(t, MessageLastReplyAfterInput, flags.MessageLastReplyAfterFV)
|
||||
assert.Equal(t, MessageLastReplyBeforeInput, flags.MessageLastReplyBeforeFV)
|
||||
}
|
||||
|
||||
func PreparedConversationFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.ConversationFN, FlgInputs(ConversationInput),
|
||||
"--" + flags.PostFN, FlgInputs(PostInput),
|
||||
}
|
||||
}
|
||||
|
||||
func AssertConversationFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.Equal(t, ConversationInput, flags.ConversationFV)
|
||||
assert.Equal(t, PostInput, flags.PostFV)
|
||||
}
|
||||
56
src/cli/flags/testdata/repo.go
vendored
56
src/cli/flags/testdata/repo.go
vendored
@ -1,56 +0,0 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
func PreparedStorageFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.AWSAccessKeyFN, AWSAccessKeyID,
|
||||
"--" + flags.AWSSecretAccessKeyFN, AWSSecretAccessKey,
|
||||
"--" + flags.AWSSessionTokenFN, AWSSessionToken,
|
||||
|
||||
"--" + flags.PassphraseFN, CorsoPassphrase,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertStorageFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.Equal(t, AWSAccessKeyID, flags.AWSAccessKeyFV)
|
||||
assert.Equal(t, AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
|
||||
assert.Equal(t, AWSSessionToken, flags.AWSSessionTokenFV)
|
||||
|
||||
assert.Equal(t, CorsoPassphrase, flags.PassphraseFV)
|
||||
}
|
||||
|
||||
func PreparedProviderFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.AzureClientIDFN, AzureClientID,
|
||||
"--" + flags.AzureClientTenantFN, AzureTenantID,
|
||||
"--" + flags.AzureClientSecretFN, AzureClientSecret,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertProviderFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.Equal(t, AzureClientID, flags.AzureClientIDFV)
|
||||
assert.Equal(t, AzureTenantID, flags.AzureClientTenantFV)
|
||||
assert.Equal(t, AzureClientSecret, flags.AzureClientSecretFV)
|
||||
}
|
||||
|
||||
func PreparedGenericBackupFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.FailFastFN,
|
||||
"--" + flags.DisableIncrementalsFN,
|
||||
"--" + flags.ForceItemDataDownloadFN,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertGenericBackupFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.True(t, flags.FailFastFV, "fail fast flag")
|
||||
assert.True(t, flags.DisableIncrementalsFV, "disable incrementals flag")
|
||||
assert.True(t, flags.ForceItemDataDownloadFV, "force item data download flag")
|
||||
}
|
||||
32
src/cli/flags/testdata/sharepoint.go
vendored
32
src/cli/flags/testdata/sharepoint.go
vendored
@ -1,32 +0,0 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
func PreparedLibraryFlags() []string {
|
||||
return []string{
|
||||
"--" + flags.LibraryFN, LibraryInput,
|
||||
"--" + flags.FolderFN, FlgInputs(FolderPathInput),
|
||||
"--" + flags.FileFN, FlgInputs(FileNameInput),
|
||||
"--" + flags.FileCreatedAfterFN, FileCreatedAfterInput,
|
||||
"--" + flags.FileCreatedBeforeFN, FileCreatedBeforeInput,
|
||||
"--" + flags.FileModifiedAfterFN, FileModifiedAfterInput,
|
||||
"--" + flags.FileModifiedBeforeFN, FileModifiedBeforeInput,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertLibraryFlags(t *testing.T, cmd *cobra.Command) {
|
||||
assert.Equal(t, LibraryInput, flags.LibraryFV)
|
||||
assert.Equal(t, FolderPathInput, flags.FolderPathFV)
|
||||
assert.Equal(t, FileNameInput, flags.FileNameFV)
|
||||
assert.Equal(t, FileCreatedAfterInput, flags.FileCreatedAfterFV)
|
||||
assert.Equal(t, FileCreatedBeforeInput, flags.FileCreatedBeforeFV)
|
||||
assert.Equal(t, FileModifiedAfterInput, flags.FileModifiedAfterFV)
|
||||
assert.Equal(t, FileModifiedBeforeInput, flags.FileModifiedBeforeFV)
|
||||
}
|
||||
25
src/cli/flags/testdata/teamschats.go
vendored
25
src/cli/flags/testdata/teamschats.go
vendored
@ -1,25 +0,0 @@
|
||||
package testdata
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func PreparedTeamsChatsFlags() []string {
|
||||
return []string{
|
||||
// FIXME: populate when adding filters
|
||||
// "--" + flags.ChatCreatedAfterFN, ChatCreatedAfterInput,
|
||||
// "--" + flags.ChatCreatedBeforeFN, ChatCreatedBeforeInput,
|
||||
// "--" + flags.ChatLastMessageAfterFN, ChatLastMessageAfterInput,
|
||||
// "--" + flags.ChatLastMessageBeforeFN, ChatLastMessageBeforeInput,
|
||||
}
|
||||
}
|
||||
|
||||
func AssertTeamsChatsFlags(t *testing.T, cmd *cobra.Command) {
|
||||
// FIXME: populate when adding filters
|
||||
// assert.Equal(t, ChatCreatedAfterInput, flags.ChatCreatedAfterFV)
|
||||
// assert.Equal(t, ChatCreatedBeforeInput, flags.ChatCreatedBeforeFV)
|
||||
// assert.Equal(t, ChatLastMessageAfterInput, flags.ChatLastMessageAfterFV)
|
||||
// assert.Equal(t, ChatLastMessageBeforeInput, flags.ChatLastMessageBeforeFV)
|
||||
}
|
||||
@ -46,12 +46,11 @@ func (ev envVar) MinimumPrintable() any {
|
||||
return ev
|
||||
}
|
||||
|
||||
func (ev envVar) Headers(bool) []string {
|
||||
// NOTE: skipID does not make sense in this context
|
||||
func (ev envVar) Headers() []string {
|
||||
return []string{ev.category, " "}
|
||||
}
|
||||
|
||||
func (ev envVar) Values(bool) []string {
|
||||
func (ev envVar) Values() []string {
|
||||
return []string{ev.name, ev.description}
|
||||
}
|
||||
|
||||
@ -94,7 +93,8 @@ func envGuide(cmd *cobra.Command, args []string) {
|
||||
Info(ctx,
|
||||
"\n--- Environment Variable Guide ---\n",
|
||||
"As a best practice, Corso retrieves credentials and sensitive information from environment variables.\n ",
|
||||
"\n")
|
||||
"\n",
|
||||
)
|
||||
Table(ctx, toPrintable(corsoEVs))
|
||||
Info(ctx, "\n")
|
||||
Table(ctx, toPrintable(azureEVs))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user