Compare commits

..

12 Commits

Author SHA1 Message Date
Abhishek Pandey
ed6d1066ed Add reset 2023-06-20 14:25:20 -07:00
Abhishek Pandey
1d1603e0ad Add prev delta 2023-06-20 14:25:01 -07:00
Abhishek Pandey
671fd5150d Cherry pick cache integration changes with dummy url cache 2023-06-20 14:00:52 -07:00
Abhishek Pandey
b68336a466 Address review feedback 2023-06-20 13:53:15 -07:00
Abhishek Pandey
099651491d Fix rebase issues 2023-06-20 13:53:15 -07:00
Abhishek Pandey
397a0a6aa3 Remove dummy url cache 2023-06-20 13:53:15 -07:00
Abhishek Pandey
34e1f393cb Add soem more tests 2023-06-20 13:52:57 -07:00
Abhishek Pandey
30e9439287 Utilize mock backup handler 2023-06-20 13:52:40 -07:00
Abhishek Pandey
c13f70ecc7 Add more tests 2023-06-20 13:52:40 -07:00
Abhishek Pandey
01bb562bb5 Misc changes - add urlCacher interface, add some tests for cache integration 2023-06-20 13:52:40 -07:00
Abhishek Pandey
c62bcc6d95 Remove drive ID to url cache map from Collections. Unused 2023-06-20 13:52:03 -07:00
Abhishek Pandey
9bd1b5588a Cherry pick cache integration changes with dummy url cache 2023-06-20 13:52:03 -07:00
893 changed files with 52955 additions and 142492 deletions

View File

@ -35,6 +35,6 @@ body:
id: logs id: logs
attributes: attributes:
label: Relevant log output label: Relevant log output
description: Please run Corso with `--log-level debug --mask-sensitive-data` and attach the log file. description: Please run Corso with `--log-level debug` and attach the log file.
placeholder: This will be automatically formatted, so no need for backticks. placeholder: This will be automatically formatted, so no need for backticks.
render: shell render: shell

View File

@ -1,5 +1,4 @@
name: Backup Restore Test name: Backup Restore Test
description: Run various backup/restore/export tests for a service.
inputs: inputs:
service: service:
@ -8,38 +7,23 @@ inputs:
kind: kind:
description: Kind of test description: Kind of test
required: true required: true
backup-id:
description: Backup to retrieve data out of
required: false
backup-args: backup-args:
description: Arguments to pass for backup description: Arguments to pass for backup
required: false required: false
default: "" default: ""
restore-args: restore-args:
description: Arguments to pass for restore; restore is skipped when missing. description: Arguments to pass for restore
required: false required: false
default: "" default: ""
export-args: test-folder:
description: Arguments to pass for export.
required: false
default: ""
restore-container:
description: Folder to use for testing description: Folder to use for testing
required: true required: true
base-backup:
description: Base backup to use for testing
required: false
log-dir: log-dir:
description: Folder to store test log files description: Folder to store test log files
required: true required: true
on-collision:
description: Value for the --collisions flag
required: false
default: "replace"
with-export:
description: Runs export tests when true
required: false
default: false
category:
description: category of data for given service
required: false
outputs: outputs:
backup-id: backup-id:
@ -53,13 +37,8 @@ runs:
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-backup-${{inputs.kind }}.log
./corso backup create '${{ inputs.service }}' \ ./corso backup create '${{ inputs.service }}' \
--no-stats --hide-progress --json \ --no-stats --hide-progress --json \
${{ inputs.backup-args }} | ${{ inputs.backup-args }} |
@ -69,22 +48,15 @@ runs:
tee $GITHUB_OUTPUT tee $GITHUB_OUTPUT
- name: Restore ${{ inputs.service }} ${{ inputs.kind }} - name: Restore ${{ inputs.service }} ${{ inputs.kind }}
if: inputs.restore-args
id: restore id: restore
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso restore '${{ inputs.service }}' \ ./corso restore '${{ inputs.service }}' \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
--collisions ${{ inputs.on-collision }} \
${{ inputs.restore-args }} \ ${{ inputs.restore-args }} \
--backup '${{ steps.backup.outputs.result }}' \ --backup '${{ steps.backup.outputs.result }}' \
2>&1 | 2>&1 |
@ -95,125 +67,24 @@ runs:
cat /tmp/corsologs cat /tmp/corsologs
- name: Check restore ${{ inputs.service }} ${{ inputs.kind }} - name: Check ${{ inputs.service }} ${{ inputs.kind }}
if: inputs.restore-args
shell: bash shell: bash
working-directory: src working-directory: src
env: env:
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }} SANITY_RESTORE_FOLDER: ${{ steps.restore.outputs.result }}
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }} SANITY_RESTORE_SERVICE: ${{ inputs.service }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }} TEST_DATA: ${{ inputs.test-folder }}
# lists are not restored to a different folder. they get created adjacent to their originals BASE_BACKUP: ${{ inputs.base-backup }}
# hence SANITY_TEST_RESTORE_CONTAINER_PREFIX is necessary to differentiate restored from original
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: | run: |
echo "---------------------------" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }} ./sanity-test
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test restore ${{ inputs.service }}
- name: Export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }}
id: export
shell: bash
working-directory: src
run: |
echo "---------------------------"
echo Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }} \
--no-stats \
--hide-progress \
${{ inputs.export-args }} \
--backup '${{ steps.backup.outputs.result }}'
cat /tmp/corsologs
- name: Check export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }}
shell: bash
working-directory: src
env:
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{ inputs.kind }}
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
# applies only for sharepoint lists
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: |
echo "---------------------------"
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test export ${{ inputs.service }}
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }}
id: export-archive
shell: bash
working-directory: src
run: |
echo "---------------------------"
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive \
--no-stats \
--hide-progress \
--archive \
${{ inputs.export-args }} \
--backup '${{ steps.backup.outputs.result }}'
unzip /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive/*.zip \
-d /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-unzipped
cat /tmp/corsologs
- name: Check archive export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }}
shell: bash
working-directory: src
env:
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{inputs.kind }}-unzipped
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }}
# applies only for sharepoint lists
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: |
echo "---------------------------"
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test export ${{ inputs.service }}
- name: List ${{ inputs.service }} ${{ inputs.kind }} - name: List ${{ inputs.service }} ${{ inputs.kind }}
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}${CATEGORY_SUFFIX}-list-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \ ./corso backup list ${{ inputs.service }} \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
@ -230,14 +101,8 @@ runs:
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
# Include category in the log file name if present CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-single-${{ inputs.service }}-${{inputs.kind }}.log
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}${CATEGORY_SUFFIX}-single-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \ ./corso backup list ${{ inputs.service }} \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
@ -251,13 +116,7 @@ runs:
exit 1 exit 1
fi fi
- if: always() # Upload the original go test output as an artifact for later review.
shell: bash
run: |
echo "---------------------------"
echo Logging Results
echo "---------------------------"
- name: Upload test log - name: Upload test log
if: always() if: always()
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
@ -265,4 +124,4 @@ runs:
name: "${{ inputs.service }}-${{ inputs.kind }}-logs" name: "${{ inputs.service }}-${{ inputs.kind }}-logs"
path: ${{ inputs.log-dir }}/* path: ${{ inputs.log-dir }}/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14

View File

@ -1,5 +1,4 @@
name: Setup and Cache Golang name: Setup and Cache Golang
description: Build golang binaries for later use in CI.
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml # clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
# #

View File

@ -1,5 +1,4 @@
name: Publish Binary name: Publish Binary
description: Publish binary artifacts.
inputs: inputs:
version: version:
@ -37,7 +36,7 @@ runs:
uses: goreleaser/goreleaser-action@v4 uses: goreleaser/goreleaser-action@v4
with: with:
version: latest version: latest
args: release --clean --timeout 500m --parallelism 1 ${{ env.grflags }} args: release --rm-dist --timeout 500m --parallelism 1 ${{ env.grflags }}
workdir: src workdir: src
env: env:
GITHUB_TOKEN: ${{ inputs.github_token }} GITHUB_TOKEN: ${{ inputs.github_token }}

View File

@ -1,5 +1,4 @@
name: Publish Website name: Publish Website
description: Publish website artifacts.
inputs: inputs:
aws-iam-role: aws-iam-role:

View File

@ -1,5 +1,4 @@
name: Purge M365 User Data name: Purge M365 User Data
description: Deletes M365 data generated during CI tests.
# Hard deletion of an m365 user's data. Our CI processes create a lot # Hard deletion of an m365 user's data. Our CI processes create a lot
# of data churn (creation and immediate deletion) of files, the likes # of data churn (creation and immediate deletion) of files, the likes
@ -20,9 +19,7 @@ inputs:
site: site:
description: Sharepoint site where data is to be purged. description: Sharepoint site where data is to be purged.
libraries: libraries:
description: List of library names within the site where data is to be purged. description: List of library names within site where data is to be purged.
library-prefix:
description: List of library names within the site where the library will get deleted entirely.
folder-prefix: folder-prefix:
description: Name of the folder to be purged. If falsy, will purge the set of static, well known folders instead. description: Name of the folder to be purged. If falsy, will purge the set of static, well known folders instead.
older-than: older-than:
@ -31,19 +28,12 @@ inputs:
description: Secret value of for AZURE_CLIENT_ID description: Secret value of for AZURE_CLIENT_ID
azure-client-secret: azure-client-secret:
description: Secret value of for AZURE_CLIENT_SECRET description: Secret value of for AZURE_CLIENT_SECRET
azure-pnp-client-id:
description: Secret value of AZURE_PNP_CLIENT_ID
azure-pnp-client-cert:
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
azure-tenant-id: azure-tenant-id:
description: Secret value of AZURE_TENANT_ID description: Secret value of for AZURE_TENANT_ID
m365-admin-user: m365-admin-user:
description: Secret value of for M365_TENANT_ADMIN_USER description: Secret value of for M365_TENANT_ADMIN_USER
m365-admin-password: m365-admin-password:
description: Secret value of for M365_TENANT_ADMIN_PASSWORD description: Secret value of for M365_TENANT_ADMIN_PASSWORD
tenant-domain:
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
required: true
runs: runs:
using: composite using: composite
@ -62,22 +52,17 @@ runs:
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }} AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }} AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: | run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++) ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
{
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call. - name: Reset retention for all mailboxes to 0
#- name: Reset retention for all mailboxes to 0 if: ${{ inputs.user == '' }}
# if: ${{ inputs.user == '' }} shell: pwsh
# shell: pwsh working-directory: ./src/cmd/purge/scripts
# working-directory: ./src/cmd/purge/scripts env:
# env: M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
# M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
# M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }} run: |
# run: ./exchangeRetention.ps1 ./exchangeRetention.ps1
################################################################################################################ ################################################################################################################
# OneDrive # OneDrive
@ -88,16 +73,10 @@ runs:
shell: pwsh shell: pwsh
working-directory: ./src/cmd/purge/scripts working-directory: ./src/cmd/purge/scripts
env: env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }} M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: | run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++) ./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
{
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
################################################################################################################ ################################################################################################################
# Sharepoint # Sharepoint
@ -108,14 +87,7 @@ runs:
shell: pwsh shell: pwsh
working-directory: ./src/cmd/purge/scripts working-directory: ./src/cmd/purge/scripts
env: env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }} M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: | run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++) ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -PurgeBeforeTimestamp ${{ inputs.older-than }}
{
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}

View File

@ -1,92 +0,0 @@
name: Send a message to Teams
description: Send messages to communication apps.
inputs:
msg:
description: The teams message text
teams_url:
description: passthrough for secrets.TEAMS_CORSO_CI_WEBHOOK_URL
runs:
using: composite
steps:
- uses: actions/checkout@v3
- name: set github ref
shell: bash
run: |
echo "github_reference=${{ github.ref }}" >> $GITHUB_ENV
- name: trim github ref
shell: bash
run: |
echo "trimmed_ref=${github_reference#refs/}" >> $GITHUB_ENV
- name: build urls
shell: bash
run: |
echo "logurl=$(printf 'https://github.com/alcionai/corso/actions/runs/%s' ${{ github.run_id }})" >> $GITHUB_ENV
echo "commiturl=$(printf 'https://github.com/alcionai/corso/commit/%s' ${{ github.sha }})" >> $GITHUB_ENV
echo "refurl=$(printf 'https://github.com/alcionai/corso/%s' ${{ env.trimmed_ref }})" >> $GITHUB_ENV
- name: use url or blank val
shell: bash
run: |
echo "STEP=${{ env.trimmed_ref || '' }}" >> $GITHUB_ENV
echo "JOB=${{ github.job || '' }}" >> $GITHUB_ENV
echo "LOGS=${{ github.run_id && env.logurl || '-' }}" >> $GITHUB_ENV
echo "COMMIT=${{ github.sha && env.commiturl || '-' }}" >> $GITHUB_ENV
echo "REF=${{ env.trimmed_ref && env.refurl || '-' }}" >> $GITHUB_ENV
- name: Send JSON payload to Teams Webhook
shell: bash
run: |
curl -X POST \
-H "Content-Type: application/json" \
-d '{
"type":"message",
"attachments":[
{
"contentType":"application/vnd.microsoft.card.adaptive",
"contentUrl":null,
"content":{
"$schema":"http://adaptivecards.io/schemas/adaptive-card.json",
"type":"AdaptiveCard",
"body": [
{
"type": "TextBlock",
"size": "Medium",
"weight": "Bolder",
"text": "${{ inputs.msg }}",
"color": "Attention"
},
{
"type": "TextBlock",
"text": "${{ env.JOB }} :: ${{ env.STEP }}",
"wrap": true
}
],
"actions": [
{
"type": "Action.OpenUrl",
"title": "Action",
"url": "${{ env.LOGS }}"
},
{
"type": "Action.OpenUrl",
"title": "Commit",
"url": "${{ env.COMMIT }}"
},
{
"type": "Action.OpenUrl",
"title": "Ref",
"url": "${{ env.REF }}"
}
],
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"version": "1.5"
}
}
]
}' \
${{ inputs.teams_url }}

View File

@ -1,5 +1,4 @@
name: Lint Website name: Lint Website
description: Lint website content.
inputs: inputs:
version: version:

View File

@ -24,11 +24,11 @@ jobs:
websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }} websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }}
actionsfileschanged: ${{ steps.actionschecker.outputs.actionsfileschanged }} actionsfileschanged: ${{ steps.actionschecker.outputs.actionsfileschanged }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
# only run CI tests if the src folder or workflow actions have changed # only run CI tests if the src folder or workflow actions have changed
- name: Check for file changes in src/ or .github/workflows/ - name: Check for file changes in src/ or .github/workflows/
uses: dorny/paths-filter@v3 uses: dorny/paths-filter@v2
id: dornycheck id: dornycheck
with: with:
list-files: json list-files: json

View File

@ -9,7 +9,7 @@ jobs:
outputs: outputs:
version: ${{ steps.version.outputs.version }} version: ${{ steps.version.outputs.version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Get version string - name: Get version string
id: version id: version
@ -26,7 +26,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Publish Binary - name: Publish Binary
uses: ./.github/actions/publish-binary uses: ./.github/actions/publish-binary
@ -34,11 +34,4 @@ jobs:
version: ${{ needs.SetEnv.outputs.version }} version: ${{ needs.SetEnv.outputs.version }}
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }} rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }} rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Publishing Binary"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -40,7 +40,7 @@ jobs:
run: run:
working-directory: src working-directory: src
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
# single setup and sum cache handling here. # single setup and sum cache handling here.
# the results will cascade onto both testing and linting. # the results will cascade onto both testing and linting.
@ -63,7 +63,7 @@ jobs:
website-bucket: ${{ steps.website-bucket.outputs.website-bucket }} website-bucket: ${{ steps.website-bucket.outputs.website-bucket }}
website-cfid: ${{ steps.website-cfid.outputs.website-cfid }} website-cfid: ${{ steps.website-cfid.outputs.website-cfid }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Figure out environment - name: Figure out environment
id: environment id: environment
@ -110,11 +110,10 @@ jobs:
needs: [Precheck, Checkout, SetEnv] needs: [Precheck, Checkout, SetEnv]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true' # websitefileschanged also includes srcfileschanged if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true' # websitefileschanged also includes srcfileschanged
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Lint Website - name: Lint Website
uses: ./.github/actions/website-linting uses: ./.github/actions/website-linting
@ -129,7 +128,6 @@ jobs:
needs: [Precheck, Checkout, SetM365App] needs: [Precheck, Checkout, SetM365App]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 120
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository) if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
defaults: defaults:
run: run:
@ -142,7 +140,7 @@ jobs:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci.log CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci.log
LOG_GRAPH_REQUESTS: true LOG_GRAPH_REQUESTS: true
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Setup Golang with cache - name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4 uses: magnetikonline/action-golang-cache@v4
@ -157,7 +155,7 @@ jobs:
# AWS creds # AWS creds
- name: Configure AWS credentials from Test account - name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4 uses: aws-actions/configure-aws-credentials@v2
with: with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }} role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing role-session-name: integration-testing
@ -173,7 +171,6 @@ jobs:
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }} CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }} CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
run: | run: |
set -euo pipefail set -euo pipefail
go test \ go test \
@ -182,96 +179,24 @@ jobs:
-v \ -v \
-failfast \ -failfast \
-p 1 \ -p 1 \
-timeout 20m \ -timeout 15m \
./... \ ./... \
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests 2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: ci-test-log name: ci-test-log
path: src/testlog/* path: src/testlog/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
Retention-Test-Suite-Trusted:
needs: [Precheck, Checkout, SetM365App]
environment: Testing
runs-on: ubuntu-latest
timeout-minutes: 30
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
defaults:
run:
working-directory: src
env:
# Resolve the name of the secret that contains the Azure client ID/secret
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-ci-retention.log
LOG_GRAPH_REQUESTS: true
steps:
- uses: actions/checkout@v4
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: mkdir testlog
# Install gotestfmt
- name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
# run the tests
- name: Retention Tests
env:
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_RETENTION_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
run: |
set -euo pipefail
go test \
-tags testing \
-json \
-v \
-failfast \
-p 1 \
-timeout 10m \
./... \
2>&1 | tee ./testlog/gotest-ci.log | gotestfmt -hide successful-tests
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: failure()
uses: actions/upload-artifact@v4
with:
name: ci-retention-test-log
path: src/testlog/*
if-no-files-found: error
retention-days: 14
Unit-Test-Suite: Unit-Test-Suite:
needs: [Precheck, Checkout] needs: [Precheck, Checkout]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30
if: needs.precheck.outputs.srcfileschanged == 'true' if: needs.precheck.outputs.srcfileschanged == 'true'
defaults: defaults:
run: run:
@ -280,7 +205,7 @@ jobs:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
LOG_GRAPH_REQUESTS: true LOG_GRAPH_REQUESTS: true
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Setup Golang with cache - name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4 uses: magnetikonline/action-golang-cache@v4
@ -308,14 +233,14 @@ jobs:
-v \ -v \
-failfast \ -failfast \
-p 1 \ -p 1 \
-timeout 20m \ -timeout 15m \
./... \ ./... \
2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests 2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: unit-test-log name: unit-test-log
path: src/testlog/* path: src/testlog/*
@ -360,7 +285,7 @@ jobs:
# Check out merge commit # Check out merge commit
- name: Fork based /ok-to-test checkout - name: Fork based /ok-to-test checkout
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
ref: "refs/pull/${{ github.event.client_payload.pull_request.number }}/merge" ref: "refs/pull/${{ github.event.client_payload.pull_request.number }}/merge"
@ -377,7 +302,7 @@ jobs:
# AWS creds # AWS creds
- name: Configure AWS credentials from Test account - name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4 uses: aws-actions/configure-aws-credentials@v2
with: with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }} role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing role-session-name: integration-testing
@ -404,7 +329,7 @@ jobs:
# Upload the original go test log as an artifact for later review. # Upload the original go test log as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: fork-test-log name: fork-test-log
path: src/testlog/* path: src/testlog/*
@ -412,7 +337,7 @@ jobs:
retention-days: 14 retention-days: 14
# Update check run called "Test-Suite-Fork" # Update check run called "Test-Suite-Fork"
- uses: actions/github-script@v7 - uses: actions/github-script@v6
id: update-check-run id: update-check-run
if: failure() if: failure()
env: env:
@ -449,13 +374,12 @@ jobs:
needs: [Precheck, Checkout] needs: [Precheck, Checkout]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
defaults: defaults:
run: run:
working-directory: src working-directory: src
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Setup Golang with cache - name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4 uses: magnetikonline/action-golang-cache@v4
@ -463,11 +387,11 @@ jobs:
go-version-file: src/go.mod go-version-file: src/go.mod
- name: Go Lint - name: Go Lint
uses: golangci/golangci-lint-action@v4 uses: golangci/golangci-lint-action@v3
with: with:
# Keep pinned to a verson as sometimes updates will add new lint # Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code. # failures in unchanged code.
version: v1.54.2 version: v1.52.2
working-directory: src working-directory: src
skip-pkg-cache: true skip-pkg-cache: true
skip-build-cache: true skip-build-cache: true
@ -485,53 +409,6 @@ jobs:
install-go: false install-go: false
working-directory: src working-directory: src
- name: Run allowtags
run: |
go install github.com/ashmrtn/allowtags@latest
allowtags --allow-key json --allow-key uriparametername ./...
# I could not find a way to install tree-grepper without nix
# https://github.com/BrianHicks/tree-grepper/issues/293
- uses: cachix/install-nix-action@v25
- uses: cachix/cachix-action@v14
with:
name: tree-grepper
- run: nix-env -if https://github.com/BrianHicks/tree-grepper/archive/refs/heads/main.tar.gz
- name: Run trailing comma lint rule
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '(argument_list "," @no-trailing-comma .)' | grep .; then
echo "No trailing commas for function calls"
exit 1
fi
- name: Check for empty string comparison
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((binary_expression (identifier) ["==" "!="] (interpreted_string_literal) @_ri) @exp (#eq? @_ri "\"\""))' | grep .; then
echo "Use len check instead of empty string comparison"
exit 1
fi
- name: Check for cases where errors are not propagated
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((if_statement (binary_expression) @_if (block (return_statement (expression_list (call_expression (selector_expression) @_fun ) @ret .)))) (#match? @_if "err != nil") (#match? @_fun "clues.NewWC"))' | grep .; then
echo "Make sure to propagate errors with clues"
exit 1
fi
- name: Check if clues without context are used when context is passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
echo "Do not use clues.*WC when context is passed in"
exit 1
fi
- name: Check clues with context is used when context is not passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
echo "Use clues.*WC when context is not passed in"
exit 1
fi
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- GitHub Actions Linting ------------------------------------------------------------------------- # --- GitHub Actions Linting -------------------------------------------------------------------------
@ -543,7 +420,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: needs.precheck.outputs.actionsfileschanged == 'true' if: needs.precheck.outputs.actionsfileschanged == 'true'
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: actionlint - name: actionlint
uses: raven-actions/actionlint@v1 uses: raven-actions/actionlint@v1
@ -568,7 +445,7 @@ jobs:
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Publish Binary - name: Publish Binary
uses: ./.github/actions/publish-binary uses: ./.github/actions/publish-binary
@ -589,17 +466,17 @@ jobs:
env: env:
PLATFORMS: linux/amd64,linux/arm64 PLATFORMS: linux/amd64,linux/arm64
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
# Setup buildx # Setup buildx
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v2
# retrieve credentials for ghcr.io # retrieve credentials for ghcr.io
- name: Login to Github Packages - name: Login to Github Packages
uses: docker/login-action@v3 uses: docker/login-action@v2
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@ -607,7 +484,7 @@ jobs:
- name: Extract metadata (tags, labels) for Docker - name: Extract metadata (tags, labels) for Docker
id: meta id: meta
uses: docker/metadata-action@v5 uses: docker/metadata-action@v4
with: with:
images: ${{ env.IMAGE_NAME }} images: ${{ env.IMAGE_NAME }}
tags: | tags: |
@ -617,7 +494,7 @@ jobs:
# deploy the image # deploy the image
- name: Build image and push to GitHub Container Registry - name: Build image and push to GitHub Container Registry
uses: docker/build-push-action@v5 uses: docker/build-push-action@v3
with: with:
context: . context: .
file: ./build/Dockerfile file: ./build/Dockerfile
@ -670,9 +547,9 @@ jobs:
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }} CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
steps: steps:
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v2
- name: Validate amd64 container images - name: Validate amd64 container images
run: | run: |
@ -727,7 +604,7 @@ jobs:
if: github.ref == 'refs/heads/main' if: github.ref == 'refs/heads/main'
steps: steps:
- uses: actions/checkout@v4 # need to checkout to make the action available - uses: actions/checkout@v3 # need to checkout to make the action available
- name: Publish website - name: Publish website
uses: ./.github/actions/publish-website uses: ./.github/actions/publish-website
@ -743,7 +620,7 @@ jobs:
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
steps: steps:
- uses: actions/checkout@v4 # need to checkout to make the action available - uses: actions/checkout@v3 # need to checkout to make the action available
- name: Publish website - name: Publish website
uses: ./.github/actions/publish-website uses: ./.github/actions/publish-website

View File

@ -12,15 +12,18 @@ jobs:
continue-on-error: true continue-on-error: true
strategy: strategy:
matrix: matrix:
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""] user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, EXT_SDK_TEST_USER_ID, '' ]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
# sets the maximum time to now-30m. # sets the maximum time to now-30m.
# CI test have a 20 minute timeout. # CI test have a 10 minute timeout.
# At 20 minutes ago, we should be safe from conflicts.
# The additional 10 minutes is just to be good citizens.
- name: Set purge boundary - name: Set purge boundary
run: echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV run: |
echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Users - name: Purge CI-Produced Folders for Users
uses: ./.github/actions/purge-m365-data uses: ./.github/actions/purge-m365-data
@ -33,16 +36,6 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}
Test-Site-Data-Cleanup: Test-Site-Data-Cleanup:
environment: Testing environment: Testing
@ -50,36 +43,28 @@ jobs:
continue-on-error: true continue-on-error: true
strategy: strategy:
matrix: matrix:
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL] site: [ CORSO_M365_TEST_SITE_URL, EXT_SDK_TEST_SITE_URL ]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
# sets the maximum time to now-30m. # sets the maximum time to now-30m.
# CI test have a 20 minute timeout. # CI test have a 10 minute timeout.
# At 20 minutes ago, we should be safe from conflicts.
# The additional 10 minutes is just to be good citizens.
- name: Set purge boundary - name: Set purge boundary
run: echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV run: |
echo "HALF_HOUR_AGO=$(date -d '30 minutes ago' -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Sites - name: Purge CI-Produced Folders for Sites
uses: ./.github/actions/purge-m365-data uses: ./.github/actions/purge-m365-data
with: with:
site: ${{ vars[matrix.site] }} site: ${{ vars[matrix.site] }}
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }} folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }} libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
library-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
older-than: ${{ env.HALF_HOUR_AGO }} older-than: ${{ env.HALF_HOUR_AGO }}
azure-client-id: ${{ secrets.CLIENT_ID }} azure-client-id: ${{ secrets.CLIENT_ID }}
azure-client-secret: ${{ secrets.CLIENT_SECRET }} azure-client-secret: ${{ secrets.CLIENT_SECRET }}
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -26,7 +26,7 @@ jobs:
run: run:
working-directory: src working-directory: src
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Build the otel-daemon - name: Build the otel-daemon
run: make build-otel-daemon run: make build-otel-daemon
@ -107,7 +107,7 @@ jobs:
# package all artifacts for later review # package all artifacts for later review
- name: Upload Log, Profilers, Traces - name: Upload Log, Profilers, Traces
if: always() if: always()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: load-test-profiling name: load-test-profiling
path: ${{ github.workspace }}/testlog/* path: ${{ github.workspace }}/testlog/*
@ -120,7 +120,7 @@ jobs:
outputs: outputs:
matrix: ${{ steps.build.outputs.matrix }} matrix: ${{ steps.build.outputs.matrix }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- id: build - id: build
run: | run: |
u=$(echo "${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}" | sed 's/\[/["/g' | sed 's/\]/"]/g' | sed 's/|/","/g') u=$(echo "${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}" | sed 's/\[/["/g' | sed 's/\]/"]/g' | sed 's/|/","/g')
@ -140,7 +140,7 @@ jobs:
matrix: matrix:
user: [ CORSO_M365_LOAD_TEST_USER_ID, '' ] user: [ CORSO_M365_LOAD_TEST_USER_ID, '' ]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Set folder boundary datetime - name: Set folder boundary datetime
run: | run: |
echo "NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV echo "NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
@ -155,6 +155,3 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}

View File

@ -1,396 +0,0 @@
name: Longevity Testing
on:
schedule:
# Run every day at 04:00 GMT (roughly 8pm PST)
- cron: "0 4 * * *"
workflow_dispatch:
inputs:
user:
description: "User to run longevity test on"
permissions:
# required to retrieve AWS credentials
id-token: write
contents: write
# cancel currently running jobs if a new version of the branch is pushed
concurrency:
group: longevity_testing-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
SetM365App:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Longevity-Tests:
needs: [SetM365App]
environment: Testing
runs-on: ubuntu-latest
env:
# Need these in the local env so that corso can read them
AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
# re-used values
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
RESTORE_DEST_PFX: Corso_Test_Longevity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }}
PREFIX: "longevity"
# Options for retention.
RETENTION_MODE: GOVERNANCE
# Time to retain blobs for in hours.
RETENTION_DURATION: 216
defaults:
run:
working-directory: src
############################################################################
# setup
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # needed to get latest tag
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- run: |
go build -o longevity-test ./cmd/longevity_test
go build -o s3checker ./cmd/s3checker
- name: Get version string
id: version
run: |
echo version=$(git describe --tags --abbrev=0) | tee -a $GITHUB_OUTPUT
# Checkout the .github directory at the original branch's ref so we have a
# stable view of the actions.
- name: Code Checkout
working-directory: ${{ github.workspace }}
run: |
git checkout ${{ steps.version.outputs.version }}
git checkout ${{ github.ref }} -- .github
- run: go build -o corso
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR}
# Use shorter-lived credentials obtained from assume-role since these
# runs haven't been taking long.
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4
timeout-minutes: 10
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
##########################################################################
# Repository commands
- name: Version Test
timeout-minutes: 10
run: |
./corso --version | grep -c 'Corso version:'
- name: Repo init test
id: repo-init
timeout-minutes: 10
run: |
set -euo pipefail
echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo init s3 \
--no-stats \
--hide-progress \
--retention-mode $(echo "${{ env.RETENTION_MODE }}" | tr '[:upper:]' '[:lower:]') \
--retention-duration "${{ env.RETENTION_DURATION }}h" \
--extend-retention \
--prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
then
echo "Repo could not be initialized"
exit 1
fi
- name: Repo connect test
timeout-minutes: 10
run: |
set -euo pipefail
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo connect s3 \
--no-stats \
--hide-progress \
--prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
if ! grep -q 'Connected to S3 bucket' ${{ env.CORSO_LOG_DIR }}/gotest-repo-connect.log
then
echo "Repo could not be connected"
exit 1
fi
##########################################################################
# Exchange
- name: Backup exchange test
id: exchange-test
timeout-minutes: 30
run: |
echo -e "\nBackup Exchange test\n" >> ${CORSO_LOG_FILE}
./corso backup create exchange \
--no-stats \
--mailbox "${TEST_USER}" \
--hide-progress \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_exchange.txt
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_exchange.txt )
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Onedrive
- name: Backup onedrive test
id: onedrive-test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\nBackup OneDrive test\n" >> ${CORSO_LOG_FILE}
./corso backup create onedrive \
--no-stats \
--hide-progress \
--user "${TEST_USER}" \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_onedrive.txt
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_onedrive.txt )
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Sharepoint test
- name: Backup sharepoint test
id: sharepoint-test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\nBackup SharePoint test\n" >> ${CORSO_LOG_FILE}
./corso backup create sharepoint \
--no-stats \
--hide-progress \
--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt
resultjson=$(sed -e '1,/Completed Backups/d' ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt )
if [[ $( echo $resultjson | jq -r '.[0] | .stats.errorCount') -ne 0 ]]; then
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################
# Backup Exchange Deletion test
- name: Backup Delete exchange test
id: delete-exchange-test
timeout-minutes: 30
env:
SERVICE: "exchange"
DELETION_DAYS: 10
run: |
set -euo pipefail
echo -e "\nDelete Backup exchange \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################
# Backup Onedrive Deletion test
- name: Backup Delete onedrive test
id: delete-onedrive-test
timeout-minutes: 30
env:
SERVICE: "onedrive"
DELETION_DAYS: 10
run: |
set -euo pipefail
echo -e "\nDelete Backup onedrive \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################
# Backup Sharepoint Deletion test
- name: Backup Delete Sharepoint test
id: delete-sharepoint-test
timeout-minutes: 30
env:
SERVICE: "sharepoint"
DELETION_DAYS: 5
run: |
set -euo pipefail
echo -e "\nDelete Backup sharepoint \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################
# Export OneDrive Test
- name: OneDrive Export test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
echo -e "\Export OneDrive test - first entry\n" >> ${CORSO_LOG_FILE}
./corso backup list onedrive 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
while read -r line; do
./corso export onedrive \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_first.txt
done
echo -e "\Export OneDrive test - last entry\n" >> ${CORSO_LOG_FILE}
./corso backup list onedrive 2>/dev/null | tail -n1 | awk '{print $1}' |
while read -r line; do
./corso export onedrive \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_last.txt
done
##########################################################################
# Export SharePoint Test
- name: SharePoint Export test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
echo -e "\Export SharePoint test - first entry\n" >> ${CORSO_LOG_FILE}
./corso backup list sharepoint 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
while read -r line; do
./corso export sharepoint \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_first.txt
done
echo -e "\Export SharePoint test - last entry\n" >> ${CORSO_LOG_FILE}
./corso backup list sharepoint 2>/dev/null | tail -n1 | awk '{print $1}' |
while read -r line; do
./corso export sharepoint \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_last.txt
done
##########################################################################
# Maintenance test
- name: Maintenance test Daily
id: maintenance-test-daily
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\n Maintenance test Daily\n" >> ${CORSO_LOG_FILE}
# Run with the force flag so it doesn't fail if the github runner
# hostname isn't what's expected. This is only safe because we can
# guarantee only one runner will be executing maintenance at a time.
./corso repo maintenance --mode metadata \
--no-stats \
--hide-progress \
--force \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
- name: Maintenance test Weekly
id: maintenance-test-weekly
timeout-minutes: 30
run: |
if [[ $(date +%A) == "Saturday" ]]; then
set -euo pipefail
echo -e "\n Maintenance test Weekly\n" >> ${CORSO_LOG_FILE}
./corso repo maintenance --mode complete \
--no-stats \
--hide-progress \
--force \
--json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_complete.txt
# TODO(ashmrtn): We can also check that non-current versions of
# blobs don't have their retention extended if we want.
#
# Assuming no failures during full maintenance, current versions of
# objects with the below versions should have retention times that
# are roughly (now + RETENTION_DURATION). We can explicitly check
# for this, but leave a little breathing room since maintenance may
# take some time to run.
#
# If we pick a live-retention-duration that is too small then we'll
# start seeing failures. The check for live objects is a lower bound
# check.
#
# Blob prefixes are as follows:
# - kopia.blobcfg - repo-wide config
# - kopia.repository - repo-wide config
# - p - data pack blobs (i.e. file data)
# - q - metadata pack blobs (i.e. manifests, directory listings, etc)
# - x - index blobs
./s3checker \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
--prefix ${{ env.PREFIX }} \
--retention-mode ${{ env.RETENTION_MODE }} \
--live-retention-duration "$((${{ env.RETENTION_DURATION }}-1))h" \
--object-prefix "kopia.blobcfg" \
--object-prefix "kopia.repository" \
--object-prefix "p" \
--object-prefix "q" \
--object-prefix "x"
fi
##########################################################################
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: always()
uses: actions/upload-artifact@v4
with:
name: longevity-test-log
path: src/testlog/*
if-no-files-found: error
retention-days: 14
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Longevity Test"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -30,7 +30,7 @@ jobs:
run: run:
working-directory: src working-directory: src
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
# single setup and sum cache handling here. # single setup and sum cache handling here.
# the results will cascade onto both testing and linting. # the results will cascade onto both testing and linting.
@ -48,7 +48,7 @@ jobs:
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted: Test-Suite-Trusted:
needs: [Checkout, SetM365App] needs: [ Checkout, SetM365App]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
@ -60,7 +60,7 @@ jobs:
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }} AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }} CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Setup Golang with cache - name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4 uses: magnetikonline/action-golang-cache@v4
@ -73,12 +73,17 @@ jobs:
- name: Set up gotestfmt - name: Set up gotestfmt
run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest run: go install github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
# run the tests # run the tests
- name: Integration Tests - name: Integration Tests
env: env:
# Use long-lived AWS credentials.
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }} AZURE_CLIENT_ID: ${{ secrets[env.AZURE_CLIENT_ID_NAME] }}
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }} AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }} AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
@ -87,36 +92,60 @@ jobs:
CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }} CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }} CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-nightly.log CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-nightly.log
LOG_GRAPH_REQUESTS: true LOG_GRAPH_REQUESTS: true
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
run: | run: |
set -euo pipefail set -euo pipefail
go test \ go test \
-tags testing \ -tags testing \
-json \ -json \
-v \ -v \
-failfast \
-p 1 \ -p 1 \
-timeout 2h \ -timeout 15m \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests ./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
########################################################################################################################################## ##########################################################################################################################################
# Logging & Notifications # Logging & Notifications
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: always() if: always()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: nightly-test-log name: nightly-test-log
path: src/testlog/* path: src/testlog/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
- name: Notify failure in teams - name: SHA info
id: sha-info
if: failure() if: failure()
uses: ./.github/actions/teams-message run: |
echo ${GITHUB_REF#refs/heads/}-${GITHUB_SHA}
echo SHA=${GITHUB_REF#refs/heads/}-${GITHUB_SHA} >> $GITHUB_OUTPUT
echo RUN_URL=${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} >> $GITHUB_OUTPUT
echo COMMIT_URL=${{ github.server_url }}/${{ github.repository }}/commit/${GITHUB_SHA} >> $GITHUB_OUTPUT
- name: Send Github Action failure to Slack
id: slack-notification
if: failure()
uses: slackapi/slack-github-action@v1.24.0
with: with:
msg: "[COROS FAILED] Nightly Checks" payload: |
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} {
"text": "Nightly test failure - build: ${{ job.status }} - SHA: ${{ steps.sha-info.outputs.SHA }}",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "[FAILED] Nightly Checks :: <${{ steps.sha-info.outputs.RUN_URL }}|[Logs]> <${{ steps.sha-info.outputs.COMMIT_URL }}|[Base]>\nCommit: <${{ steps.sha-info.outputs.COMMIT_URL }}|${{ steps.sha-info.outputs.SHA }}>"
}
}
]
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK

View File

@ -13,13 +13,13 @@ jobs:
steps: steps:
- name: Generate token - name: Generate token
id: generate_token id: generate_token
uses: tibdex/github-app-token@v2 uses: tibdex/github-app-token@v1
with: with:
app_id: ${{ secrets.APP_ID }} app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.PRIVATE_KEY }} private_key: ${{ secrets.PRIVATE_KEY }}
- name: Slash Command Dispatch - name: Slash Command Dispatch
uses: peter-evans/slash-command-dispatch@v4 uses: peter-evans/slash-command-dispatch@v3
env: env:
TOKEN: ${{ steps.generate_token.outputs.token }} TOKEN: ${{ steps.generate_token.outputs.token }}
with: with:

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
user: user:
description: "User to run sanity test on" description: 'User to run sanity test on'
permissions: permissions:
# required to retrieve AWS credentials # required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests: Sanity-Tests:
needs: [SetM365App] needs: [ SetM365App ]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
@ -38,17 +38,18 @@ jobs:
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
RESTORE_DEST_PFX: Corso_Test_Sanity_ RESTORE_DEST_PFX: Corso_Test_Sanity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }} TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }}
defaults: defaults:
run: run:
working-directory: src working-directory: src
##########################################################################################################################################
########################################################################################################################################## # setup
# setup
steps: steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup Golang with cache - name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4 uses: magnetikonline/action-golang-cache@v4
@ -56,16 +57,13 @@ jobs:
go-version-file: src/go.mod go-version-file: src/go.mod
- run: go build -o corso - run: go build -o corso
timeout-minutes: 10
- run: go build -o sanity-test ./cmd/sanity_test - run: go build -o sanity-test ./cmd/sanity_test
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR} - run: mkdir ${CORSO_LOG_DIR}
########################################################################################################################################## ##########################################################################################################################################
# Pre-Run cleanup # Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently. # unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring # however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
@ -79,7 +77,6 @@ jobs:
echo "NOW=$(date +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV echo "NOW=$(date +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Users - name: Purge CI-Produced Folders for Users
timeout-minutes: 30
uses: ./.github/actions/purge-m365-data uses: ./.github/actions/purge-m365-data
with: with:
user: ${{ env.TEST_USER }} user: ${{ env.TEST_USER }}
@ -90,39 +87,30 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites - name: Purge CI-Produced Folders for Sites
timeout-minutes: 30
if: always() if: always()
uses: ./.github/actions/purge-m365-data uses: ./.github/actions/purge-m365-data
with: with:
site: ${{ vars.CORSO_M365_TEST_SITE_URL }} site: ${{ secrets.CORSO_M365_TEST_SITE_URL }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }} folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }} libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }} older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }} azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }} azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
########################################################################################################################################## ##########################################################################################################################################
# Repository commands # Repository commands
- name: Version Test - name: Version Test
timeout-minutes: 10
run: | run: |
./corso --version | grep -c 'Corso version:' ./corso --version | grep -c 'Corso version:'
- name: Repo init test - name: Repo init test
timeout-minutes: 10
id: repo-init id: repo-init
run: | run: |
set -euo pipefail set -euo pipefail
@ -144,7 +132,6 @@ jobs:
echo result="$prefix" >> $GITHUB_OUTPUT echo result="$prefix" >> $GITHUB_OUTPUT
- name: Repo connect test - name: Repo connect test
timeout-minutes: 10
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }} echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
@ -161,27 +148,13 @@ jobs:
exit 1 exit 1
fi fi
# Run maintenance on an empty repo just to make sure the command still ##########################################################################################################################################
# works.
- name: Repo maintenance test
timeout-minutes: 30
run: |
set -euo pipefail
echo -e "\nRepo maintenance test\n" >> ${{ env.CORSO_LOG_FILE }}
./corso repo maintenance \
--no-stats \
--hide-progress \
--mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
########################################################################################################################################## # Exchange
# Exchange
# generate new entries to roll into the next load test # generate new entries to roll into the next load test
# only runs if the test was successful # only runs if the test was successful
- name: Exchange - Create new data - name: Exchange - Create new data
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
go run . exchange emails \ go run . exchange emails \
@ -191,68 +164,60 @@ jobs:
--count 4 --count 4
- name: Exchange - Backup - name: Exchange - Backup
timeout-minutes: 30
id: exchange-backup id: exchange-backup
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: first-backup kind: initial
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Incremental backup - name: Exchange - Incremental backup
timeout-minutes: 30
id: exchange-backup-incremental id: exchange-backup-incremental
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: incremental kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Non delta backup - name: Exchange - Non delta backup
timeout-minutes: 30
id: exchange-backup-non-delta id: exchange-backup-non-delta
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: non-delta kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Incremental backup after non-delta - name: Exchange - Incremental backup after non-delta
timeout-minutes: 30
id: exchange-backup-incremental-after-non-delta id: exchange-backup-incremental-after-non-delta
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: non-delta-incremental kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Onedrive ##########################################################################################################################################
# Onedrive
# generate new entries for test # generate new entries for test
- name: OneDrive - Create new data - name: OneDrive - Create new data
id: new-data-creation-onedrive id: new-data-creation-onedrive
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S") suffix=$(date +"%Y-%m-%d_%H-%M-%S")
@ -268,20 +233,17 @@ jobs:
- name: OneDrive - Backup - name: OneDrive - Backup
id: onedrive-backup id: onedrive-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: onedrive service: onedrive
kind: first-backup kind: initial
backup-args: '--user "${{ env.TEST_USER }}"' backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }} --restore-permissions'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
# generate some more enteries for incremental check # generate some more enteries for incremental check
- name: OneDrive - Create new data (for incremental) - name: OneDrive - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
go run . onedrive files \ go run . onedrive files \
@ -293,31 +255,28 @@ jobs:
- name: OneDrive - Incremental backup - name: OneDrive - Incremental backup
id: onedrive-incremental id: onedrive-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: onedrive service: onedrive
kind: incremental kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"' backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }} --restore-permissions'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
########################################################################################################################################## ##########################################################################################################################################
# Sharepoint Library # Sharepoint
# generate new entries for test # generate new entries for test
- name: SharePoint - Create new data - name: SharePoint - Create new data
id: new-data-creation-sharepoint id: new-data-creation-sharepoint
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S") suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint files \ go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \ --site ${{ secrets.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \ --user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \ --tenant ${{ secrets.TENANT_ID }} \
@ -328,25 +287,21 @@ jobs:
- name: SharePoint - Backup - name: SharePoint - Backup
id: sharepoint-backup id: sharepoint-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: sharepoint service: sharepoint
kind: first-backup kind: initial
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries' backup-args: '--site "${{ secrets.CORSO_M365_TEST_SITE_URL }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }} --restore-permissions'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
# generate some more enteries for incremental check # generate some more enteries for incremental check
- name: SharePoint - Create new data (for incremental) - name: SharePoint - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
go run . sharepoint files \ go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \ --site ${{ secrets.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \ --user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \ --tenant ${{ secrets.TENANT_ID }} \
@ -355,186 +310,56 @@ jobs:
- name: SharePoint - Incremental backup - name: SharePoint - Incremental backup
id: sharepoint-incremental id: sharepoint-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: sharepoint service: sharepoint
kind: incremental kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries' backup-args: '--site "${{ secrets.CORSO_M365_TEST_SITE_URL }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }} --restore-permissions'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: libraries
########################################################################################################################################## ##########################################################################################################################################
# Sharepoint Lists # Logging & Notifications
# generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
- name: SharePoint Lists - Create new data
id: new-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
# Extracts the common prefix for the Sharepoint list names.
- name: SharePoint Lists - Store restore container
id: sharepoint-lists-store-restore-container
run: |
echo ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Backup
id: sharepoint-lists-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: first-backup-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
# generate some more enteries for incremental check
- name: SharePoint Lists - Create new data (for incremental)
id: inc-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Store restore container (for incremental)
id: sharepoint-lists-store-restore-container-inc
run: |
echo ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Incremental backup
id: sharepoint-lists-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: incremental-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
##########################################################################################################################################
# Groups and Teams
# generate new entries for test
- name: Groups - Create new data
id: new-data-creation-groups
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4
echo result="${suffix}" >> $GITHUB_OUTPUT
- name: Groups - Backup
id: groups-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: groups
kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
# generate some more entries for incremental check
- name: Groups - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \
--count 4
- name: Groups - Incremental backup
id: groups-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: groups
kind: incremental
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Logging & Notifications
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: always() if: always()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: sanity-test-log name: sanity-test-log
path: ${{ env.CORSO_LOG_DIR }}/* path: ${{ env.CORSO_LOG_DIR }}/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
- name: Notify failure in teams - name: SHA info
id: sha-info
if: failure() if: failure()
uses: ./.github/actions/teams-message run: |
echo ${GITHUB_REF#refs/heads/}-${GITHUB_SHA}
echo SHA=${GITHUB_REF#refs/heads/}-${GITHUB_SHA} >> $GITHUB_OUTPUT
echo RUN_URL=${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} >> $GITHUB_OUTPUT
echo COMMIT_URL=${{ github.server_url }}/${{ github.repository }}/commit/${GITHUB_SHA} >> $GITHUB_OUTPUT
- name: Send Github Action failure to Slack
id: slack-notification
if: failure()
uses: slackapi/slack-github-action@v1.24.0
with: with:
msg: "[CORSO FAILED] Sanity Tests" payload: |
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} {
"text": "Sanity test failure - build: ${{ job.status }} - SHA: ${{ steps.sha-info.outputs.SHA }}",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "[FAILED] Sanity Checks :: <${{ steps.sha-info.outputs.RUN_URL }}|[Logs]> <${{ github.event.pull_request.html_url || github.event.head_commit.url }}|[Base]>\nCommit: <${{ steps.sha-info.outputs.COMMIT_URL }}|${{ steps.sha-info.outputs.SHA }}>"
}
}
]
}
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK

View File

@ -1,23 +0,0 @@
name: Manually Test Teams Action
on:
workflow_dispatch:
inputs:
msg:
description: 'Message to send:'
required: true
default: 'This is a test message'
jobs:
notify:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Send notification
uses: ./.github/actions/teams-message
with:
msg: ${{ github.event.inputs.msg }}
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -21,7 +21,7 @@ jobs:
outputs: outputs:
version: ${{ steps.version.outputs.version }} version: ${{ steps.version.outputs.version }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 # needed to get latest tag fetch-depth: 0 # needed to get latest tag
@ -40,7 +40,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- name: Lint Website - name: Lint Website
uses: ./.github/actions/website-linting uses: ./.github/actions/website-linting
@ -56,7 +56,7 @@ jobs:
working-directory: website working-directory: website
steps: steps:
- uses: actions/checkout@v4 # need to checkout to make the action available - uses: actions/checkout@v3 # need to checkout to make the action available
- name: Publish website - name: Publish website
uses: ./.github/actions/publish-website uses: ./.github/actions/publish-website

26
.github/workflows/weekly_cleanup.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: Weekly S3 Test Bucket Cleanup
on:
schedule:
# every saturday at 23:59 (11:59pm)
- cron: "59 23 * * 6"
permissions:
# required to retrieve AWS credentials
id-token: write
jobs:
S3-Test-Cleanup:
runs-on: ubuntu-latest
environment: Testing
steps:
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
- name: Delete all files in the test bucket
run: |
aws s3 rm s3://${{ secrets.CI_TESTS_S3_BUCKET }} --recursive

View File

@ -6,234 +6,27 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased] (beta) ## [Unreleased] (beta)
### Fixed
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
- Emails attached within other emails are now correctly exported
- Gracefully handle email and post attachments without name when exporting to eml
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
- Fixed an issue causing exports dealing with calendar data to have high memory usage
## [v0.19.0] (beta) - 2024-02-06
### Added
- Events can now be exported from Exchange backups as .ics files.
- Update repo init configuration to reduce the total number of GET requests sent
to the object store when using corso. This affects repos that have many
backups created in them per day the most.
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
### Fixed
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
- Backup attachments associated with group mailbox items.
- Groups and Teams backups no longer fail when a resource has no display name.
- Contacts in-place restore failed if the restore destination was empty.
- Link shares with external users are now backed up and restored as expected
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
### Changed
- When running `backup details` on an empty backup returns a more helpful error message.
- Backup List additionally shows the data category for each backup.
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
### Known issues
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
- Event description for exchange exports might look slightly different for certain events.
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
- External users with access through shared links will not receive these links as they are not sent via email during restore.
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
- SharePoint list item attachments are not available due to graph API limitations.
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
## [v0.18.0] (beta) - 2024-01-02
### Fixed
- Handle the case where an email cannot be retrieved from Exchange due to an `ErrorInvalidRecipients` error. In
this case, Corso will skip over the item but report this in the backup summary.
- Fix `ErrorItemNotFound` errors when restoring emails with multiple attachments.
- Avoid Graph SDK `Requests must contain extension changes exclusively.` errors by removing server-populated field from restored event items.
- Improve Group mailbox(conversations) backup performance by only downloading new items or items with modified content.
- Handle cases where Exchange backup stored invalid JSON blobs if there were special characters in the user content. These would result in errors during restore.
### Known issues
- Restoring OneDrive, SharePoint, or Teams & Groups items shared with external users while the tenant or site is configured to not allow sharing with external users will not restore permissions.
### Added
- Contacts can now be exported from Exchange backups as .vcf files
## [v0.17.0] (beta) - 2023-12-11
### Changed
- Memory optimizations for large scale OneDrive and Sharepoint backups.
### Fixed
- Resolved a possible deadlock when backing up Teams Channel Messages.
- Fixed an attachment download failure(ErrorTooManyObjectsOpened) during exchange backup.
## [v0.16.0] (beta) - 2023-11-28
### Added
- Export support for emails in exchange backups as `.eml` files.
- More colorful and informational cli display.
### Changed
- The file extension in Teams messages exports has switched to json to match the content type.
- SDK consumption of the /services/m365 package has shifted from independent functions to a client-based api.
- SDK consumers can now configure the /services/m365 graph api client configuration when constructing a new m365 client.
- Dynamic api rate limiting allows small-scale Exchange backups to complete more quickly.
- Kopia's local config files now uses unique filenames that match Corso configurations. This can protect concurrent Corso operations from mistakenly clobbering storage configs during runtime.
### Fixed
- Handle OneDrive folders being deleted and recreated midway through a backup.
- Automatically re-run a full delta query on incremental if the prior backup is found to have malformed prior-state information.
- Retry drive item permission downloads during long-running backups after the jwt token expires and refreshes.
- Retry item downloads during connection timeouts.
## [v0.15.0] (beta) - 2023-10-31
### Added
- Added `corso repo update-passphrase` command to update the passphrase of an existing Corso repository
- Added Subject and Message preview to channel messages detail entries
### Fixed
- SharePoint backup would fail if any site had an empty display name
- Fix a bug with exports hanging post completion
- Handle 503 errors in nested OneDrive packages
### Changed
- Item Details formatting in Groups and Teams backups
## [v0.14.2] (beta) - 2023-10-17
### Added
- Skips graph calls for expired item download URLs.
- Export operation now shows the stats at the end of the run
### Fixed
- Catch and report cases where a protected resource is locked out of access. SDK consumers have a new errs sentinel that allows them to check for this case.
- Fix a case where missing item LastModifiedTimes could cause incremental backups to fail.
- Email size metadata was incorrectly set to the size of the last attachment. Emails will now correctly report the size of the mail content plus the size of all attachments.
- Improves the filtering capabilities for Groups restore and backup
- Improve check to skip OneNote files that cannot be downloaded.
- Fix Groups backup for non Team groups
### Changed
- Groups restore now expects the site whose backup we should restore
## [v0.14.0] (beta) - 2023-10-09
### Added
- Enables local or network-attached storage for Corso repositories.
- Reduce backup runtime for OneDrive and SharePoint incremental backups that have no file changes.
- Increase Exchange backup performance by lazily fetching data only for items whose content changed.
- Added `--backups` flag to delete multiple backups in `corso backup delete` command.
- Backup now includes all sites that belongs to a team, not just the root site.
### Fixed
- Teams Channels that cannot support delta tokens (those without messages) fall back to non-delta enumeration and no longer fail a backup.
### Known issues
- Restoring the data into a different Group from the one it was backed up from is not currently supported
### Other
- Groups and Teams service support is still in feature preview
## [v0.13.0] (beta) - 2023-09-18
### Added
- Groups and Teams service support available as a feature preview! Channel messages and Files are now available for backup and restore in the CLI: `corso backup create groups --group '*'`
- The cli commands for "groups" and "teams" can be used interchangeably, and will operate on the same backup data.
- New permissions are required to backup Channel messages. See the [Corso Documentation](https://corsobackup.io/docs/setup/m365-access/#configure-required-permissions) for complete details.
Even though Channel message restoration is not available, message write permissions are included to cover future integration.
- This is a feature preview, and may be subject to breaking changes based on feedback and testing.
### Changed
- Switched to Go 1.21
- SharePoint exported libraries are now exported with a `Libraries` prefix.
### Fixed
- Contacts backups no longer slices root-folder data if outlook is set to languages other than english.
- Failed backups if the --disable-incrementals flag was passed when there was a valid merge base under some conditions.
## [v0.12.0] (beta) - 2023-08-29
### Added
- Added `export` command to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
- Restore commands now accept an optional resource override with the `--to-resource` flag. This allows restores to recreate backup data within different mailboxes, sites, and users.
- Improve `--mask-sensitive-data` logging mode.
- Reliability: Handle connection cancellation and resets observed when backing up or restoring large data sets.
- Reliability: Recover from Graph SDK panics when the Graph API returns incomplete responses.
- Performance: Improve backup delete performance by batching multiple storage operations into a single operation.
### Fixed
- SharePoint document libraries deleted after the last backup can now be restored.
- Restore requires the protected resource to have access to the service being restored.
- SharePoint data from multiple document libraries are not merged in exports
- `corso backup delete` was not removing the backup details data associated with that snapshot
- Fix OneDrive restores could fail with a concurrent map write error
- Fix backup list displaying backups that had errors
- Fix OneDrive backup could fail if item was deleted during backup
- Exchange backups would fail attempting to use delta tokens even if the user was over quota
## [v0.11.1] (beta) - 2023-07-20
### Fixed
- Allow repo connect to succeed when a `corso.toml` file was not provided but configuration is specified using environment variables and flags.
## [v0.11.0] (beta) - 2023-07-18
### Added
- Drive items backup and restore link shares
- Restore commands now accept an optional top-level restore destination with the `--destination` flag. Setting the destination to '/' will restore items back into their original location.
- Restore commands can specify item collision behavior. Options are Skip (default), Replace, and Copy.
- Introduced repository maintenance commands to help optimize the repository as well as unreferenced data.
### Fixed
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
- Added retries for http/2 stream connection failures when downloading large item content.
- SharePoint document libraries that were deleted after the last backup can now be restored.
### Known issues
- If a link share is created for an item with inheritance disabled
(via the Graph API), the link shares restored in that item will
not be inheritable by children
- Link shares with password protection can't be restored
## [v0.10.0] (beta) - 2023-06-26
### Added
- Exceptions and cancellations for recurring events are now backed up and restored
- Introduced a URL cache for OneDrive that helps reduce Graph API calls for long running (>1hr) backups
- Improve incremental backup behavior by leveraging information from incomplete backups
- Improve restore performance and memory use for Exchange and OneDrive
### Fixed
- Handle OLE conversion errors when trying to fetch attachments
- Fix uploading large attachments for emails and calendar
- Fixed high memory use in OneDrive backup related to logging
- Return a ServiceNotEnabled error when a tenant has no active SharePoint license.
### Changed
- Switched to Go 1.20
## [v0.9.0] (beta) - 2023-06-05 ## [v0.9.0] (beta) - 2023-06-05
### Added ### Added
- Added ProtectedResourceName to the backup list json output. ProtectedResourceName holds either a UPN or a WebURL, depending on the resource type. - Added ProtectedResourceName to the backup list json output. ProtectedResourceName holds either a UPN or a WebURL, depending on the resource type.
- Rework base selection logic for incremental backups so it's more likely to find a valid base. - Rework base selection logic for incremental backups so it's more likely to find a valid base.
- Improve OneDrive restore performance by paralleling item restores - Improve OneDrive restore performance by paralleling item restores
- Exceptions and cancellations for recurring events are now backed up and restored
### Fixed ### Fixed
- Fix Exchange folder cache population error when parent folder isn't found. - Fix Exchange folder cache population error when parent folder isn't found.
- Fix Exchange backup issue caused by incorrect json serialization - Fix Exchange backup issue caused by incorrect json serialization
- Fix issues with details model containing duplicate entry for api consumers - Fix issues with details model containing duplicate entry for api consumers
- Handle OLE conversion errors when trying to fetch attachments
### Changed ### Changed
- Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`. - Do not display all the items that we restored at the end if there are more than 15. You can override this with `--verbose`.
### Known Issues
- Changes to attachments in instances of recurring events compared to the series master aren't restored
## [v0.8.0] (beta) - 2023-05-15 ## [v0.8.0] (beta) - 2023-05-15
### Added ### Added
@ -502,18 +295,7 @@ this case, Corso will skip over the item but report this in the backup summary.
- Miscellaneous - Miscellaneous
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35)) - Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD [Unreleased]: https://github.com/alcionai/corso/compare/v0.9.0...HEAD
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
[v0.15.0]: https://github.com/alcionai/corso/compare/v0.14.0...v0.15.0
[v0.14.0]: https://github.com/alcionai/corso/compare/v0.13.0...v0.14.0
[v0.13.0]: https://github.com/alcionai/corso/compare/v0.12.0...v0.13.0
[v0.12.0]: https://github.com/alcionai/corso/compare/v0.11.1...v0.12.0
[v0.11.1]: https://github.com/alcionai/corso/compare/v0.11.0...v0.11.1
[v0.11.0]: https://github.com/alcionai/corso/compare/v0.10.0...v0.11.0
[v0.10.0]: https://github.com/alcionai/corso/compare/v0.9.0...v0.10.0
[v0.9.0]: https://github.com/alcionai/corso/compare/v0.8.1...v0.9.0 [v0.9.0]: https://github.com/alcionai/corso/compare/v0.8.1...v0.9.0
[v0.8.0]: https://github.com/alcionai/corso/compare/v0.7.1...v0.8.0 [v0.8.0]: https://github.com/alcionai/corso/compare/v0.7.1...v0.8.0
[v0.7.0]: https://github.com/alcionai/corso/compare/v0.6.1...v0.7.0 [v0.7.0]: https://github.com/alcionai/corso/compare/v0.6.1...v0.7.0

View File

@ -1,6 +1,3 @@
> [!NOTE]
> **The Corso project is no longer actively maintained and has been archived**.
<p align="center"> <p align="center">
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" /> <img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
</p> </p>

View File

@ -1,4 +1,4 @@
FROM golang:1.21-alpine as builder FROM golang:1.19-alpine as builder
WORKDIR /go/src/app WORKDIR /go/src/app
COPY src . COPY src .

View File

@ -12,7 +12,7 @@ usage() {
} }
ROOT=$(dirname $(dirname $(readlink -f $0))) ROOT=$(dirname $(dirname $(readlink -f $0)))
GOVER=1.21 # go version GOVER=1.19 # go version
CORSO_BUILD_CACHE="/tmp/.corsobuild" # shared persistent cache CORSO_BUILD_CACHE="/tmp/.corsobuild" # shared persistent cache
# Figure out os and architecture # Figure out os and architecture

View File

@ -3,21 +3,14 @@ run:
linters: linters:
enable: enable:
- errcheck
- exhaustive
- forbidigo
- gci - gci
- gofmt - gofmt
- gofumpt - gofumpt
- gosimple - errcheck
- govet - forbidigo
- ineffassign
- lll - lll
- loggercheck
- misspell - misspell
- revive - revive
- unused
- usestdlibvars
- wsl - wsl
disable: disable:
@ -26,11 +19,6 @@ linters:
- staticcheck - staticcheck
linters-settings: linters-settings:
exhaustive:
check:
- switch
default-signifies-exhaustive: false
explicit-exhaustive-switch: true
gci: gci:
sections: sections:
- standard - standard
@ -55,13 +43,10 @@ linters-settings:
# String formatting should be avoided in favor of structured errors (ie: err.With(k, v)). # String formatting should be avoided in favor of structured errors (ie: err.With(k, v)).
- '(errors|fmt)\.(New|Stack|Wrap|Error)f?\((# error handling should use clues pkg)?' - '(errors|fmt)\.(New|Stack|Wrap|Error)f?\((# error handling should use clues pkg)?'
# Avoid Warn-level logging in favor of Info or Error. # Avoid Warn-level logging in favor of Info or Error.
- 'Warnw?f?\((# logging should use Info or Error)?' - 'Warn[wf]?\((# logging should use Info or Error)?'
# Prefer suite.Run(name, func() {}) for subtests as testify has it instead # Prefer suite.Run(name, func() {}) for subtests as testify has it instead
# of suite.T().Run(name, func(t *testing.T) {}). # of suite.T().Run(name, func(t *testing.T) {}).
- '(T\(\)|\st[a-zA-Z0-9]*)\.Run(# prefer testify suite.Run(name, func()) )?' - '(T\(\)|\st[a-zA-Z0-9]*)\.Run(# prefer testify suite.Run(name, func()) )?'
# Prefer packing ctx values into the error using NewWC, WrapWC, or StackWC
# instead of New|Stack|Wrap().WithClues(ctx)
- 'WithClues(# prefer the builderWC variant - ex: StackWC(ctx, ...))?'
lll: lll:
line-length: 120 line-length: 120
revive: revive:
@ -96,7 +81,6 @@ linters-settings:
- name: time-equal - name: time-equal
- name: time-naming - name: time-naming
- name: unreachable-code - name: unreachable-code
- name: use-any
- name: useless-break - name: useless-break
- name: var-declaration - name: var-declaration
- name: var-naming - name: var-naming
@ -134,13 +118,7 @@ issues:
linters: linters:
- forbidigo - forbidigo
text: "context.(Background|TODO)" text: "context.(Background|TODO)"
- path: internal/m365/collection/drive/collections_test.go - path: internal/m365/graph/betasdk
linters:
- lll
- path: internal/m365/collection/drive/collections_tree_test.go
linters:
- lll
- path: pkg/services/m365/api/graph/betasdk
linters: linters:
- wsl - wsl
- revive - revive

View File

@ -18,15 +18,13 @@ builds:
- -X 'github.com/alcionai/corso/src/internal/events.RudderStackDataPlaneURL={{.Env.RUDDERSTACK_CORSO_DATA_PLANE_URL}}' - -X 'github.com/alcionai/corso/src/internal/events.RudderStackDataPlaneURL={{.Env.RUDDERSTACK_CORSO_DATA_PLANE_URL}}'
archives: archives:
# this name template makes the OS and Arch compatible with the results of uname. - name_template: "{{ .ProjectName }}_{{ .Tag }}_{{ .Os }}_{{ .Arch }}"
- name_template: >- replacements:
{{ .ProjectName }}_ darwin: Darwin
{{- .Tag }}_ linux: Linux
{{- title .Os }}_ windows: Windows
{{- if eq .Arch "amd64" }}x86_64 386: i386
{{- else if eq .Arch "386" }}i386 amd64: x86_64
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end }}
format: tar.gz format: tar.gz
format_overrides: format_overrides:
- goos: windows - goos: windows

View File

@ -1,56 +0,0 @@
{{- /*gotype: github.com/gotesttools/gotestfmt/v2/parser.Package*/ -}}
{{- /*
This template contains the format for an individual package. GitHub actions does not currently support nested groups so
we are creating a stylized header for each package.
*/ -}}
{{- $settings := .Settings -}}
{{- if and (or (not $settings.HideSuccessfulPackages) (ne .Result "PASS")) (or (not $settings.HideEmptyPackages) (ne .Result "SKIP") (ne (len .TestCases) 0)) -}}
{{- if eq .Result "PASS" -}}
{{ "\033" }}[0;32m
{{- else if eq .Result "SKIP" -}}
{{ "\033" }}[0;33m
{{- else -}}
{{ "\033" }}[0;31m
{{- end -}}
📦 {{ .Name }}{{- "\033" }}[0m
{{- with .Coverage -}}
{{- "\033" -}}[0;37m ({{ . }}% coverage){{- "\033" -}}[0m
{{- end -}}
{{- " " -}}({{- .Duration -}})
{{- "\n" -}}
{{- with .Reason -}}
{{- " " -}}🛑 {{ . -}}{{- "\n" -}}
{{- end -}}
{{- with .Output -}}
{{- . -}}{{- "\n" -}}
{{- end -}}
{{- with .TestCases -}}
{{- range . -}}
{{- if or (not $settings.HideSuccessfulTests) (ne .Result "PASS") -}}
::group::
{{- if eq .Result "PASS" -}}
{{ "\033" }}[0;32m✅
{{- else if eq .Result "SKIP" -}}
{{ "\033" }}[0;33m🚧
{{- else -}}
{{ "\033" }}[0;31m❌
{{- end -}}
{{ " " }}{{- .Name -}}
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
{{- with .Coverage -}}
, coverage: {{ . }}%
{{- end -}})
{{- "\033" -}}[0m
{{- "\n" -}}
{{- with .Output -}}
{{- formatTestOutput . $settings -}}
{{- "\n" -}}
{{- end -}}
::endgroup::{{- "\n" -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{- "\n" -}}
{{- end -}}

View File

@ -1,5 +1,5 @@
# This must match the version defined in .github/workflows/lint.yaml. # This must match the version defined in .github/workflows/lint.yaml.
WANTED_LINT_VERSION := 1.54.2 WANTED_LINT_VERSION := 1.52.2
LINT_VERSION := $(shell golangci-lint version | cut -d' ' -f4) LINT_VERSION := $(shell golangci-lint version | cut -d' ' -f4)
HAS_LINT := $(shell which golangci-lint) HAS_LINT := $(shell which golangci-lint)
@ -18,7 +18,7 @@ lint: check-lint-version
fmt: fmt:
gofumpt -w . gofumpt -w .
goimports -w . goimports -w .
gci write --skip-generated -s 'standard' -s 'default' -s 'prefix(github.com/alcionai/corso)' . gci write --skip-generated -s 'standard,default,prefix(github.com/alcionai/corso)' .
check-lint-version: check-lint check-lint-version: check-lint
@if [ "$(LINT_VERSION)" != "$(WANTED_LINT_VERSION)" ]; then \ @if [ "$(LINT_VERSION)" != "$(WANTED_LINT_VERSION)" ]; then \

View File

@ -9,17 +9,12 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/color"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
@ -27,8 +22,6 @@ import (
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
var ErrEmptyBackup = clues.New("no items in backup")
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// adding commands to cobra // adding commands to cobra
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -44,8 +37,6 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
addExchangeCommands, addExchangeCommands,
addOneDriveCommands, addOneDriveCommands,
addSharePointCommands, addSharePointCommands,
addGroupsCommands,
addTeamsChatsCommands,
} }
// AddCommands attaches all `corso backup * *` commands to the parent. // AddCommands attaches all `corso backup * *` commands to the parent.
@ -58,13 +49,43 @@ func AddCommands(cmd *cobra.Command) {
backupC.AddCommand(subCommand) backupC.AddCommand(subCommand)
for _, addBackupTo := range serviceCommands { for _, addBackupTo := range serviceCommands {
sc := addBackupTo(subCommand) addBackupTo(subCommand)
flags.AddAllProviderFlags(sc)
flags.AddAllStorageFlags(sc)
} }
} }
} }
// ---------------------------------------------------------------------------
// common flags and flag attachers for commands
// ---------------------------------------------------------------------------
// list output filter flags
var (
failedItemsFN = "failed-items"
listFailedItems string
skippedItemsFN = "skipped-items"
listSkippedItems string
recoveredErrorsFN = "recovered-errors"
listRecoveredErrors string
)
func addFailedItemsFN(cmd *cobra.Command) {
cmd.Flags().StringVar(
&listFailedItems, failedItemsFN, "show",
"Toggles showing or hiding the list of items that failed.")
}
func addSkippedItemsFN(cmd *cobra.Command) {
cmd.Flags().StringVar(
&listSkippedItems, skippedItemsFN, "show",
"Toggles showing or hiding the list of items that were skipped.")
}
func addRecoveredErrorsFN(cmd *cobra.Command) {
cmd.Flags().StringVar(
&listRecoveredErrors, recoveredErrorsFN, "show",
"Toggles showing or hiding the list of errors which corso recovered from.")
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// commands // commands
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -170,10 +191,10 @@ func handleDeleteCmd(cmd *cobra.Command, args []string) error {
// standard set of selector behavior that we want used in the cli // standard set of selector behavior that we want used in the cli
var defaultSelectorConfig = selectors.Config{OnlyMatchItemNames: true} var defaultSelectorConfig = selectors.Config{OnlyMatchItemNames: true}
func genericCreateCommand( func runBackups(
ctx context.Context, ctx context.Context,
r repository.Repositoryer, r repository.Repository,
serviceName string, serviceName, resourceOwnerType string,
selectorSet []selectors.Selector, selectorSet []selectors.Selector,
ins idname.Cacher, ins idname.Cacher,
) error { ) error {
@ -190,47 +211,29 @@ func genericCreateCommand(
ictx = clues.Add(ctx, "resource_owner_selected", owner) ictx = clues.Add(ctx, "resource_owner_selected", owner)
) )
logger.Ctx(ictx).Infof("setting up backup")
bo, err := r.NewBackupWithLookup(ictx, discSel, ins) bo, err := r.NewBackupWithLookup(ictx, discSel, ins)
if err != nil { if err != nil {
cerr := clues.WrapWC(ictx, err, owner) errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
errs = append(errs, cerr) Errf(ictx, "%v\n", err)
Errf(
ictx,
"%s\nCause: %s",
"Unable to initiate backup",
err.Error())
continue continue
} }
ictx = clues.Add( ictx = clues.Add(
ictx, ctx,
"resource_owner_id", bo.ResourceOwner.ID(), "resource_owner_id", bo.ResourceOwner.ID(),
"resource_owner_name", clues.Hide(bo.ResourceOwner.Name())) "resource_owner_name", bo.ResourceOwner.Name())
logger.Ctx(ictx).Infof("running backup")
err = bo.Run(ictx) err = bo.Run(ictx)
if err != nil { if err != nil {
if errors.Is(err, core.ErrServiceNotEnabled) { if errors.Is(err, graph.ErrServiceNotEnabled) {
logger.Ctx(ictx).Infow("service not enabled", logger.Ctx(ctx).Infow("service not enabled", "resource_owner_name", bo.ResourceOwner.Name())
"resource_owner_id", bo.ResourceOwner.ID(),
"service", serviceName)
continue continue
} }
cerr := clues.Wrap(err, owner) errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
errs = append(errs, cerr) Errf(ictx, "%v\n", err)
Errf(
ictx,
"%s\nCause: %s",
"Unable to complete backup",
err.Error())
continue continue
} }
@ -238,10 +241,10 @@ func genericCreateCommand(
bIDs = append(bIDs, string(bo.Results.BackupID)) bIDs = append(bIDs, string(bo.Results.BackupID))
if !DisplayJSONFormat() { if !DisplayJSONFormat() {
Infof(ictx, fmt.Sprintf("Backup complete %s %s", observe.Bullet, color.BlueOutput(bo.Results.BackupID))) Infof(ctx, "Done\n")
printBackupStats(ictx, r, string(bo.Results.BackupID)) printBackupStats(ctx, r, string(bo.Results.BackupID))
} else { } else {
Infof(ictx, "Backup complete - ID: %v\n", bo.Results.BackupID) Infof(ctx, "Done - ID: %v\n", bo.Results.BackupID)
} }
} }
@ -250,10 +253,8 @@ func genericCreateCommand(
return Only(ctx, clues.Wrap(berrs.Failure(), "Unable to retrieve backup results from storage")) return Only(ctx, clues.Wrap(berrs.Failure(), "Unable to retrieve backup results from storage"))
} }
if len(bups) > 0 { Info(ctx, "Completed Backups:")
Info(ctx, "\nCompleted Backups:") backup.PrintAll(ctx, bups)
backup.PrintAll(ctx, bups)
}
if len(errs) > 0 { if len(errs) > 0 {
sb := fmt.Sprintf("%d of %d backups failed:\n", len(errs), len(selectorSet)) sb := fmt.Sprintf("%d of %d backups failed:\n", len(errs), len(selectorSet))
@ -271,53 +272,35 @@ func genericCreateCommand(
// genericDeleteCommand is a helper function that all services can use // genericDeleteCommand is a helper function that all services can use
// for the removal of an entry from the repository // for the removal of an entry from the repository
func genericDeleteCommand( func genericDeleteCommand(cmd *cobra.Command, bID, designation string, args []string) error {
cmd *cobra.Command,
pst path.ServiceType,
designation string,
bID, args []string,
) error {
if utils.HasNoFlagsAndShownHelp(cmd) { if utils.HasNoFlagsAndShownHelp(cmd) {
return nil return nil
} }
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
ctx := clues.Add(cmd.Context(), "delete_backup_id", bID) ctx := clues.Add(cmd.Context(), "delete_backup_id", bID)
r, _, err := utils.GetAccountAndConnect(ctx, cmd, pst) r, _, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
if err := r.DeleteBackups(ctx, true, bID...); err != nil { if err := r.DeleteBackup(ctx, bID); err != nil {
return Only(ctx, clues.Wrap(err, fmt.Sprintf("Deleting backup %v", bID))) return Only(ctx, clues.Wrap(err, "Deleting backup "+bID))
} }
Infof(ctx, "Deleted %s backup %v", designation, bID) Infof(ctx, "Deleted %s backup %s", designation, bID)
return nil return nil
} }
// genericListCommand is a helper function that all services can use // genericListCommand is a helper function that all services can use
// to display the backup IDs saved within the repository // to display the backup IDs saved within the repository
func genericListCommand( func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType, args []string) error {
cmd *cobra.Command,
bID string,
service path.ServiceType,
args []string,
) error {
ctx := cmd.Context() ctx := cmd.Context()
if flags.RunModeFV == flags.RunModeFlagTest { r, _, _, err := utils.GetAccountAndConnect(ctx)
return nil
}
r, _, err := utils.GetAccountAndConnect(ctx, cmd, service)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -335,12 +318,7 @@ func genericListCommand(
} }
b.Print(ctx) b.Print(ctx)
fe.PrintItems( fe.PrintItems(ctx, !ifShow(listFailedItems), !ifShow(listSkippedItems), !ifShow(listRecoveredErrors))
ctx,
!ifShow(flags.ListAlertsFV),
!ifShow(flags.FailedItemsFV),
!ifShow(flags.ListSkippedItemsFV),
!ifShow(flags.ListRecoveredErrorsFV))
return nil return nil
} }
@ -355,78 +333,16 @@ func genericListCommand(
return nil return nil
} }
func genericDetailsCommand(
cmd *cobra.Command,
backupID string,
sel selectors.Selector,
) (*details.Details, error) {
ctx := cmd.Context()
r, rdao, err := utils.GetAccountAndConnect(ctx, cmd, path.OneDriveService)
if err != nil {
return nil, clues.Stack(err)
}
defer utils.CloseRepo(ctx, r)
return genericDetailsCore(
ctx,
r,
backupID,
sel,
rdao.Opts)
}
func genericDetailsCore(
ctx context.Context,
bg repository.BackupGetter,
backupID string,
sel selectors.Selector,
opts control.Options,
) (*details.Details, error) {
ctx = clues.Add(ctx, "backup_id", backupID)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
d, _, errs := bg.GetBackupDetails(ctx, backupID)
// TODO: log/track recoverable errors
if errs.Failure() != nil {
if errors.Is(errs.Failure(), data.ErrNotFound) {
return nil, clues.New("no backup exists with the id " + backupID)
}
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
}
if len(d.Entries) == 0 {
return nil, ErrEmptyBackup
}
if opts.SkipReduce {
return d, nil
}
d, err := sel.Reduce(ctx, d, errs)
if err != nil {
return nil, clues.Wrap(err, "filtering backup details to selection")
}
return d, nil
}
// ---------------------------------------------------------------------------
// helper funcs
// ---------------------------------------------------------------------------
func ifShow(flag string) bool { func ifShow(flag string) bool {
return strings.ToLower(strings.TrimSpace(flag)) == "show" return strings.ToLower(strings.TrimSpace(flag)) == "show"
} }
func printBackupStats(ctx context.Context, r repository.Repositoryer, bid string) { func printBackupStats(ctx context.Context, r repository.Repository, bid string) {
b, err := r.Backup(ctx, bid) b, err := r.Backup(ctx, bid)
if err != nil { if err != nil {
logger.CtxErr(ctx, err).Error("finding backup immediately after backup operation completion") logger.CtxErr(ctx, err).Error("finding backup immediately after backup operation completion")
} }
b.ToPrintable().Stats.PrintProperties(ctx) b.ToPrintable().Stats.Print(ctx)
Info(ctx, " ")
} }

View File

@ -1,97 +0,0 @@
package backup
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
)
type BackupUnitSuite struct {
tester.Suite
}
func TestBackupUnitSuite(t *testing.T) {
suite.Run(t, &BackupUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *BackupUnitSuite) TestGenericDetailsCore() {
t := suite.T()
expected := append(
append(
dtd.GetItemsForVersion(
t,
path.ExchangeService,
path.EmailCategory,
0,
-1),
dtd.GetItemsForVersion(
t,
path.ExchangeService,
path.EventsCategory,
0,
-1)...),
dtd.GetItemsForVersion(
t,
path.ExchangeService,
path.ContactsCategory,
0,
-1)...)
ctx, flush := tester.NewContext(t)
defer flush()
bg := testdata.VersionedBackupGetter{
Details: dtd.GetDetailsSetForVersion(t, 0),
}
sel := selectors.NewExchangeBackup([]string{"user-id"})
sel.Include(sel.AllData())
output, err := genericDetailsCore(
ctx,
bg,
"backup-ID",
sel.Selector,
control.DefaultOptions())
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, expected, output.Entries)
}
func (suite *BackupUnitSuite) TestGenericDetailsCore_empty() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bg := testdata.VersionedBackupGetter{
Details: &details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.Entry{},
},
},
}
sel := selectors.NewExchangeBackup([]string{"user-id"})
sel.Include(sel.AllData())
_, err := genericDetailsCore(
ctx,
bg,
"backup-ID",
sel.Selector,
control.DefaultOptions())
require.Error(t, err, "has error")
assert.ErrorIs(t, err, ErrEmptyBackup, clues.ToCore(err))
}

View File

@ -1,14 +1,21 @@
package backup package backup
import ( import (
"github.com/alcionai/clues" "context"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -24,8 +31,8 @@ const (
const ( const (
exchangeServiceCommand = "exchange" exchangeServiceCommand = "exchange"
exchangeServiceCommandCreateUseSuffix = "--mailbox <email> | '" + flags.Wildcard + "'" exchangeServiceCommandCreateUseSuffix = "--mailbox <email> | '" + utils.Wildcard + "'"
exchangeServiceCommandDeleteUseSuffix = "--backups <backupId>" exchangeServiceCommandDeleteUseSuffix = "--backup <backupId>"
exchangeServiceCommandDetailsUseSuffix = "--backup <backupId>" exchangeServiceCommandDetailsUseSuffix = "--backup <backupId>"
) )
@ -39,9 +46,8 @@ corso backup create exchange --mailbox alice@example.com,bob@example.com --data
# Backup all Exchange data for all M365 users # Backup all Exchange data for all M365 users
corso backup create exchange --mailbox '*'` corso backup create exchange --mailbox '*'`
exchangeServiceCommandDeleteExamples = `# Delete Exchange backup with IDs 1234abcd-12ab-cd34-56de-1234abcd \ exchangeServiceCommandDeleteExamples = `# Delete Exchange backup with ID 1234abcd-12ab-cd34-56de-1234abcd
and 1234abcd-12ab-cd34-56de-1234abce corso backup delete exchange --backup 1234abcd-12ab-cd34-56de-1234abcd`
corso backup delete exchange --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
exchangeServiceCommandDetailsExamples = `# Explore items in Alice's latest backup (1234abcd...) exchangeServiceCommandDetailsExamples = `# Explore items in Alice's latest backup (1234abcd...)
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd
@ -61,53 +67,61 @@ corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
// called by backup.go to map subcommands to provider-specific handling. // called by backup.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command { func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case createCommand: case createCommand:
c, _ = utils.AddCommand(cmd, exchangeCreateCmd()) c, fs = utils.AddCommand(cmd, exchangeCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandCreateUseSuffix c.Use = c.Use + " " + exchangeServiceCommandCreateUseSuffix
c.Example = exchangeServiceCommandCreateExamples c.Example = exchangeServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs: // Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence. // More generic (ex: --user) and more frequently used flags take precedence.
flags.AddMailBoxFlag(c) utils.AddMailBoxFlag(c)
flags.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false) utils.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false)
flags.AddFetchParallelismFlag(c) options.AddFetchParallelismFlag(c)
flags.AddDisableDeltaFlag(c) options.AddFailFastFlag(c)
flags.AddEnableImmutableIDFlag(c) options.AddDisableIncrementalsFlag(c)
flags.AddDeltaPageSizeFlag(c) options.AddDisableDeltaFlag(c)
flags.AddGenericBackupFlags(c) options.AddEnableImmutableIDFlag(c)
flags.AddDisableSlidingWindowLimiterFlag(c) options.AddDisableConcurrencyLimiterFlag(c)
case listCommand: case listCommand:
c, _ = utils.AddCommand(cmd, exchangeListCmd()) c, fs = utils.AddCommand(cmd, exchangeListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false) utils.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c) addFailedItemsFN(c)
addSkippedItemsFN(c)
addRecoveredErrorsFN(c)
case detailsCommand: case detailsCommand:
c, _ = utils.AddCommand(cmd, exchangeDetailsCmd()) c, fs = utils.AddCommand(cmd, exchangeDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandDetailsUseSuffix c.Use = c.Use + " " + exchangeServiceCommandDetailsUseSuffix
c.Example = exchangeServiceCommandDetailsExamples c.Example = exchangeServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c) options.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs: // Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence. // More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true) utils.AddBackupIDFlag(c, true)
flags.AddExchangeDetailsAndRestoreFlags(c, false) utils.AddExchangeDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, _ = utils.AddCommand(cmd, exchangeDeleteCmd()) c, fs = utils.AddCommand(cmd, exchangeDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandDeleteUseSuffix c.Use = c.Use + " " + exchangeServiceCommandDeleteUseSuffix
c.Example = exchangeServiceCommandDeleteExamples c.Example = exchangeServiceCommandDeleteExamples
flags.AddMultipleBackupIDsFlag(c, false) utils.AddBackupIDFlag(c, true)
flags.AddBackupIDFlag(c, false)
} }
return c return c
@ -135,32 +149,20 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if flags.RunModeFV == flags.RunModeFlagTest { if err := validateExchangeBackupCreateFlags(utils.UserFV, utils.CategoryDataFV); err != nil {
return nil
}
if err := validateExchangeBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
return err return err
} }
r, acct, err := utils.AccountConnectAndWriteRepoConfig( r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx)
ctx,
cmd,
path.ExchangeService)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
sel := exchangeBackupCreateSelectors(flags.UserFV, flags.CategoryDataFV) sel := exchangeBackupCreateSelectors(utils.UserFV, utils.CategoryDataFV)
ins, err := utils.UsersMap( ins, err := utils.UsersMap(ctx, *acct, fault.New(true))
ctx,
*acct,
utils.Control(),
r.Counter(),
fault.New(true))
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
} }
@ -171,10 +173,10 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
selectorSet = append(selectorSet, discSel.Selector) selectorSet = append(selectorSet, discSel.Selector)
} }
return genericCreateCommand( return runBackups(
ctx, ctx,
r, r,
"Exchange", "Exchange", "user",
selectorSet, selectorSet,
ins) ins)
} }
@ -233,7 +235,7 @@ func exchangeListCmd() *cobra.Command {
// lists the history of backup operations // lists the history of backup operations
func listExchangeCmd(cmd *cobra.Command, args []string) error { func listExchangeCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.ExchangeService, args) return genericListCommand(cmd, utils.BackupIDFV, path.ExchangeService, args)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -257,35 +259,71 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
return runDetailsExchangeCmd(cmd)
}
func runDetailsExchangeCmd(cmd *cobra.Command) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeExchangeOpts(cmd) opts := utils.MakeExchangeOpts(cmd)
sel := utils.IncludeExchangeRestoreDataSelectors(opts) r, _, _, err := utils.GetAccountAndConnect(ctx)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterExchangeRestoreInfoSelectors(sel, opts)
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
if len(ds.Entries) > 0 { defer utils.CloseRepo(ctx, r)
ds.PrintEntries(ctx)
} else { ctrlOpts := options.Control()
Info(ctx, selectors.ErrorNoMatchingItems)
ds, err := runDetailsExchangeCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce)
if err != nil {
return Only(ctx, err)
} }
if len(ds.Entries) == 0 {
Info(ctx, selectors.ErrorNoMatchingItems)
return nil
}
ds.PrintEntries(ctx)
return nil return nil
} }
// runDetailsExchangeCmd actually performs the lookup in backup details.
// the fault.Errors return is always non-nil. Callers should check if
// errs.Failure() == nil.
func runDetailsExchangeCmd(
ctx context.Context,
r repository.BackupGetter,
backupID string,
opts utils.ExchangeOpts,
skipReduce bool,
) (*details.Details, error) {
if err := utils.ValidateExchangeRestoreFlags(backupID, opts); err != nil {
return nil, err
}
ctx = clues.Add(ctx, "backup_id", backupID)
d, _, errs := r.GetBackupDetails(ctx, backupID)
// TODO: log/track recoverable errors
if errs.Failure() != nil {
if errors.Is(errs.Failure(), data.ErrNotFound) {
return nil, clues.New("No backup exists with the id " + backupID)
}
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
}
ctx = clues.Add(ctx, "details_entries", len(d.Entries))
if !skipReduce {
sel := utils.IncludeExchangeRestoreDataSelectors(opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterExchangeRestoreInfoSelectors(sel, opts)
d = sel.Reduce(ctx, d, errs)
}
return d, nil
}
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// backup delete // backup delete
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -302,15 +340,5 @@ func exchangeDeleteCmd() *cobra.Command {
// deletes an exchange service backup. // deletes an exchange service backup.
func deleteExchangeCmd(cmd *cobra.Command, args []string) error { func deleteExchangeCmd(cmd *cobra.Command, args []string) error {
var backupIDValue []string return genericDeleteCommand(cmd, utils.BackupIDFV, "Exchange", args)
if len(flags.BackupIDsFV) > 0 {
backupIDValue = flags.BackupIDsFV
} else if len(flags.BackupIDFV) > 0 {
backupIDValue = append(backupIDValue, flags.BackupIDFV)
} else {
return clues.New("either --backup or --backups flag is required")
}
return genericDeleteCommand(cmd, path.ExchangeService, "Exchange", backupIDValue, args)
} }

View File

@ -1,30 +1,32 @@
package backup_test package backup_test
import ( import (
"context"
"fmt" "fmt"
"strings" "strings"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/m365/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/storage"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
) )
var ( var (
@ -34,19 +36,25 @@ var (
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests that depend on no backups existing // tests with no backups
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type NoBackupExchangeE2ESuite struct { type NoBackupExchangeE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies acct account.Account
m365 its.M365IntgTestSetup st storage.Storage
vpr *viper.Viper
cfgFP string
repo repository.Repository
m365UserID string
recorder strings.Builder
} }
func TestNoBackupExchangeE2ESuite(t *testing.T) { func TestNoBackupExchangeE2ESuite(t *testing.T) {
suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite( suite.Run(t, &NoBackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
t, t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})}) [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
)})
} }
func (suite *NoBackupExchangeE2ESuite) SetupSuite() { func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
@ -55,25 +63,32 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
suite.acct = acct
suite.st = st
suite.repo = repo
suite.vpr = vpr
suite.recorder = recorder
suite.cfgFP = cfgFilePath
suite.m365UserID = tester.M365UserID(t)
} }
func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() { func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_empty() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
suite.dpnd.recorder.Reset() suite.recorder.Reset()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "list", "exchange", "backup", "list", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.cfgFP)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
@ -81,7 +96,7 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String() result := suite.recorder.String()
// as an offhand check: the result should contain the m365 user id // as an offhand check: the result should contain the m365 user id
assert.True(t, strings.HasSuffix(result, "No backups available\n")) assert.True(t, strings.HasSuffix(result, "No backups available\n"))
@ -93,14 +108,19 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
type BackupExchangeE2ESuite struct { type BackupExchangeE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies acct account.Account
m365 its.M365IntgTestSetup st storage.Storage
vpr *viper.Viper
cfgFP string
repo repository.Repository
m365UserID string
} }
func TestBackupExchangeE2ESuite(t *testing.T) { func TestBackupExchangeE2ESuite(t *testing.T) {
suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite( suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
t, t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})}) [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
)})
} }
func (suite *BackupExchangeE2ESuite) SetupSuite() { func (suite *BackupExchangeE2ESuite) SetupSuite() {
@ -109,39 +129,40 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) acct, st, repo, vpr, _, cfgFilePath := prepM365Test(t, ctx)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
suite.acct = acct
suite.st = st
suite.repo = repo
suite.vpr = vpr
suite.cfgFP = cfgFilePath
suite.m365UserID = tester.M365UserID(t)
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_email() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_email() {
runExchangeBackupCategoryTest(suite, email) runExchangeBackupCategoryTest(suite, "email")
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_contacts() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_contacts() {
runExchangeBackupCategoryTest(suite, contacts) runExchangeBackupCategoryTest(suite, "contacts")
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_events() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_events() {
runExchangeBackupCategoryTest(suite, events) runExchangeBackupCategoryTest(suite, "events")
} }
func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.CategoryType) { func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category string) {
recorder := strings.Builder{} recorder := strings.Builder{}
recorder.Reset() recorder.Reset()
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
cmd, ctx := buildExchangeBackupCmd( cmd, ctx := buildExchangeBackupCmd(ctx, suite.cfgFP, suite.m365UserID, category, &recorder)
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
category.String(),
&recorder)
// run the command // run the command
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
@ -150,25 +171,22 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
result := recorder.String() result := recorder.String()
t.Log("backup results", result) t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email. // as an offhand check: the result should contain the m365 user id
assert.Contains( assert.Contains(t, result, suite.m365UserID)
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
runExchangeBackupServiceNotEnabledTest(suite, email) runExchangeBackupServiceNotEnabledTest(suite, "email")
} }
func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, category path.CategoryType) { func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, category string) {
recorder := strings.Builder{} recorder := strings.Builder{}
recorder.Reset() recorder.Reset()
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
@ -176,9 +194,9 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
cmd, ctx := buildExchangeBackupCmd( cmd, ctx := buildExchangeBackupCmd(
ctx, ctx,
suite.dpnd.configFilePath, suite.cfgFP,
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.m365.User.ID), fmt.Sprintf("%s,%s", tester.UnlicensedM365UserID(suite.T()), suite.m365UserID),
category.String(), category,
&recorder) &recorder)
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -186,42 +204,34 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
result := recorder.String() result := recorder.String()
t.Log("backup results", result) t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email. // as an offhand check: the result should contain the m365 user id
assert.Contains( assert.Contains(t, result, suite.m365UserID)
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
runExchangeBackupUserNotFoundTest(suite, email) runExchangeBackupUserNotFoundTest(suite, "email")
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_contacts() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_contacts() {
runExchangeBackupUserNotFoundTest(suite, contacts) runExchangeBackupUserNotFoundTest(suite, "contacts")
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_events() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_events() {
runExchangeBackupUserNotFoundTest(suite, events) runExchangeBackupUserNotFoundTest(suite, "events")
} }
func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category path.CategoryType) { func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category string) {
recorder := strings.Builder{} recorder := strings.Builder{}
recorder.Reset() recorder.Reset()
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
cmd, ctx := buildExchangeBackupCmd( cmd, ctx := buildExchangeBackupCmd(ctx, suite.cfgFP, "foo@not-there.com", category, &recorder)
ctx,
suite.dpnd.configFilePath,
"foo@not-there.com",
category.String(),
&recorder)
// run the command // run the command
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
@ -229,8 +239,7 @@ func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category p
assert.Contains( assert.Contains(
t, t,
err.Error(), err.Error(),
"not found", "not found in tenant", "error missing user not found")
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened") assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error()) t.Logf("backup error message: %s", err.Error())
@ -239,105 +248,27 @@ func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category p
t.Log("backup results", result) t.Log("backup results", result)
} }
func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFlag() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email.
assert.Contains(
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
}
// AWS flags
func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--user", suite.m365.User.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
// since invalid aws creds are explicitly set, should see a failure
require.Error(t, err, clues.ToCore(err))
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests prepared with a previous backup // tests prepared with a previous backup
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type PreparedBackupExchangeE2ESuite struct { type PreparedBackupExchangeE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies acct account.Account
backupOps map[path.CategoryType]string st storage.Storage
m365 its.M365IntgTestSetup vpr *viper.Viper
cfgFP string
repo repository.Repository
m365UserID string
backupOps map[path.CategoryType]string
recorder strings.Builder
} }
func TestPreparedBackupExchangeE2ESuite(t *testing.T) { func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupExchangeE2ESuite{ suite.Run(t, &PreparedBackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
Suite: tester.NewE2ESuite( t,
t, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}), )})
})
} }
func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() { func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
@ -346,13 +277,20 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
suite.acct = acct
suite.st = st
suite.repo = repo
suite.vpr = vpr
suite.recorder = recorder
suite.cfgFP = cfgFilePath
suite.m365UserID = tester.M365UserID(t)
suite.backupOps = make(map[path.CategoryType]string) suite.backupOps = make(map[path.CategoryType]string)
var ( var (
users = []string{suite.m365.User.ID} users = []string{suite.m365UserID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID}) ins = idname.NewCache(map[string]string{suite.m365UserID: suite.m365UserID})
) )
for _, set := range []path.CategoryType{email, contacts, events} { for _, set := range []path.CategoryType{email, contacts, events} {
@ -363,18 +301,18 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
switch set { switch set {
case email: case email:
scopes = sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()) scopes = sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch())
case contacts: case contacts:
scopes = sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()) scopes = sel.ContactFolders([]string{exchange.DefaultContactFolder}, selectors.PrefixMatch())
case events: case events:
scopes = sel.EventCalendars([]string{api.DefaultCalendar}, selectors.PrefixMatch()) scopes = sel.EventCalendars([]string{exchange.DefaultCalendar}, selectors.PrefixMatch())
} }
sel.Include(scopes) sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins) bop, err := suite.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx) err = bop.Run(ctx)
@ -383,11 +321,11 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
bIDs := string(bop.Results.BackupID) bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately // sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID)) b, err := suite.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID") require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id") require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs) _, b, errs := suite.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID") require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID") require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id") require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
@ -409,20 +347,20 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_events() {
} }
func runExchangeListCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) { func runExchangeListCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset() suite.recorder.Reset()
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "list", "exchange", "backup", "list", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.cfgFP)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
@ -431,7 +369,7 @@ func runExchangeListCmdTest(suite *PreparedBackupExchangeE2ESuite, category path
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// compare the output // compare the output
result := suite.dpnd.recorder.String() result := suite.recorder.String()
assert.Contains(t, result, suite.backupOps[category]) assert.Contains(t, result, suite.backupOps[category])
} }
@ -448,24 +386,24 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_singleID_events
} }
func runExchangeListSingleCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) { func runExchangeListSingleCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset() suite.recorder.Reset()
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
bID := suite.backupOps[category] bID := suite.backupOps[category]
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "list", "exchange", "backup", "list", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--backup", string(bID)) "--backup", string(bID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
@ -474,7 +412,7 @@ func runExchangeListSingleCmdTest(suite *PreparedBackupExchangeE2ESuite, categor
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// compare the output // compare the output
result := suite.dpnd.recorder.String() result := suite.recorder.String()
assert.Contains(t, result, bID) assert.Contains(t, result, bID)
} }
@ -482,13 +420,13 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_badID() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "list", "exchange", "backup", "list", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--backup", "smarfs") "--backup", "smarfs")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -512,28 +450,28 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeDetailsCmd_events() {
} }
func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) { func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset() suite.recorder.Reset()
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
bID := suite.backupOps[category] bID := suite.backupOps[category]
// fetch the details from the repo first // fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID)) deets, _, errs := suite.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure())) require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered()) require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "details", "exchange", "backup", "details", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--"+flags.BackupFN, string(bID)) "--"+utils.BackupFN, string(bID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
@ -542,7 +480,7 @@ func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category p
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// compare the output // compare the output
result := suite.dpnd.recorder.String() result := suite.recorder.String()
i := 0 i := 0
foundFolders := 0 foundFolders := 0
@ -572,15 +510,20 @@ func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category p
type BackupDeleteExchangeE2ESuite struct { type BackupDeleteExchangeE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies acct account.Account
backupOps [3]operations.BackupOperation st storage.Storage
vpr *viper.Viper
cfgFP string
repo repository.Repository
backupOp operations.BackupOperation
} }
func TestBackupDeleteExchangeE2ESuite(t *testing.T) { func TestBackupDeleteExchangeE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteExchangeE2ESuite{ suite.Run(t, &BackupDeleteExchangeE2ESuite{
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}), [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
),
}) })
} }
@ -590,41 +533,42 @@ func (suite *BackupDeleteExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService) acct, st, repo, vpr, _, cfgFilePath := prepM365Test(t, ctx)
m365UserID := tconfig.M365UserID(t) suite.acct = acct
suite.st = st
suite.repo = repo
suite.vpr = vpr
suite.cfgFP = cfgFilePath
m365UserID := tester.M365UserID(t)
users := []string{m365UserID} users := []string{m365UserID}
// some tests require an existing backup // some tests require an existing backup
sel := selectors.NewExchangeBackup(users) sel := selectors.NewExchangeBackup(users)
sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{exchange.DefaultMailFolder}, selectors.PrefixMatch()))
for i := 0; i < cap(suite.backupOps); i++ { backupOp, err := suite.repo.NewBackup(ctx, sel.Selector)
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[i] = backupOp suite.backupOp = backupOp
err = suite.backupOps[i].Run(ctx) err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
}
} }
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() { func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "exchange", "backup", "delete", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--"+flags.BackupIDsFN, "--"+utils.BackupFN, string(suite.backupOp.Results.BackupID))
fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID),
string(suite.backupOps[1].Results.BackupID)))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
@ -632,50 +576,10 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = tester.StubRootCmd(
"backup", "details", "exchange", "backup", "details", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--backup", string(suite.backupOps[0].Results.BackupID)) "--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[1].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_SingleID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx) err = cmd.ExecuteContext(ctx)
@ -686,14 +590,14 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "exchange", "backup", "delete", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--"+flags.BackupIDsFN, uuid.NewString()) "--"+utils.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
@ -701,20 +605,22 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID
require.Error(t, err, clues.ToCore(err)) require.Error(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_NoBackupID() { // ---------------------------------------------------------------------------
t := suite.T() // helpers
// ---------------------------------------------------------------------------
ctx, flush := tester.NewContext(t) func buildExchangeBackupCmd(
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx context.Context,
configFile, user, category string,
defer flush() recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "exchange", "backup", "create", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", configFile,
"--"+utils.UserFN, user,
"--"+utils.CategoryDataFN, category)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
// empty backupIDs should error since no data provided return cmd, print.SetRootCmd(ctx, cmd)
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }

View File

@ -1,7 +1,7 @@
package backup package backup
import ( import (
"strconv" "fmt"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -10,12 +10,12 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/options"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/internal/version"
dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata"
) )
type ExchangeUnitSuite struct { type ExchangeUnitSuite struct {
@ -34,35 +34,72 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
use string use string
expectUse string expectUse string
expectShort string expectShort string
flags []string
expectRunE func(*cobra.Command, []string) error expectRunE func(*cobra.Command, []string) error
}{ }{
{ {
name: "create exchange", "create exchange",
use: createCommand, createCommand,
expectUse: expectUse + " " + exchangeServiceCommandCreateUseSuffix, expectUse + " " + exchangeServiceCommandCreateUseSuffix,
expectShort: exchangeCreateCmd().Short, exchangeCreateCmd().Short,
expectRunE: createExchangeCmd, []string{
utils.UserFN,
utils.CategoryDataFN,
options.DisableIncrementalsFN,
options.DisableDeltaFN,
options.FailFastFN,
options.FetchParallelismFN,
options.SkipReduceFN,
options.NoStatsFN,
},
createExchangeCmd,
}, },
{ {
name: "list exchange", "list exchange",
use: listCommand, listCommand,
expectUse: expectUse, expectUse,
expectShort: exchangeListCmd().Short, exchangeListCmd().Short,
expectRunE: listExchangeCmd, []string{
utils.BackupFN,
failedItemsFN,
skippedItemsFN,
recoveredErrorsFN,
},
listExchangeCmd,
}, },
{ {
name: "details exchange", "details exchange",
use: detailsCommand, detailsCommand,
expectUse: expectUse + " " + exchangeServiceCommandDetailsUseSuffix, expectUse + " " + exchangeServiceCommandDetailsUseSuffix,
expectShort: exchangeDetailsCmd().Short, exchangeDetailsCmd().Short,
expectRunE: detailsExchangeCmd, []string{
utils.BackupFN,
utils.ContactFN,
utils.ContactFolderFN,
utils.ContactNameFN,
utils.EmailFN,
utils.EmailFolderFN,
utils.EmailReceivedAfterFN,
utils.EmailReceivedBeforeFN,
utils.EmailSenderFN,
utils.EmailSubjectFN,
utils.EventFN,
utils.EventCalendarFN,
utils.EventOrganizerFN,
utils.EventRecursFN,
utils.EventStartsAfterFN,
utils.EventStartsBeforeFN,
utils.EventSubjectFN,
},
detailsExchangeCmd,
}, },
{ {
name: "delete exchange", "delete exchange",
use: deleteCommand, deleteCommand,
expectUse: expectUse + " " + exchangeServiceCommandDeleteUseSuffix, expectUse + " " + exchangeServiceCommandDeleteUseSuffix,
expectShort: exchangeDeleteCmd().Short, exchangeDeleteCmd().Short,
expectRunE: deleteExchangeCmd, []string{utils.BackupFN},
deleteExchangeCmd,
}, },
} }
for _, test := range table { for _, test := range table {
@ -85,147 +122,6 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
} }
} }
func (suite *ExchangeUnitSuite) TestBackupCreateFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: createCommand},
addExchangeCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.MailBoxFN, flagsTD.FlgInputs(flagsTD.MailboxInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.ExchangeCategoryDataInput),
"--" + flags.FetchParallelismFN, flagsTD.FetchParallelism,
"--" + flags.DeltaPageSizeFN, flagsTD.DeltaPageSize,
// bool flags
"--" + flags.DisableDeltaFN,
"--" + flags.EnableImmutableIDFN,
"--" + flags.DisableSlidingWindowLimiterFN,
},
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeExchangeOpts(cmd)
co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(backupOpts.Parallelism.ItemFetch))
assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(backupOpts.M365.DeltaPageSize)))
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.True(t, backupOpts.M365.DisableDeltaEndpoint)
assert.True(t, backupOpts.M365.ExchangeImmutableIDs)
assert.True(t, backupOpts.ServiceRateLimiter.DisableSlidingWindowLimiter)
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch))
assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(co.DeltaPageSize)))
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.True(t, co.ToggleFeatures.DisableDelta)
assert.True(t, co.ToggleFeatures.ExchangeImmutableIDs)
assert.True(t, co.ToggleFeatures.DisableSlidingWindowLimiter)
assert.ElementsMatch(t, flagsTD.MailboxInput, opts.Users)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *ExchangeUnitSuite) TestBackupListFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: listCommand},
addExchangeCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedBackupListFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *ExchangeUnitSuite) TestBackupDetailsFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: detailsCommand},
addExchangeCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *ExchangeUnitSuite) TestBackupDeleteFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: deleteCommand},
addExchangeCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *ExchangeUnitSuite) TestValidateBackupCreateFlags() { func (suite *ExchangeUnitSuite) TestValidateBackupCreateFlags() {
table := []struct { table := []struct {
name string name string
@ -275,7 +171,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, no data", name: "any users, no data",
user: []string{flags.Wildcard}, user: []string{utils.Wildcard},
expectIncludeLen: 3, expectIncludeLen: 3,
}, },
{ {
@ -285,7 +181,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, contacts", name: "any users, contacts",
user: []string{flags.Wildcard}, user: []string{utils.Wildcard},
data: []string{dataContacts}, data: []string{dataContacts},
expectIncludeLen: 1, expectIncludeLen: 1,
}, },
@ -297,7 +193,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, email", name: "any users, email",
user: []string{flags.Wildcard}, user: []string{utils.Wildcard},
data: []string{dataEmail}, data: []string{dataEmail},
expectIncludeLen: 1, expectIncludeLen: 1,
}, },
@ -309,7 +205,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, events", name: "any users, events",
user: []string{flags.Wildcard}, user: []string{utils.Wildcard},
data: []string{dataEvents}, data: []string{dataEvents},
expectIncludeLen: 1, expectIncludeLen: 1,
}, },
@ -321,7 +217,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, contacts + email", name: "any users, contacts + email",
user: []string{flags.Wildcard}, user: []string{utils.Wildcard},
data: []string{dataContacts, dataEmail}, data: []string{dataContacts, dataEmail},
expectIncludeLen: 2, expectIncludeLen: 2,
}, },
@ -333,7 +229,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, email + events", name: "any users, email + events",
user: []string{flags.Wildcard}, user: []string{utils.Wildcard},
data: []string{dataEmail, dataEvents}, data: []string{dataEmail, dataEvents},
expectIncludeLen: 2, expectIncludeLen: 2,
}, },
@ -345,7 +241,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}, },
{ {
name: "any users, events + contacts", name: "any users, events + contacts",
user: []string{flags.Wildcard}, user: []string{utils.Wildcard},
data: []string{dataEvents, dataContacts}, data: []string{dataEvents, dataContacts},
expectIncludeLen: 2, expectIncludeLen: 2,
}, },
@ -377,3 +273,51 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupCreateSelectors() {
}) })
} }
} }
func (suite *ExchangeUnitSuite) TestExchangeBackupDetailsSelectors() {
for v := 0; v <= version.Backup; v++ {
suite.Run(fmt.Sprintf("version%d", v), func() {
for _, test := range testdata.ExchangeOptionDetailLookups {
suite.Run(test.Name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bg := testdata.VersionedBackupGetter{
Details: dtd.GetDetailsSetForVersion(t, v),
}
output, err := runDetailsExchangeCmd(
ctx,
bg,
"backup-ID",
test.Opts(t, v),
false)
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected(t, v), output.Entries)
})
}
})
}
}
func (suite *ExchangeUnitSuite) TestExchangeBackupDetailsSelectorsBadFormats() {
for _, test := range testdata.BadExchangeOptionsFormats {
suite.Run(test.Name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
output, err := runDetailsExchangeCmd(
ctx,
test.BackupGetter,
"backup-ID",
test.Opts(t, version.Backup),
false)
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output)
})
}
}

View File

@ -1,329 +0,0 @@
package backup
import (
"context"
"fmt"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
)
// ------------------------------------------------------------------------------------------------
// setup and globals
// ------------------------------------------------------------------------------------------------
const (
groupsServiceCommand = "groups"
teamsServiceCommand = "teams"
groupsServiceCommandCreateUseSuffix = "--group <groupName> | '" + flags.Wildcard + "'"
groupsServiceCommandDeleteUseSuffix = "--backups <backupId>"
groupsServiceCommandDetailsUseSuffix = "--backup <backupId>"
)
const (
groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group
corso backup create groups --group Marketing
# Backup only Teams channel messages
corso backup create groups --group Marketing --data messages
# Backup only group mailbox posts
corso backup create groups --group Marketing --data conversations
# Backup all Groups and Teams data for all groups
corso backup create groups --group '*'`
groupsServiceCommandDeleteExamples = `# Delete Groups backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce
corso backup delete groups --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
groupsServiceCommandDetailsExamples = `# Explore items in Marketing's latest backup (1234abcd...)
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
# Explore Marketing messages posted after the start of 2022
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
--last-message-reply-after 2022-01-01T00:00:00
# Explore group mailbox posts with conversation subject "hello world"
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"`
)
// called by backup.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case createCommand:
c, _ = utils.AddCommand(cmd, groupsCreateCmd(), utils.MarkPreviewCommand())
c.Use = c.Use + " " + groupsServiceCommandCreateUseSuffix
c.Example = groupsServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddGroupFlag(c)
flags.AddDataFlag(c, []string{flags.DataLibraries, flags.DataMessages, flags.DataConversations}, false)
flags.AddFetchParallelismFlag(c)
flags.AddDisableDeltaFlag(c)
flags.AddGenericBackupFlags(c)
flags.AddDisableLazyItemReader(c)
case listCommand:
c, _ = utils.AddCommand(cmd, groupsListCmd(), utils.MarkPreviewCommand())
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, groupsDetailsCmd(), utils.MarkPreviewCommand())
c.Use = c.Use + " " + groupsServiceCommandDetailsUseSuffix
c.Example = groupsServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true)
flags.AddGroupDetailsAndRestoreFlags(c)
flags.AddSharePointDetailsAndRestoreFlags(c)
case deleteCommand:
c, _ = utils.AddCommand(cmd, groupsDeleteCmd(), utils.MarkPreviewCommand())
c.Use = c.Use + " " + groupsServiceCommandDeleteUseSuffix
c.Example = groupsServiceCommandDeleteExamples
flags.AddMultipleBackupIDsFlag(c, false)
flags.AddBackupIDFlag(c, false)
}
return c
}
// ------------------------------------------------------------------------------------------------
// backup create
// ------------------------------------------------------------------------------------------------
// `corso backup create groups [<flag>...]`
func groupsCreateCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Aliases: []string{teamsServiceCommand},
Short: "Backup M365 Groups & Teams service data",
RunE: createGroupsCmd,
Args: cobra.NoArgs,
}
}
// processes a groups service backup.
func createGroupsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := validateGroupsBackupCreateFlags(flags.GroupFV, flags.CategoryDataFV); err != nil {
return err
}
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
ctx,
cmd,
path.GroupsService)
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
// TODO: log/print recoverable errors
errs := fault.New(false)
svcCli, err := m365.NewM365Client(ctx, *acct)
if err != nil {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.AC.Groups().GetAllIDsAndNames(ctx, errs)
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 groups"))
}
sel := groupsBackupCreateSelectors(ctx, ins, flags.GroupFV, flags.CategoryDataFV)
selectorSet := []selectors.Selector{}
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
selectorSet = append(selectorSet, discSel.Selector)
}
return genericCreateCommand(
ctx,
r,
"Group",
selectorSet,
ins)
}
// ------------------------------------------------------------------------------------------------
// backup list
// ------------------------------------------------------------------------------------------------
// `corso backup list groups [<flag>...]`
func groupsListCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "List the history of M365 Groups service backups",
RunE: listGroupsCmd,
Args: cobra.NoArgs,
}
}
// lists the history of backup operations
func listGroupsCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.GroupsService, args)
}
// ------------------------------------------------------------------------------------------------
// backup details
// ------------------------------------------------------------------------------------------------
// `corso backup details groups [<flag>...]`
func groupsDetailsCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Shows the details of a M365 Groups service backup",
RunE: detailsGroupsCmd,
Args: cobra.NoArgs,
}
}
// processes a groups service backup.
func detailsGroupsCmd(cmd *cobra.Command, args []string) error {
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
return runDetailsGroupsCmd(cmd)
}
func runDetailsGroupsCmd(cmd *cobra.Command) error {
ctx := cmd.Context()
opts := utils.MakeGroupsOpts(cmd)
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
if err != nil {
return Only(ctx, err)
}
if len(ds.Entries) > 0 {
ds.PrintEntries(ctx)
} else {
Info(ctx, selectors.ErrorNoMatchingItems)
}
return nil
}
// ------------------------------------------------------------------------------------------------
// backup delete
// ------------------------------------------------------------------------------------------------
// `corso backup delete groups [<flag>...]`
func groupsDeleteCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Delete backed-up M365 Groups service data",
RunE: deleteGroupsCmd,
Args: cobra.NoArgs,
}
}
// deletes an groups service backup.
func deleteGroupsCmd(cmd *cobra.Command, args []string) error {
backupIDValue := []string{}
if len(flags.BackupIDsFV) > 0 {
backupIDValue = flags.BackupIDsFV
} else if len(flags.BackupIDFV) > 0 {
backupIDValue = append(backupIDValue, flags.BackupIDFV)
} else {
return clues.New("either --backup or --backups flag is required")
}
return genericDeleteCommand(cmd, path.GroupsService, "Groups", backupIDValue, args)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func validateGroupsBackupCreateFlags(groups, cats []string) error {
if len(groups) == 0 {
return clues.New(
"requires one or more --" +
flags.GroupFN + " ids, or the wildcard --" +
flags.GroupFN + " *")
}
// TODO(keepers): release conversations support
msg := fmt.Sprintf(
" is an unrecognized data type; only %s and %s are supported",
flags.DataLibraries, flags.DataMessages)
// msg := fmt.Sprintf(
// " is an unrecognized data type; only %s, %s and %s are supported",
// flags.DataLibraries, flags.DataMessages, flags.DataConversations)
allowedCats := utils.GroupsAllowedCategories()
for _, d := range cats {
if _, ok := allowedCats[d]; !ok {
return clues.New(d + msg)
}
}
return nil
}
func groupsBackupCreateSelectors(
ctx context.Context,
ins idname.Cacher,
group, cats []string,
) *selectors.GroupsBackup {
if filters.PathContains(group).Compare(flags.Wildcard) {
return includeAllGroupsWithCategories(ins, cats)
}
sel := selectors.NewGroupsBackup(slices.Clone(group))
return utils.AddGroupsCategories(sel, cats)
}
func includeAllGroupsWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
}

View File

@ -1,690 +0,0 @@
package backup_test
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// tests that require no existing backups
// ---------------------------------------------------------------------------
type NoBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestNoBackupGroupsE2ESuite(t *testing.T) {
suite.Run(t, &BackupGroupsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
}
func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "list", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
// as an offhand check: the result should contain the m365 group id
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
}
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestBackupGroupsE2ESuite(t *testing.T) {
suite.Run(t, &BackupGroupsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *BackupGroupsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
runGroupsBackupCategoryTest(suite, flags.DataMessages)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsBackupCategoryTest(suite, flags.DataConversations)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_libraries() {
runGroupsBackupCategoryTest(suite, flags.DataLibraries)
}
func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildGroupsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Group.ID,
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_channelMessages() {
runGroupsBackupGroupNotFoundTest(suite, flags.DataMessages)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_conversations() {
runGroupsBackupGroupNotFoundTest(suite, flags.DataConversations)
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_libraries() {
runGroupsBackupGroupNotFoundTest(suite, flags.DataLibraries)
}
func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildGroupsBackupCmd(
ctx,
suite.dpnd.configFilePath,
"foo@not-there.com",
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"not found",
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
}
// AWS flags
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--group", suite.m365.Group.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
// since invalid aws creds are explicitly set, should see a failure
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
}
func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupGroupsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
groups = []string{suite.m365.Group.ID}
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID})
cats = []path.CategoryType{
path.ChannelMessagesCategory,
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// path.ConversationPostsCategory,
path.LibrariesCategory,
}
)
for _, set := range cats {
var (
sel = selectors.NewGroupsBackup(groups)
scopes []selectors.GroupsScope
)
switch set {
case path.ChannelMessagesCategory:
scopes = selTD.GroupsBackupChannelScope(sel)
case path.ConversationPostsCategory:
scopes = selTD.GroupsBackupConversationScope(sel)
case path.LibrariesCategory:
scopes = selTD.GroupsBackupLibraryFolderScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_channelMessages() {
runGroupsListCmdTest(suite, path.ChannelMessagesCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_conversations() {
runGroupsListCmdTest(suite, path.ConversationPostsCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_libraries() {
runGroupsListCmdTest(suite, path.LibrariesCategory)
}
func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_channelMessages() {
runGroupsListSingleCmdTest(suite, path.ChannelMessagesCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_conversations() {
runGroupsListSingleCmdTest(suite, path.ConversationPostsCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_libraries() {
runGroupsListSingleCmdTest(suite, path.LibrariesCategory)
}
func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
cmd := cliTD.StubRootCmd(
"backup", "list", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, bID)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", "smarfs")
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages() {
runGroupsDetailsCmdTest(suite, path.ChannelMessagesCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_libraries() {
runGroupsDetailsCmdTest(suite, path.LibrariesCategory)
}
func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
foundFolders := 0
for _, ent := range deets.Entries {
// Skip folders as they don't mean anything to the end group.
if ent.Folder != nil {
foundFolders++
continue
}
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
// We only backup the default folder for each category so there should be at
// least that folder (we don't make details entries for prefix folders).
assert.GreaterOrEqual(t, foundFolders, 1)
}
// ---------------------------------------------------------------------------
// tests for deleting backups
// ---------------------------------------------------------------------------
type BackupDeleteGroupsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps [3]operations.BackupOperation
}
func TestBackupDeleteGroupsE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteGroupsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *BackupDeleteGroupsE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
m365GroupID := tconfig.M365TeamID(t)
groups := []string{m365GroupID}
// some tests require an existing backup
sel := selectors.NewGroupsBackup(groups)
sel.Include(selTD.GroupsBackupChannelScope(sel))
for i := 0; i < cap(suite.backupOps); i++ {
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[i] = backupOp
err = suite.backupOps[i].Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
}
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID),
string(suite.backupOps[1].Results.BackupID)))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backups", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_SingleID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_UnknownID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_NoBackupID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildGroupsBackupCmd(
ctx context.Context,
configFile, group, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "groups",
"--"+flags.ConfigFileFN, configFile,
"--"+flags.GroupFN, group,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -1,273 +0,0 @@
package backup
import (
"strconv"
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
)
type GroupsUnitSuite struct {
tester.Suite
}
func TestGroupsUnitSuite(t *testing.T) {
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
expectUse := groupsServiceCommand
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "create groups",
use: createCommand,
expectUse: expectUse + " " + groupsServiceCommandCreateUseSuffix,
expectShort: groupsCreateCmd().Short,
expectRunE: createGroupsCmd,
},
{
name: "list groups",
use: listCommand,
expectUse: expectUse,
expectShort: groupsListCmd().Short,
expectRunE: listGroupsCmd,
},
{
name: "details groups",
use: detailsCommand,
expectUse: expectUse + " " + groupsServiceCommandDetailsUseSuffix,
expectShort: groupsDetailsCmd().Short,
expectRunE: detailsGroupsCmd,
},
{
name: "delete groups",
use: deleteCommand,
expectUse: expectUse + " " + groupsServiceCommandDeleteUseSuffix,
expectShort: groupsDeleteCmd().Short,
expectRunE: deleteGroupsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addGroupsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
})
}
}
func (suite *GroupsUnitSuite) TestValidateGroupsBackupCreateFlags() {
table := []struct {
name string
cats []string
expect assert.ErrorAssertionFunc
}{
{
name: "none",
cats: []string{},
expect: assert.NoError,
},
{
name: "libraries",
cats: []string{flags.DataLibraries},
expect: assert.NoError,
},
{
name: "messages",
cats: []string{flags.DataMessages},
expect: assert.NoError,
},
{
name: "conversations",
cats: []string{flags.DataConversations},
expect: assert.NoError,
},
{
name: "all allowed",
cats: []string{
flags.DataLibraries,
flags.DataMessages,
flags.DataConversations,
},
expect: assert.NoError,
},
{
name: "bad inputs",
cats: []string{"foo"},
expect: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
err := validateGroupsBackupCreateFlags([]string{"*"}, test.cats)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}
func (suite *GroupsUnitSuite) TestBackupCreateFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: createCommand},
addGroupsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.GroupFN, flagsTD.FlgInputs(flagsTD.GroupsInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.GroupsCategoryDataInput),
"--" + flags.FetchParallelismFN, flagsTD.FetchParallelism,
"--" + flags.DisableDeltaFN,
"--" + flags.DisableLazyItemReaderFN,
},
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeGroupsOpts(cmd)
co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(backupOpts.Parallelism.ItemFetch))
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.True(t, backupOpts.M365.DisableDeltaEndpoint)
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch))
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.True(t, co.ToggleFeatures.DisableDelta)
assert.True(t, co.ToggleFeatures.DisableLazyItemReader)
assert.ElementsMatch(t, flagsTD.GroupsInput, opts.Groups)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *GroupsUnitSuite) TestBackupListFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: listCommand},
addGroupsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedBackupListFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: detailsCommand},
addGroupsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
},
flagsTD.PreparedChannelFlags(),
flagsTD.PreparedConversationFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags(),
flagsTD.PreparedLibraryFlags()))
co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
flagsTD.AssertChannelFlags(t, cmd)
flagsTD.AssertConversationFlags(t, cmd)
flagsTD.AssertLibraryFlags(t, cmd)
}
func (suite *GroupsUnitSuite) TestBackupDeleteFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: deleteCommand},
addGroupsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}

View File

@ -0,0 +1,53 @@
package backup_test
import (
"context"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/storage"
)
func prepM365Test(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
) (
account.Account,
storage.Storage,
repository.Repository,
*viper.Viper,
strings.Builder,
string,
) {
var (
acct = tester.NewM365Account(t)
st = tester.NewPrefixedS3Storage(t)
recorder = strings.Builder{}
)
cfg, err := st.S3Config()
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tester.TestCfgAccountProvider: "M365",
tester.TestCfgStorageProvider: "S3",
tester.TestCfgPrefix: cfg.Prefix,
}
vpr, cfgFP := tester.MakeTempTestConfigClone(t, force)
ctx = config.SetViper(ctx, vpr)
repo, err := repository.Initialize(ctx, acct, st, control.Defaults())
require.NoError(t, err, clues.ToCore(err))
return acct, st, repo, vpr, recorder, cfgFP
}

View File

@ -1,98 +0,0 @@
package backup_test
import (
"context"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/storage"
"github.com/alcionai/corso/src/pkg/storage/testdata"
)
type dependencies struct {
st storage.Storage
repo repository.Repositoryer
vpr *viper.Viper
recorder strings.Builder
configFilePath string
}
func prepM365Test(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
pst path.ServiceType,
) dependencies {
var (
acct = tconfig.NewM365Account(t)
st = testdata.NewPrefixedS3Storage(t)
recorder = strings.Builder{}
)
cfg, err := st.ToS3Config()
require.NoError(t, err, clues.ToCore(err))
force := map[string]string{
tconfig.TestCfgAccountProvider: account.ProviderM365.String(),
tconfig.TestCfgStorageProvider: storage.ProviderS3.String(),
tconfig.TestCfgPrefix: cfg.Prefix,
}
vpr, cfgFP := tconfig.MakeTempTestConfigClone(t, force)
ctx = config.SetViper(ctx, vpr)
repo, err := repository.New(
ctx,
acct,
st,
control.DefaultOptions(),
repository.NewRepoID)
require.NoError(t, err, clues.ToCore(err))
err = repo.Initialize(ctx, repository.InitConfig{
Service: pst,
})
require.NoError(t, err, clues.ToCore(err))
return dependencies{
st: st,
repo: repo,
vpr: vpr,
recorder: recorder,
configFilePath: cfgFP,
}
}
// ---------------------------------------------------------------------------
// funcs
// ---------------------------------------------------------------------------
func buildExchangeBackupCmd(
ctx context.Context,
configFile, user, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "exchange",
"--"+flags.ConfigFileFN, configFile,
"--"+flags.UserFN, user,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -1,15 +1,21 @@
package backup package backup
import ( import (
"context"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -19,8 +25,8 @@ import (
const ( const (
oneDriveServiceCommand = "onedrive" oneDriveServiceCommand = "onedrive"
oneDriveServiceCommandCreateUseSuffix = "--user <email> | '" + flags.Wildcard + "'" oneDriveServiceCommandCreateUseSuffix = "--user <email> | '" + utils.Wildcard + "'"
oneDriveServiceCommandDeleteUseSuffix = "--backups <backupId>" oneDriveServiceCommandDeleteUseSuffix = "--backup <backupId>"
oneDriveServiceCommandDetailsUseSuffix = "--backup <backupId>" oneDriveServiceCommandDetailsUseSuffix = "--backup <backupId>"
) )
@ -34,9 +40,8 @@ corso backup create onedrive --user alice@example.com,bob@example.com
# Backup all OneDrive data for all M365 users # Backup all OneDrive data for all M365 users
corso backup create onedrive --user '*'` corso backup create onedrive --user '*'`
oneDriveServiceCommandDeleteExamples = `# Delete OneDrive backup with ID 1234abcd-12ab-cd34-56de-1234abcd \ oneDriveServiceCommandDeleteExamples = `# Delete OneDrive backup with ID 1234abcd-12ab-cd34-56de-1234abcd
and 1234abcd-12ab-cd34-56de-1234abce corso backup delete onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd`
corso backup delete onedrive --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
oneDriveServiceCommandDetailsExamples = `# Explore items in Bob's latest backup (1234abcd...) oneDriveServiceCommandDetailsExamples = `# Explore items in Bob's latest backup (1234abcd...)
corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd
@ -60,43 +65,43 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
switch cmd.Use { switch cmd.Use {
case createCommand: case createCommand:
c, fs = utils.AddCommand(cmd, oneDriveCreateCmd()) c, fs = utils.AddCommand(cmd, oneDriveCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix
c.Example = oneDriveServiceCommandCreateExamples c.Example = oneDriveServiceCommandCreateExamples
flags.AddUserFlag(c) utils.AddUserFlag(c)
flags.AddGenericBackupFlags(c) options.AddFailFastFlag(c)
fs.BoolVar( options.AddDisableIncrementalsFlag(c)
&flags.UseOldDeltaProcessFV,
flags.UseOldDeltaProcessFN,
false,
"process backups using the old delta processor instead of tree-based enumeration")
cobra.CheckErr(fs.MarkHidden(flags.UseOldDeltaProcessFN))
case listCommand: case listCommand:
c, _ = utils.AddCommand(cmd, oneDriveListCmd()) c, fs = utils.AddCommand(cmd, oneDriveListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false) utils.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c) addFailedItemsFN(c)
addSkippedItemsFN(c)
addRecoveredErrorsFN(c)
case detailsCommand: case detailsCommand:
c, _ = utils.AddCommand(cmd, oneDriveDetailsCmd()) c, fs = utils.AddCommand(cmd, oneDriveDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandDetailsUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandDetailsUseSuffix
c.Example = oneDriveServiceCommandDetailsExamples c.Example = oneDriveServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c) options.AddSkipReduceFlag(c)
flags.AddBackupIDFlag(c, true) utils.AddBackupIDFlag(c, true)
flags.AddOneDriveDetailsAndRestoreFlags(c) utils.AddOneDriveDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, _ = utils.AddCommand(cmd, oneDriveDeleteCmd()) c, fs = utils.AddCommand(cmd, oneDriveDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandDeleteUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandDeleteUseSuffix
c.Example = oneDriveServiceCommandDeleteExamples c.Example = oneDriveServiceCommandDeleteExamples
flags.AddMultipleBackupIDsFlag(c, false) utils.AddBackupIDFlag(c, true)
flags.AddBackupIDFlag(c, false)
} }
return c return c
@ -125,32 +130,20 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if flags.RunModeFV == flags.RunModeFlagTest { if err := validateOneDriveBackupCreateFlags(utils.UserFV); err != nil {
return nil
}
if err := validateOneDriveBackupCreateFlags(flags.UserFV); err != nil {
return err return err
} }
r, acct, err := utils.AccountConnectAndWriteRepoConfig( r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx)
ctx,
cmd,
path.OneDriveService)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
sel := oneDriveBackupCreateSelectors(flags.UserFV) sel := oneDriveBackupCreateSelectors(utils.UserFV)
ins, err := utils.UsersMap( ins, err := utils.UsersMap(ctx, *acct, fault.New(true))
ctx,
*acct,
utils.Control(),
r.Counter(),
fault.New(true))
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
} }
@ -161,10 +154,10 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
selectorSet = append(selectorSet, discSel.Selector) selectorSet = append(selectorSet, discSel.Selector)
} }
return genericCreateCommand( return runBackups(
ctx, ctx,
r, r,
"OneDrive", "OneDrive", "user",
selectorSet, selectorSet,
ins) ins)
} }
@ -200,7 +193,7 @@ func oneDriveListCmd() *cobra.Command {
// lists the history of backup operations // lists the history of backup operations
func listOneDriveCmd(cmd *cobra.Command, args []string) error { func listOneDriveCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.OneDriveService, args) return genericListCommand(cmd, utils.BackupIDFV, path.OneDriveService, args)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -224,35 +217,71 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
return runDetailsOneDriveCmd(cmd)
}
func runDetailsOneDriveCmd(cmd *cobra.Command) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
sel := utils.IncludeOneDriveRestoreDataSelectors(opts) r, _, _, err := utils.GetAccountAndConnect(ctx)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
if len(ds.Entries) > 0 { defer utils.CloseRepo(ctx, r)
ds.PrintEntries(ctx)
} else { ctrlOpts := options.Control()
Info(ctx, selectors.ErrorNoMatchingItems)
ds, err := runDetailsOneDriveCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce)
if err != nil {
return Only(ctx, err)
} }
if len(ds.Entries) == 0 {
Info(ctx, selectors.ErrorNoMatchingItems)
return nil
}
ds.PrintEntries(ctx)
return nil return nil
} }
// runDetailsOneDriveCmd actually performs the lookup in backup details.
// the fault.Errors return is always non-nil. Callers should check if
// errs.Failure() == nil.
func runDetailsOneDriveCmd(
ctx context.Context,
r repository.BackupGetter,
backupID string,
opts utils.OneDriveOpts,
skipReduce bool,
) (*details.Details, error) {
if err := utils.ValidateOneDriveRestoreFlags(backupID, opts); err != nil {
return nil, err
}
ctx = clues.Add(ctx, "backup_id", backupID)
d, _, errs := r.GetBackupDetails(ctx, backupID)
// TODO: log/track recoverable errors
if errs.Failure() != nil {
if errors.Is(errs.Failure(), data.ErrNotFound) {
return nil, clues.New("no backup exists with the id " + backupID)
}
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
}
ctx = clues.Add(ctx, "details_entries", len(d.Entries))
if !skipReduce {
sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
d = sel.Reduce(ctx, d, errs)
}
return d, nil
}
// `corso backup delete onedrive [<flag>...]` // `corso backup delete onedrive [<flag>...]`
func oneDriveDeleteCmd() *cobra.Command { func oneDriveDeleteCmd() *cobra.Command {
return &cobra.Command{ return &cobra.Command{
@ -266,15 +295,5 @@ func oneDriveDeleteCmd() *cobra.Command {
// deletes a oneDrive service backup. // deletes a oneDrive service backup.
func deleteOneDriveCmd(cmd *cobra.Command, args []string) error { func deleteOneDriveCmd(cmd *cobra.Command, args []string) error {
backupIDValue := []string{} return genericDeleteCommand(cmd, utils.BackupIDFV, "OneDrive", args)
if len(flags.BackupIDsFV) > 0 {
backupIDValue = flags.BackupIDsFV
} else if len(flags.BackupIDFV) > 0 {
backupIDValue = append(backupIDValue, flags.BackupIDFV)
} else {
return clues.New("either --backup or --backups flag is required")
}
return genericDeleteCommand(cmd, path.OneDriveService, "OneDrive", backupIDValue, args)
} }

View File

@ -7,23 +7,23 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata" "github.com/alcionai/corso/src/pkg/storage"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -32,14 +32,20 @@ import (
type NoBackupOneDriveE2ESuite struct { type NoBackupOneDriveE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies acct account.Account
st storage.Storage
vpr *viper.Viper
cfgFP string
repo repository.Repository
m365UserID string
recorder strings.Builder
} }
func TestNoBackupOneDriveE2ESuite(t *testing.T) { func TestNoBackupOneDriveE2ESuite(t *testing.T) {
suite.Run(t, &NoBackupOneDriveE2ESuite{ suite.Run(t, &NoBackupOneDriveE2ESuite{
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}), [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}),
}) })
} }
@ -49,25 +55,33 @@ func (suite *NoBackupOneDriveE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.dpnd = prepM365Test(t, ctx, path.OneDriveService) acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
suite.acct = acct
suite.st = st
suite.repo = repo
suite.recorder = recorder
suite.vpr = vpr
suite.cfgFP = cfgFilePath
suite.m365UserID = tester.M365UserID(t)
} }
func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() { func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
suite.dpnd.recorder.Reset() suite.recorder.Reset()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "list", "onedrive", "backup", "list", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.cfgFP)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
@ -75,13 +89,13 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String() result := suite.recorder.String()
// as an offhand check: the result should contain the m365 user id // as an offhand check: the result should contain the m365 user id
assert.True(t, strings.HasSuffix(result, "No backups available\n")) assert.True(t, strings.HasSuffix(result, "No backups available\n"))
} }
func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() { func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_UserNotInTenant() {
recorder := strings.Builder{} recorder := strings.Builder{}
t := suite.T() t := suite.T()
@ -89,12 +103,12 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "create", "onedrive", "backup", "create", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--"+flags.UserFN, "foo@not-there.com") "--"+utils.UserFN, "foo@nothere.com")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&recorder) cmd.SetOut(&recorder)
@ -107,8 +121,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
assert.Contains( assert.Contains(
t, t,
err.Error(), err.Error(),
"not found", "not found in tenant", "error missing user not found")
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened") assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error()) t.Logf("backup error message: %s", err.Error())
@ -123,15 +136,20 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
type BackupDeleteOneDriveE2ESuite struct { type BackupDeleteOneDriveE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies acct account.Account
backupOps [3]operations.BackupOperation st storage.Storage
vpr *viper.Viper
cfgFP string
repo repository.Repository
backupOp operations.BackupOperation
recorder strings.Builder
} }
func TestBackupDeleteOneDriveE2ESuite(t *testing.T) { func TestBackupDeleteOneDriveE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteOneDriveE2ESuite{ suite.Run(t, &BackupDeleteOneDriveE2ESuite{
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}), [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}),
}) })
} }
@ -141,10 +159,17 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.dpnd = prepM365Test(t, ctx, path.OneDriveService) acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
suite.acct = acct
suite.st = st
suite.repo = repo
suite.recorder = recorder
suite.vpr = vpr
suite.cfgFP = cfgFilePath
var ( var (
m365UserID = tconfig.M365UserID(t) m365UserID = tester.M365UserID(t)
users = []string{m365UserID} users = []string{m365UserID}
ins = idname.NewCache(map[string]string{m365UserID: m365UserID}) ins = idname.NewCache(map[string]string{m365UserID: m365UserID})
) )
@ -153,36 +178,31 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
sel := selectors.NewOneDriveBackup(users) sel := selectors.NewOneDriveBackup(users)
sel.Include(selTD.OneDriveBackupFolderScope(sel)) sel.Include(selTD.OneDriveBackupFolderScope(sel))
for i := 0; i < cap(suite.backupOps); i++ { backupOp, err := suite.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
backupOp, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins) require.NoError(t, err, clues.ToCore(err))
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[i] = backupOp suite.backupOp = backupOp
err = suite.backupOps[i].Run(ctx) err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
}
} }
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() { func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
suite.dpnd.recorder.Reset() suite.recorder.Reset()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "onedrive", "backup", "delete", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--"+flags.BackupIDsFN, "--"+utils.BackupFN, string(suite.backupOp.Results.BackupID))
fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID),
string(suite.backupOps[1].Results.BackupID)))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
@ -190,61 +210,19 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String() result := suite.recorder.String()
assert.True(t, assert.True(t,
strings.HasSuffix( strings.HasSuffix(
result, result,
fmt.Sprintf("Deleted OneDrive backup [%s %s]\n", fmt.Sprintf("Deleted OneDrive backup %s\n", string(suite.backupOp.Results.BackupID)),
string(suite.backupOps[0].Results.BackupID), ),
string(suite.backupOps[1].Results.BackupID)))) )
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = tester.StubRootCmd(
"backup", "details", "onedrive", "backup", "details", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--backups", string(suite.backupOps[0].Results.BackupID)) "--backup", string(suite.backupOp.Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_SingleID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "delete", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
assert.True(t,
strings.HasSuffix(
result,
fmt.Sprintf("Deleted OneDrive backup [%s]\n",
string(suite.backupOps[2].Results.BackupID))))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx) err = cmd.ExecuteContext(ctx)
@ -255,35 +233,17 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "onedrive", "backup", "delete", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--"+flags.BackupIDsFN, uuid.NewString()) "--"+utils.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err)) require.Error(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_NoBackupID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}

View File

@ -1,6 +1,7 @@
package backup package backup
import ( import (
"fmt"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -9,12 +10,12 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/options"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/internal/version"
dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata"
) )
type OneDriveUnitSuite struct { type OneDriveUnitSuite struct {
@ -33,35 +34,57 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
use string use string
expectUse string expectUse string
expectShort string expectShort string
flags []string
expectRunE func(*cobra.Command, []string) error expectRunE func(*cobra.Command, []string) error
}{ }{
{ {
name: "create onedrive", "create onedrive",
use: createCommand, createCommand,
expectUse: expectUse + " " + oneDriveServiceCommandCreateUseSuffix, expectUse + " " + oneDriveServiceCommandCreateUseSuffix,
expectShort: oneDriveCreateCmd().Short, oneDriveCreateCmd().Short,
expectRunE: createOneDriveCmd, []string{
utils.UserFN,
options.DisableIncrementalsFN,
options.FailFastFN,
},
createOneDriveCmd,
}, },
{ {
name: "list onedrive", "list onedrive",
use: listCommand, listCommand,
expectUse: expectUse, expectUse,
expectShort: oneDriveListCmd().Short, oneDriveListCmd().Short,
expectRunE: listOneDriveCmd, []string{
utils.BackupFN,
failedItemsFN,
skippedItemsFN,
recoveredErrorsFN,
},
listOneDriveCmd,
}, },
{ {
name: "details onedrive", "details onedrive",
use: detailsCommand, detailsCommand,
expectUse: expectUse + " " + oneDriveServiceCommandDetailsUseSuffix, expectUse + " " + oneDriveServiceCommandDetailsUseSuffix,
expectShort: oneDriveDetailsCmd().Short, oneDriveDetailsCmd().Short,
expectRunE: detailsOneDriveCmd, []string{
utils.BackupFN,
utils.FolderFN,
utils.FileFN,
utils.FileCreatedAfterFN,
utils.FileCreatedBeforeFN,
utils.FileModifiedAfterFN,
utils.FileModifiedBeforeFN,
},
detailsOneDriveCmd,
}, },
{ {
name: "delete onedrive", "delete onedrive",
use: deleteCommand, deleteCommand,
expectUse: expectUse + " " + oneDriveServiceCommandDeleteUseSuffix, expectUse + " " + oneDriveServiceCommandDeleteUseSuffix,
expectShort: oneDriveDeleteCmd().Short, oneDriveDeleteCmd().Short,
expectRunE: deleteOneDriveCmd, []string{utils.BackupFN},
deleteOneDriveCmd,
}, },
} }
@ -81,133 +104,14 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
assert.Equal(t, test.expectUse, child.Use) assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short) assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE) tester.AreSameFunc(t, test.expectRunE, child.RunE)
for _, f := range test.flags {
assert.NotNil(t, c.Flag(f), f+" flag")
}
}) })
} }
} }
func (suite *OneDriveUnitSuite) TestBackupCreateFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: createCommand},
addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
},
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeOneDriveOpts(cmd)
co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *OneDriveUnitSuite) TestBackupListFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: listCommand},
addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedBackupListFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *OneDriveUnitSuite) TestBackupDetailsFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: detailsCommand},
addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control()
assert.True(t, co.SkipReduce)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *OneDriveUnitSuite) TestBackupDeleteFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: deleteCommand},
addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *OneDriveUnitSuite) TestValidateOneDriveBackupCreateFlags() { func (suite *OneDriveUnitSuite) TestValidateOneDriveBackupCreateFlags() {
table := []struct { table := []struct {
name string name string
@ -231,3 +135,51 @@ func (suite *OneDriveUnitSuite) TestValidateOneDriveBackupCreateFlags() {
}) })
} }
} }
func (suite *OneDriveUnitSuite) TestOneDriveBackupDetailsSelectors() {
for v := 0; v <= version.Backup; v++ {
suite.Run(fmt.Sprintf("version%d", v), func() {
for _, test := range testdata.OneDriveOptionDetailLookups {
suite.Run(test.Name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bg := testdata.VersionedBackupGetter{
Details: dtd.GetDetailsSetForVersion(t, v),
}
output, err := runDetailsOneDriveCmd(
ctx,
bg,
"backup-ID",
test.Opts(t, v),
false)
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected(t, v), output.Entries)
})
}
})
}
}
func (suite *OneDriveUnitSuite) TestOneDriveBackupDetailsSelectorsBadFormats() {
for _, test := range testdata.BadOneDriveOptionsFormats {
suite.Run(test.Name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
output, err := runDetailsOneDriveCmd(
ctx,
test.BackupGetter,
"backup-ID",
test.Opts(t, version.Backup),
false)
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output)
})
}
}

View File

@ -4,16 +4,21 @@ import (
"context" "context"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365" "github.com/alcionai/corso/src/pkg/services/m365"
) )
@ -22,10 +27,15 @@ import (
// setup and globals // setup and globals
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
const (
dataLibraries = "libraries"
dataPages = "pages"
)
const ( const (
sharePointServiceCommand = "sharepoint" sharePointServiceCommand = "sharepoint"
sharePointServiceCommandCreateUseSuffix = "--site <siteURL> | '" + flags.Wildcard + "'" sharePointServiceCommandCreateUseSuffix = "--site <siteURL> | '" + utils.Wildcard + "'"
sharePointServiceCommandDeleteUseSuffix = "--backups <backupId>" sharePointServiceCommandDeleteUseSuffix = "--backup <backupId>"
sharePointServiceCommandDetailsUseSuffix = "--backup <backupId>" sharePointServiceCommandDetailsUseSuffix = "--backup <backupId>"
) )
@ -37,15 +47,10 @@ corso backup create sharepoint --site https://example.com/hr
corso backup create sharepoint --site https://example.com/hr,https://example.com/team corso backup create sharepoint --site https://example.com/hr,https://example.com/team
# Backup all SharePoint data for all Sites # Backup all SharePoint data for all Sites
corso backup create sharepoint --site '*' corso backup create sharepoint --site '*'`
# Backup all SharePoint list data for a Site sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd
corso backup create sharepoint --site https://example.com/hr --data lists corso backup delete sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd`
`
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce
corso backup delete sharepoint --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
sharePointServiceCommandDetailsExamples = `# Explore items in the HR site's latest backup (1234abcd...) sharePointServiceCommandDetailsExamples = `# Explore items in the HR site's latest backup (1234abcd...)
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd
@ -61,70 +66,58 @@ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Explore all files within the document library "Work Documents" # Explore all files within the document library "Work Documents"
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library "Work Documents" --library "Work Documents"
`
# Explore lists by their name(s)
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2"
# Explore lists created after a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34
# Explore lists created before a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34
# Explore lists modified before a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34
# Explore lists modified after a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34`
) )
// called by backup.go to map subcommands to provider-specific handling. // called by backup.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command { func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case createCommand: case createCommand:
c, _ = utils.AddCommand(cmd, sharePointCreateCmd()) c, fs = utils.AddCommand(cmd, sharePointCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix
c.Example = sharePointServiceCommandCreateExamples c.Example = sharePointServiceCommandCreateExamples
flags.AddSiteFlag(c, true) utils.AddSiteFlag(c)
flags.AddSiteIDFlag(c, true) utils.AddSiteIDFlag(c)
// [TODO](hitesh) to add lists flag to invoke backup for lists utils.AddDataFlag(c, []string{dataLibraries}, true)
// when explicit invoke is not required anymore options.AddFailFastFlag(c)
flags.AddDataFlag(c, []string{flags.DataLibraries}, true) options.AddDisableIncrementalsFlag(c)
flags.AddGenericBackupFlags(c)
case listCommand: case listCommand:
c, _ = utils.AddCommand(cmd, sharePointListCmd()) c, fs = utils.AddCommand(cmd, sharePointListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false) utils.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c) addFailedItemsFN(c)
addSkippedItemsFN(c)
addRecoveredErrorsFN(c)
case detailsCommand: case detailsCommand:
c, _ = utils.AddCommand(cmd, sharePointDetailsCmd()) c, fs = utils.AddCommand(cmd, sharePointDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix
c.Example = sharePointServiceCommandDetailsExamples c.Example = sharePointServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c) options.AddSkipReduceFlag(c)
flags.AddBackupIDFlag(c, true) utils.AddBackupIDFlag(c, true)
flags.AddSharePointDetailsAndRestoreFlags(c) utils.AddSharePointDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, _ = utils.AddCommand(cmd, sharePointDeleteCmd()) c, fs = utils.AddCommand(cmd, sharePointDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix
c.Example = sharePointServiceCommandDeleteExamples c.Example = sharePointServiceCommandDeleteExamples
flags.AddMultipleBackupIDsFlag(c, false) utils.AddBackupIDFlag(c, true)
flags.AddBackupIDFlag(c, false)
} }
return c return c
@ -153,18 +146,11 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if flags.RunModeFV == flags.RunModeFlagTest { if err := validateSharePointBackupCreateFlags(utils.SiteIDFV, utils.WebURLFV, utils.CategoryDataFV); err != nil {
return nil
}
if err := validateSharePointBackupCreateFlags(flags.SiteIDFV, flags.WebURLFV, flags.CategoryDataFV); err != nil {
return err return err
} }
r, acct, err := utils.AccountConnectAndWriteRepoConfig( r, acct, err := utils.AccountConnectAndWriteRepoConfig(ctx)
ctx,
cmd,
path.SharePointService)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -174,17 +160,12 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
// TODO: log/print recoverable errors // TODO: log/print recoverable errors
errs := fault.New(false) errs := fault.New(false)
svcCli, err := m365.NewM365Client(ctx, *acct) ins, err := m365.SitesMap(ctx, *acct, errs)
if err != nil {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.SitesMap(ctx, errs)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 sites")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 sites"))
} }
sel, err := sharePointBackupCreateSelectors(ctx, ins, flags.SiteIDFV, flags.WebURLFV, flags.CategoryDataFV) sel, err := sharePointBackupCreateSelectors(ctx, ins, utils.SiteIDFV, utils.WebURLFV, utils.CategoryDataFV)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving up sharepoint sites by ID and URL")) return Only(ctx, clues.Wrap(err, "Retrieving up sharepoint sites by ID and URL"))
} }
@ -195,10 +176,10 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
selectorSet = append(selectorSet, discSel.Selector) selectorSet = append(selectorSet, discSel.Selector)
} }
return genericCreateCommand( return runBackups(
ctx, ctx,
r, r,
"SharePoint", "SharePoint", "site",
selectorSet, selectorSet,
ins) ins)
} }
@ -207,16 +188,16 @@ func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
if len(sites) == 0 && len(weburls) == 0 { if len(sites) == 0 && len(weburls) == 0 {
return clues.New( return clues.New(
"requires one or more --" + "requires one or more --" +
flags.SiteFN + " urls, or the wildcard --" + utils.SiteFN + " urls, or the wildcard --" +
flags.SiteFN + " *") utils.SiteFN + " *",
)
} }
allowedCats := utils.SharePointAllowedCategories()
for _, d := range cats { for _, d := range cats {
if _, ok := allowedCats[d]; !ok { if d != dataLibraries && d != dataPages {
return clues.New( return clues.New(
d + " is an unrecognized data type; only " + flags.DataLibraries + " supported") d + " is an unrecognized data type; either " + dataLibraries + "or " + dataPages,
)
} }
} }
@ -233,21 +214,39 @@ func sharePointBackupCreateSelectors(
return selectors.NewSharePointBackup(selectors.None()), nil return selectors.NewSharePointBackup(selectors.None()), nil
} }
if filters.PathContains(sites).Compare(flags.Wildcard) { if filters.PathContains(sites).Compare(utils.Wildcard) {
return includeAllSitesWithCategories(ins, cats), nil return includeAllSitesWithCategories(ins, cats), nil
} }
if filters.PathContains(weburls).Compare(flags.Wildcard) { if filters.PathContains(weburls).Compare(utils.Wildcard) {
return includeAllSitesWithCategories(ins, cats), nil return includeAllSitesWithCategories(ins, cats), nil
} }
sel := selectors.NewSharePointBackup(append(slices.Clone(sites), weburls...)) sel := selectors.NewSharePointBackup(append(slices.Clone(sites), weburls...))
return utils.AddCategories(sel, cats), nil return addCategories(sel, cats), nil
} }
func includeAllSitesWithCategories(ins idname.Cacher, categories []string) *selectors.SharePointBackup { func includeAllSitesWithCategories(ins idname.Cacher, categories []string) *selectors.SharePointBackup {
return utils.AddCategories(selectors.NewSharePointBackup(ins.IDs()), categories) return addCategories(selectors.NewSharePointBackup(ins.IDs()), categories)
}
func addCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
// Issue #2631: Libraries are the only supported feature for SharePoint at this time.
if len(cats) == 0 {
sel.Include(sel.LibraryFolders(selectors.Any()))
}
for _, d := range cats {
switch d {
case dataLibraries:
sel.Include(sel.LibraryFolders(selectors.Any()))
case dataPages:
sel.Include(sel.Pages(selectors.Any()))
}
}
return sel
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -266,7 +265,7 @@ func sharePointListCmd() *cobra.Command {
// lists the history of backup operations // lists the history of backup operations
func listSharePointCmd(cmd *cobra.Command, args []string) error { func listSharePointCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.SharePointService, args) return genericListCommand(cmd, utils.BackupIDFV, path.SharePointService, args)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -286,24 +285,14 @@ func sharePointDeleteCmd() *cobra.Command {
// deletes a sharePoint service backup. // deletes a sharePoint service backup.
func deleteSharePointCmd(cmd *cobra.Command, args []string) error { func deleteSharePointCmd(cmd *cobra.Command, args []string) error {
backupIDValue := []string{} return genericDeleteCommand(cmd, utils.BackupIDFV, "SharePoint", args)
if len(flags.BackupIDsFV) > 0 {
backupIDValue = flags.BackupIDsFV
} else if len(flags.BackupIDFV) > 0 {
backupIDValue = append(backupIDValue, flags.BackupIDFV)
} else {
return clues.New("either --backup or --backups flag is required")
}
return genericDeleteCommand(cmd, path.SharePointService, "SharePoint", backupIDValue, args)
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// backup details // backup details
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// `corso backup details SharePoint [<flag>...]` // `corso backup details onedrive [<flag>...]`
func sharePointDetailsCmd() *cobra.Command { func sharePointDetailsCmd() *cobra.Command {
return &cobra.Command{ return &cobra.Command{
Use: sharePointServiceCommand, Use: sharePointServiceCommand,
@ -320,31 +309,67 @@ func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
return runDetailsSharePointCmd(cmd)
}
func runDetailsSharePointCmd(cmd *cobra.Command) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts) r, _, _, err := utils.GetAccountAndConnect(ctx)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
if len(ds.Entries) > 0 { defer utils.CloseRepo(ctx, r)
ds.PrintEntries(ctx)
} else { ctrlOpts := options.Control()
Info(ctx, selectors.ErrorNoMatchingItems)
ds, err := runDetailsSharePointCmd(ctx, r, utils.BackupIDFV, opts, ctrlOpts.SkipReduce)
if err != nil {
return Only(ctx, err)
} }
if len(ds.Entries) == 0 {
Info(ctx, selectors.ErrorNoMatchingItems)
return nil
}
ds.PrintEntries(ctx)
return nil return nil
} }
// runDetailsSharePointCmd actually performs the lookup in backup details.
// the fault.Errors return is always non-nil. Callers should check if
// errs.Failure() == nil.
func runDetailsSharePointCmd(
ctx context.Context,
r repository.BackupGetter,
backupID string,
opts utils.SharePointOpts,
skipReduce bool,
) (*details.Details, error) {
if err := utils.ValidateSharePointRestoreFlags(backupID, opts); err != nil {
return nil, err
}
ctx = clues.Add(ctx, "backup_id", backupID)
d, _, errs := r.GetBackupDetails(ctx, backupID)
// TODO: log/track recoverable errors
if errs.Failure() != nil {
if errors.Is(errs.Failure(), data.ErrNotFound) {
return nil, clues.New("no backup exists with the id " + backupID)
}
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
}
ctx = clues.Add(ctx, "details_entries", len(d.Entries))
if !skipReduce {
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
d = sel.Reduce(ctx, d, errs)
}
return d, nil
}

View File

@ -1,48 +1,51 @@
package backup_test package backup_test
import ( import (
"context"
"fmt" "fmt"
"strings" "strings"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/spf13/cobra" "github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/selectors/testdata" "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata" "github.com/alcionai/corso/src/pkg/storage"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests that require no existing backups // tests with no prior backup
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type NoBackupSharePointE2ESuite struct { type NoBackupSharePointE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies acct account.Account
st storage.Storage
vpr *viper.Viper
cfgFP string
repo repository.Repository
m365SiteID string
recorder strings.Builder
} }
func TestNoBackupSharePointE2ESuite(t *testing.T) { func TestNoBackupSharePointE2ESuite(t *testing.T) {
suite.Run(t, &NoBackupSharePointE2ESuite{Suite: tester.NewE2ESuite( suite.Run(t, &NoBackupSharePointE2ESuite{Suite: tester.NewE2ESuite(
t, t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})}) [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
)})
} }
func (suite *NoBackupSharePointE2ESuite) SetupSuite() { func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
@ -51,25 +54,33 @@ func (suite *NoBackupSharePointE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.dpnd = prepM365Test(t, ctx, path.SharePointService) acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
suite.acct = acct
suite.st = st
suite.repo = repo
suite.vpr = vpr
suite.recorder = recorder
suite.cfgFP = cfgFilePath
suite.m365SiteID = tester.M365SiteID(t)
} }
func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() { func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
suite.dpnd.recorder.Reset() suite.recorder.Reset()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "list", "sharepoint", "backup", "list", "sharepoint",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.cfgFP)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
@ -77,319 +88,32 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String() result := suite.recorder.String()
// as an offhand check: the result should contain the m365 sitet id // as an offhand check: the result should contain the m365 sitet id
assert.True(t, strings.HasSuffix(result, "No backups available\n")) assert.True(t, strings.HasSuffix(result, "No backups available\n"))
} }
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestBackupSharepointE2ESuite(t *testing.T) {
suite.Run(t, &BackupSharepointE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *BackupSharepointE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
}
func (suite *BackupSharepointE2ESuite) TestSharepointBackupCmd_lists() {
// Issue: https://github.com/alcionai/corso/issues/4754
suite.T().Skip("unskip when sharepoint lists support is enabled")
runSharepointBackupCategoryTest(suite, flags.DataLists)
}
func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildSharepointBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Site.ID,
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupSharepointE2ESuite) TestSharepointBackupCmd_siteNotFound_lists() {
// Issue: https://github.com/alcionai/corso/issues/4754
suite.T().Skip("un-skip test when lists support is enabled")
runSharepointBackupSiteNotFoundTest(suite, flags.DataLists)
}
func runSharepointBackupSiteNotFoundTest(suite *BackupSharepointE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildSharepointBackupCmd(
ctx,
suite.dpnd.configFilePath,
uuid.NewString(),
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"Invalid hostname for this tenancy", "error missing site not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
}
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupSharepointE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupSharepointE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
suite.backupOps = make(map[path.CategoryType]string)
var (
sites = []string{suite.m365.Site.ID}
ins = idname.NewCache(map[string]string{suite.m365.Site.ID: suite.m365.Site.ID})
cats = []path.CategoryType{
path.ListsCategory,
}
)
for _, set := range cats {
var (
sel = selectors.NewSharePointBackup(sites)
scopes []selectors.SharePointScope
)
switch set {
case path.ListsCategory:
scopes = testdata.SharePointBackupListsScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointListCmd_lists() {
runSharepointListCmdTest(suite, path.ListsCategory)
}
func runSharepointListCmdTest(suite *PreparedBackupSharepointE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "sharepoint",
"--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
t.Log("backup results", result)
}
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "sharepoint",
"--config-file", suite.dpnd.configFilePath,
"--backup", uuid.NewString())
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointDetailsCmd_lists() {
runSharepointDetailsCmdTest(suite, path.ListsCategory)
}
func runSharepointDetailsCmdTest(suite *PreparedBackupSharepointE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "sharepoint",
"--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
findings := make(map[path.CategoryType]int)
incrementor := func(cond bool, cat path.CategoryType) {
if cond {
findings[cat]++
}
}
for _, ent := range deets.Entries {
if ent.SharePoint == nil {
continue
}
isSharePointList := ent.SharePoint.ItemType == details.SharePointList
hasListName := isSharePointList && len(ent.SharePoint.List.Name) > 0
hasItemName := !isSharePointList && len(ent.SharePoint.ItemName) > 0
incrementor(hasListName, category)
incrementor(hasItemName, category)
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
assert.GreaterOrEqual(t, findings[category], 1)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests for deleting backups // tests for deleting backups
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type BackupDeleteSharePointE2ESuite struct { type BackupDeleteSharePointE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies acct account.Account
backupOp operations.BackupOperation st storage.Storage
secondaryBackupOp operations.BackupOperation vpr *viper.Viper
cfgFP string
repo repository.Repository
backupOp operations.BackupOperation
recorder strings.Builder
} }
func TestBackupDeleteSharePointE2ESuite(t *testing.T) { func TestBackupDeleteSharePointE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteSharePointE2ESuite{ suite.Run(t, &BackupDeleteSharePointE2ESuite{
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}), [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}),
}) })
} }
@ -399,10 +123,17 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.dpnd = prepM365Test(t, ctx, path.SharePointService) acct, st, repo, vpr, recorder, cfgFilePath := prepM365Test(t, ctx)
suite.acct = acct
suite.st = st
suite.repo = repo
suite.vpr = vpr
suite.recorder = recorder
suite.cfgFP = cfgFilePath
var ( var (
m365SiteID = tconfig.M365SiteID(t) m365SiteID = tester.M365SiteID(t)
sites = []string{m365SiteID} sites = []string{m365SiteID}
ins = idname.NewCache(map[string]string{m365SiteID: m365SiteID}) ins = idname.NewCache(map[string]string{m365SiteID: m365SiteID})
) )
@ -411,43 +142,31 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
sel := selectors.NewSharePointBackup(sites) sel := selectors.NewSharePointBackup(sites)
sel.Include(testdata.SharePointBackupFolderScope(sel)) sel.Include(testdata.SharePointBackupFolderScope(sel))
backupOp, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins) backupOp, err := suite.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
suite.backupOp = backupOp suite.backupOp = backupOp
err = suite.backupOp.Run(ctx) err = suite.backupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// secondary backup
secondaryBackupOp, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
suite.secondaryBackupOp = secondaryBackupOp
err = suite.secondaryBackupOp.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() { func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
suite.dpnd.recorder.Reset() suite.recorder.Reset()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "sharepoint", "backup", "delete", "sharepoint",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--"+flags.BackupIDsFN, "--"+utils.BackupFN, string(suite.backupOp.Results.BackupID))
fmt.Sprintf("%s,%s",
string(suite.backupOp.Results.BackupID),
string(suite.secondaryBackupOp.Results.BackupID)))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.recorder)
ctx = print.SetRootCmd(ctx, cmd) ctx = print.SetRootCmd(ctx, cmd)
@ -455,20 +174,20 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String() result := suite.recorder.String()
assert.True(t, assert.True(t,
strings.HasSuffix( strings.HasSuffix(
result, result,
fmt.Sprintf("Deleted SharePoint backup [%s %s]\n", fmt.Sprintf("Deleted SharePoint backup %s\n", string(suite.backupOp.Results.BackupID)),
string(suite.backupOp.Results.BackupID), ),
string(suite.secondaryBackupOp.Results.BackupID)))) )
} }
// moved out of the func above to make the linter happy // moved out of the func above to make the linter happy
// // a follow-up details call should fail, due to the backup ID being deleted // // a follow-up details call should fail, due to the backup ID being deleted
// cmd = cliTD.StubRootCmd( // cmd = tester.StubRootCmd(
// "backup", "details", "sharepoint", // "backup", "details", "sharepoint",
// "--"+flags.ConfigFileFN, suite.cfgFP, // "--config-file", suite.cfgFP,
// "--backup", string(suite.backupOp.Results.BackupID)) // "--backup", string(suite.backupOp.Results.BackupID))
// cli.BuildCommandTree(cmd) // cli.BuildCommandTree(cmd)
@ -479,55 +198,17 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unkno
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.vpr)
defer flush() defer flush()
cmd := cliTD.StubRootCmd( cmd := tester.StubRootCmd(
"backup", "delete", "sharepoint", "backup", "delete", "sharepoint",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.cfgFP,
"--"+flags.BackupIDsFN, uuid.NewString()) "--"+utils.BackupFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup // unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err)) require.Error(t, err, clues.ToCore(err))
} }
func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_NoBackupID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildSharepointBackupCmd(
ctx context.Context,
configFile, site, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "sharepoint",
"--config-file", configFile,
"--"+flags.SiteIDFN, site,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -1,7 +1,7 @@
package backup package backup
import ( import (
"strings" "fmt"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -10,13 +10,13 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/options"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/internal/version"
dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -36,35 +36,58 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
use string use string
expectUse string expectUse string
expectShort string expectShort string
flags []string
expectRunE func(*cobra.Command, []string) error expectRunE func(*cobra.Command, []string) error
}{ }{
{ {
name: "create sharepoint", "create sharepoint",
use: createCommand, createCommand,
expectUse: expectUse + " " + sharePointServiceCommandCreateUseSuffix, expectUse + " " + sharePointServiceCommandCreateUseSuffix,
expectShort: sharePointCreateCmd().Short, sharePointCreateCmd().Short,
expectRunE: createSharePointCmd, []string{
utils.SiteFN,
options.DisableIncrementalsFN,
options.FailFastFN,
},
createSharePointCmd,
}, },
{ {
name: "list sharepoint", "list sharepoint",
use: listCommand, listCommand,
expectUse: expectUse, expectUse,
expectShort: sharePointListCmd().Short, sharePointListCmd().Short,
expectRunE: listSharePointCmd, []string{
utils.BackupFN,
failedItemsFN,
skippedItemsFN,
recoveredErrorsFN,
},
listSharePointCmd,
}, },
{ {
name: "details sharepoint", "details sharepoint",
use: detailsCommand, detailsCommand,
expectUse: expectUse + " " + sharePointServiceCommandDetailsUseSuffix, expectUse + " " + sharePointServiceCommandDetailsUseSuffix,
expectShort: sharePointDetailsCmd().Short, sharePointDetailsCmd().Short,
expectRunE: detailsSharePointCmd, []string{
utils.BackupFN,
utils.LibraryFN,
utils.FolderFN,
utils.FileFN,
utils.FileCreatedAfterFN,
utils.FileCreatedBeforeFN,
utils.FileModifiedAfterFN,
utils.FileModifiedBeforeFN,
},
detailsSharePointCmd,
}, },
{ {
name: "delete sharepoint", "delete sharepoint",
use: deleteCommand, deleteCommand,
expectUse: expectUse + " " + sharePointServiceCommandDeleteUseSuffix, expectUse + " " + sharePointServiceCommandDeleteUseSuffix,
expectShort: sharePointDeleteCmd().Short, sharePointDeleteCmd().Short,
expectRunE: deleteSharePointCmd, []string{utils.BackupFN},
deleteSharePointCmd,
}, },
} }
for _, test := range table { for _, test := range table {
@ -83,142 +106,19 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
assert.Equal(t, test.expectUse, child.Use) assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short) assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE) tester.AreSameFunc(t, test.expectRunE, child.RunE)
for _, f := range test.flags {
assert.NotNil(t, c.Flag(f), f+" flag")
}
}) })
} }
} }
func (suite *SharePointUnitSuite) TestBackupCreateFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: createCommand},
addSharePointCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.SiteIDFN, flagsTD.FlgInputs(flagsTD.SiteIDInput),
"--" + flags.SiteFN, flagsTD.FlgInputs(flagsTD.WebURLInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.SharepointCategoryDataInput),
},
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeSharePointOpts(cmd)
co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.ElementsMatch(t, []string{strings.Join(flagsTD.SiteIDInput, ",")}, opts.SiteID)
assert.ElementsMatch(t, flagsTD.WebURLInput, opts.WebURL)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *SharePointUnitSuite) TestBackupListFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: listCommand},
addSharePointCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedBackupListFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *SharePointUnitSuite) TestBackupDetailsFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: detailsCommand},
addSharePointCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *SharePointUnitSuite) TestBackupDeleteFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: deleteCommand},
addSharePointCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() { func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() {
table := []struct { table := []struct {
name string name string
site []string site []string
weburl []string weburl []string
cats []string
expect assert.ErrorAssertionFunc expect assert.ErrorAssertionFunc
}{ }{
{ {
@ -226,61 +126,25 @@ func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() {
expect: assert.Error, expect: assert.Error,
}, },
{ {
name: "sites but no category", name: "sites",
site: []string{"smarf"}, site: []string{"smarf"},
expect: assert.NoError, expect: assert.NoError,
}, },
{ {
name: "web urls but no category", name: "urls",
weburl: []string{"fnord"}, weburl: []string{"fnord"},
expect: assert.NoError, expect: assert.NoError,
}, },
{ {
name: "both web urls and sites but no category", name: "both",
site: []string{"smarf"}, site: []string{"smarf"},
weburl: []string{"fnord"}, weburl: []string{"fnord"},
expect: assert.NoError, expect: assert.NoError,
}, },
{
name: "site with libraries category",
site: []string{"smarf"},
cats: []string{flags.DataLibraries},
expect: assert.NoError,
},
{
name: "site with invalid category",
site: []string{"smarf"},
cats: []string{"invalid category"},
expect: assert.Error,
},
{
name: "site with lists category",
site: []string{"smarf"},
cats: []string{flags.DataLists},
expect: assert.NoError,
},
// [TODO]: Uncomment when pages are enabled
// {
// name: "site with pages category",
// site: []string{"smarf"},
// cats: []string{flags.DataPages},
// expect: assert.NoError,
// },
// [TODO]: Uncomment when pages & lists are enabled
// {
// name: "site with all categories",
// site: []string{"smarf"},
// cats: []string{flags.DataLists, flags.DataPages, flags.DataLibraries},
// expect: assert.NoError,
// },
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
err := validateSharePointBackupCreateFlags(test.site, test.weburl, test.cats) err := validateSharePointBackupCreateFlags(test.site, test.weburl, nil)
test.expect(suite.T(), err, clues.ToCore(err)) test.expect(suite.T(), err, clues.ToCore(err))
}) })
} }
@ -300,11 +164,12 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
) )
table := []struct { table := []struct {
name string name string
site []string site []string
weburl []string weburl []string
data []string data []string
expect []string expect []string
expectScopesLen int
}{ }{
{ {
name: "no sites or urls", name: "no sites or urls",
@ -317,60 +182,63 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
expect: selectors.None(), expect: selectors.None(),
}, },
{ {
name: "site wildcard", name: "site wildcard",
site: []string{flags.Wildcard}, site: []string{utils.Wildcard},
expect: bothIDs, expect: bothIDs,
expectScopesLen: 2,
}, },
{ {
name: "url wildcard", name: "url wildcard",
weburl: []string{flags.Wildcard}, weburl: []string{utils.Wildcard},
expect: bothIDs, expect: bothIDs,
expectScopesLen: 2,
}, },
{ {
name: "sites", name: "sites",
site: []string{id1, id2}, site: []string{id1, id2},
expect: []string{id1, id2}, expect: []string{id1, id2},
expectScopesLen: 2,
}, },
{ {
name: "urls", name: "urls",
weburl: []string{url1, url2}, weburl: []string{url1, url2},
expect: []string{url1, url2}, expect: []string{url1, url2},
expectScopesLen: 2,
}, },
{ {
name: "mix sites and urls", name: "mix sites and urls",
site: []string{id1}, site: []string{id1},
weburl: []string{url2}, weburl: []string{url2},
expect: []string{id1, url2}, expect: []string{id1, url2},
expectScopesLen: 2,
}, },
{ {
name: "duplicate sites and urls", name: "duplicate sites and urls",
site: []string{id1, id2}, site: []string{id1, id2},
weburl: []string{url1, url2}, weburl: []string{url1, url2},
expect: []string{id1, id2, url1, url2}, expect: []string{id1, id2, url1, url2},
expectScopesLen: 2,
}, },
{ {
name: "unnecessary site wildcard", name: "unnecessary site wildcard",
site: []string{id1, flags.Wildcard}, site: []string{id1, utils.Wildcard},
weburl: []string{url1, url2}, weburl: []string{url1, url2},
expect: bothIDs, expect: bothIDs,
expectScopesLen: 2,
}, },
{ {
name: "unnecessary url wildcard", name: "unnecessary url wildcard",
site: []string{id1}, site: []string{id1},
weburl: []string{url1, flags.Wildcard}, weburl: []string{url1, utils.Wildcard},
expect: bothIDs, expect: bothIDs,
expectScopesLen: 2,
}, },
{ {
name: "Pages", name: "Pages",
site: bothIDs, site: bothIDs,
data: []string{flags.DataPages}, data: []string{dataPages},
expect: bothIDs, expect: bothIDs,
}, expectScopesLen: 1,
{
name: "Lists",
site: bothIDs,
data: []string{flags.DataLists},
expect: bothIDs,
}, },
} }
for _, test := range table { for _, test := range table {
@ -382,7 +250,55 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
sel, err := sharePointBackupCreateSelectors(ctx, ins, test.site, test.weburl, test.data) sel, err := sharePointBackupCreateSelectors(ctx, ins, test.site, test.weburl, test.data)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.expect, sel.ResourceOwners.Targets) assert.ElementsMatch(t, test.expect, sel.DiscreteResourceOwners())
})
}
}
func (suite *SharePointUnitSuite) TestSharePointBackupDetailsSelectors() {
for v := 0; v <= version.Backup; v++ {
suite.Run(fmt.Sprintf("version%d", v), func() {
for _, test := range testdata.SharePointOptionDetailLookups {
suite.Run(test.Name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bg := testdata.VersionedBackupGetter{
Details: dtd.GetDetailsSetForVersion(t, v),
}
output, err := runDetailsSharePointCmd(
ctx,
bg,
"backup-ID",
test.Opts(t, v),
false)
assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, test.Expected(t, v), output.Entries)
})
}
})
}
}
func (suite *SharePointUnitSuite) TestSharePointBackupDetailsSelectorsBadFormats() {
for _, test := range testdata.BadSharePointOptionsFormats {
suite.Run(test.Name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
output, err := runDetailsSharePointCmd(
ctx,
test.BackupGetter,
"backup-ID",
test.Opts(t, version.Backup),
false)
assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, output)
}) })
} }
} }

View File

@ -1,305 +0,0 @@
package backup
import (
"context"
"fmt"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
)
// ------------------------------------------------------------------------------------------------
// setup and globals
// ------------------------------------------------------------------------------------------------
const (
teamschatsServiceCommand = "chats"
teamschatsServiceCommandCreateUseSuffix = "--user <userEmail> | '" + flags.Wildcard + "'"
teamschatsServiceCommandDeleteUseSuffix = "--backups <backupId>"
teamschatsServiceCommandDetailsUseSuffix = "--backup <backupId>"
)
const (
teamschatsServiceCommandCreateExamples = `# Backup all chats with bob@company.hr
corso backup create chats --user bob@company.hr
# Backup all chats for all users
corso backup create chats --user '*'`
teamschatsServiceCommandDeleteExamples = `# Delete chats backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce
corso backup delete chats --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
teamschatsServiceCommandDetailsExamples = `# Explore chats in Bob's latest backup (1234abcd...)
corso backup details chats --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// called by backup.go to map subcommands to provider-specific handling.
func addTeamsChatsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case createCommand:
c, _ = utils.AddCommand(cmd, teamschatsCreateCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandCreateUseSuffix
c.Example = teamschatsServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddUserFlag(c)
flags.AddDataFlag(c, []string{flags.DataChats}, false)
flags.AddGenericBackupFlags(c)
case listCommand:
c, _ = utils.AddCommand(cmd, teamschatsListCmd(), utils.MarkPreReleaseCommand())
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, teamschatsDetailsCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandDetailsUseSuffix
c.Example = teamschatsServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true)
flags.AddTeamsChatsDetailsAndRestoreFlags(c)
case deleteCommand:
c, _ = utils.AddCommand(cmd, teamschatsDeleteCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandDeleteUseSuffix
c.Example = teamschatsServiceCommandDeleteExamples
flags.AddMultipleBackupIDsFlag(c, false)
flags.AddBackupIDFlag(c, false)
}
return c
}
// ------------------------------------------------------------------------------------------------
// backup create
// ------------------------------------------------------------------------------------------------
// `corso backup create chats [<flag>...]`
func teamschatsCreateCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Aliases: []string{teamsServiceCommand},
Short: "Backup M365 Chats data",
RunE: createTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// processes a teamschats backup.
func createTeamsChatsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := validateTeamsChatsBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
return err
}
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
ctx,
cmd,
path.TeamsChatsService)
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
// TODO: log/print recoverable errors
errs := fault.New(false)
svcCli, err := m365.NewM365Client(ctx, *acct)
if err != nil {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.AC.Users().GetAllIDsAndNames(ctx, errs)
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 teamschats"))
}
sel := teamschatsBackupCreateSelectors(ctx, ins, flags.UserFV, flags.CategoryDataFV)
selectorSet := []selectors.Selector{}
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
selectorSet = append(selectorSet, discSel.Selector)
}
return genericCreateCommand(
ctx,
r,
"Chats",
selectorSet,
ins)
}
// ------------------------------------------------------------------------------------------------
// backup list
// ------------------------------------------------------------------------------------------------
// `corso backup list teamschats [<flag>...]`
func teamschatsListCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "List the history of M365 Chats backups",
RunE: listTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// lists the history of backup operations
func listTeamsChatsCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsChatsService, args)
}
// ------------------------------------------------------------------------------------------------
// backup details
// ------------------------------------------------------------------------------------------------
// `corso backup details teamschats [<flag>...]`
func teamschatsDetailsCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "Shows the details of a M365 Chats backup",
RunE: detailsTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// processes a teamschats backup.
func detailsTeamsChatsCmd(cmd *cobra.Command, args []string) error {
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
return runDetailsTeamsChatsCmd(cmd)
}
func runDetailsTeamsChatsCmd(cmd *cobra.Command) error {
ctx := cmd.Context()
opts := utils.MakeTeamsChatsOpts(cmd)
sel := utils.IncludeTeamsChatsRestoreDataSelectors(ctx, opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterTeamsChatsRestoreInfoSelectors(sel, opts)
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
if err != nil {
return Only(ctx, err)
}
if len(ds.Entries) > 0 {
ds.PrintEntries(ctx)
} else {
Info(ctx, selectors.ErrorNoMatchingItems)
}
return nil
}
// ------------------------------------------------------------------------------------------------
// backup delete
// ------------------------------------------------------------------------------------------------
// `corso backup delete teamschats [<flag>...]`
func teamschatsDeleteCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "Delete backed-up M365 Chats data",
RunE: deleteTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// deletes an teamschats backup.
func deleteTeamsChatsCmd(cmd *cobra.Command, args []string) error {
backupIDValue := []string{}
if len(flags.BackupIDsFV) > 0 {
backupIDValue = flags.BackupIDsFV
} else if len(flags.BackupIDFV) > 0 {
backupIDValue = append(backupIDValue, flags.BackupIDFV)
} else {
return clues.New("either --backup or --backups flag is required")
}
return genericDeleteCommand(cmd, path.TeamsChatsService, "TeamsChats", backupIDValue, args)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func validateTeamsChatsBackupCreateFlags(teamschats, cats []string) error {
if len(teamschats) == 0 {
return clues.New(
"requires one or more --" +
flags.UserFN + " ids, or the wildcard --" +
flags.UserFN + " *")
}
msg := fmt.Sprintf(
" is an unrecognized data type; only %s is supported",
flags.DataChats)
allowedCats := utils.TeamsChatsAllowedCategories()
for _, d := range cats {
if _, ok := allowedCats[d]; !ok {
return clues.New(d + msg)
}
}
return nil
}
func teamschatsBackupCreateSelectors(
ctx context.Context,
ins idname.Cacher,
users, cats []string,
) *selectors.TeamsChatsBackup {
if filters.PathContains(users).Compare(flags.Wildcard) {
return includeAllTeamsChatsWithCategories(ins, cats)
}
sel := selectors.NewTeamsChatsBackup(slices.Clone(users))
return utils.AddTeamsChatsCategories(sel, cats)
}
func includeAllTeamsChatsWithCategories(ins idname.Cacher, categories []string) *selectors.TeamsChatsBackup {
return utils.AddTeamsChatsCategories(selectors.NewTeamsChatsBackup(ins.IDs()), categories)
}

View File

@ -1,636 +0,0 @@
package backup_test
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// tests that require no existing backups
// ---------------------------------------------------------------------------
type NoBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestNoBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *NoBackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
func (suite *NoBackupTeamsChatsE2ESuite) TestTeamsChatsBackupListCmd_noBackups() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
// as an offhand check: the result should contain the m365 teamschat id
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
}
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *BackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_chats() {
runTeamsChatsBackupCategoryTest(suite, flags.DataChats)
}
func runTeamsChatsBackupCategoryTest(suite *BackupTeamsChatsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_teamschatNotFound_chats() {
runTeamsChatsBackupTeamsChatNotFoundTest(suite, flags.DataChats)
}
func runTeamsChatsBackupTeamsChatNotFoundTest(suite *BackupTeamsChatsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
"foo@not-there.com",
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"not found",
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAzureClientIDFlag() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_fromConfigFile() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
}
// AWS flags
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAWSFlags() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
// since invalid aws creds are explicitly set, should see a failure
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
}
func TestPreparedBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupTeamsChatsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
teamschats = []string{suite.m365.User.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
cats = []path.CategoryType{
path.ChatsCategory,
}
)
for _, set := range cats {
var (
sel = selectors.NewTeamsChatsBackup(teamschats)
scopes []selectors.TeamsChatsScope
)
switch set {
case path.ChatsCategory:
scopes = selTD.TeamsChatsBackupChatScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_chats() {
runTeamsChatsListCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsListCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_singleID_chats() {
runTeamsChatsListSingleCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsListSingleCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, bID)
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", "smarfs")
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsDetailsCmd_chats() {
runTeamsChatsDetailsCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsDetailsCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
foundFolders := 0
for _, ent := range deets.Entries {
// Skip folders as they don't mean anything to the end teamschat.
if ent.Folder != nil {
foundFolders++
continue
}
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
// We only backup the default folder for each category so there should be at
// least that folder (we don't make details entries for prefix folders).
assert.GreaterOrEqual(t, foundFolders, 1)
}
// ---------------------------------------------------------------------------
// tests for deleting backups
// ---------------------------------------------------------------------------
type BackupDeleteTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps [3]operations.BackupOperation
}
func TestBackupDeleteTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteTeamsChatsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *BackupDeleteTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
m365TeamsChatID := tconfig.M365TeamID(t)
teamschats := []string{m365TeamsChatID}
// some tests require an existing backup
sel := selectors.NewTeamsChatsBackup(teamschats)
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
for i := 0; i < cap(suite.backupOps); i++ {
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[i] = backupOp
err = suite.backupOps[i].Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID),
string(suite.backupOps[1].Results.BackupID)))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backups", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_SingleID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_UnknownID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_NoBackupID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildTeamsChatsBackupCmd(
ctx context.Context,
configFile, resource, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--"+flags.ConfigFileFN, configFile,
"--"+flags.UserFN, resource,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -1,248 +0,0 @@
package backup
import (
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
)
type TeamsChatsUnitSuite struct {
tester.Suite
}
func TestTeamsChatsUnitSuite(t *testing.T) {
suite.Run(t, &TeamsChatsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *TeamsChatsUnitSuite) TestAddTeamsChatsCommands() {
expectUse := teamschatsServiceCommand
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "create teamschats",
use: createCommand,
expectUse: expectUse + " " + teamschatsServiceCommandCreateUseSuffix,
expectShort: teamschatsCreateCmd().Short,
expectRunE: createTeamsChatsCmd,
},
{
name: "list teamschats",
use: listCommand,
expectUse: expectUse,
expectShort: teamschatsListCmd().Short,
expectRunE: listTeamsChatsCmd,
},
{
name: "details teamschats",
use: detailsCommand,
expectUse: expectUse + " " + teamschatsServiceCommandDetailsUseSuffix,
expectShort: teamschatsDetailsCmd().Short,
expectRunE: detailsTeamsChatsCmd,
},
{
name: "delete teamschats",
use: deleteCommand,
expectUse: expectUse + " " + teamschatsServiceCommandDeleteUseSuffix,
expectShort: teamschatsDeleteCmd().Short,
expectRunE: deleteTeamsChatsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addTeamsChatsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
})
}
}
func (suite *TeamsChatsUnitSuite) TestValidateTeamsChatsBackupCreateFlags() {
table := []struct {
name string
cats []string
expect assert.ErrorAssertionFunc
}{
{
name: "none",
cats: []string{},
expect: assert.NoError,
},
{
name: "chats",
cats: []string{flags.DataChats},
expect: assert.NoError,
},
{
name: "all allowed",
cats: []string{
flags.DataChats,
},
expect: assert.NoError,
},
{
name: "bad inputs",
cats: []string{"foo"},
expect: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
err := validateTeamsChatsBackupCreateFlags([]string{"*"}, test.cats)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}
func (suite *TeamsChatsUnitSuite) TestBackupCreateFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: createCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.TeamsChatsCategoryDataInput),
},
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeTeamsChatsOpts(cmd)
co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupListFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: listCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedBackupListFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupDetailsFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: detailsCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
},
flagsTD.PreparedTeamsChatsFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
flagsTD.AssertTeamsChatsFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupDeleteFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: deleteCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}

View File

@ -10,16 +10,15 @@ import (
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/backup" "github.com/alcionai/corso/src/cli/backup"
"github.com/alcionai/corso/src/cli/debug" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/export"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/help" "github.com/alcionai/corso/src/cli/help"
"github.com/alcionai/corso/src/cli/options"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/repo" "github.com/alcionai/corso/src/cli/repo"
"github.com/alcionai/corso/src/cli/restore" "github.com/alcionai/corso/src/cli/restore"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
@ -38,29 +37,59 @@ var corsoCmd = &cobra.Command{
} }
func preRun(cc *cobra.Command, args []string) error { func preRun(cc *cobra.Command, args []string) error {
if err := config.InitCmd(cc, args); err != nil { if err := config.InitFunc(cc, args); err != nil {
return err return err
} }
ctx := cc.Context() ctx := cc.Context()
log := logger.Ctx(ctx) log := logger.Ctx(ctx)
fs := flags.GetPopulatedFlags(cc) flags := utils.GetPopulatedFlags(cc)
flagSl := make([]string, 0, len(fs)) flagSl := make([]string, 0, len(flags))
// currently only tracking flag names to avoid pii leakage. // currently only tracking flag names to avoid pii leakage.
for f := range fs { for f := range flags {
flagSl = append(flagSl, f) flagSl = append(flagSl, f)
} }
avoidTheseCommands := []string{ avoidTheseCommands := []string{
"corso", "env", "help", "backup", "details", "list", "restore", "export", "delete", "repo", "init", "connect", "corso", "env", "help", "backup", "details", "list", "restore", "delete", "repo", "init", "connect",
} }
if len(logger.ResolvedLogFile) > 0 && !slices.Contains(avoidTheseCommands, cc.Use) { if len(logger.ResolvedLogFile) > 0 && !slices.Contains(avoidTheseCommands, cc.Use) {
print.Infof(ctx, "Logging to file: %s", logger.ResolvedLogFile) print.Infof(ctx, "Logging to file: %s", logger.ResolvedLogFile)
} }
avoidTheseDescription := []string{
"Initialize a repository.",
"Initialize a S3 repository",
"Help about any command",
"Free, Secure, Open-Source Backup for M365.",
}
if !slices.Contains(avoidTheseDescription, cc.Short) {
overrides := map[string]string{}
if cc.Short == "Connect to a S3 repository" {
// Get s3 overrides for connect. Ideally we also need this
// for init, but we don't reach this block for init.
overrides = repo.S3Overrides()
}
cfg, err := config.GetConfigRepoDetails(ctx, true, overrides)
if err != nil {
log.Error("Error while getting config info to run command: ", cc.Use)
return err
}
utils.SendStartCorsoEvent(
ctx,
cfg.Storage,
cfg.Account.ID(),
map[string]any{"command": cc.CommandPath()},
cfg.RepoID,
options.Control())
}
// handle deprecated user flag in Backup exchange command // handle deprecated user flag in Backup exchange command
if cc.CommandPath() == "corso backup create exchange" { if cc.CommandPath() == "corso backup create exchange" {
handleMailBoxFlag(ctx, cc, flagSl) handleMailBoxFlag(ctx, cc, flagSl)
@ -73,7 +102,7 @@ func preRun(cc *cobra.Command, args []string) error {
func handleMailBoxFlag(ctx context.Context, c *cobra.Command, flagNames []string) { func handleMailBoxFlag(ctx context.Context, c *cobra.Command, flagNames []string) {
if !slices.Contains(flagNames, "user") && !slices.Contains(flagNames, "mailbox") { if !slices.Contains(flagNames, "user") && !slices.Contains(flagNames, "mailbox") {
print.Err(ctx, "either --user or --mailbox flag is required") print.Errf(ctx, "either --user or --mailbox flag is required")
os.Exit(1) os.Exit(1)
} }
@ -109,7 +138,7 @@ func CorsoCommand() *cobra.Command {
func BuildCommandTree(cmd *cobra.Command) { func BuildCommandTree(cmd *cobra.Command) {
// want to order flags explicitly // want to order flags explicitly
cmd.PersistentFlags().SortFlags = false cmd.PersistentFlags().SortFlags = false
flags.AddRunModeFlag(cmd, true) utils.AddRunModeFlag(cmd, true)
cmd.Flags().BoolP("version", "v", false, "current version info") cmd.Flags().BoolP("version", "v", false, "current version info")
cmd.PersistentPreRunE = preRun cmd.PersistentPreRunE = preRun
@ -117,7 +146,7 @@ func BuildCommandTree(cmd *cobra.Command) {
logger.AddLoggingFlags(cmd) logger.AddLoggingFlags(cmd)
observe.AddProgressBarFlags(cmd) observe.AddProgressBarFlags(cmd)
print.AddOutputFlag(cmd) print.AddOutputFlag(cmd)
flags.AddGlobalOperationFlags(cmd) options.AddGlobalOperationFlags(cmd)
cmd.SetUsageTemplate(indentExamplesTemplate(corsoCmd.UsageTemplate())) cmd.SetUsageTemplate(indentExamplesTemplate(corsoCmd.UsageTemplate()))
cmd.CompletionOptions.DisableDefaultCmd = true cmd.CompletionOptions.DisableDefaultCmd = true
@ -125,8 +154,6 @@ func BuildCommandTree(cmd *cobra.Command) {
repo.AddCommands(cmd) repo.AddCommands(cmd)
backup.AddCommands(cmd) backup.AddCommands(cmd)
restore.AddCommands(cmd) restore.AddCommands(cmd)
export.AddCommands(cmd)
debug.AddCommands(cmd)
help.AddCommands(cmd) help.AddCommands(cmd)
} }
@ -138,15 +165,15 @@ func BuildCommandTree(cmd *cobra.Command) {
func Handle() { func Handle() {
//nolint:forbidigo //nolint:forbidigo
ctx := config.Seed(context.Background()) ctx := config.Seed(context.Background())
ctx, log := logger.Seed(ctx, logger.PreloadLoggingFlags(os.Args[1:]))
ctx = print.SetRootCmd(ctx, corsoCmd) ctx = print.SetRootCmd(ctx, corsoCmd)
ctx = observe.SeedObserver(ctx, print.StderrWriter(ctx), observe.PreloadFlags())
observe.SeedWriter(ctx, print.StderrWriter(ctx), observe.PreloadFlags())
BuildCommandTree(corsoCmd) BuildCommandTree(corsoCmd)
defer func() { ctx, log := logger.Seed(ctx, logger.PreloadLoggingFlags(os.Args[1:]))
observe.Flush(ctx) // flush the progress bars
defer func() {
_ = log.Sync() // flush all logs in the buffer _ = log.Sync() // flush all logs in the buffer
}() }()

View File

@ -6,7 +6,6 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
@ -16,34 +15,32 @@ import (
func m365ConfigsFromViper(vpr *viper.Viper) (account.M365Config, error) { func m365ConfigsFromViper(vpr *viper.Viper) (account.M365Config, error) {
var m365 account.M365Config var m365 account.M365Config
m365.AzureClientID = vpr.GetString(account.AzureClientID) providerType := vpr.GetString(AccountProviderTypeKey)
m365.AzureClientSecret = vpr.GetString(account.AzureSecret) if providerType != account.ProviderM365.String() {
m365.AzureTenantID = vpr.GetString(account.AzureTenantIDKey) return m365, clues.New("unsupported account provider: " + providerType)
}
m365.AzureTenantID = vpr.GetString(AzureTenantIDKey)
return m365, nil return m365, nil
} }
func m365Overrides(in map[string]string) map[string]string { func m365Overrides(in map[string]string) map[string]string {
return map[string]string{ return map[string]string{
account.AzureTenantID: in[account.AzureTenantID], account.AzureTenantID: in[account.AzureTenantID],
account.AccountProviderTypeKey: in[account.AccountProviderTypeKey], AccountProviderTypeKey: in[AccountProviderTypeKey],
} }
} }
// add m365 config key names that require path related validations
var m365PathKeys = []string{}
// configureAccount builds a complete account configuration from a mix of // configureAccount builds a complete account configuration from a mix of
// viper properties and manual overrides. // viper properties and manual overrides.
func configureAccount( func configureAccount(
vpr *viper.Viper, vpr *viper.Viper,
readConfigFromViper bool, readConfigFromViper bool,
matchFromConfig bool,
overrides map[string]string, overrides map[string]string,
) (account.Account, error) { ) (account.Account, error) {
var ( var (
m365Cfg account.M365Config m365Cfg account.M365Config
m365 credentials.M365
acct account.Account acct account.Account
err error err error
) )
@ -54,20 +51,13 @@ func configureAccount(
return acct, clues.Wrap(err, "reading m365 configs from corso config file") return acct, clues.Wrap(err, "reading m365 configs from corso config file")
} }
if matchFromConfig { if err := mustMatchConfig(vpr, m365Overrides(overrides)); err != nil {
providerType := vpr.GetString(account.AccountProviderTypeKey) return acct, clues.Wrap(err, "verifying m365 configs in corso config file")
if providerType != account.ProviderM365.String() {
return acct, clues.New("unsupported account provider: [" + providerType + "]")
}
if err := mustMatchConfig(vpr, m365Overrides(overrides), m365PathKeys); err != nil {
return acct, clues.Wrap(err, "verifying m365 configs in corso config file")
}
} }
} }
// compose the m365 config and credentials // compose the m365 config and credentials
m365 = GetM365(m365Cfg) m365 := credentials.GetM365()
if err := m365.Validate(); err != nil { if err := m365.Validate(); err != nil {
return acct, clues.Wrap(err, "validating m365 credentials") return acct, clues.Wrap(err, "validating m365 credentials")
} }
@ -76,15 +66,14 @@ func configureAccount(
M365: m365, M365: m365,
AzureTenantID: str.First( AzureTenantID: str.First(
overrides[account.AzureTenantID], overrides[account.AzureTenantID],
flags.AzureClientTenantFV, m365Cfg.AzureTenantID,
os.Getenv(account.AzureTenantID), os.Getenv(account.AzureTenantID)),
m365Cfg.AzureTenantID),
} }
// ensure required properties are present // ensure required properties are present
if err := requireProps(map[string]string{ if err := requireProps(map[string]string{
credentials.AzureClientID: m365Cfg.M365.AzureClientID, credentials.AzureClientID: m365Cfg.AzureClientID,
credentials.AzureClientSecret: m365Cfg.M365.AzureClientSecret, credentials.AzureClientSecret: m365Cfg.AzureClientSecret,
account.AzureTenantID: m365Cfg.AzureTenantID, account.AzureTenantID: m365Cfg.AzureTenantID,
}); err != nil { }); err != nil {
return acct, err return acct, err
@ -98,20 +87,3 @@ func configureAccount(
return acct, nil return acct, nil
} }
// M365 is a helper for aggregating m365 secrets and credentials.
func GetM365(m365Cfg account.M365Config) credentials.M365 {
AzureClientID := str.First(
flags.AzureClientIDFV,
os.Getenv(credentials.AzureClientID),
m365Cfg.AzureClientID)
AzureClientSecret := str.First(
flags.AzureClientSecretFV,
os.Getenv(credentials.AzureClientSecret),
m365Cfg.AzureClientSecret)
return credentials.M365{
AzureClientID: AzureClientID,
AzureClientSecret: AzureClientSecret,
}
}

View File

@ -2,50 +2,48 @@ package config
import ( import (
"context" "context"
"errors"
"io/fs"
"os" "os"
"path/filepath" "path/filepath"
"slices"
"strings" "strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
const ( const (
RepoID = "repo_id" // S3 config
StorageProviderTypeKey = "provider"
BucketNameKey = "bucket"
EndpointKey = "endpoint"
PrefixKey = "prefix"
DisableTLSKey = "disable_tls"
DisableTLSVerificationKey = "disable_tls_verification"
RepoID = "repo_id"
// Corso passphrase in config // M365 config
CorsoPassphrase = "passphrase" AccountProviderTypeKey = "account_provider"
CorsoUser = "corso_user" AzureTenantIDKey = "azure_tenantid"
CorsoHost = "corso_host"
) )
var ( var (
defaultConfigFilePath string configFilePath string
configDir string configFilePathFlag string
displayDefaultFP = filepath.Join("$HOME", ".corso.toml") configDir string
displayDefaultFP = filepath.Join("$HOME", ".corso.toml")
) )
// RepoDetails holds the repository configuration retrieved from // RepoDetails holds the repository configuration retrieved from
// the .corso.toml configuration file. // the .corso.toml configuration file.
type RepoDetails struct { type RepoDetails struct {
Storage storage.Storage Storage storage.Storage
Account account.Account Account account.Account
RepoID string RepoID string
RepoUser string
RepoHost string
} }
// Attempts to set the default dir and config file path. // Attempts to set the default dir and config file path.
@ -57,7 +55,7 @@ func init() {
Infof(context.Background(), "cannot stat CORSO_CONFIG_DIR [%s]: %v", envDir, err) Infof(context.Background(), "cannot stat CORSO_CONFIG_DIR [%s]: %v", envDir, err)
} else { } else {
configDir = envDir configDir = envDir
defaultConfigFilePath = filepath.Join(configDir, ".corso.toml") configFilePath = filepath.Join(configDir, ".corso.toml")
} }
} }
@ -68,71 +66,43 @@ func init() {
if len(configDir) == 0 { if len(configDir) == 0 {
configDir = homeDir configDir = homeDir
defaultConfigFilePath = filepath.Join(configDir, ".corso.toml") configFilePath = filepath.Join(configDir, ".corso.toml")
} }
} }
// adds the persistent flag --config-file to the provided command. // adds the persistent flag --config-file to the provided command.
func AddConfigFlags(cmd *cobra.Command) { func AddConfigFlags(cmd *cobra.Command) {
pf := cmd.PersistentFlags() fs := cmd.PersistentFlags()
pf.StringVar( fs.StringVar(
&flags.ConfigFileFV, &configFilePathFlag,
flags.ConfigFileFN, displayDefaultFP, "config file location") "config-file", displayDefaultFP, "config file location")
} }
// --------------------------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------------------
// Initialization & Storage // Initialization & Storage
// --------------------------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------------------
// InitCmd provides a func that lazily initializes viper and // InitFunc provides a func that lazily initializes viper and
// verifies that the configuration was able to read a file. // verifies that the configuration was able to read a file.
func InitCmd(cmd *cobra.Command, args []string) error { func InitFunc(cmd *cobra.Command, args []string) error {
_, err := commonInit(cmd.Context(), flags.ConfigFileFV) fp := configFilePathFlag
return clues.Stack(err).OrNil()
}
// InitConfig allows sdk consumers to initialize viper.
func InitConfig(
ctx context.Context,
userDefinedConfigFile string,
) (context.Context, error) {
return commonInit(ctx, userDefinedConfigFile)
}
func commonInit(
ctx context.Context,
userDefinedConfigFile string,
) (context.Context, error) {
fp := userDefinedConfigFile
if len(fp) == 0 || fp == displayDefaultFP { if len(fp) == 0 || fp == displayDefaultFP {
fp = defaultConfigFilePath fp = configFilePath
} }
vpr := GetViper(ctx) err := initWithViper(GetViper(cmd.Context()), fp)
if err := initWithViper(ctx, vpr, fp); err != nil { if err != nil {
return ctx, err return err
} }
return SetViper(ctx, vpr), clues.Stack(Read(ctx)).OrNil() return Read(cmd.Context())
} }
// initWithViper implements InitConfig, but takes in a viper // initWithViper implements InitConfig, but takes in a viper
// struct for testing. // struct for testing.
func initWithViper( func initWithViper(vpr *viper.Viper, configFP string) error {
ctx context.Context,
vpr *viper.Viper,
configFP string,
) error {
logger.Ctx(ctx).Debugw("initializing viper", "config_file_path", configFP)
defer func() {
logger.Ctx(ctx).Debugw("initialized config", "config_file_path", configFP)
}()
// Configure default config file location // Configure default config file location
if len(configFP) == 0 || configFP == displayDefaultFP { if configFP == "" || configFP == displayDefaultFP {
configFP = defaultConfigFilePath
// Find home directory. // Find home directory.
_, err := os.Stat(configDir) _, err := os.Stat(configDir)
if err != nil { if err != nil {
@ -143,23 +113,26 @@ func initWithViper(
vpr.AddConfigPath(configDir) vpr.AddConfigPath(configDir)
vpr.SetConfigType("toml") vpr.SetConfigType("toml")
vpr.SetConfigName(".corso") vpr.SetConfigName(".corso")
} else {
ext := filepath.Ext(configFP)
if len(ext) == 0 {
return clues.New("config file requires an extension e.g. `toml`")
}
fileName := filepath.Base(configFP) return nil
fileName = strings.TrimSuffix(fileName, ext)
vpr.SetConfigType(strings.TrimPrefix(ext, "."))
vpr.SetConfigName(fileName)
vpr.SetConfigFile(configFP)
// We also configure the path, type and filename
// because `vpr.SafeWriteConfig` needs these set to
// work correctly (it does not use the configured file)
vpr.AddConfigPath(filepath.Dir(configFP))
} }
vpr.SetConfigFile(configFP)
// We also configure the path, type and filename
// because `vpr.SafeWriteConfig` needs these set to
// work correctly (it does not use the configured file)
vpr.AddConfigPath(filepath.Dir(configFP))
ext := filepath.Ext(configFP)
if len(ext) == 0 {
return clues.New("config file requires an extension e.g. `toml`")
}
fileName := filepath.Base(configFP)
fileName = strings.TrimSuffix(fileName, ext)
vpr.SetConfigType(strings.TrimPrefix(ext, "."))
vpr.SetConfigName(fileName)
return nil return nil
} }
@ -214,44 +187,34 @@ func Read(ctx context.Context) error {
// It does not check for conflicts or existing data. // It does not check for conflicts or existing data.
func WriteRepoConfig( func WriteRepoConfig(
ctx context.Context, ctx context.Context,
wcs storage.WriteConfigToStorer, s3Config storage.S3Config,
m365Config account.M365Config, m365Config account.M365Config,
repoOpts repository.Options,
repoID string, repoID string,
) error { ) error {
return writeRepoConfigWithViper( return writeRepoConfigWithViper(GetViper(ctx), s3Config, m365Config, repoID)
GetViper(ctx),
wcs,
m365Config,
repoOpts,
repoID)
} }
// writeRepoConfigWithViper implements WriteRepoConfig, but takes in a viper // writeRepoConfigWithViper implements WriteRepoConfig, but takes in a viper
// struct for testing. // struct for testing.
func writeRepoConfigWithViper( func writeRepoConfigWithViper(
vpr *viper.Viper, vpr *viper.Viper,
wcs storage.WriteConfigToStorer, s3Config storage.S3Config,
m365Config account.M365Config, m365Config account.M365Config,
repoOpts repository.Options,
repoID string, repoID string,
) error { ) error {
// Write storage configuration to viper s3Config = s3Config.Normalize()
wcs.WriteConfigToStore(vpr) // Rudimentary support for persisting repo config
// TODO: Handle conflicts, support other config types
vpr.Set(StorageProviderTypeKey, storage.ProviderS3.String())
vpr.Set(BucketNameKey, s3Config.Bucket)
vpr.Set(EndpointKey, s3Config.Endpoint)
vpr.Set(PrefixKey, s3Config.Prefix)
vpr.Set(DisableTLSKey, s3Config.DoNotUseTLS)
vpr.Set(DisableTLSVerificationKey, s3Config.DoNotVerifyTLS)
vpr.Set(RepoID, repoID) vpr.Set(RepoID, repoID)
// Need if-checks as Viper will write empty values otherwise. vpr.Set(AccountProviderTypeKey, account.ProviderM365.String())
if len(repoOpts.User) > 0 { vpr.Set(AzureTenantIDKey, m365Config.AzureTenantID)
vpr.Set(CorsoUser, repoOpts.User)
}
if len(repoOpts.Host) > 0 {
vpr.Set(CorsoHost, repoOpts.Host)
}
vpr.Set(account.AccountProviderTypeKey, account.ProviderM365.String())
vpr.Set(account.AzureTenantIDKey, m365Config.AzureTenantID)
if err := vpr.SafeWriteConfig(); err != nil { if err := vpr.SafeWriteConfig(); err != nil {
if _, ok := err.(viper.ConfigFileAlreadyExistsError); ok { if _, ok := err.(viper.ConfigFileAlreadyExistsError); ok {
@ -264,36 +227,30 @@ func writeRepoConfigWithViper(
return nil return nil
} }
// ReadCorsoConfig creates a storage and account instance by mediating all the possible // GetStorageAndAccount creates a storage and account instance by mediating all the possible
// data sources (config file, env vars, flag overrides) and the config file. // data sources (config file, env vars, flag overrides) and the config file.
func ReadCorsoConfig( func GetConfigRepoDetails(
ctx context.Context, ctx context.Context,
provider storage.ProviderType,
readFromFile bool, readFromFile bool,
mustMatchFromConfig bool,
overrides map[string]string, overrides map[string]string,
) (RepoDetails, error) { ) (
config, err := getStorageAndAccountWithViper( RepoDetails,
ctx, error,
GetViper(ctx), ) {
provider, config, err := getStorageAndAccountWithViper(GetViper(ctx), readFromFile, overrides)
readFromFile,
mustMatchFromConfig,
overrides)
return config, err return config, err
} }
// getSorageAndAccountWithViper implements GetSorageAndAccount, but takes in a viper // getSorageAndAccountWithViper implements GetSorageAndAccount, but takes in a viper
// struct for testing. // struct for testing.
func getStorageAndAccountWithViper( func getStorageAndAccountWithViper(
ctx context.Context,
vpr *viper.Viper, vpr *viper.Viper,
provider storage.ProviderType,
readFromFile bool, readFromFile bool,
mustMatchFromConfig bool,
overrides map[string]string, overrides map[string]string,
) (RepoDetails, error) { ) (
RepoDetails,
error,
) {
var ( var (
config RepoDetails config RepoDetails
err error err error
@ -303,19 +260,12 @@ func getStorageAndAccountWithViper(
// possibly read the prior config from a .corso file // possibly read the prior config from a .corso file
if readFromFile { if readFromFile {
ctx = clues.Add(ctx, "viper_config_file", vpr.ConfigFileUsed()) err = vpr.ReadInConfig()
logger.Ctx(ctx).Debug("reading config from file") if err != nil {
if _, ok := err.(viper.ConfigFileNotFoundError); !ok {
if err := vpr.ReadInConfig(); err != nil {
configNotSet := errors.As(err, &viper.ConfigFileNotFoundError{})
configNotFound := errors.Is(err, fs.ErrNotExist)
if !configNotSet && !configNotFound {
return config, clues.Wrap(err, "reading corso config file: "+vpr.ConfigFileUsed()) return config, clues.Wrap(err, "reading corso config file: "+vpr.ConfigFileUsed())
} }
logger.Ctx(ctx).Info("config file not found")
readConfigFromViper = false readConfigFromViper = false
} }
@ -323,49 +273,36 @@ func getStorageAndAccountWithViper(
config.RepoID = vpr.GetString(RepoID) config.RepoID = vpr.GetString(RepoID)
} }
config.Account, err = configureAccount(vpr, readConfigFromViper, mustMatchFromConfig, overrides) config.Account, err = configureAccount(vpr, readConfigFromViper, overrides)
if err != nil { if err != nil {
return config, clues.Wrap(err, "retrieving account configuration details") return config, clues.Wrap(err, "retrieving account configuration details")
} }
config.Storage, err = configureStorage( config.Storage, err = configureStorage(vpr, readConfigFromViper, overrides)
vpr,
provider,
readConfigFromViper,
mustMatchFromConfig,
overrides)
if err != nil { if err != nil {
return config, clues.Wrap(err, "retrieving storage provider details") return config, clues.Wrap(err, "retrieving storage provider details")
} }
config.RepoUser, config.RepoHost = getUserHost(vpr, readConfigFromViper)
return config, nil return config, nil
} }
func getUserHost(vpr *viper.Viper, readConfigFromViper bool) (string, string) {
user := str.First(flags.UserMaintenanceFV, vpr.GetString(CorsoUser))
host := str.First(flags.HostnameMaintenanceFV, vpr.GetString(CorsoHost))
// Fine if these are empty; later code will assign a meaningful default if
// needed.
return user, host
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Helper funcs // Helper funcs
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var constToTomlKeyMap = map[string]string{ var constToTomlKeyMap = map[string]string{
account.AzureTenantID: account.AzureTenantIDKey, account.AzureTenantID: AzureTenantIDKey,
account.AccountProviderTypeKey: account.AccountProviderTypeKey, AccountProviderTypeKey: AccountProviderTypeKey,
storage.Bucket: BucketNameKey,
storage.Endpoint: EndpointKey,
storage.Prefix: PrefixKey,
StorageProviderTypeKey: StorageProviderTypeKey,
} }
// mustMatchConfig compares the values of each key to their config file value in viper. // mustMatchConfig compares the values of each key to their config file value in viper.
// If any value differs from the viper value, an error is returned. // If any value differs from the viper value, an error is returned.
// values in m that aren't stored in the config are ignored. // values in m that aren't stored in the config are ignored.
// TODO(pandeyabs): This code is currently duplicated in 2 places. func mustMatchConfig(vpr *viper.Viper, m map[string]string) error {
func mustMatchConfig(vpr *viper.Viper, m map[string]string, pathKeys []string) error {
for k, v := range m { for k, v := range m {
if len(v) == 0 { if len(v) == 0 {
continue // empty variables will get caught by configuration validators, if necessary continue // empty variables will get caught by configuration validators, if necessary
@ -377,16 +314,7 @@ func mustMatchConfig(vpr *viper.Viper, m map[string]string, pathKeys []string) e
} }
vv := vpr.GetString(tomlK) vv := vpr.GetString(tomlK)
areEqual := false if v != vv {
// some of the values maybe paths, hence they require more than just string equality
if len(pathKeys) > 0 && slices.Contains(pathKeys, k) {
areEqual = path.ArePathsEquivalent(v, vv)
} else {
areEqual = v == vv
}
if !areEqual {
return clues.New("value of " + k + " (" + v + ") does not match corso configuration value (" + vv + ")") return clues.New("value of " + k + " (" + v + ") does not match corso configuration value (" + vv + ")")
} }
} }

View File

@ -0,0 +1,327 @@
package config
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/alcionai/clues"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage"
)
const (
configFileTemplate = `
` + BucketNameKey + ` = '%s'
` + EndpointKey + ` = 's3.amazonaws.com'
` + PrefixKey + ` = 'test-prefix/'
` + StorageProviderTypeKey + ` = 'S3'
` + AccountProviderTypeKey + ` = 'M365'
` + AzureTenantIDKey + ` = '%s'
` + DisableTLSKey + ` = 'false'
` + DisableTLSVerificationKey + ` = 'false'
`
)
type ConfigSuite struct {
tester.Suite
}
func TestConfigSuite(t *testing.T) {
suite.Run(t, &ConfigSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ConfigSuite) TestRequireProps() {
table := []struct {
name string
props map[string]string
errCheck assert.ErrorAssertionFunc
}{
{
props: map[string]string{"exists": "I have seen the fnords!"},
errCheck: assert.NoError,
},
{
props: map[string]string{"not-exists": ""},
errCheck: assert.Error,
},
}
for _, test := range table {
err := requireProps(test.props)
test.errCheck(suite.T(), err, clues.ToCore(err))
}
}
func (suite *ConfigSuite) TestReadRepoConfigBasic() {
var (
t = suite.T()
vpr = viper.New()
)
const (
b = "read-repo-config-basic-bucket"
tID = "6f34ac30-8196-469b-bf8f-d83deadbbbba"
)
// Generate test config file
testConfigData := fmt.Sprintf(configFileTemplate, b, tID)
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
err := os.WriteFile(testConfigFilePath, []byte(testConfigData), 0o700)
require.NoError(t, err, clues.ToCore(err))
// Configure viper to read test config file
vpr.SetConfigFile(testConfigFilePath)
// Read and validate config
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
s3Cfg, err := s3ConfigsFromViper(vpr)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, b, s3Cfg.Bucket)
m365, err := m365ConfigsFromViper(vpr)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, tID, m365.AzureTenantID)
}
func (suite *ConfigSuite) TestWriteReadConfig() {
var (
t = suite.T()
vpr = viper.New()
// Configure viper to read test config file
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
)
const (
bkt = "write-read-config-bucket"
tid = "3c0748d2-470e-444c-9064-1268e52609d5"
)
err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config", clues.ToCore(err))
s3Cfg := storage.S3Config{Bucket: bkt, DoNotUseTLS: true, DoNotVerifyTLS: true}
m365 := account.M365Config{AzureTenantID: tid}
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
readS3Cfg, err := s3ConfigsFromViper(vpr)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS)
assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS)
readM365, err := m365ConfigsFromViper(vpr)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
}
func (suite *ConfigSuite) TestMustMatchConfig() {
var (
t = suite.T()
vpr = viper.New()
// Configure viper to read test config file
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
)
const (
bkt = "must-match-config-bucket"
tid = "dfb12063-7598-458b-85ab-42352c5c25e2"
)
err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config")
s3Cfg := storage.S3Config{Bucket: bkt}
m365 := account.M365Config{AzureTenantID: tid}
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
table := []struct {
name string
input map[string]string
errCheck assert.ErrorAssertionFunc
}{
{
name: "full match",
input: map[string]string{
storage.Bucket: bkt,
account.AzureTenantID: tid,
},
errCheck: assert.NoError,
},
{
name: "empty values",
input: map[string]string{
storage.Bucket: "",
account.AzureTenantID: "",
},
errCheck: assert.NoError,
},
{
name: "no overrides",
input: map[string]string{},
errCheck: assert.NoError,
},
{
name: "nil map",
input: nil,
errCheck: assert.NoError,
},
{
name: "no recognized keys",
input: map[string]string{
"fnords": "smurfs",
"nonsense": "",
},
errCheck: assert.NoError,
},
{
name: "mismatch",
input: map[string]string{
storage.Bucket: tid,
account.AzureTenantID: bkt,
},
errCheck: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input), clues.ToCore(err))
})
}
}
// ------------------------------------------------------------
// integration tests
// ------------------------------------------------------------
type ConfigIntegrationSuite struct {
tester.Suite
}
func TestConfigIntegrationSuite(t *testing.T) {
suite.Run(t, &ConfigIntegrationSuite{Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
)})
}
func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
t := suite.T()
vpr := viper.New()
const (
bkt = "get-storage-and-account-bucket"
end = "https://get-storage-and-account.com"
pfx = "get-storage-and-account-prefix/"
tid = "3a2faa4e-a882-445c-9d27-f552ef189381"
)
// Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config", clues.ToCore(err))
s3Cfg := storage.S3Config{
Bucket: bkt,
Endpoint: end,
Prefix: pfx,
DoNotVerifyTLS: true,
DoNotUseTLS: true,
}
m365 := account.M365Config{AzureTenantID: tid}
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err))
err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err))
config, err := getStorageAndAccountWithViper(vpr, true, nil)
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
readS3Cfg, err := config.Storage.S3Config()
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint)
assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix)
assert.Equal(t, readS3Cfg.DoNotUseTLS, s3Cfg.DoNotUseTLS)
assert.Equal(t, readS3Cfg.DoNotVerifyTLS, s3Cfg.DoNotVerifyTLS)
assert.Equal(t, config.RepoID, "repoid")
common, err := config.Storage.CommonConfig()
require.NoError(t, err, "reading common config from storage", clues.ToCore(err))
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
readM365, err := config.Account.M365Config()
require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
}
func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverrides() {
t := suite.T()
vpr := viper.New()
const (
bkt = "get-storage-and-account-no-file-bucket"
end = "https://get-storage-and-account.com/no-file"
pfx = "get-storage-and-account-no-file-prefix/"
tid = "88f8522b-18e4-4d0f-b514-2d7b34d4c5a1"
)
m365 := account.M365Config{AzureTenantID: tid}
overrides := map[string]string{
account.AzureTenantID: tid,
AccountProviderTypeKey: account.ProviderM365.String(),
storage.Bucket: bkt,
storage.Endpoint: end,
storage.Prefix: pfx,
storage.DoNotUseTLS: "true",
storage.DoNotVerifyTLS: "true",
StorageProviderTypeKey: storage.ProviderS3.String(),
}
config, err := getStorageAndAccountWithViper(vpr, false, overrides)
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
readS3Cfg, err := config.Storage.S3Config()
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
assert.Equal(t, readS3Cfg.Bucket, bkt)
assert.Equal(t, config.RepoID, "")
assert.Equal(t, readS3Cfg.Endpoint, end)
assert.Equal(t, readS3Cfg.Prefix, pfx)
assert.True(t, readS3Cfg.DoNotUseTLS)
assert.True(t, readS3Cfg.DoNotVerifyTLS)
common, err := config.Storage.CommonConfig()
require.NoError(t, err, "reading common config from storage", clues.ToCore(err))
assert.Equal(t, common.CorsoPassphrase, os.Getenv(credentials.CorsoPassphrase))
readM365, err := config.Account.M365Config()
require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
}

129
src/cli/config/storage.go Normal file
View File

@ -0,0 +1,129 @@
package config
import (
"os"
"path/filepath"
"strconv"
"github.com/alcionai/clues"
"github.com/aws/aws-sdk-go/aws/defaults"
"github.com/spf13/viper"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage"
)
// prerequisite: readRepoConfig must have been run prior to this to populate the global viper values.
func s3ConfigsFromViper(vpr *viper.Viper) (storage.S3Config, error) {
var s3Config storage.S3Config
providerType := vpr.GetString(StorageProviderTypeKey)
if providerType != storage.ProviderS3.String() {
return s3Config, clues.New("unsupported storage provider: " + providerType)
}
s3Config.Bucket = vpr.GetString(BucketNameKey)
s3Config.Endpoint = vpr.GetString(EndpointKey)
s3Config.Prefix = vpr.GetString(PrefixKey)
s3Config.DoNotUseTLS = vpr.GetBool(DisableTLSKey)
s3Config.DoNotVerifyTLS = vpr.GetBool(DisableTLSVerificationKey)
return s3Config, nil
}
func s3Overrides(in map[string]string) map[string]string {
return map[string]string{
storage.Bucket: in[storage.Bucket],
storage.Endpoint: in[storage.Endpoint],
storage.Prefix: in[storage.Prefix],
storage.DoNotUseTLS: in[storage.DoNotUseTLS],
storage.DoNotVerifyTLS: in[storage.DoNotVerifyTLS],
StorageProviderTypeKey: in[StorageProviderTypeKey],
}
}
// configureStorage builds a complete storage configuration from a mix of
// viper properties and manual overrides.
func configureStorage(
vpr *viper.Viper,
readConfigFromViper bool,
overrides map[string]string,
) (storage.Storage, error) {
var (
s3Cfg storage.S3Config
store storage.Storage
err error
)
if readConfigFromViper {
if s3Cfg, err = s3ConfigsFromViper(vpr); err != nil {
return store, clues.Wrap(err, "reading s3 configs from corso config file")
}
if b, ok := overrides[storage.Bucket]; ok {
overrides[storage.Bucket] = common.NormalizeBucket(b)
}
if p, ok := overrides[storage.Prefix]; ok {
overrides[storage.Prefix] = common.NormalizePrefix(p)
}
if err := mustMatchConfig(vpr, s3Overrides(overrides)); err != nil {
return store, clues.Wrap(err, "verifying s3 configs in corso config file")
}
}
_, err = defaults.CredChain(defaults.Config().WithCredentialsChainVerboseErrors(true), defaults.Handlers()).Get()
if err != nil {
return store, clues.Wrap(err, "validating aws credentials")
}
s3Cfg = storage.S3Config{
Bucket: str.First(overrides[storage.Bucket], s3Cfg.Bucket, os.Getenv(storage.BucketKey)),
Endpoint: str.First(overrides[storage.Endpoint], s3Cfg.Endpoint, os.Getenv(storage.EndpointKey)),
Prefix: str.First(overrides[storage.Prefix], s3Cfg.Prefix, os.Getenv(storage.PrefixKey)),
DoNotUseTLS: str.ParseBool(str.First(
overrides[storage.DoNotUseTLS],
strconv.FormatBool(s3Cfg.DoNotUseTLS),
os.Getenv(storage.PrefixKey))),
DoNotVerifyTLS: str.ParseBool(str.First(
overrides[storage.DoNotVerifyTLS],
strconv.FormatBool(s3Cfg.DoNotVerifyTLS),
os.Getenv(storage.PrefixKey))),
}
// compose the common config and credentials
corso := credentials.GetCorso()
if err := corso.Validate(); err != nil {
return store, clues.Wrap(err, "validating corso credentials")
}
cCfg := storage.CommonConfig{
Corso: corso,
}
// the following is a hack purely for integration testing.
// the value is not required, and if empty, kopia will default
// to its routine behavior
if t, ok := vpr.Get("corso-testing").(bool); t && ok {
dir, _ := filepath.Split(vpr.ConfigFileUsed())
cCfg.KopiaCfgDir = dir
}
// ensure required properties are present
if err := requireProps(map[string]string{
storage.Bucket: s3Cfg.Bucket,
credentials.CorsoPassphrase: corso.CorsoPassphrase,
}); err != nil {
return storage.Storage{}, err
}
// build the storage
store, err = storage.NewStorage(storage.ProviderS3, s3Cfg, cCfg)
if err != nil {
return store, clues.Wrap(err, "configuring repository storage")
}
return store, nil
}

View File

@ -1,120 +0,0 @@
package debug
import (
"context"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/selectors"
)
var subCommandFuncs = []func() *cobra.Command{
metadataFilesCmd,
}
var debugCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addExchangeCommands,
}
// AddCommands attaches all `corso debug * *` commands to the parent.
func AddCommands(cmd *cobra.Command) {
debugC, _ := utils.AddCommand(cmd, debugCmd(), utils.MarkDebugCommand())
for _, sc := range subCommandFuncs {
subCommand := sc()
utils.AddCommand(debugC, subCommand, utils.MarkDebugCommand())
for _, addTo := range debugCommands {
servCmd := addTo(subCommand)
flags.AddAllProviderFlags(servCmd)
flags.AddAllStorageFlags(servCmd)
}
}
}
// ---------------------------------------------------------------------------
// Commands
// ---------------------------------------------------------------------------
const debugCommand = "debug"
// The debug category of commands.
// `corso debug [<subcommand>] [<flag>...]`
func debugCmd() *cobra.Command {
return &cobra.Command{
Use: debugCommand,
Short: "debugging & troubleshooting utilities",
Long: `debug the data stored in corso.`,
RunE: handledebugCmd,
Args: cobra.NoArgs,
}
}
// Handler for flat calls to `corso debug`.
// Produces the same output as `corso debug --help`.
func handledebugCmd(cmd *cobra.Command, args []string) error {
return cmd.Help()
}
// The debug metadataFiles subcommand.
// `corso debug metadata-files <service> [<flag>...]`
var metadataFilesCommand = "metadata-files"
func metadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: metadataFilesCommand,
Short: "display all the metadata file contents stored by the service",
RunE: handleMetadataFilesCmd,
Args: cobra.NoArgs,
}
}
// Handler for calls to `corso debug metadata-files`.
// Produces the same output as `corso debug metadata-files --help`.
func handleMetadataFilesCmd(cmd *cobra.Command, args []string) error {
return cmd.Help()
}
// ---------------------------------------------------------------------------
// runners
// ---------------------------------------------------------------------------
func genericMetadataFiles(
ctx context.Context,
cmd *cobra.Command,
args []string,
sel selectors.Selector,
backupID string,
) error {
ctx = clues.Add(ctx, "backup_id", backupID)
r, _, err := utils.GetAccountAndConnect(ctx, cmd, sel.PathService())
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
// read metadata
files, err := r.GetBackupMetadata(ctx, sel, backupID, fault.New(true))
if err != nil {
return Only(ctx, clues.Wrap(err, "retrieving metadata files"))
}
for _, file := range files {
Infof(ctx, "\n------------------------------")
Info(ctx, file.Name)
Info(ctx, file.Path)
Pretty(ctx, file.Data)
}
return nil
}

View File

@ -1,66 +0,0 @@
package debug
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by debug.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, exchangeMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
}
return c
}
const (
exchangeServiceCommand = "exchange"
exchangeServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll
exchangeServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
corso debug metadata-files exchange --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// `corso debug metadata-files exchange [<flag>...] <destination>`
func exchangeMetadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: exchangeServiceCommand,
Short: "Display exchange metadata file content",
RunE: metadataFilesExchangeCmd,
Args: cobra.NoArgs,
Example: exchangeServiceCommandDebugExamples,
}
}
func metadataFilesExchangeCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
sel := selectors.NewExchangeBackup([]string{"unused-placeholder"})
sel.Include(sel.AllData())
return genericMetadataFiles(
ctx,
cmd,
args,
sel.Selector,
flags.BackupIDFV)
}

View File

@ -1,77 +0,0 @@
package debug
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type ExchangeUnitSuite struct {
tester.Suite
}
func TestExchangeUnitSuite(t *testing.T) {
suite.Run(t, &ExchangeUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeUnitSuite) TestExchangeCommands() {
expectUse := exchangeServiceCommand + " " + exchangeServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "metdata-files exchange",
use: metadataFilesCommand,
expectUse: expectUse,
expectShort: exchangeMetadataFilesCmd().Short,
expectRunE: metadataFilesExchangeCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: metadataFilesCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addExchangeCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,68 +0,0 @@
package debug
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by debug.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, groupsMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
}
return c
}
// TODO: correct examples
const (
groupsServiceCommand = "groups"
groupsServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll
groupsServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
corso debug metadata-files groups --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// `corso debug metadata-files groups [<flag>...] <destination>`
func groupsMetadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Display groups metadata file content",
RunE: metadataFilesGroupsCmd,
Args: cobra.NoArgs,
Example: groupsServiceCommandDebugExamples,
}
}
func metadataFilesGroupsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
sel := selectors.NewGroupsBackup([]string{"unused-placeholder"})
sel.Include(sel.AllData())
return genericMetadataFiles(
ctx,
cmd,
args,
sel.Selector,
flags.BackupIDFV)
}

View File

@ -1,76 +0,0 @@
package debug
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type GroupsUnitSuite struct {
tester.Suite
}
func TestGroupsUnitSuite(t *testing.T) {
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "metdata-files groups",
use: metadataFilesCommand,
expectUse: expectUse,
expectShort: groupsMetadataFilesCmd().Short,
expectRunE: metadataFilesGroupsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: metadataFilesCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addGroupsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,66 +0,0 @@
package debug
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by debug.go to map subcommands to provider-specific handling.
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, oneDriveMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
}
return c
}
const (
oneDriveServiceCommand = "onedrive"
oneDriveServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll
oneDriveServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
corso debug metadata-files onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// `corso debug metadata-files onedrive [<flag>...] <destination>`
func oneDriveMetadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: oneDriveServiceCommand,
Short: "Display onedrive metadata file content",
RunE: metadataFilesOneDriveCmd,
Args: cobra.NoArgs,
Example: oneDriveServiceCommandDebugExamples,
}
}
func metadataFilesOneDriveCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
sel := selectors.NewOneDriveBackup([]string{"unused-placeholder"})
sel.Include(sel.AllData())
return genericMetadataFiles(
ctx,
cmd,
args,
sel.Selector,
flags.BackupIDFV)
}

View File

@ -1,76 +0,0 @@
package debug
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type OneDriveUnitSuite struct {
tester.Suite
}
func TestOneDriveUnitSuite(t *testing.T) {
suite.Run(t, &OneDriveUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
expectUse := oneDriveServiceCommand + " " + oneDriveServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "metadata-files onedrive",
use: metadataFilesCommand,
expectUse: expectUse,
expectShort: oneDriveMetadataFilesCmd().Short,
expectRunE: metadataFilesOneDriveCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: metadataFilesCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,66 +0,0 @@
package debug
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by debug.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, sharePointMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
}
return c
}
const (
sharePointServiceCommand = "sharepoint"
sharePointServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll
sharePointServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
corso debug metadata-files sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// `corso debug metadata-files sharepoint [<flag>...] <destination>`
func sharePointMetadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: sharePointServiceCommand,
Short: "Display sharepoint metadata file content",
RunE: metadataFilesSharePointCmd,
Args: cobra.NoArgs,
Example: sharePointServiceCommandDebugExamples,
}
}
func metadataFilesSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
sel := selectors.NewSharePointBackup([]string{"unused-placeholder"})
sel.Include(sel.LibraryFolders(selectors.Any()))
return genericMetadataFiles(
ctx,
cmd,
args,
sel.Selector,
flags.BackupIDFV)
}

View File

@ -1,76 +0,0 @@
package debug
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type SharePointUnitSuite struct {
tester.Suite
}
func TestSharePointUnitSuite(t *testing.T) {
suite.Run(t, &SharePointUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
expectUse := sharePointServiceCommand + " " + sharePointServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "metdata-files sharepoint",
use: metadataFilesCommand,
expectUse: expectUse,
expectShort: sharePointMetadataFilesCmd().Short,
expectRunE: metadataFilesSharePointCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: metadataFilesCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addSharePointCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,101 +0,0 @@
package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
)
// called by export.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, exchangeExportCmd())
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
flags.AddExchangeDetailsAndRestoreFlags(c, true)
flags.AddExportConfigFlags(c)
flags.AddFailFastFlag(c)
}
return c
}
const (
exchangeServiceCommand = "exchange"
exchangeServiceCommandUseSuffix = "<destination> --backup <backupId>"
// TODO(meain): remove message about only supporting email exports once others are added
//nolint:lll
exchangeServiceCommandExportExamples = `> Only email exports are supported as of now.
# Export emails with ID 98765abcdef and 12345abcdef from Alice's last backup (1234abcd...) to my-folder
corso export exchange my-folder --backup 1234abcd-12ab-cd34-56de-1234abcd --email 98765abcdef,12345abcdef
# Export emails with subject containing "Hello world" in the "Inbox" to my-folder
corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
--email-subject "Hello world" --email-folder Inbox my-folder`
// TODO(meain): Uncomment once support for these are added
// `# Export an entire calendar to my-folder
// corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
// --event-calendar Calendar my-folder
// # Export the contact with ID abdef0101 to my-folder
// corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --contact abdef0101 my-folder`
)
// `corso export exchange [<flag>...] <destination>`
func exchangeExportCmd() *cobra.Command {
return &cobra.Command{
Use: exchangeServiceCommand,
Short: "Export M365 Exchange service data",
RunE: exportExchangeCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing export destination")
}
return nil
},
Example: exchangeServiceCommandExportExamples,
}
}
// processes an exchange service export.
func exportExchangeCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
opts := utils.MakeExchangeOpts(cmd)
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := utils.ValidateExchangeRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err
}
sel := utils.IncludeExchangeRestoreDataSelectors(opts)
utils.FilterExchangeRestoreInfoSelectors(sel, opts)
return runExport(
ctx,
cmd,
args,
opts.ExportCfg,
sel.Selector,
flags.BackupIDFV,
"Exchange",
defaultAcceptedFormatTypes)
}

View File

@ -1,78 +0,0 @@
package export
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
)
type ExchangeUnitSuite struct {
tester.Suite
}
func TestExchangeUnitSuite(t *testing.T) {
suite.Run(t, &ExchangeUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
expectUse := exchangeServiceCommand + " " + exchangeServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"export exchange", exportCommand, expectUse, exchangeExportCmd().Short, exportExchangeCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: exportCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addExchangeCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
exchangeServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FormatFN, flagsTD.FormatType,
"--" + flags.ArchiveFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeExchangeOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,153 +0,0 @@
package export
import (
"context"
"errors"
"github.com/alcionai/clues"
"github.com/dustin/go-humanize"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/selectors"
)
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addExchangeCommands,
}
var defaultAcceptedFormatTypes = []string{string(control.DefaultFormat)}
// AddCommands attaches all `corso export * *` commands to the parent.
func AddCommands(cmd *cobra.Command) {
subCommand := exportCmd()
cmd.AddCommand(subCommand)
for _, addExportTo := range exportCommands {
sc := addExportTo(subCommand)
flags.AddAllStorageFlags(sc)
}
}
const exportCommand = "export"
// The export category of commands.
// `corso export [<subcommand>] [<flag>...]`
func exportCmd() *cobra.Command {
return &cobra.Command{
Use: exportCommand,
Short: "Export your service data",
Long: `Export the data stored in one of your M365 services.`,
RunE: handleExportCmd,
Args: cobra.NoArgs,
}
}
// Handler for flat calls to `corso export`.
// Produces the same output as `corso export --help`.
func handleExportCmd(cmd *cobra.Command, args []string) error {
return cmd.Help()
}
func runExport(
ctx context.Context,
cmd *cobra.Command,
args []string,
ueco utils.ExportCfgOpts,
sel selectors.Selector,
backupID, serviceName string,
acceptedFormatTypes []string,
) error {
if err := utils.ValidateExportConfigFlags(&ueco, acceptedFormatTypes); err != nil {
return Only(ctx, err)
}
r, _, err := utils.GetAccountAndConnect(ctx, cmd, sel.PathService())
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
exportLocation := args[0]
if len(exportLocation) == 0 {
// This should not be possible, but adding it just in case.
exportLocation = control.DefaultRestoreLocation + dttm.FormatNow(dttm.HumanReadableDriveItem)
}
Infof(ctx, "Exporting to folder %s", exportLocation)
eo, err := r.NewExport(
ctx,
backupID,
sel,
utils.MakeExportConfig(ctx, ueco))
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to initialize "+serviceName+" export"))
}
collections, err := eo.Run(ctx)
if err != nil {
if errors.Is(err, data.ErrNotFound) {
return Only(ctx, clues.New("Backup or backup details missing for id "+backupID))
}
return Only(ctx, clues.Wrap(err, "Failed to run "+serviceName+" export"))
}
if err = showExportProgress(ctx, eo, collections, exportLocation); err != nil {
return err
}
if len(eo.Errors.Recovered()) > 0 {
Infof(ctx, "\nExport failures")
for _, i := range eo.Errors.Recovered() {
Err(ctx, i.Error())
}
return Only(ctx, clues.New("Incomplete export of "+serviceName+" data"))
}
stats := eo.GetStats()
if len(stats) > 0 {
Infof(ctx, "\nExport details")
}
for k, s := range stats {
Infof(ctx, "%s: %d items (%s)", k.HumanString(), s.ResourceCount, humanize.Bytes(uint64(s.BytesRead)))
}
return nil
}
// slim wrapper that allows us to defer the progress bar closure with the expected scope.
func showExportProgress(
ctx context.Context,
op operations.ExportOperation,
collections []export.Collectioner,
exportLocation string,
) error {
// It would be better to give a progressbar than a spinner, but we
// have any way of knowing how many files are available as of now.
progressMessage := observe.MessageWithCompletion(ctx, observe.DefaultCfg(), "Writing data to disk")
defer close(progressMessage)
err := export.ConsumeExportCollections(ctx, exportLocation, collections, op.Errors)
if err != nil {
return Only(ctx, err)
}
return nil
}

View File

@ -1,115 +0,0 @@
package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/control"
)
// called by export.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, groupsExportCmd(), utils.MarkPreviewCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
flags.AddSiteFlag(c, false)
flags.AddSiteIDFlag(c, false)
flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddGroupDetailsAndRestoreFlags(c)
flags.AddExportConfigFlags(c)
flags.AddFailFastFlag(c)
}
return c
}
const (
groupsServiceCommand = "groups"
teamsServiceCommand = "teams"
groupsServiceCommandUseSuffix = "<destination> --backup <backupId>"
//nolint:lll
groupsServiceCommandExportExamples = `# Export a message in Marketing's last backup (1234abcd...) to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --message 98765abcdef
# Export all messages named in channel "Finance Reports" to the current directory
corso export groups . --backup 1234abcd-12ab-cd34-56de-1234abcd \
--message '*' --channel "Finance Reports"
# Export all messages in channel "Finance Reports" that were created before 2020 to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
--channel "Finance Reports" --message-created-before 2020-01-01T00:00:00
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00
# Export all posts from a conversation with topic "hello world" from group mailbox's last backup to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"
# Export post with ID 98765abcdef from a conversation from group mailbox's last backup to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world" --post 98765abcdef`
)
// `corso export groups [<flag>...] <destination>`
func groupsExportCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Aliases: []string{teamsServiceCommand},
Short: "Export M365 Groups service data",
RunE: exportGroupsCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing export destination")
}
return nil
},
Example: groupsServiceCommandExportExamples,
}
}
// processes an groups service export.
func exportGroupsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
opts := utils.MakeGroupsOpts(cmd)
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts, false); err != nil {
return err
}
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
acceptedGroupsFormatTypes := []string{
string(control.DefaultFormat),
string(control.JSONFormat),
}
return runExport(
ctx,
cmd,
args,
opts.ExportCfg,
sel.Selector,
flags.BackupIDFV,
"Groups",
acceptedGroupsFormatTypes)
}

View File

@ -1,78 +0,0 @@
package export
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
)
type GroupsUnitSuite struct {
tester.Suite
}
func TestGroupsUnitSuite(t *testing.T) {
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"export groups", exportCommand, expectUse, groupsExportCmd().Short, exportGroupsCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: exportCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addGroupsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
groupsServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FormatFN, flagsTD.FormatType,
"--" + flags.ArchiveFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeGroupsOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,94 +0,0 @@
package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
)
// called by export.go to map subcommands to provider-specific handling.
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, oneDriveExportCmd())
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
flags.AddOneDriveDetailsAndRestoreFlags(c)
flags.AddExportConfigFlags(c)
flags.AddFailFastFlag(c)
}
return c
}
const (
oneDriveServiceCommand = "onedrive"
oneDriveServiceCommandUseSuffix = "<destination> --backup <backupId>"
//nolint:lll
oneDriveServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's last backup (1234abcd...) to /my-exports
corso export onedrive my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
# Export files named "FY2021 Planning.xlsx" in "Documents/Finance Reports" to he current directory
corso export onedrive . --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
corso export onedrive my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
)
// `corso export onedrive [<flag>...] <destination>`
func oneDriveExportCmd() *cobra.Command {
return &cobra.Command{
Use: oneDriveServiceCommand,
Short: "Export M365 OneDrive service data",
RunE: exportOneDriveCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing export destination")
}
return nil
},
Example: oneDriveServiceCommandExportExamples,
}
}
// processes an onedrive service export.
func exportOneDriveCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
opts := utils.MakeOneDriveOpts(cmd)
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := utils.ValidateOneDriveRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err
}
sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
return runExport(
ctx,
cmd,
args,
opts.ExportCfg,
sel.Selector,
flags.BackupIDFV,
"OneDrive",
defaultAcceptedFormatTypes)
}

View File

@ -1,92 +0,0 @@
package export
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
)
type OneDriveUnitSuite struct {
tester.Suite
}
func TestOneDriveUnitSuite(t *testing.T) {
suite.Run(t, &OneDriveUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
expectUse := oneDriveServiceCommand + " " + oneDriveServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"export onedrive", exportCommand, expectUse, oneDriveExportCmd().Short, exportOneDriveCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: exportCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
oneDriveServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.FormatFN, flagsTD.FormatType,
// bool flags
"--" + flags.ArchiveFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeOneDriveOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
assert.Equal(t, flagsTD.FileCreatedAfterInput, opts.FileCreatedAfter)
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.Equal(t, flagsTD.CorsoPassphrase, flags.PassphraseFV)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,118 +0,0 @@
package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
)
// called by export.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, sharePointExportCmd())
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddExportConfigFlags(c)
flags.AddFailFastFlag(c)
}
return c
}
const (
sharePointServiceCommand = "sharepoint"
sharePointServiceCommandUseSuffix = "<destination> --backup <backupId>"
//nolint:lll
sharePointServiceCommandExportExamples = `# Export file with ID 98765abcdef in Bob's latest backup (1234abcd...) to /my-exports
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef my-exports
# Export file "ServerRenderTemplate.xsl" in "Display Templates/Style Sheets" as archive to the current directory
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file "ServerRenderTemplate.xsl" --folder "Display Templates/Style Sheets" --archive .
# Export all files in the folder "Display Templates/Style Sheets" that were created before 2020 to /my-exports
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file-created-before 2020-01-01T00:00:00 --folder "Display Templates/Style Sheets" my-exports
# Export all files in the "Documents" library to the current directory.
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library Documents --folder "Display Templates/Style Sheets" .
# Export lists by their name(s)
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2" .
# Export lists created after a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34 .
# Export lists created before a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34 .
# Export lists modified before a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34 .
# Export lists modified after a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34 .`
)
// `corso export sharepoint [<flag>...] <destination>`
func sharePointExportCmd() *cobra.Command {
return &cobra.Command{
Use: sharePointServiceCommand,
Short: "Export M365 SharePoint service data",
RunE: exportSharePointCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing export destination")
}
return nil
},
Example: sharePointServiceCommandExportExamples,
}
}
// processes an sharepoint service export.
func exportSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
opts := utils.MakeSharePointOpts(cmd)
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := utils.ValidateSharePointRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err
}
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts)
return runExport(
ctx,
cmd,
args,
opts.ExportCfg,
sel.Selector,
flags.BackupIDFV,
"SharePoint",
defaultAcceptedFormatTypes)
}

View File

@ -1,106 +0,0 @@
package export
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
)
type SharePointUnitSuite struct {
tester.Suite
}
func TestSharePointUnitSuite(t *testing.T) {
suite.Run(t, &SharePointUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
expectUse := sharePointServiceCommand + " " + sharePointServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"export sharepoint", exportCommand, expectUse, sharePointExportCmd().Short, exportSharePointCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: exportCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addSharePointCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
sharePointServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.LibraryFN, flagsTD.LibraryInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput),
"--" + flags.ListCreatedAfterFN, flagsTD.ListCreatedAfterInput,
"--" + flags.ListCreatedBeforeFN, flagsTD.ListCreatedBeforeInput,
"--" + flags.ListModifiedAfterFN, flagsTD.ListModifiedAfterInput,
"--" + flags.ListModifiedBeforeFN, flagsTD.ListModifiedBeforeInput,
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.FormatFN, flagsTD.FormatType,
"--" + flags.ArchiveFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeSharePointOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.LibraryInput, opts.Library)
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
assert.Equal(t, flagsTD.FileCreatedAfterInput, opts.FileCreatedAfter)
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
assert.Equal(t, flagsTD.ListCreatedAfterInput, opts.ListCreatedAfter)
assert.Equal(t, flagsTD.ListCreatedBeforeInput, opts.ListCreatedBefore)
assert.Equal(t, flagsTD.ListModifiedAfterInput, opts.ListModifiedAfter)
assert.Equal(t, flagsTD.ListModifiedBeforeInput, opts.ListModifiedBefore)
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,11 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
func AddGenericBackupFlags(cmd *cobra.Command) {
AddFailFastFlag(cmd)
AddDisableIncrementalsFlag(cmd)
AddForceItemDataDownloadFlag(cmd)
}

View File

@ -1,38 +0,0 @@
package flags
import "github.com/spf13/cobra"
const Show = "show"
func AddAllBackupListFlags(cmd *cobra.Command) {
AddFailedItemsFN(cmd)
AddSkippedItemsFN(cmd)
AddRecoveredErrorsFN(cmd)
AddAlertsFN(cmd)
}
func AddFailedItemsFN(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&FailedItemsFV, FailedItemsFN, Show,
"Toggles showing or hiding the list of items that failed.")
cobra.CheckErr(fs.MarkHidden(FailedItemsFN))
}
func AddSkippedItemsFN(cmd *cobra.Command) {
cmd.Flags().StringVar(
&ListSkippedItemsFV, SkippedItemsFN, Show,
"Toggles showing or hiding the list of items that were skipped.")
}
func AddRecoveredErrorsFN(cmd *cobra.Command) {
cmd.Flags().StringVar(
&ListRecoveredErrorsFV, RecoveredErrorsFN, Show,
"Toggles showing or hiding the list of errors which Corso recovered from.")
}
func AddAlertsFN(cmd *cobra.Command) {
cmd.Flags().StringVar(
&ListAlertsFV, AlertsFN, Show,
"Toggles showing or hiding the list of alerts produced during the operation.")
}

View File

@ -1,130 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
ContactFN = "contact"
ContactFolderFN = "contact-folder"
ContactNameFN = "contact-name"
EmailFN = "email"
EmailFolderFN = "email-folder"
EmailReceivedAfterFN = "email-received-after"
EmailReceivedBeforeFN = "email-received-before"
EmailSenderFN = "email-sender"
EmailSubjectFN = "email-subject"
EventFN = "event"
EventCalendarFN = "event-calendar"
EventOrganizerFN = "event-organizer"
EventRecursFN = "event-recurs"
EventStartsAfterFN = "event-starts-after"
EventStartsBeforeFN = "event-starts-before"
EventSubjectFN = "event-subject"
)
// flag values (ie: FV)
var (
ContactFV []string
ContactFolderFV []string
ContactNameFV string
EmailFV []string
EmailFolderFV []string
EmailReceivedAfterFV string
EmailReceivedBeforeFV string
EmailSenderFV string
EmailSubjectFV string
EventFV []string
EventCalendarFV []string
EventOrganizerFV string
EventRecursFV string
EventStartsAfterFV string
EventStartsBeforeFV string
EventSubjectFV string
)
// AddExchangeDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddExchangeDetailsAndRestoreFlags(cmd *cobra.Command, emailOnly bool) {
fs := cmd.Flags()
// email flags
fs.StringSliceVar(
&EmailFV,
EmailFN, nil,
"Select email messages by ID; accepts '"+Wildcard+"' to select all emails.")
fs.StringSliceVar(
&EmailFolderFV,
EmailFolderFN, nil,
"Select emails within a folder; accepts '"+Wildcard+"' to select all email folders.")
fs.StringVar(
&EmailSubjectFV,
EmailSubjectFN, "",
"Select emails with a subject containing this value.")
fs.StringVar(
&EmailSenderFV,
EmailSenderFN, "",
"Select emails from a specific sender.")
fs.StringVar(
&EmailReceivedAfterFV,
EmailReceivedAfterFN, "",
"Select emails received after this datetime.")
fs.StringVar(
&EmailReceivedBeforeFV,
EmailReceivedBeforeFN, "",
"Select emails received before this datetime.")
// NOTE: Only temporary until we add support for exporting the
// others as well in exchange.
if emailOnly {
return
}
// event flags
fs.StringSliceVar(
&EventFV,
EventFN, nil,
"Select events by event ID; accepts '"+Wildcard+"' to select all events.")
fs.StringSliceVar(
&EventCalendarFV,
EventCalendarFN, nil,
"Select events under a calendar; accepts '"+Wildcard+"' to select all events.")
fs.StringVar(
&EventSubjectFV,
EventSubjectFN, "",
"Select events with a subject containing this value.")
fs.StringVar(
&EventOrganizerFV,
EventOrganizerFN, "",
"Select events from a specific organizer.")
fs.StringVar(
&EventRecursFV,
EventRecursFN, "",
"Select recurring events. Use `--event-recurs false` to select non-recurring events.")
fs.StringVar(
&EventStartsAfterFV,
EventStartsAfterFN, "",
"Select events starting after this datetime.")
fs.StringVar(
&EventStartsBeforeFV,
EventStartsBeforeFN, "",
"Select events starting before this datetime.")
// contact flags
fs.StringSliceVar(
&ContactFV,
ContactFN, nil,
"Select contacts by contact ID; accepts '"+Wildcard+"' to select all contacts.")
fs.StringSliceVar(
&ContactFolderFV,
ContactFolderFN, nil,
"Select contacts within a folder; accepts '"+Wildcard+"' to select all contact folders.")
fs.StringVar(
&ContactNameFV,
ContactNameFN, "",
"Select contacts whose contact name contains this value.")
}

View File

@ -1,23 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
ArchiveFN = "archive"
FormatFN = "format"
)
var (
ArchiveFV bool
FormatFV string
)
// AddExportConfigFlags adds the restore config flag set.
func AddExportConfigFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(&ArchiveFV, ArchiveFN, false, "Export data as an archive instead of individual files")
fs.StringVar(&FormatFV, FormatFN, "", "Specify the export file format")
cobra.CheckErr(fs.MarkHidden(FormatFN))
}

View File

@ -1,48 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/storage"
)
// filesystem flag names
const (
FilesystemPathFN = "path"
)
// filesystem flag values
var (
FilesystemPathFV string
)
func AddFilesystemFlags(cmd *cobra.Command) {
fs := cmd.Flags()
AddAzureCredsFlags(cmd)
AddCorsoPassphaseFlags(cmd)
fs.StringVar(
&FilesystemPathFV,
FilesystemPathFN,
"",
"path to local or network storage")
cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN))
}
func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string {
fs := GetPopulatedFlags(cmd)
return PopulateFilesystemFlags(fs)
}
func PopulateFilesystemFlags(flagset PopulatedFlags) map[string]string {
fsOverrides := map[string]string{
storage.StorageProviderTypeKey: storage.ProviderFilesystem.String(),
}
if _, ok := flagset[FilesystemPathFN]; ok {
fsOverrides[FilesystemPathFN] = FilesystemPathFV
}
return fsOverrides
}

View File

@ -1,36 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
"github.com/spf13/pflag"
)
const Wildcard = "*"
type PopulatedFlags map[string]struct{}
func (fs PopulatedFlags) populate(pf *pflag.Flag) {
if pf == nil {
return
}
if pf.Changed {
fs[pf.Name] = struct{}{}
}
}
// GetPopulatedFlags returns a map of flags that have been
// populated by the user. Entry keys match the flag's long
// name. Values are empty.
func GetPopulatedFlags(cmd *cobra.Command) PopulatedFlags {
pop := PopulatedFlags{}
fs := cmd.Flags()
if fs == nil {
return pop
}
fs.VisitAll(pop.populate)
return pop
}

View File

@ -1,93 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
DataMessages = "messages"
DataConversations = "conversations"
)
const (
ChannelFN = "channel"
ConversationFN = "conversation"
GroupFN = "group"
MessageFN = "message"
PostFN = "post"
MessageCreatedAfterFN = "message-created-after"
MessageCreatedBeforeFN = "message-created-before"
MessageLastReplyAfterFN = "message-last-reply-after"
MessageLastReplyBeforeFN = "message-last-reply-before"
)
var (
ChannelFV []string
ConversationFV []string
GroupFV []string
MessageFV []string
PostFV []string
MessageCreatedAfterFV string
MessageCreatedBeforeFV string
MessageLastReplyAfterFV string
MessageLastReplyBeforeFV string
)
func AddGroupDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringSliceVar(
&ChannelFV,
ChannelFN, nil,
"Select data within a Team's Channel.")
fs.StringSliceVar(
&MessageFV,
MessageFN, nil,
"Select messages by reference.")
fs.StringVar(
&MessageCreatedAfterFV,
MessageCreatedAfterFN, "",
"Select messages created after this datetime.")
fs.StringVar(
&MessageCreatedBeforeFV,
MessageCreatedBeforeFN, "",
"Select messages created before this datetime.")
fs.StringVar(
&MessageLastReplyAfterFV,
MessageLastReplyAfterFN, "",
"Select messages with replies after this datetime.")
fs.StringVar(
&MessageLastReplyBeforeFV,
MessageLastReplyBeforeFN, "",
"Select messages with replies before this datetime.")
fs.StringSliceVar(
&ConversationFV,
ConversationFN, nil,
"Select data within a Group's Conversation.")
fs.StringSliceVar(
&PostFV,
PostFN, nil,
"Select Conversation Posts by reference.")
}
// AddGroupFlag adds the --group flag, which accepts either the id,
// the display name, or the mailbox address as its values. Users are
// expected to supply the display name. The ID is supported becase, well,
// IDs. The mailbox address is supported as a lookup fallback for certain
// SDK cases, therefore it's also supported here, though that support
// isn't exposed to end users.
func AddGroupFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&GroupFV,
GroupFN, nil,
"Backup data by group; accepts '"+Wildcard+"' to select all groups.")
}

View File

@ -1,42 +0,0 @@
package flags
import (
"fmt"
"strings"
"github.com/spf13/cobra"
)
var CategoryDataFV []string
const CategoryDataFN = "data"
func AddDataFlag(cmd *cobra.Command, allowed []string, hide bool) {
var (
allowedMsg string
fs = cmd.Flags()
)
switch len(allowed) {
case 0:
return
case 1:
allowedMsg = allowed[0]
case 2:
allowedMsg = fmt.Sprintf("%s or %s", allowed[0], allowed[1])
default:
allowedMsg = fmt.Sprintf(
"%s or %s",
strings.Join(allowed[:len(allowed)-1], ", "),
allowed[len(allowed)-1])
}
fs.StringSliceVar(
&CategoryDataFV,
CategoryDataFN, nil,
"Select one or more types of data to backup: "+allowedMsg+".")
if hide {
cobra.CheckErr(fs.MarkHidden(CategoryDataFN))
}
}

View File

@ -1,56 +0,0 @@
package flags
import (
"fmt"
"github.com/spf13/cobra"
)
const (
UserFN = "user"
MailBoxFN = "mailbox"
AzureClientTenantFN = "azure-tenant-id"
AzureClientIDFN = "azure-client-id"
AzureClientSecretFN = "azure-client-secret"
)
var (
UserFV []string
AzureClientTenantFV string
AzureClientIDFV string
AzureClientSecretFV string
)
// AddUserFlag adds the --user flag.
func AddUserFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&UserFV,
UserFN, nil,
"Backup a specific user's data; accepts '"+Wildcard+"' to select all users.")
cobra.CheckErr(cmd.MarkFlagRequired(UserFN))
}
// AddMailBoxFlag adds the --user and --mailbox flag.
func AddMailBoxFlag(cmd *cobra.Command) {
flags := cmd.Flags()
flags.StringSliceVar(
&UserFV,
UserFN, nil,
"Backup a specific user's data; accepts '"+Wildcard+"' to select all users.")
cobra.CheckErr(flags.MarkDeprecated(UserFN, fmt.Sprintf("use --%s instead", MailBoxFN)))
flags.StringSliceVar(
&UserFV,
MailBoxFN, nil,
"Backup a specific mailbox's data; accepts '"+Wildcard+"' to select all mailbox.")
}
// AddAzureCredsFlags adds M365 cred flags
func AddAzureCredsFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(&AzureClientTenantFV, AzureClientTenantFN, "", "Azure tenant ID")
fs.StringVar(&AzureClientIDFV, AzureClientIDFN, "", "Azure app client ID")
fs.StringVar(&AzureClientSecretFV, AzureClientSecretFN, "", "Azure app client secret")
}

View File

@ -1,60 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/control/repository"
)
const (
MaintenanceModeFN = "mode"
ForceMaintenanceFN = "force"
UserMaintenanceFN = "user"
HostnameMaintenanceFN = "host"
)
var (
MaintenanceModeFV string
ForceMaintenanceFV bool
UserMaintenanceFV string
HostnameMaintenanceFV string
)
func AddMaintenanceModeFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&MaintenanceModeFV,
MaintenanceModeFN,
repository.CompleteMaintenance.String(),
"Type of maintenance operation to run ('"+
repository.MetadataMaintenance.String()+"' | '"+
repository.CompleteMaintenance.String()+"' )")
}
func AddForceMaintenanceFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&ForceMaintenanceFV,
ForceMaintenanceFN,
false,
"Force maintenance. Caution: user must ensure this is not run concurrently on a single repo")
cobra.CheckErr(fs.MarkHidden(ForceMaintenanceFN))
}
func AddMaintenanceUserFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&UserMaintenanceFV,
UserMaintenanceFN,
"",
"Attempt to run maintenance as the specified user for the repo owner user")
}
func AddMaintenanceHostnameFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&HostnameMaintenanceFV,
HostnameMaintenanceFN,
"",
"Attempt to run maintenance with the specified hostname for the repo owner hostname")
}

View File

@ -1,65 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
FileFN = "file"
FolderFN = "folder"
FileCreatedAfterFN = "file-created-after"
FileCreatedBeforeFN = "file-created-before"
FileModifiedAfterFN = "file-modified-after"
FileModifiedBeforeFN = "file-modified-before"
UseOldDeltaProcessFN = "use-old-delta-process"
)
var (
FolderPathFV []string
FileNameFV []string
FileCreatedAfterFV string
FileCreatedBeforeFV string
FileModifiedAfterFV string
FileModifiedBeforeFV string
UseOldDeltaProcessFV bool
)
// AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddOneDriveDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringSliceVar(
&FolderPathFV,
FolderFN, nil,
"Select files by OneDrive folder; defaults to root.")
fs.StringSliceVar(
&FileNameFV,
FileFN, nil,
"Select files by name.")
fs.StringVar(
&FileCreatedAfterFV,
FileCreatedAfterFN, "",
"Select files created after this datetime.")
fs.StringVar(
&FileCreatedBeforeFV,
FileCreatedBeforeFN, "",
"Select files created before this datetime.")
fs.StringVar(
&FileModifiedAfterFV,
FileModifiedAfterFN, "",
"Select files modified after this datetime.")
fs.StringVar(
&FileModifiedBeforeFV,
FileModifiedBeforeFN, "",
"Select files modified before this datetime.")
}

View File

@ -1,198 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
AlertsFN = "alerts"
ConfigFileFN = "config-file"
DeltaPageSizeFN = "delta-page-size"
DisableDeltaFN = "disable-delta"
DisableIncrementalsFN = "disable-incrementals"
DisableLazyItemReaderFN = "disable-lazy-item-reader"
DisableSlidingWindowLimiterFN = "disable-sliding-window-limiter"
ForceItemDataDownloadFN = "force-item-data-download"
EnableImmutableIDFN = "enable-immutable-id"
FailFastFN = "fail-fast"
FailedItemsFN = "failed-items"
FetchParallelismFN = "fetch-parallelism"
NoPermissionsFN = "no-permissions"
NoStatsFN = "no-stats"
RecoveredErrorsFN = "recovered-errors"
RunModeFN = "run-mode"
SkippedItemsFN = "skipped-items"
SkipReduceFN = "skip-reduce"
)
var (
ConfigFileFV string
DeltaPageSizeFV int
DisableDeltaFV bool
DisableIncrementalsFV bool
DisableLazyItemReaderFV bool
DisableSlidingWindowLimiterFV bool
ForceItemDataDownloadFV bool
EnableImmutableIDFV bool
FailFastFV bool
FailedItemsFV string
FetchParallelismFV int
ListAlertsFV string
ListSkippedItemsFV string
ListRecoveredErrorsFV string
NoPermissionsFV bool
NoStatsFV bool
// RunMode describes the type of run, such as:
// flagtest, dry, run. Should default to 'run'.
RunModeFV string
SkipReduceFV bool
)
// well-known flag values
const (
RunModeFlagTest = "flag-test"
RunModeRun = "run"
)
// AddGlobalOperationFlags adds the global operations flag set.
func AddGlobalOperationFlags(cmd *cobra.Command) {
fs := cmd.PersistentFlags()
fs.BoolVar(&NoStatsFV, NoStatsFN, false, "disable anonymous usage statistics gathering")
}
// AddFailFastFlag adds a flag to toggle fail-fast error handling behavior.
func AddFailFastFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(&FailFastFV, FailFastFN, false, "stop processing immediately if any error occurs")
// TODO: reveal this flag when fail-fast support is implemented
cobra.CheckErr(fs.MarkHidden(FailFastFN))
}
// AddNoPermissionsFlag adds OneDrive flag for skipping restoring permissions
func AddNoPermissionsFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(&NoPermissionsFV, NoPermissionsFN, false, "don't restore file and folder permissions")
}
// AddSkipReduceFlag adds a hidden flag that allows callers to skip the selector
// reduction step. Currently only intended for details commands, not restore.
func AddSkipReduceFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(&SkipReduceFV, SkipReduceFN, false, "Skip the selector reduce filtering")
cobra.CheckErr(fs.MarkHidden(SkipReduceFN))
}
// AddDeltaPageSizeFlag adds a hidden flag that allows callers to reduce delta
// query page sizes below 500.
func AddDeltaPageSizeFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.IntVar(
&DeltaPageSizeFV,
DeltaPageSizeFN,
500,
"Control quantity of items returned in paged queries. Valid range is [1-500]. Default: 500")
cobra.CheckErr(fs.MarkHidden(DeltaPageSizeFN))
}
// AddFetchParallelismFlag adds a hidden flag that allows callers to reduce call
// paralellism (ie, the corso worker pool size) from 4 to as low as 1.
func AddFetchParallelismFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.IntVar(
&FetchParallelismFV,
FetchParallelismFN,
4,
"Control the number of concurrent data fetches for Exchange. Valid range is [1-4]. Default: 4")
cobra.CheckErr(fs.MarkHidden(FetchParallelismFN))
}
// Adds the hidden '--disable-incrementals' cli flag which, when set, disables
// incremental backups.
func AddDisableIncrementalsFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&DisableIncrementalsFV,
DisableIncrementalsFN,
false,
"Disable incremental data retrieval in backups.")
cobra.CheckErr(fs.MarkHidden(DisableIncrementalsFN))
}
// Adds the hidden '--force-item-data-download' cli flag which, when set,
// disables kopia-assisted incremental backups.
func AddForceItemDataDownloadFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&ForceItemDataDownloadFV,
ForceItemDataDownloadFN,
false,
"Disable cached data checks in backups to force item redownloads for "+
"items changed since the last successful backup.")
cobra.CheckErr(fs.MarkHidden(ForceItemDataDownloadFN))
}
// Adds the hidden '--disable-delta' cli flag which, when set, disables
// delta based backups.
func AddDisableDeltaFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&DisableDeltaFV,
DisableDeltaFN,
false,
"Disable delta based data retrieval in backups.")
cobra.CheckErr(fs.MarkHidden(DisableDeltaFN))
}
// Adds the hidden '--enable-immutable-id' cli flag which, when set, enables
// immutable IDs for Exchange
func AddEnableImmutableIDFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&EnableImmutableIDFV,
EnableImmutableIDFN,
false,
"Enable exchange immutable ID.")
cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN))
}
// AddRunModeFlag adds the hidden --run-mode flag.
func AddRunModeFlag(cmd *cobra.Command, persistent bool) {
fs := cmd.Flags()
if persistent {
fs = cmd.PersistentFlags()
}
fs.StringVar(&RunModeFV, RunModeFN, "run", "What mode to run: dry, test, run. Defaults to run.")
cobra.CheckErr(fs.MarkHidden(RunModeFN))
}
// AddDisableSlidingWindowLimiterFN disables the experimental sliding window rate
// limiter for graph API requests. This is only relevant for exchange backups.
// Exchange restores continue to use the default token bucket rate limiter.
// Setting this flag switches exchange backups to use the default token bucket
// rate limiter.
func AddDisableSlidingWindowLimiterFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&DisableSlidingWindowLimiterFV,
DisableSlidingWindowLimiterFN,
false,
"Disable sliding window rate limiter.")
cobra.CheckErr(fs.MarkHidden(DisableSlidingWindowLimiterFN))
}
// AddDisableLazyItemReader disables lazy item reader, such that we fall back to
// prefetch reader. This flag is currently only meant for groups conversations
// backup. Although it can be utilized for other services in future.
//
// This flag should only be used if lazy item reader is the default choice and
// we want to fallback to prefetch reader.
func AddDisableLazyItemReader(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&DisableLazyItemReaderFV,
DisableLazyItemReaderFN,
false,
"Disable lazy item reader.")
cobra.CheckErr(fs.MarkHidden(DisableLazyItemReaderFN))
}

View File

@ -1,97 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
BackupFN = "backup"
BackupIDsFN = "backups"
AWSAccessKeyFN = "aws-access-key"
AWSSecretAccessKeyFN = "aws-secret-access-key"
AWSSessionTokenFN = "aws-session-token"
// Corso Flags
PassphraseFN = "passphrase"
NewPassphraseFN = "new-passphrase"
)
var (
BackupIDFV string
BackupIDsFV []string
AWSAccessKeyFV string
AWSSecretAccessKeyFV string
AWSSessionTokenFV string
PassphraseFV string
NewPhasephraseFV string
)
// AddMultipleBackupIDsFlag adds the --backups flag.
func AddMultipleBackupIDsFlag(cmd *cobra.Command, require bool) {
cmd.Flags().StringSliceVar(
&BackupIDsFV,
BackupIDsFN, nil,
"',' separated IDs of the backup to retrieve")
if require {
cobra.CheckErr(cmd.MarkFlagRequired(BackupIDsFN))
}
}
// AddBackupIDFlag adds the --backup flag.
func AddBackupIDFlag(cmd *cobra.Command, require bool) {
cmd.Flags().StringVar(&BackupIDFV, BackupFN, "", "ID of the backup to retrieve.")
if require {
cobra.CheckErr(cmd.MarkFlagRequired(BackupFN))
}
}
// ---------------------------------------------------------------------------
// storage
// ---------------------------------------------------------------------------
func AddAllStorageFlags(cmd *cobra.Command) {
AddCorsoPassphaseFlags(cmd)
// AddAzureCredsFlags is added by ProviderFlags
AddAWSCredsFlags(cmd)
}
func AddAWSCredsFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(&AWSAccessKeyFV, AWSAccessKeyFN, "", "S3 access key")
fs.StringVar(&AWSSecretAccessKeyFV, AWSSecretAccessKeyFN, "", "S3 access secret")
fs.StringVar(&AWSSessionTokenFV, AWSSessionTokenFN, "", "S3 session token")
}
// M365 flags
func AddCorsoPassphaseFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&PassphraseFV,
PassphraseFN,
"",
"Passphrase to protect encrypted repository contents")
}
// M365 flags
func AddUpdatePassphraseFlags(cmd *cobra.Command, require bool) {
fs := cmd.Flags()
fs.StringVar(
&NewPhasephraseFV,
NewPassphraseFN,
"",
"update Corso passphrase for repo")
if require {
cobra.CheckErr(cmd.MarkFlagRequired(NewPassphraseFN))
}
}
// ---------------------------------------------------------------------------
// Provider
// ---------------------------------------------------------------------------
func AddAllProviderFlags(cmd *cobra.Command) {
AddAzureCredsFlags(cmd)
}

View File

@ -1,37 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/control"
)
const (
CollisionsFN = "collisions"
DestinationFN = "destination"
ToResourceFN = "to-resource"
)
var (
CollisionsFV string
DestinationFV string
ToResourceFV string
)
// AddRestoreConfigFlags adds the restore config flag set.
func AddRestoreConfigFlags(cmd *cobra.Command, canRestoreToAlternate bool) {
fs := cmd.Flags()
fs.StringVar(
&CollisionsFV, CollisionsFN, string(control.Skip),
//nolint:lll
"Sets the behavior for existing item collisions: "+string(control.Skip)+", "+string(control.Copy)+", or "+string(control.Replace))
fs.StringVar(
&DestinationFV, DestinationFN, "",
"Overrides the folder where items get restored; '/' places items into their original location")
if canRestoreToAlternate {
fs.StringVar(
&ToResourceFV, ToResourceFN, "",
"Overrides the protected resource (mailbox, site, user, etc) where data gets restored")
}
}

View File

@ -1,50 +0,0 @@
package flags
import (
"time"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/control/repository"
)
const (
RetentionModeFN = "retention-mode"
RetentionDurationFN = "retention-duration"
ExtendRetentionFN = "extend-retention"
)
var (
RetentionModeFV string
RetentionDurationFV time.Duration
ExtendRetentionFV bool
)
// AddRetentionConfigFlags adds the retention config flag set.
func AddRetentionConfigFlags(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&RetentionModeFV,
RetentionModeFN,
repository.NoRetention.String(),
"Sets object locking mode (if any) to use in remote storage: "+
repository.NoRetention.String()+", "+
repository.GovernanceRetention.String()+", or "+
repository.ComplianceRetention.String())
cobra.CheckErr(fs.MarkHidden(RetentionModeFN))
fs.DurationVar(
&RetentionDurationFV,
RetentionDurationFN,
time.Duration(0),
"Set the amount of time to lock individual objects in remote storage")
cobra.CheckErr(fs.MarkHidden(RetentionDurationFN))
fs.BoolVar(
&ExtendRetentionFV,
ExtendRetentionFN,
false,
"Extends object locks during maintenance. "+
"Extends locks by the most recently set value of "+RetentionDurationFN)
cobra.CheckErr(fs.MarkHidden(ExtendRetentionFN))
}

View File

@ -1,86 +0,0 @@
package flags
import (
"strconv"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage"
)
// S3 bucket flags
const (
BucketFN = "bucket"
EndpointFN = "endpoint"
PrefixFN = "prefix"
DoNotUseTLSFN = "disable-tls"
DoNotVerifyTLSFN = "disable-tls-verification"
)
// S3 bucket flag values
var (
BucketFV string
EndpointFV string
PrefixFV string
DoNotUseTLSFV bool
DoNotVerifyTLSFV bool
)
// S3 bucket flags
func AddS3BucketFlags(cmd *cobra.Command) {
fs := cmd.Flags()
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic and more frequently used flags take precedence.
fs.StringVar(&BucketFV, BucketFN, "", "Name of S3 bucket for repo. (required)")
fs.StringVar(&PrefixFV, PrefixFN, "", "Repo prefix within bucket.")
fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.")
fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)")
fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.")
}
func S3FlagOverrides(cmd *cobra.Command) map[string]string {
fs := GetPopulatedFlags(cmd)
return PopulateS3Flags(fs)
}
func PopulateS3Flags(flagset PopulatedFlags) map[string]string {
s3Overrides := map[string]string{
storage.StorageProviderTypeKey: storage.ProviderS3.String(),
}
if _, ok := flagset[AWSAccessKeyFN]; ok {
s3Overrides[credentials.AWSAccessKeyID] = AWSAccessKeyFV
}
if _, ok := flagset[AWSSecretAccessKeyFN]; ok {
s3Overrides[credentials.AWSSecretAccessKey] = AWSSecretAccessKeyFV
}
if _, ok := flagset[AWSSessionTokenFN]; ok {
s3Overrides[credentials.AWSSessionToken] = AWSSessionTokenFV
}
if _, ok := flagset[BucketFN]; ok {
s3Overrides[storage.Bucket] = BucketFV
}
if _, ok := flagset[PrefixFN]; ok {
s3Overrides[storage.Prefix] = PrefixFV
}
if _, ok := flagset[DoNotUseTLSFN]; ok {
s3Overrides[storage.DoNotUseTLS] = strconv.FormatBool(DoNotUseTLSFV)
}
if _, ok := flagset[DoNotVerifyTLSFN]; ok {
s3Overrides[storage.DoNotVerifyTLS] = strconv.FormatBool(DoNotVerifyTLSFV)
}
if _, ok := flagset[EndpointFN]; ok {
s3Overrides[storage.Endpoint] = EndpointFV
}
return s3Overrides
}

View File

@ -1,144 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
DataLibraries = "libraries"
DataPages = "pages"
DataLists = "lists"
)
const (
LibraryFN = "library"
ListFN = "list"
ListModifiedAfterFN = "list-modified-after"
ListModifiedBeforeFN = "list-modified-before"
ListCreatedAfterFN = "list-created-after"
ListCreatedBeforeFN = "list-created-before"
PageFolderFN = "page-folder"
PageFN = "page"
SiteFN = "site" // site only accepts WebURL values
SiteIDFN = "site-id" // site-id accepts actual site ids
)
var (
LibraryFV string
ListFV []string
ListModifiedAfterFV string
ListModifiedBeforeFV string
ListCreatedAfterFV string
ListCreatedBeforeFV string
PageFolderFV []string
PageFV []string
SiteIDFV []string
WebURLFV []string
)
// AddSharePointDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands.
func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags()
// libraries
fs.StringVar(
&LibraryFV,
LibraryFN, "",
"Select only this library; defaults to all libraries.")
fs.StringSliceVar(
&FolderPathFV,
FolderFN, nil,
"Select by folder; defaults to root.")
fs.StringSliceVar(
&FileNameFV,
FileFN, nil,
"Select by file name.")
fs.StringVar(
&FileCreatedAfterFV,
FileCreatedAfterFN, "",
"Select files created after this datetime.")
fs.StringVar(
&FileCreatedBeforeFV,
FileCreatedBeforeFN, "",
"Select files created before this datetime.")
fs.StringVar(
&FileModifiedAfterFV,
FileModifiedAfterFN, "",
"Select files modified after this datetime.")
fs.StringVar(
&FileModifiedBeforeFV,
FileModifiedBeforeFN, "",
"Select files modified before this datetime.")
// lists
fs.StringSliceVar(
&ListFV,
ListFN, nil,
"Select lists by name.")
fs.StringVar(
&ListModifiedAfterFV,
ListModifiedAfterFN, "",
"Select lists modified after this datetime.")
fs.StringVar(
&ListModifiedBeforeFV,
ListModifiedBeforeFN, "",
"Select lists modified before this datetime.")
fs.StringVar(
&ListCreatedAfterFV,
ListCreatedAfterFN, "",
"Select lists created after this datetime.")
fs.StringVar(
&ListCreatedBeforeFV,
ListCreatedBeforeFN, "",
"Select lists created before this datetime.")
// pages
fs.StringSliceVar(
&PageFolderFV,
PageFolderFN, nil,
"Select pages by folder name; accepts '"+Wildcard+"' to select all pages.")
cobra.CheckErr(fs.MarkHidden(PageFolderFN))
fs.StringSliceVar(
&PageFV,
PageFN, nil,
"Select pages by item name; accepts '"+Wildcard+"' to select all pages.")
cobra.CheckErr(fs.MarkHidden(PageFN))
}
// AddSiteIDFlag adds the --site-id flag, which accepts site ID values.
// This flag is hidden, since we expect users to prefer the --site url
// and do not want to encourage confusion.
func AddSiteIDFlag(cmd *cobra.Command, multiple bool) {
fs := cmd.Flags()
message := "ID of the site to operate on"
if multiple {
//nolint:lll
message += "; accepts '" + Wildcard + "' to select all sites. Args cannot be comma-delimited and must use multiple flags."
}
// note string ARRAY var. IDs naturally contain commas, so we cannot accept
// duplicate values within a flag declaration. ie: --site-id a,b,c does not
// work. Users must call --site-id a --site-id b --site-id c.
fs.StringArrayVar(&SiteIDFV, SiteIDFN, nil, message)
cobra.CheckErr(fs.MarkHidden(SiteIDFN))
}
// AddSiteFlag adds the --site flag, which accepts webURL values.
func AddSiteFlag(cmd *cobra.Command, multiple bool) {
message := "Web URL of the site to operate on"
if multiple {
message += "; accepts '" + Wildcard + "' to select all sites."
}
cmd.Flags().StringSliceVar(&WebURLFV, SiteFN, nil, message)
}

View File

@ -1,28 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
TeamFN = "team"
)
var TeamFV []string
func AddTeamDetailsAndRestoreFlags(cmd *cobra.Command) {
// TODO: implement flags
}
// AddTeamFlag adds the --team flag, which accepts id or name values.
// TODO: need to decide what the appropriate "name" to accept here is.
// keepers thinks its either DisplayName or MailNickname or Mail
// Mail is most accurate, MailNickame is accurate and shorter, but the end user
// may not see either one visibly.
// https://learn.microsoft.com/en-us/graph/api/team-list?view=graph-rest-1.0&tabs=http
func AddTeamFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar(
&TeamFV,
TeamFN, nil,
"Backup data by team; accepts '"+Wildcard+"' to select all teams.")
}

View File

@ -1,13 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
DataChats = "chats"
)
func AddTeamsChatsDetailsAndRestoreFlags(cmd *cobra.Command) {
// TODO: add details flags
}

View File

@ -1,26 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
"gotest.tools/v3/assert"
"github.com/alcionai/corso/src/cli/flags"
)
func PreparedBackupListFlags() []string {
return []string{
"--" + flags.AlertsFN, flags.Show,
"--" + flags.FailedItemsFN, flags.Show,
"--" + flags.SkippedItemsFN, flags.Show,
"--" + flags.RecoveredErrorsFN, flags.Show,
}
}
func AssertBackupListFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, flags.Show, flags.ListAlertsFV)
assert.Equal(t, flags.Show, flags.FailedItemsFV)
assert.Equal(t, flags.Show, flags.ListSkippedItemsFV)
assert.Equal(t, flags.Show, flags.ListRecoveredErrorsFV)
}

View File

@ -1,130 +0,0 @@
package testdata
import (
"strings"
"github.com/spf13/cobra"
)
func FlgInputs(in []string) string { return strings.Join(in, ",") }
var (
BackupInput = "backup-id"
SiteInput = "site-id"
GroupsInput = []string{"team1", "group2"}
MailboxInput = []string{"mailbox1", "mailbox2"}
UsersInput = []string{"users1", "users2"}
SiteIDInput = []string{"siteID1", "siteID2"}
WebURLInput = []string{"webURL1", "webURL2"}
ExchangeCategoryDataInput = []string{"email", "events", "contacts"}
SharepointCategoryDataInput = []string{"files", "lists", "pages"}
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
TeamsChatsCategoryDataInput = []string{"chats"}
ChannelInput = []string{"channel1", "channel2"}
MessageInput = []string{"message1", "message2"}
MessageCreatedAfterInput = "messageCreatedAfter"
MessageCreatedBeforeInput = "messageCreatedBefore"
MessageLastReplyAfterInput = "messageLastReplyAfter"
MessageLastReplyBeforeInput = "messageLastReplyBefore"
ContactInput = []string{"contact1", "contact2"}
ContactFldInput = []string{"contactFld1", "contactFld2"}
ContactNameInput = "contactName"
ConversationInput = []string{"conversation1", "conversation2"}
PostInput = []string{"post1", "post2"}
EmailInput = []string{"mail1", "mail2"}
EmailFldInput = []string{"mailFld1", "mailFld2"}
EmailReceivedAfterInput = "mailReceivedAfter"
EmailReceivedBeforeInput = "mailReceivedBefore"
EmailSenderInput = "mailSender"
EmailSubjectInput = "mailSubject"
EventInput = []string{"event1", "event2"}
EventCalInput = []string{"eventCal1", "eventCal2"}
EventOrganizerInput = "eventOrganizer"
EventRecursInput = "eventRecurs"
EventStartsAfterInput = "eventStartsAfter"
EventStartsBeforeInput = "eventStartsBefore"
EventSubjectInput = "eventSubject"
LibraryInput = "library"
FileNameInput = []string{"fileName1", "fileName2"}
FolderPathInput = []string{"folderPath1", "folderPath2"}
FileCreatedAfterInput = "fileCreatedAfter"
FileCreatedBeforeInput = "fileCreatedBefore"
FileModifiedAfterInput = "fileModifiedAfter"
FileModifiedBeforeInput = "fileModifiedBefore"
ListsInput = []string{"listName1", "listName2"}
ListCreatedAfterInput = "listCreatedAfter"
ListCreatedBeforeInput = "listCreatedBefore"
ListModifiedAfterInput = "listModifiedAfter"
ListModifiedBeforeInput = "listModifiedBefore"
PageFolderInput = []string{"pageFolder1", "pageFolder2"}
PageInput = []string{"page1", "page2"}
Collisions = "collisions"
Destination = "destination"
ToResource = "toResource"
SkipPermissions = false
DeltaPageSize = "7"
Archive = true
FormatType = "json"
AzureClientID = "testAzureClientId"
AzureTenantID = "testAzureTenantId"
AzureClientSecret = "testAzureClientSecret"
AWSAccessKeyID = "testAWSAccessKeyID"
AWSSecretAccessKey = "testAWSSecretAccessKey"
AWSSessionToken = "testAWSSessionToken"
CorsoPassphrase = "testCorsoPassphrase"
RestoreDestination = "test-restore-destination"
FetchParallelism = "3"
FailFast = true
DisableIncrementals = true
ForceItemDataDownload = true
DisableDelta = true
EnableImmutableID = true
)
func WithFlags2(
cc *cobra.Command,
command string,
flagSets ...[]string,
) {
args := []string{command}
for _, sl := range flagSets {
args = append(args, sl...)
}
cc.SetArgs(args)
}
func WithFlags(
command string,
flagSets ...[]string,
) func(*cobra.Command) {
return func(cc *cobra.Command) {
args := []string{command}
for _, sl := range flagSets {
args = append(args, sl...)
}
cc.SetArgs(args)
}
}

View File

@ -1,42 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/cli/flags"
)
func PreparedChannelFlags() []string {
return []string{
"--" + flags.ChannelFN, FlgInputs(ChannelInput),
"--" + flags.MessageFN, FlgInputs(MessageInput),
"--" + flags.MessageCreatedAfterFN, MessageCreatedAfterInput,
"--" + flags.MessageCreatedBeforeFN, MessageCreatedBeforeInput,
"--" + flags.MessageLastReplyAfterFN, MessageLastReplyAfterInput,
"--" + flags.MessageLastReplyBeforeFN, MessageLastReplyBeforeInput,
}
}
func AssertChannelFlags(t *testing.T, cmd *cobra.Command) {
assert.ElementsMatch(t, ChannelInput, flags.ChannelFV)
assert.ElementsMatch(t, MessageInput, flags.MessageFV)
assert.Equal(t, MessageCreatedAfterInput, flags.MessageCreatedAfterFV)
assert.Equal(t, MessageCreatedBeforeInput, flags.MessageCreatedBeforeFV)
assert.Equal(t, MessageLastReplyAfterInput, flags.MessageLastReplyAfterFV)
assert.Equal(t, MessageLastReplyBeforeInput, flags.MessageLastReplyBeforeFV)
}
func PreparedConversationFlags() []string {
return []string{
"--" + flags.ConversationFN, FlgInputs(ConversationInput),
"--" + flags.PostFN, FlgInputs(PostInput),
}
}
func AssertConversationFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, ConversationInput, flags.ConversationFV)
assert.Equal(t, PostInput, flags.PostFV)
}

View File

@ -1,56 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/cli/flags"
)
func PreparedStorageFlags() []string {
return []string{
"--" + flags.AWSAccessKeyFN, AWSAccessKeyID,
"--" + flags.AWSSecretAccessKeyFN, AWSSecretAccessKey,
"--" + flags.AWSSessionTokenFN, AWSSessionToken,
"--" + flags.PassphraseFN, CorsoPassphrase,
}
}
func AssertStorageFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, AWSAccessKeyID, flags.AWSAccessKeyFV)
assert.Equal(t, AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
assert.Equal(t, AWSSessionToken, flags.AWSSessionTokenFV)
assert.Equal(t, CorsoPassphrase, flags.PassphraseFV)
}
func PreparedProviderFlags() []string {
return []string{
"--" + flags.AzureClientIDFN, AzureClientID,
"--" + flags.AzureClientTenantFN, AzureTenantID,
"--" + flags.AzureClientSecretFN, AzureClientSecret,
}
}
func AssertProviderFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, AzureClientID, flags.AzureClientIDFV)
assert.Equal(t, AzureTenantID, flags.AzureClientTenantFV)
assert.Equal(t, AzureClientSecret, flags.AzureClientSecretFV)
}
func PreparedGenericBackupFlags() []string {
return []string{
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
}
}
func AssertGenericBackupFlags(t *testing.T, cmd *cobra.Command) {
assert.True(t, flags.FailFastFV, "fail fast flag")
assert.True(t, flags.DisableIncrementalsFV, "disable incrementals flag")
assert.True(t, flags.ForceItemDataDownloadFV, "force item data download flag")
}

View File

@ -1,32 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/cli/flags"
)
func PreparedLibraryFlags() []string {
return []string{
"--" + flags.LibraryFN, LibraryInput,
"--" + flags.FolderFN, FlgInputs(FolderPathInput),
"--" + flags.FileFN, FlgInputs(FileNameInput),
"--" + flags.FileCreatedAfterFN, FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, FileModifiedBeforeInput,
}
}
func AssertLibraryFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, LibraryInput, flags.LibraryFV)
assert.Equal(t, FolderPathInput, flags.FolderPathFV)
assert.Equal(t, FileNameInput, flags.FileNameFV)
assert.Equal(t, FileCreatedAfterInput, flags.FileCreatedAfterFV)
assert.Equal(t, FileCreatedBeforeInput, flags.FileCreatedBeforeFV)
assert.Equal(t, FileModifiedAfterInput, flags.FileModifiedAfterFV)
assert.Equal(t, FileModifiedBeforeInput, flags.FileModifiedBeforeFV)
}

View File

@ -1,25 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
)
func PreparedTeamsChatsFlags() []string {
return []string{
// FIXME: populate when adding filters
// "--" + flags.ChatCreatedAfterFN, ChatCreatedAfterInput,
// "--" + flags.ChatCreatedBeforeFN, ChatCreatedBeforeInput,
// "--" + flags.ChatLastMessageAfterFN, ChatLastMessageAfterInput,
// "--" + flags.ChatLastMessageBeforeFN, ChatLastMessageBeforeInput,
}
}
func AssertTeamsChatsFlags(t *testing.T, cmd *cobra.Command) {
// FIXME: populate when adding filters
// assert.Equal(t, ChatCreatedAfterInput, flags.ChatCreatedAfterFV)
// assert.Equal(t, ChatCreatedBeforeInput, flags.ChatCreatedBeforeFV)
// assert.Equal(t, ChatLastMessageAfterInput, flags.ChatLastMessageAfterFV)
// assert.Equal(t, ChatLastMessageBeforeInput, flags.ChatLastMessageBeforeFV)
}

View File

@ -46,12 +46,11 @@ func (ev envVar) MinimumPrintable() any {
return ev return ev
} }
func (ev envVar) Headers(bool) []string { func (ev envVar) Headers() []string {
// NOTE: skipID does not make sense in this context
return []string{ev.category, " "} return []string{ev.category, " "}
} }
func (ev envVar) Values(bool) []string { func (ev envVar) Values() []string {
return []string{ev.name, ev.description} return []string{ev.name, ev.description}
} }
@ -94,7 +93,8 @@ func envGuide(cmd *cobra.Command, args []string) {
Info(ctx, Info(ctx,
"\n--- Environment Variable Guide ---\n", "\n--- Environment Variable Guide ---\n",
"As a best practice, Corso retrieves credentials and sensitive information from environment variables.\n ", "As a best practice, Corso retrieves credentials and sensitive information from environment variables.\n ",
"\n") "\n",
)
Table(ctx, toPrintable(corsoEVs)) Table(ctx, toPrintable(corsoEVs))
Info(ctx, "\n") Info(ctx, "\n")
Table(ctx, toPrintable(azureEVs)) Table(ctx, toPrintable(azureEVs))

153
src/cli/options/options.go Normal file
View File

@ -0,0 +1,153 @@
package options
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/pkg/control"
)
// Control produces the control options based on the user's flags.
func Control() control.Options {
opt := control.Defaults()
if failFastFV {
opt.FailureHandling = control.FailFast
}
opt.DisableMetrics = noStatsFV
opt.RestorePermissions = restorePermissionsFV
opt.SkipReduce = skipReduceFV
opt.ToggleFeatures.DisableIncrementals = disableIncrementalsFV
opt.ToggleFeatures.DisableDelta = disableDeltaFV
opt.ToggleFeatures.ExchangeImmutableIDs = enableImmutableID
opt.ToggleFeatures.DisableConcurrencyLimiter = disableConcurrencyLimiterFV
opt.Parallelism.ItemFetch = fetchParallelismFV
return opt
}
// ---------------------------------------------------------------------------
// Operations Flags
// ---------------------------------------------------------------------------
const (
FailFastFN = "fail-fast"
FetchParallelismFN = "fetch-parallelism"
NoStatsFN = "no-stats"
RestorePermissionsFN = "restore-permissions"
SkipReduceFN = "skip-reduce"
DisableDeltaFN = "disable-delta"
DisableIncrementalsFN = "disable-incrementals"
EnableImmutableIDFN = "enable-immutable-id"
DisableConcurrencyLimiterFN = "disable-concurrency-limiter"
)
var (
failFastFV bool
fetchParallelismFV int
noStatsFV bool
restorePermissionsFV bool
skipReduceFV bool
)
// AddGlobalOperationFlags adds the global operations flag set.
func AddGlobalOperationFlags(cmd *cobra.Command) {
fs := cmd.PersistentFlags()
fs.BoolVar(&noStatsFV, NoStatsFN, false, "disable anonymous usage statistics gathering")
}
// AddFailFastFlag adds a flag to toggle fail-fast error handling behavior.
func AddFailFastFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(&failFastFV, FailFastFN, false, "stop processing immediately if any error occurs")
// TODO: reveal this flag when fail-fast support is implemented
cobra.CheckErr(fs.MarkHidden(FailFastFN))
}
// AddRestorePermissionsFlag adds OneDrive flag for restoring permissions
func AddRestorePermissionsFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(&restorePermissionsFV, RestorePermissionsFN, false, "Restore permissions for files and folders")
}
// AddSkipReduceFlag adds a hidden flag that allows callers to skip the selector
// reduction step. Currently only intended for details commands, not restore.
func AddSkipReduceFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(&skipReduceFV, SkipReduceFN, false, "Skip the selector reduce filtering")
cobra.CheckErr(fs.MarkHidden(SkipReduceFN))
}
// AddFetchParallelismFlag adds a hidden flag that allows callers to reduce call
// paralellism (ie, the corso worker pool size) from 4 to as low as 1.
func AddFetchParallelismFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.IntVar(
&fetchParallelismFV,
FetchParallelismFN,
4,
"Control the number of concurrent data fetches for Exchange. Valid range is [1-4]. Default: 4")
cobra.CheckErr(fs.MarkHidden(FetchParallelismFN))
}
// ---------------------------------------------------------------------------
// Feature Flags
// ---------------------------------------------------------------------------
var (
disableIncrementalsFV bool
disableDeltaFV bool
)
// Adds the hidden '--disable-incrementals' cli flag which, when set, disables
// incremental backups.
func AddDisableIncrementalsFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&disableIncrementalsFV,
DisableIncrementalsFN,
false,
"Disable incremental data retrieval in backups.")
cobra.CheckErr(fs.MarkHidden(DisableIncrementalsFN))
}
// Adds the hidden '--disable-delta' cli flag which, when set, disables
// delta based backups.
func AddDisableDeltaFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&disableDeltaFV,
DisableDeltaFN,
false,
"Disable delta based data retrieval in backups.")
cobra.CheckErr(fs.MarkHidden(DisableDeltaFN))
}
var enableImmutableID bool
// Adds the hidden '--enable-immutable-id' cli flag which, when set, enables
// immutable IDs for Exchange
func AddEnableImmutableIDFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&enableImmutableID,
EnableImmutableIDFN,
false,
"Enable exchange immutable ID.")
cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN))
}
var disableConcurrencyLimiterFV bool
// AddDisableConcurrencyLimiterFlag adds a hidden cli flag which, when set,
// removes concurrency limits when communicating with graph API. This
// flag is only relevant for exchange backups for now
func AddDisableConcurrencyLimiterFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&disableConcurrencyLimiterFV,
DisableConcurrencyLimiterFN,
false,
"Disable concurrency limiter middleware. Default: false")
cobra.CheckErr(fs.MarkHidden(DisableConcurrencyLimiterFN))
}

View File

@ -0,0 +1,66 @@
package options
import (
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
)
type OptionsUnitSuite struct {
tester.Suite
}
func TestOptionsUnitSuite(t *testing.T) {
suite.Run(t, &OptionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OptionsUnitSuite) TestAddExchangeCommands() {
t := suite.T()
cmd := &cobra.Command{
Use: "test",
Run: func(cmd *cobra.Command, args []string) {
assert.True(t, failFastFV, FailFastFN)
assert.True(t, disableIncrementalsFV, DisableIncrementalsFN)
assert.True(t, disableDeltaFV, DisableDeltaFN)
assert.True(t, noStatsFV, NoStatsFN)
assert.True(t, restorePermissionsFV, RestorePermissionsFN)
assert.True(t, skipReduceFV, SkipReduceFN)
assert.Equal(t, 2, fetchParallelismFV, FetchParallelismFN)
assert.True(t, disableConcurrencyLimiterFV, DisableConcurrencyLimiterFN)
},
}
// adds no-stats
AddGlobalOperationFlags(cmd)
AddFailFastFlag(cmd)
AddDisableIncrementalsFlag(cmd)
AddDisableDeltaFlag(cmd)
AddRestorePermissionsFlag(cmd)
AddSkipReduceFlag(cmd)
AddFetchParallelismFlag(cmd)
AddDisableConcurrencyLimiterFlag(cmd)
// Test arg parsing for few args
cmd.SetArgs([]string{
"test",
"--" + FailFastFN,
"--" + DisableIncrementalsFN,
"--" + DisableDeltaFN,
"--" + NoStatsFN,
"--" + RestorePermissionsFN,
"--" + SkipReduceFN,
"--" + FetchParallelismFN, "2",
"--" + DisableConcurrencyLimiterFN,
})
err := cmd.Execute()
require.NoError(t, err, clues.ToCore(err))
}

View File

@ -5,14 +5,10 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"strings"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/tidwall/pretty" "github.com/tidwall/pretty"
"github.com/tomlazar/table" "github.com/tomlazar/table"
"github.com/alcionai/corso/src/internal/common/color"
"github.com/alcionai/corso/src/internal/observe"
) )
var ( var (
@ -85,89 +81,67 @@ func Only(ctx context.Context, e error) error {
// Err prints the params to cobra's error writer (stdErr by default) // Err prints the params to cobra's error writer (stdErr by default)
// if s is nil, prints nothing. // if s is nil, prints nothing.
// Prepends the message with "Error: "
func Err(ctx context.Context, s ...any) { func Err(ctx context.Context, s ...any) {
cw := color.NewColorableWriter(color.Red, getRootCmd(ctx).ErrOrStderr()) out(getRootCmd(ctx).ErrOrStderr(), s...)
s = append([]any{"Error:"}, s...)
out(ctx, cw, s...)
} }
// Errf prints the params to cobra's error writer (stdErr by default) // Errf prints the params to cobra's error writer (stdErr by default)
// if s is nil, prints nothing. // if s is nil, prints nothing.
// You should ideally be using SimpleError or OperationError. // Prepends the message with "Error: "
func Errf(ctx context.Context, tmpl string, s ...any) { func Errf(ctx context.Context, tmpl string, s ...any) {
cw := color.NewColorableWriter(color.Red, getRootCmd(ctx).ErrOrStderr()) outf(getRootCmd(ctx).ErrOrStderr(), "Error: "+tmpl, s...)
tmpl = "Error: " + tmpl
outf(ctx, cw, tmpl, s...)
} }
// Out prints the params to cobra's output writer (stdOut by default) // Out prints the params to cobra's output writer (stdOut by default)
// if s is nil, prints nothing. // if s is nil, prints nothing.
func Out(ctx context.Context, s ...any) { func Out(ctx context.Context, s ...any) {
out(ctx, getRootCmd(ctx).OutOrStdout(), s...) out(getRootCmd(ctx).OutOrStdout(), s...)
} }
// Out prints the formatted strings to cobra's output writer (stdOut by default) // Out prints the formatted strings to cobra's output writer (stdOut by default)
// if t is empty, prints nothing. // if t is empty, prints nothing.
func Outf(ctx context.Context, t string, s ...any) { func Outf(ctx context.Context, t string, s ...any) {
outf(ctx, getRootCmd(ctx).OutOrStdout(), t, s...) outf(getRootCmd(ctx).OutOrStdout(), t, s...)
} }
// Info prints the params to cobra's error writer (stdErr by default) // Info prints the params to cobra's error writer (stdErr by default)
// if s is nil, prints nothing. // if s is nil, prints nothing.
func Info(ctx context.Context, s ...any) { func Info(ctx context.Context, s ...any) {
out(ctx, getRootCmd(ctx).ErrOrStderr(), s...) out(getRootCmd(ctx).ErrOrStderr(), s...)
} }
// Info prints the formatted strings to cobra's error writer (stdErr by default) // Info prints the formatted strings to cobra's error writer (stdErr by default)
// if t is empty, prints nothing. // if t is empty, prints nothing.
func Infof(ctx context.Context, t string, s ...any) { func Infof(ctx context.Context, t string, s ...any) {
outf(ctx, getRootCmd(ctx).ErrOrStderr(), t, s...) outf(getRootCmd(ctx).ErrOrStderr(), t, s...)
}
// Pretty prettifies and prints the value.
func Pretty(ctx context.Context, a any) {
if a == nil {
Err(ctx, "<nil>")
return
}
printPrettyJSON(ctx, getRootCmd(ctx).ErrOrStderr(), a)
} }
// PrettyJSON prettifies and prints the value. // PrettyJSON prettifies and prints the value.
func PrettyJSON(ctx context.Context, p minimumPrintabler) { func PrettyJSON(ctx context.Context, p minimumPrintabler) {
if p == nil { if p == nil {
Err(ctx, "<nil>") Err(ctx, "<nil>")
return
} }
outputJSON(ctx, getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug) outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
} }
// out is the testable core of exported print funcs // out is the testable core of exported print funcs
func out(ctx context.Context, w io.Writer, s ...any) { func out(w io.Writer, s ...any) {
if len(s) == 0 { if len(s) == 0 {
return return
} }
// observe bars needs to be flushed before printing
observe.Flush(ctx)
fmt.Fprint(w, s...) fmt.Fprint(w, s...)
fmt.Fprintf(w, "\n") fmt.Fprintf(w, "\n")
} }
// outf is the testable core of exported print funcs // outf is the testable core of exported print funcs
func outf(ctx context.Context, w io.Writer, t string, s ...any) { func outf(w io.Writer, t string, s ...any) {
if len(t) == 0 { if len(t) == 0 {
return return
} }
// observe bars needs to be flushed before printing
observe.Flush(ctx)
fmt.Fprintf(w, t, s...) fmt.Fprintf(w, t, s...)
fmt.Fprintf(w, "\n") fmt.Fprintf(w, "\n")
} }
@ -179,11 +153,11 @@ func outf(ctx context.Context, w io.Writer, t string, s ...any) {
type Printable interface { type Printable interface {
minimumPrintabler minimumPrintabler
// should list the property names of the values surfaced in Values() // should list the property names of the values surfaced in Values()
Headers(skipID bool) []string Headers() []string
// list of values for tabular or csv formatting // list of values for tabular or csv formatting
// if the backing data is nil or otherwise missing, // if the backing data is nil or otherwise missing,
// values should provide an empty string as opposed to skipping entries // values should provide an empty string as opposed to skipping entries
Values(skipID bool) []string Values() []string
} }
type minimumPrintabler interface { type minimumPrintabler interface {
@ -193,56 +167,39 @@ type minimumPrintabler interface {
// Item prints the printable, according to the caller's requested format. // Item prints the printable, according to the caller's requested format.
func Item(ctx context.Context, p Printable) { func Item(ctx context.Context, p Printable) {
printItem(ctx, getRootCmd(ctx).OutOrStdout(), p) printItem(getRootCmd(ctx).OutOrStdout(), p)
} }
// print prints the printable items, // print prints the printable items,
// according to the caller's requested format. // according to the caller's requested format.
func printItem(ctx context.Context, w io.Writer, p Printable) { func printItem(w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug { if outputAsJSON || outputAsJSONDebug {
outputJSON(ctx, w, p, outputAsJSONDebug) outputJSON(w, p, outputAsJSONDebug)
return return
} }
outputTable(ctx, w, []Printable{p}) outputTable(w, []Printable{p})
}
// ItemProperties prints the printable either as in a single line or a json
// The difference between this and Item is that this one does not print the ID
func ItemProperties(ctx context.Context, p Printable) {
printItemProperties(ctx, getRootCmd(ctx).OutOrStdout(), p)
}
// print prints the printable items,
// according to the caller's requested format.
func printItemProperties(ctx context.Context, w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug {
outputJSON(ctx, w, p, outputAsJSONDebug)
return
}
outputOneLine(ctx, w, []Printable{p})
} }
// All prints the slice of printable items, // All prints the slice of printable items,
// according to the caller's requested format. // according to the caller's requested format.
func All(ctx context.Context, ps ...Printable) { func All(ctx context.Context, ps ...Printable) {
printAll(ctx, getRootCmd(ctx).OutOrStdout(), ps) printAll(getRootCmd(ctx).OutOrStdout(), ps)
} }
// printAll prints the slice of printable items, // printAll prints the slice of printable items,
// according to the caller's requested format. // according to the caller's requested format.
func printAll(ctx context.Context, w io.Writer, ps []Printable) { func printAll(w io.Writer, ps []Printable) {
if len(ps) == 0 { if len(ps) == 0 {
return return
} }
if outputAsJSON || outputAsJSONDebug { if outputAsJSON || outputAsJSONDebug {
outputJSONArr(ctx, w, ps, outputAsJSONDebug) outputJSONArr(w, ps, outputAsJSONDebug)
return return
} }
outputTable(ctx, w, ps) outputTable(w, ps)
} }
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
@ -252,23 +209,20 @@ func printAll(ctx context.Context, w io.Writer, ps []Printable) {
// Table writes the printables in a tabular format. Takes headers from // Table writes the printables in a tabular format. Takes headers from
// the 0th printable only. // the 0th printable only.
func Table(ctx context.Context, ps []Printable) { func Table(ctx context.Context, ps []Printable) {
outputTable(ctx, getRootCmd(ctx).OutOrStdout(), ps) outputTable(getRootCmd(ctx).OutOrStdout(), ps)
} }
// output to stdout the list of printable structs in a table // output to stdout the list of printable structs in a table
func outputTable(ctx context.Context, w io.Writer, ps []Printable) { func outputTable(w io.Writer, ps []Printable) {
t := table.Table{ t := table.Table{
Headers: ps[0].Headers(false), Headers: ps[0].Headers(),
Rows: [][]string{}, Rows: [][]string{},
} }
for _, p := range ps { for _, p := range ps {
t.Rows = append(t.Rows, p.Values(false)) t.Rows = append(t.Rows, p.Values())
} }
// observe bars needs to be flushed before printing
observe.Flush(ctx)
_ = t.WriteTable( _ = t.WriteTable(
w, w,
&table.Config{ &table.Config{
@ -282,20 +236,20 @@ func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
// JSON // JSON
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
func outputJSON(ctx context.Context, w io.Writer, p minimumPrintabler, debug bool) { func outputJSON(w io.Writer, p minimumPrintabler, debug bool) {
if debug { if debug {
printJSON(ctx, w, p) printJSON(w, p)
return return
} }
if debug { if debug {
printJSON(ctx, w, p) printJSON(w, p)
} else { } else {
printJSON(ctx, w, p.MinimumPrintable()) printJSON(w, p.MinimumPrintable())
} }
} }
func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool) { func outputJSONArr(w io.Writer, ps []Printable, debug bool) {
sl := make([]any, 0, len(ps)) sl := make([]any, 0, len(ps))
for _, p := range ps { for _, p := range ps {
@ -306,14 +260,11 @@ func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool)
} }
} }
printJSON(ctx, w, sl) printJSON(w, sl)
} }
// output to stdout the list of printable structs as json. // output to stdout the list of printable structs as json.
func printJSON(ctx context.Context, w io.Writer, a any) { func printJSON(w io.Writer, a any) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
bs, err := json.Marshal(a) bs, err := json.Marshal(a)
if err != nil { if err != nil {
fmt.Fprintf(w, "error formatting results to json: %v\n", err) fmt.Fprintf(w, "error formatting results to json: %v\n", err)
@ -322,45 +273,3 @@ func printJSON(ctx context.Context, w io.Writer, a any) {
fmt.Fprintln(w, string(pretty.Pretty(bs))) fmt.Fprintln(w, string(pretty.Pretty(bs)))
} }
// output to stdout the list of printable structs as prettified json.
func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
bs, err := json.MarshalIndent(a, "", " ")
if err != nil {
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
return
}
fmt.Fprintln(w, string(pretty.Pretty(bs)))
}
// -------------------------------------------------------------------------------------------
// One line
// -------------------------------------------------------------------------------------------
// Output in the following format:
// Bytes Uploaded: 401 kB | Items Uploaded: 59 | Items Skipped: 0 | Errors: 0
func outputOneLine(ctx context.Context, w io.Writer, ps []Printable) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
headers := ps[0].Headers(true)
rows := [][]string{}
for _, p := range ps {
rows = append(rows, p.Values(true))
}
printables := []string{}
for _, row := range rows {
for i, col := range row {
printables = append(printables, fmt.Sprintf("%s: %s", headers[i], col))
}
}
fmt.Fprintln(w, strings.Join(printables, " | "))
}

View File

@ -36,28 +36,20 @@ func (suite *PrintUnitSuite) TestOnly() {
func (suite *PrintUnitSuite) TestOut() { func (suite *PrintUnitSuite) TestOut() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
b := bytes.Buffer{} b := bytes.Buffer{}
msg := "I have seen the fnords!" msg := "I have seen the fnords!"
out(ctx, &b, msg) out(&b, msg)
assert.Contains(t, b.String(), msg) assert.Contains(t, b.String(), msg)
} }
func (suite *PrintUnitSuite) TestOutf() { func (suite *PrintUnitSuite) TestOutf() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
b := bytes.Buffer{} b := bytes.Buffer{}
msg := "I have seen the fnords!" msg := "I have seen the fnords!"
msg2 := "smarf" msg2 := "smarf"
outf(ctx, &b, msg, msg2) outf(&b, msg, msg2)
bs := b.String() bs := b.String()
assert.Contains(t, bs, msg) assert.Contains(t, bs, msg)
assert.Contains(t, bs, msg2) assert.Contains(t, bs, msg2)

Some files were not shown because too many files have changed in this diff Show More