Compare commits

..

No commits in common. "main" and "v0.14.0" have entirely different histories.

736 changed files with 35904 additions and 84986 deletions

View File

@ -1,5 +1,4 @@
name: Backup Restore Test name: Backup Restore Test
description: Run various backup/restore/export tests for a service.
inputs: inputs:
service: service:
@ -8,9 +7,6 @@ inputs:
kind: kind:
description: Kind of test description: Kind of test
required: true required: true
backup-id:
description: Backup to retrieve data out of
required: false
backup-args: backup-args:
description: Arguments to pass for backup description: Arguments to pass for backup
required: false required: false
@ -19,27 +15,23 @@ inputs:
description: Arguments to pass for restore; restore is skipped when missing. description: Arguments to pass for restore; restore is skipped when missing.
required: false required: false
default: "" default: ""
export-args: test-folder:
description: Arguments to pass for export.
required: false
default: ""
restore-container:
description: Folder to use for testing description: Folder to use for testing
required: true required: true
base-backup:
description: Base backup to use for testing
required: false
log-dir: log-dir:
description: Folder to store test log files description: Folder to store test log files
required: true required: true
on-collision: on-collision:
description: Value for the --collisions flag description: Value for the --collisions flag
required: false requried: false
default: "replace" default: "replace"
with-export: with-export:
description: Runs export tests when true description: Runs export tests when true
required: false required: false
default: false default: false
category:
description: category of data for given service
required: false
outputs: outputs:
backup-id: backup-id:
@ -57,9 +49,7 @@ runs:
echo Backup ${{ inputs.service }} ${{ inputs.kind }} echo Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
set -euo pipefail set -euo pipefail
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-backup-${{inputs.kind }}.log
./corso backup create '${{ inputs.service }}' \ ./corso backup create '${{ inputs.service }}' \
--no-stats --hide-progress --json \ --no-stats --hide-progress --json \
${{ inputs.backup-args }} | ${{ inputs.backup-args }} |
@ -78,9 +68,7 @@ runs:
echo Restore ${{ inputs.service }} ${{ inputs.kind }} echo Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
set -euo pipefail set -euo pipefail
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso restore '${{ inputs.service }}' \ ./corso restore '${{ inputs.service }}' \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
@ -100,24 +88,20 @@ runs:
shell: bash shell: bash
working-directory: src working-directory: src
env: env:
SANITY_TEST_RESTORE_CONTAINER: ${{ steps.restore.outputs.result }} SANITY_TEST_KIND: restore
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }} SANITY_TEST_FOLDER: ${{ steps.restore.outputs.result }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }} SANITY_TEST_SERVICE: ${{ inputs.service }}
# lists are not restored to a different folder. they get created adjacent to their originals SANITY_TEST_DATA: ${{ inputs.test-folder }}
# hence SANITY_TEST_RESTORE_CONTAINER_PREFIX is necessary to differentiate restored from original SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: | run: |
echo "---------------------------" echo "---------------------------"
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }} echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test restore ${{ inputs.service }} ./sanity-test restore ${{ inputs.service }}
- name: Export ${{ inputs.service }} ${{ inputs.kind }} - name: Export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }} if: inputs.with-export == true
id: export id: export
shell: bash shell: bash
working-directory: src working-directory: src
@ -126,11 +110,9 @@ runs:
echo Export ${{ inputs.service }} ${{ inputs.kind }} echo Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
set -euo pipefail set -euo pipefail
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \ ./corso export '${{ inputs.service }}' \
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }} \ /tmp/export-${{ inputs.service }}-${{inputs.kind }} \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
${{ inputs.export-args }} \ ${{ inputs.export-args }} \
@ -139,27 +121,24 @@ runs:
cat /tmp/corsologs cat /tmp/corsologs
- name: Check export ${{ inputs.service }} ${{ inputs.kind }} - name: Check export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }} if: inputs.with-export == true
shell: bash shell: bash
working-directory: src working-directory: src
env: env:
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{ inputs.kind }} SANITY_TEST_KIND: export
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }} SANITY_TEST_FOLDER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}
SANITY_BACKUP_ID: ${{ inputs.backup-id }} SANITY_TEST_SERVICE: ${{ inputs.service }}
# applies only for sharepoint lists SANITY_TEST_DATA: ${{ inputs.test-folder }}
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }} SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: | run: |
echo "---------------------------" echo "---------------------------"
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }} echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test export ${{ inputs.service }} ./sanity-test export ${{ inputs.service }}
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }} - name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }} if: inputs.with-export == true
id: export-archive id: export-archive
shell: bash shell: bash
working-directory: src working-directory: src
@ -168,39 +147,34 @@ runs:
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }} echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
set -euo pipefail set -euo pipefail
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-restore-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \ ./corso export '${{ inputs.service }}' \
/tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive \ /tmp/export-${{ inputs.service }}-${{inputs.kind }}-archive \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
--archive \ --archive \
${{ inputs.export-args }} \ ${{ inputs.export-args }} \
--backup '${{ steps.backup.outputs.result }}' --backup '${{ steps.backup.outputs.result }}'
unzip /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-archive/*.zip \ unzip /tmp/export-${{ inputs.service }}-${{inputs.kind }}-archive/*.zip \
-d /tmp/export-${{ inputs.service }}${CATEGORY_SUFFIX}-${{inputs.kind }}-unzipped -d /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
cat /tmp/corsologs cat /tmp/corsologs
- name: Check archive export ${{ inputs.service }} ${{ inputs.kind }} - name: Check archive export ${{ inputs.service }} ${{ inputs.kind }}
if: ${{ inputs.with-export == 'true' }} if: inputs.with-export == true
shell: bash shell: bash
working-directory: src working-directory: src
env: env:
SANITY_TEST_RESTORE_CONTAINER: /tmp/export-${{ inputs.service }}${{ inputs.category && '-' }}${{ inputs.category }}-${{inputs.kind }}-unzipped SANITY_TEST_KIND: export
SANITY_TEST_SOURCE_CONTAINER: ${{ inputs.restore-container }} SANITY_TEST_FOLDER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
SANITY_BACKUP_ID: ${{ inputs.backup-id }} SANITY_TEST_SERVICE: ${{ inputs.service }}
# applies only for sharepoint lists SANITY_TEST_DATA: ${{ inputs.test-folder }}
SANITY_TEST_RESTORE_CONTAINER_PREFIX: ${{ steps.restore.outputs.result }} SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
SANITY_TEST_CATEGORY: ${{ inputs.category }}
run: | run: |
echo "---------------------------" echo "---------------------------"
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }} echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-${{ inputs.service }}${CATEGORY_SUFFIX}-validate-${{inputs.kind }}.log
./sanity-test export ${{ inputs.service }} ./sanity-test export ${{ inputs.service }}
- name: List ${{ inputs.service }} ${{ inputs.kind }} - name: List ${{ inputs.service }} ${{ inputs.kind }}
@ -211,9 +185,7 @@ runs:
echo Backup list ${{ inputs.service }} ${{ inputs.kind }} echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
set -euo pipefail set -euo pipefail
CATEGORY_SUFFIX="" CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}-${{inputs.kind }}.log
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}${CATEGORY_SUFFIX}-list-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \ ./corso backup list ${{ inputs.service }} \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
@ -234,10 +206,7 @@ runs:
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }} echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------" echo "---------------------------"
set -euo pipefail set -euo pipefail
# Include category in the log file name if present CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-single-${{ inputs.service }}-${{inputs.kind }}.log
CATEGORY_SUFFIX=""
[[ -n "${{ inputs.category }}" ]] && CATEGORY_SUFFIX="-${{ inputs.category }}"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}${CATEGORY_SUFFIX}-single-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \ ./corso backup list ${{ inputs.service }} \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
@ -265,4 +234,4 @@ runs:
name: "${{ inputs.service }}-${{ inputs.kind }}-logs" name: "${{ inputs.service }}-${{ inputs.kind }}-logs"
path: ${{ inputs.log-dir }}/* path: ${{ inputs.log-dir }}/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14

View File

@ -1,5 +1,4 @@
name: Setup and Cache Golang name: Setup and Cache Golang
description: Build golang binaries for later use in CI.
# clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml # clone of: https://github.com/magnetikonline/action-golang-cache/blob/main/action.yaml
# #

View File

@ -1,5 +1,4 @@
name: Publish Binary name: Publish Binary
description: Publish binary artifacts.
inputs: inputs:
version: version:
@ -14,6 +13,9 @@ inputs:
rudderstack_data_plane_url: rudderstack_data_plane_url:
description: Data plane URL for RudderStack description: Data plane URL for RudderStack
required: true required: true
slack_webhook_url:
description: Slack webhook url
required: true
runs: runs:
using: composite using: composite
@ -74,3 +76,35 @@ runs:
with: with:
name: corso_Windows_amd64 name: corso_Windows_amd64
path: src/dist/corso_windows_amd64_v1/corso.exe path: src/dist/corso_windows_amd64_v1/corso.exe
- name: SHA info
shell: bash
id: sha-info
if: failure()
run: |
echo ${GITHUB_REF#refs/heads/}-${GITHUB_SHA}
echo SHA=${GITHUB_REF#refs/heads/}-${GITHUB_SHA} >> $GITHUB_OUTPUT
echo RUN_URL=${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} >> $GITHUB_OUTPUT
echo COMMIT_URL=${{ github.server_url }}/${{ github.repository }}/commit/${GITHUB_SHA} >> $GITHUB_OUTPUT
- name: Send Github Action failure to Slack
id: slack-notification
if: failure()
uses: slackapi/slack-github-action@v1.24.0
with:
payload: |
{
"text": "Publish failure - build: ${{ job.status }} - SHA: ${{ steps.sha-info.outputs.SHA }}",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "[FAILED] Publishing Binary :: <${{ steps.sha-info.outputs.RUN_URL }}|[Logs]> <${{ steps.sha-info.outputs.COMMIT_URL }}|[Base]>\nCommit: <${{ steps.sha-info.outputs.COMMIT_URL }}|${{ steps.sha-info.outputs.SHA }}>"
}
}
]
}
env:
SLACK_WEBHOOK_URL: ${{ inputs.slack_webhook_url }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK

View File

@ -1,5 +1,4 @@
name: Publish Website name: Publish Website
description: Publish website artifacts.
inputs: inputs:
aws-iam-role: aws-iam-role:

View File

@ -1,5 +1,4 @@
name: Purge M365 User Data name: Purge M365 User Data
description: Deletes M365 data generated during CI tests.
# Hard deletion of an m365 user's data. Our CI processes create a lot # Hard deletion of an m365 user's data. Our CI processes create a lot
# of data churn (creation and immediate deletion) of files, the likes # of data churn (creation and immediate deletion) of files, the likes
@ -31,19 +30,12 @@ inputs:
description: Secret value of for AZURE_CLIENT_ID description: Secret value of for AZURE_CLIENT_ID
azure-client-secret: azure-client-secret:
description: Secret value of for AZURE_CLIENT_SECRET description: Secret value of for AZURE_CLIENT_SECRET
azure-pnp-client-id:
description: Secret value of AZURE_PNP_CLIENT_ID
azure-pnp-client-cert:
description: Base64 encoded private certificate for the azure-pnp-client-id (Secret value of AZURE_PNP_CLIENT_CERT)
azure-tenant-id: azure-tenant-id:
description: Secret value of AZURE_TENANT_ID description: Secret value of for AZURE_TENANT_ID
m365-admin-user: m365-admin-user:
description: Secret value of for M365_TENANT_ADMIN_USER description: Secret value of for M365_TENANT_ADMIN_USER
m365-admin-password: m365-admin-password:
description: Secret value of for M365_TENANT_ADMIN_PASSWORD description: Secret value of for M365_TENANT_ADMIN_PASSWORD
tenant-domain:
description: The domain of the tenant (ex. 10rqc2.onmicrosft.com)
required: true
runs: runs:
using: composite using: composite
@ -61,13 +53,7 @@ runs:
AZURE_CLIENT_ID: ${{ inputs.azure-client-id }} AZURE_CLIENT_ID: ${{ inputs.azure-client-id }}
AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }} AZURE_CLIENT_SECRET: ${{ inputs.azure-client-secret }}
AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }} AZURE_TENANT_ID: ${{ inputs.azure-tenant-id }}
run: | run: ./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./exchangePurge.ps1 -User ${{ inputs.user }} -FolderNamePurgeList PersonMetadata -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
# TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call. # TODO(ashmrtn): Re-enable when we figure out errors we're seeing with Get-Mailbox call.
#- name: Reset retention for all mailboxes to 0 #- name: Reset retention for all mailboxes to 0
@ -88,16 +74,10 @@ runs:
shell: pwsh shell: pwsh
working-directory: ./src/cmd/purge/scripts working-directory: ./src/cmd/purge/scripts
env: env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }} M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }}
run: | run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++) ./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}
{
if (./onedrivePurge.ps1 -User ${{ inputs.user }} -FolderPrefixPurgeList "${{ inputs.folder-prefix }}".Split(",") -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}
################################################################################################################ ################################################################################################################
# Sharepoint # Sharepoint
@ -108,14 +88,6 @@ runs:
shell: pwsh shell: pwsh
working-directory: ./src/cmd/purge/scripts working-directory: ./src/cmd/purge/scripts
env: env:
AZURE_CLIENT_ID: ${{ inputs.azure-pnp-client-id }} M365_TENANT_ADMIN_USER: ${{ inputs.m365-admin-user }}
AZURE_APP_CERT: ${{ inputs.azure-pnp-client-cert }} M365_TENANT_ADMIN_PASSWORD: ${{ inputs.m365-admin-password }}
TENANT_DOMAIN: ${{ inputs.tenant-domain }} run: ./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}
run: |
for ($ATTEMPT_NUM = 1; $ATTEMPT_NUM -le 3; $ATTEMPT_NUM++)
{
if (./onedrivePurge.ps1 -Site ${{ inputs.site }} -LibraryNameList "${{ inputs.libraries }}".split(",") -FolderPrefixPurgeList ${{ inputs.folder-prefix }} -LibraryPrefixDeleteList ${{ inputs.library-prefix && inputs.library-prefix || '[]' }} -PurgeBeforeTimestamp ${{ inputs.older-than }}) {
break
}
}

View File

@ -0,0 +1,58 @@
name: Send a message to slack
inputs:
msg:
description: The slack message text
slack_url:
description: passthrough for secrets.SLACK_WEBHOOK_URL
runs:
using: composite
steps:
- uses: actions/checkout@v3
- name: set github ref
shell: bash
run: |
echo "github_reference=${{ github.ref }}" >> $GITHUB_ENV
- name: trim github ref
shell: bash
run: |
echo "trimmed_ref=${github_reference#refs/}" >> $GITHUB_ENV
- name: build urls
shell: bash
run: |
echo "logurl=$(printf '<https://github.com/alcionai/corso/actions/runs/%s|[Action]>' ${{ github.run_id }})" >> $GITHUB_ENV
echo "commiturl=$(printf '<https://github.com/alcionai/corso/commit/%s|[Commit]>' ${{ github.sha }})" >> $GITHUB_ENV
echo "refurl=$(printf '<https://github.com/alcionai/corso/%s|[Ref]>' ${{ env.trimmed_ref }})" >> $GITHUB_ENV
- name: use url or blank val
shell: bash
run: |
echo "STEP=${{ env.trimmed_ref || '' }}" >> $GITHUB_ENV
echo "JOB=${{ github.job || '' }}" >> $GITHUB_ENV
echo "LOGS=${{ github.run_id && env.logurl || '-' }}" >> $GITHUB_ENV
echo "COMMIT=${{ github.sha && env.commiturl || '-' }}" >> $GITHUB_ENV
echo "REF=${{ env.trimmed_ref && env.refurl || '-' }}" >> $GITHUB_ENV
- id: slack-message
uses: slackapi/slack-github-action@v1.24.0
env:
SLACK_WEBHOOK_URL: ${{ inputs.slack_url }}
SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK
with:
payload: |
{
"text": "${{ inputs.msg }} :: ${{ env.LOGS }} ${{ env.COMMIT }} ${{ env.REF }}",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "${{ inputs.msg }}\n${{ env.JOB }} :: ${{ env.STEP }}\n${{ env.LOGS }} ${{ env.COMMIT }} ${{ env.REF }}"
}
}
]
}

View File

@ -1,92 +0,0 @@
name: Send a message to Teams
description: Send messages to communication apps.
inputs:
msg:
description: The teams message text
teams_url:
description: passthrough for secrets.TEAMS_CORSO_CI_WEBHOOK_URL
runs:
using: composite
steps:
- uses: actions/checkout@v3
- name: set github ref
shell: bash
run: |
echo "github_reference=${{ github.ref }}" >> $GITHUB_ENV
- name: trim github ref
shell: bash
run: |
echo "trimmed_ref=${github_reference#refs/}" >> $GITHUB_ENV
- name: build urls
shell: bash
run: |
echo "logurl=$(printf 'https://github.com/alcionai/corso/actions/runs/%s' ${{ github.run_id }})" >> $GITHUB_ENV
echo "commiturl=$(printf 'https://github.com/alcionai/corso/commit/%s' ${{ github.sha }})" >> $GITHUB_ENV
echo "refurl=$(printf 'https://github.com/alcionai/corso/%s' ${{ env.trimmed_ref }})" >> $GITHUB_ENV
- name: use url or blank val
shell: bash
run: |
echo "STEP=${{ env.trimmed_ref || '' }}" >> $GITHUB_ENV
echo "JOB=${{ github.job || '' }}" >> $GITHUB_ENV
echo "LOGS=${{ github.run_id && env.logurl || '-' }}" >> $GITHUB_ENV
echo "COMMIT=${{ github.sha && env.commiturl || '-' }}" >> $GITHUB_ENV
echo "REF=${{ env.trimmed_ref && env.refurl || '-' }}" >> $GITHUB_ENV
- name: Send JSON payload to Teams Webhook
shell: bash
run: |
curl -X POST \
-H "Content-Type: application/json" \
-d '{
"type":"message",
"attachments":[
{
"contentType":"application/vnd.microsoft.card.adaptive",
"contentUrl":null,
"content":{
"$schema":"http://adaptivecards.io/schemas/adaptive-card.json",
"type":"AdaptiveCard",
"body": [
{
"type": "TextBlock",
"size": "Medium",
"weight": "Bolder",
"text": "${{ inputs.msg }}",
"color": "Attention"
},
{
"type": "TextBlock",
"text": "${{ env.JOB }} :: ${{ env.STEP }}",
"wrap": true
}
],
"actions": [
{
"type": "Action.OpenUrl",
"title": "Action",
"url": "${{ env.LOGS }}"
},
{
"type": "Action.OpenUrl",
"title": "Commit",
"url": "${{ env.COMMIT }}"
},
{
"type": "Action.OpenUrl",
"title": "Ref",
"url": "${{ env.REF }}"
}
],
"$schema": "http://adaptivecards.io/schemas/adaptive-card.json",
"version": "1.5"
}
}
]
}' \
${{ inputs.teams_url }}

View File

@ -1,5 +1,4 @@
name: Lint Website name: Lint Website
description: Lint website content.
inputs: inputs:
version: version:

View File

@ -28,7 +28,7 @@ jobs:
# only run CI tests if the src folder or workflow actions have changed # only run CI tests if the src folder or workflow actions have changed
- name: Check for file changes in src/ or .github/workflows/ - name: Check for file changes in src/ or .github/workflows/
uses: dorny/paths-filter@v3 uses: dorny/paths-filter@v2
id: dornycheck id: dornycheck
with: with:
list-files: json list-files: json

View File

@ -35,10 +35,4 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }} rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }} rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
- name: Notify failure in teams
if: failure()
uses: ./.github/actions/teams-message
with:
msg: "[CORSO FAILED] Publishing Binary"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -110,7 +110,6 @@ jobs:
needs: [Precheck, Checkout, SetEnv] needs: [Precheck, Checkout, SetEnv]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true' # websitefileschanged also includes srcfileschanged if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.websitefileschanged == 'true' # websitefileschanged also includes srcfileschanged
steps: steps:
@ -129,7 +128,6 @@ jobs:
needs: [Precheck, Checkout, SetM365App] needs: [Precheck, Checkout, SetM365App]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 120
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository) if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
defaults: defaults:
run: run:
@ -174,6 +172,7 @@ jobs:
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }} CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }} S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
CORSO_ENABLE_GROUPS: true
run: | run: |
set -euo pipefail set -euo pipefail
go test \ go test \
@ -189,7 +188,7 @@ jobs:
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: ci-test-log name: ci-test-log
path: src/testlog/* path: src/testlog/*
@ -200,7 +199,6 @@ jobs:
needs: [Precheck, Checkout, SetM365App] needs: [Precheck, Checkout, SetM365App]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30
if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository) if: (startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main') || (needs.precheck.outputs.srcfileschanged == 'true' && github.event.pull_request.head.repo.full_name == github.repository)
defaults: defaults:
run: run:
@ -245,6 +243,7 @@ jobs:
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }} CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
CORSO_ENABLE_GROUPS: true
run: | run: |
set -euo pipefail set -euo pipefail
go test \ go test \
@ -260,7 +259,7 @@ jobs:
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: ci-retention-test-log name: ci-retention-test-log
path: src/testlog/* path: src/testlog/*
@ -271,7 +270,6 @@ jobs:
needs: [Precheck, Checkout] needs: [Precheck, Checkout]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30
if: needs.precheck.outputs.srcfileschanged == 'true' if: needs.precheck.outputs.srcfileschanged == 'true'
defaults: defaults:
run: run:
@ -279,6 +277,7 @@ jobs:
env: env:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
LOG_GRAPH_REQUESTS: true LOG_GRAPH_REQUESTS: true
CORSO_ENABLE_GROUPS: true
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -315,7 +314,7 @@ jobs:
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: unit-test-log name: unit-test-log
path: src/testlog/* path: src/testlog/*
@ -333,6 +332,7 @@ jobs:
env: env:
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
LOG_GRAPH_REQUESTS: true LOG_GRAPH_REQUESTS: true
CORSO_ENABLE_GROUPS: true
steps: steps:
- name: Fail check if not repository_dispatch - name: Fail check if not repository_dispatch
if: github.event_name != 'repository_dispatch' if: github.event_name != 'repository_dispatch'
@ -404,7 +404,7 @@ jobs:
# Upload the original go test log as an artifact for later review. # Upload the original go test log as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: fork-test-log name: fork-test-log
path: src/testlog/* path: src/testlog/*
@ -412,7 +412,7 @@ jobs:
retention-days: 14 retention-days: 14
# Update check run called "Test-Suite-Fork" # Update check run called "Test-Suite-Fork"
- uses: actions/github-script@v7 - uses: actions/github-script@v6
id: update-check-run id: update-check-run
if: failure() if: failure()
env: env:
@ -449,7 +449,6 @@ jobs:
needs: [Precheck, Checkout] needs: [Precheck, Checkout]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 30
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' || needs.precheck.outputs.srcfileschanged == 'true'
defaults: defaults:
run: run:
@ -463,7 +462,7 @@ jobs:
go-version-file: src/go.mod go-version-file: src/go.mod
- name: Go Lint - name: Go Lint
uses: golangci/golangci-lint-action@v4 uses: golangci/golangci-lint-action@v3
with: with:
# Keep pinned to a verson as sometimes updates will add new lint # Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code. # failures in unchanged code.
@ -492,8 +491,8 @@ jobs:
# I could not find a way to install tree-grepper without nix # I could not find a way to install tree-grepper without nix
# https://github.com/BrianHicks/tree-grepper/issues/293 # https://github.com/BrianHicks/tree-grepper/issues/293
- uses: cachix/install-nix-action@v25 - uses: cachix/install-nix-action@v23
- uses: cachix/cachix-action@v14 - uses: cachix/cachix-action@v12
with: with:
name: tree-grepper name: tree-grepper
- run: nix-env -if https://github.com/BrianHicks/tree-grepper/archive/refs/heads/main.tar.gz - run: nix-env -if https://github.com/BrianHicks/tree-grepper/archive/refs/heads/main.tar.gz
@ -511,27 +510,6 @@ jobs:
echo "Use len check instead of empty string comparison" echo "Use len check instead of empty string comparison"
exit 1 exit 1
fi fi
- name: Check for cases where errors are not propagated
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((if_statement (binary_expression) @_if (block (return_statement (expression_list (call_expression (selector_expression) @_fun ) @ret .)))) (#match? @_if "err != nil") (#match? @_fun "clues.NewWC"))' | grep .; then
echo "Make sure to propagate errors with clues"
exit 1
fi
- name: Check if clues without context are used when context is passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#match? @clue "WC$"))' | grep .; then
echo "Do not use clues.*WC when context is passed in"
exit 1
fi
- name: Check clues with context is used when context is not passed in
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((function_declaration (parameter_list . (parameter_declaration (identifier) @_octx)) body: (block (short_var_declaration left: (expression_list (identifier) @_err . ) right: (expression_list (call_expression (argument_list . (identifier) @_ctx)))) . (if_statement (binary_expression) @_exp consequence: (block (return_statement (expression_list (call_expression (selector_expression (call_expression (selector_expression) @clue))) . )))))) (#eq? @_err "err") (#eq? @_octx "ctx") (#not-eq? @_ctx "ctx") (#eq? @_exp "err != nil") (#match? @clue "^clues\.") (#not-match? @clue "WC$"))' | grep .; then
echo "Use clues.*WC when context is not passed in"
exit 1
fi
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- GitHub Actions Linting ------------------------------------------------------------------------- # --- GitHub Actions Linting -------------------------------------------------------------------------
@ -577,6 +555,7 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }} github_token: ${{ secrets.GITHUB_TOKEN }}
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }} rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }} rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
slack_webhook_url: ${{ secrets.SLACK_WEBHOOK_URL }}
Publish-Image: Publish-Image:
needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv] needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]

View File

@ -12,7 +12,7 @@ jobs:
continue-on-error: true continue-on-error: true
strategy: strategy:
matrix: matrix:
user: [CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, ""] user: [ CORSO_M365_TEST_USER_ID, CORSO_SECONDARY_M365_TEST_USER_ID, '' ]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -33,16 +33,13 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams - name: Notify failure in slack
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/slack-message
with: with:
msg: "[CORSO FAILED] ${{ vars[matrix.user] }} CI Cleanup" msg: "[FAILED] ${{ vars[matrix.user] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} slack_url: ${{ secrets.SLACK_WEBHOOK_URL }}
Test-Site-Data-Cleanup: Test-Site-Data-Cleanup:
environment: Testing environment: Testing
@ -50,7 +47,7 @@ jobs:
continue-on-error: true continue-on-error: true
strategy: strategy:
matrix: matrix:
site: [CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL] site: [ CORSO_M365_TEST_SITE_URL, CORSO_M365_TEST_GROUPS_SITE_URL ]
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -73,13 +70,10 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Notify failure in teams - name: Notify failure in slack
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/slack-message
with: with:
msg: "[CORSO FAILED] ${{ vars[matrix.site] }} CI Cleanup" msg: "[FAILED] ${{ vars[matrix.site] }} CI Cleanup"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} slack_url: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -107,7 +107,7 @@ jobs:
# package all artifacts for later review # package all artifacts for later review
- name: Upload Log, Profilers, Traces - name: Upload Log, Profilers, Traces
if: always() if: always()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: load-test-profiling name: load-test-profiling
path: ${{ github.workspace }}/testlog/* path: ${{ github.workspace }}/testlog/*
@ -155,6 +155,3 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
user: user:
description: "User to run longevity test on" description: 'User to run longevity test on'
permissions: permissions:
# required to retrieve AWS credentials # required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Longevity-Tests: Longevity-Tests:
needs: [SetM365App] needs: [ SetM365App ]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
@ -36,8 +36,8 @@ jobs:
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-longevity.log
RESTORE_DEST_PFX: Corso_Test_Longevity_ RESTORE_DEST_PFX: Corso_Test_Longevity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }} TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }}
PREFIX: "longevity" PREFIX: 'longevity'
# Options for retention. # Options for retention.
RETENTION_MODE: GOVERNANCE RETENTION_MODE: GOVERNANCE
@ -46,7 +46,7 @@ jobs:
defaults: defaults:
run: run:
working-directory: src working-directory: src
############################################################################ ############################################################################
# setup # setup
steps: steps:
@ -77,15 +77,13 @@ jobs:
git checkout ${{ github.ref }} -- .github git checkout ${{ github.ref }} -- .github
- run: go build -o corso - run: go build -o corso
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR} - run: mkdir ${CORSO_LOG_DIR}
# Use shorter-lived credentials obtained from assume-role since these # Use shorter-lived credentials obtained from assume-role since these
# runs haven't been taking long. # runs haven't been taking long.
- name: Configure AWS credentials from Test account - name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v4 uses: aws-actions/configure-aws-credentials@v4
timeout-minutes: 10
with: with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }} role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing role-session-name: integration-testing
@ -95,13 +93,11 @@ jobs:
# Repository commands # Repository commands
- name: Version Test - name: Version Test
timeout-minutes: 10
run: | run: |
./corso --version | grep -c 'Corso version:' ./corso --version | grep -c 'Corso version:'
- name: Repo init test - name: Repo init test
id: repo-init id: repo-init
timeout-minutes: 10
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }} echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }}
@ -113,6 +109,7 @@ jobs:
--extend-retention \ --extend-retention \
--prefix ${{ env.PREFIX }} \ --prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \ --bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
--succeed-if-exists \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log if grep -q 'Failed to' ${{ env.CORSO_LOG_DIR }}/gotest-repo-init.log
@ -122,7 +119,6 @@ jobs:
fi fi
- name: Repo connect test - name: Repo connect test
timeout-minutes: 10
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }} echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
@ -144,7 +140,6 @@ jobs:
- name: Backup exchange test - name: Backup exchange test
id: exchange-test id: exchange-test
timeout-minutes: 30
run: | run: |
echo -e "\nBackup Exchange test\n" >> ${CORSO_LOG_FILE} echo -e "\nBackup Exchange test\n" >> ${CORSO_LOG_FILE}
./corso backup create exchange \ ./corso backup create exchange \
@ -163,13 +158,12 @@ jobs:
data=$( echo $resultjson | jq -r '.[0] | .id' ) data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT echo result=$data >> $GITHUB_OUTPUT
########################################################################## ##########################################################################
# Onedrive # Onedrive
- name: Backup onedrive test - name: Backup onedrive test
id: onedrive-test id: onedrive-test
timeout-minutes: 30
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\nBackup OneDrive test\n" >> ${CORSO_LOG_FILE} echo -e "\nBackup OneDrive test\n" >> ${CORSO_LOG_FILE}
@ -194,7 +188,6 @@ jobs:
# Sharepoint test # Sharepoint test
- name: Backup sharepoint test - name: Backup sharepoint test
id: sharepoint-test id: sharepoint-test
timeout-minutes: 30
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\nBackup SharePoint test\n" >> ${CORSO_LOG_FILE} echo -e "\nBackup SharePoint test\n" >> ${CORSO_LOG_FILE}
@ -202,7 +195,7 @@ jobs:
./corso backup create sharepoint \ ./corso backup create sharepoint \
--no-stats \ --no-stats \
--hide-progress \ --hide-progress \
--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" \ --site "${{ secrets.CORSO_M365_TEST_SITE_URL }}" \
--json \ --json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/backup_sharepoint.txt
@ -220,7 +213,6 @@ jobs:
# Backup Exchange Deletion test # Backup Exchange Deletion test
- name: Backup Delete exchange test - name: Backup Delete exchange test
id: delete-exchange-test id: delete-exchange-test
timeout-minutes: 30
env: env:
SERVICE: "exchange" SERVICE: "exchange"
DELETION_DAYS: 10 DELETION_DAYS: 10
@ -233,7 +225,6 @@ jobs:
# Backup Onedrive Deletion test # Backup Onedrive Deletion test
- name: Backup Delete onedrive test - name: Backup Delete onedrive test
id: delete-onedrive-test id: delete-onedrive-test
timeout-minutes: 30
env: env:
SERVICE: "onedrive" SERVICE: "onedrive"
DELETION_DAYS: 10 DELETION_DAYS: 10
@ -246,7 +237,6 @@ jobs:
# Backup Sharepoint Deletion test # Backup Sharepoint Deletion test
- name: Backup Delete Sharepoint test - name: Backup Delete Sharepoint test
id: delete-sharepoint-test id: delete-sharepoint-test
timeout-minutes: 30
env: env:
SERVICE: "sharepoint" SERVICE: "sharepoint"
DELETION_DAYS: 5 DELETION_DAYS: 5
@ -258,7 +248,6 @@ jobs:
########################################################################## ##########################################################################
# Export OneDrive Test # Export OneDrive Test
- name: OneDrive Export test - name: OneDrive Export test
timeout-minutes: 30
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE} echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
@ -286,7 +275,6 @@ jobs:
########################################################################## ##########################################################################
# Export SharePoint Test # Export SharePoint Test
- name: SharePoint Export test - name: SharePoint Export test
timeout-minutes: 30
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE} echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
@ -315,7 +303,6 @@ jobs:
# Maintenance test # Maintenance test
- name: Maintenance test Daily - name: Maintenance test Daily
id: maintenance-test-daily id: maintenance-test-daily
timeout-minutes: 30
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\n Maintenance test Daily\n" >> ${CORSO_LOG_FILE} echo -e "\n Maintenance test Daily\n" >> ${CORSO_LOG_FILE}
@ -328,11 +315,10 @@ jobs:
--hide-progress \ --hide-progress \
--force \ --force \
--json \ --json \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/maintenance_metadata.txt
- name: Maintenance test Weekly - name: Maintenance test Weekly
id: maintenance-test-weekly id: maintenance-test-weekly
timeout-minutes: 30
run: | run: |
if [[ $(date +%A) == "Saturday" ]]; then if [[ $(date +%A) == "Saturday" ]]; then
set -euo pipefail set -euo pipefail
@ -381,16 +367,16 @@ jobs:
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: always() if: always()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: longevity-test-log name: longevity-test-log
path: src/testlog/* path: src/testlog/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
- name: Notify failure in teams - name: Notify failure in slack
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/slack-message
with: with:
msg: "[CORSO FAILED] Longevity Test" msg: "[FAILED] Longevity Test"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} slack_url: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -48,7 +48,7 @@ jobs:
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Test-Suite-Trusted: Test-Suite-Trusted:
needs: [Checkout, SetM365App] needs: [ Checkout, SetM365App]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
@ -59,6 +59,7 @@ jobs:
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }} AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }} AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }} CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
CORSO_ENABLE_GROUPS: true
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -100,23 +101,23 @@ jobs:
-timeout 2h \ -timeout 2h \
./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests ./... 2>&1 | tee ./testlog/gotest-nightly.log | gotestfmt -hide successful-tests
########################################################################################################################################## ##########################################################################################################################################
# Logging & Notifications # Logging & Notifications
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: always() if: always()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: nightly-test-log name: nightly-test-log
path: src/testlog/* path: src/testlog/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
- name: Notify failure in teams - name: Notify failure in slack
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/slack-message
with: with:
msg: "[COROS FAILED] Nightly Checks" msg: "[FAILED] Nightly Checks"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} slack_url: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -19,7 +19,7 @@ jobs:
private_key: ${{ secrets.PRIVATE_KEY }} private_key: ${{ secrets.PRIVATE_KEY }}
- name: Slash Command Dispatch - name: Slash Command Dispatch
uses: peter-evans/slash-command-dispatch@v4 uses: peter-evans/slash-command-dispatch@v3
env: env:
TOKEN: ${{ steps.generate_token.outputs.token }} TOKEN: ${{ steps.generate_token.outputs.token }}
with: with:

View File

@ -6,7 +6,7 @@ on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
user: user:
description: "User to run sanity test on" description: 'User to run sanity test on'
permissions: permissions:
# required to retrieve AWS credentials # required to retrieve AWS credentials
@ -23,7 +23,7 @@ jobs:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests: Sanity-Tests:
needs: [SetM365App] needs: [ SetM365App ]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
@ -38,16 +38,18 @@ jobs:
CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog CORSO_LOG_DIR: ${{ github.workspace }}/src/testlog
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
RESTORE_DEST_PFX: Corso_Test_Sanity_ RESTORE_DEST_PFX: Corso_Test_Sanity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || vars.CORSO_M365_TEST_USER_ID }} TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }}
CORSO_ENABLE_GROUPS: true
defaults: defaults:
run: run:
working-directory: src working-directory: src
##########################################################################################################################################
########################################################################################################################################## # setup
# setup
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup Golang with cache - name: Setup Golang with cache
@ -56,16 +58,13 @@ jobs:
go-version-file: src/go.mod go-version-file: src/go.mod
- run: go build -o corso - run: go build -o corso
timeout-minutes: 10
- run: go build -o sanity-test ./cmd/sanity_test - run: go build -o sanity-test ./cmd/sanity_test
timeout-minutes: 10
- run: mkdir ${CORSO_LOG_DIR} - run: mkdir ${CORSO_LOG_DIR}
########################################################################################################################################## ##########################################################################################################################################
# Pre-Run cleanup # Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently. # unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring # however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
@ -79,7 +78,6 @@ jobs:
echo "NOW=$(date +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV echo "NOW=$(date +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Users - name: Purge CI-Produced Folders for Users
timeout-minutes: 30
uses: ./.github/actions/purge-m365-data uses: ./.github/actions/purge-m365-data
with: with:
user: ${{ env.TEST_USER }} user: ${{ env.TEST_USER }}
@ -90,39 +88,30 @@ jobs:
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
- name: Purge CI-Produced Folders for Sites - name: Purge CI-Produced Folders for Sites
timeout-minutes: 30
if: always() if: always()
uses: ./.github/actions/purge-m365-data uses: ./.github/actions/purge-m365-data
with: with:
site: ${{ vars.CORSO_M365_TEST_SITE_URL }} site: ${{ secrets.CORSO_M365_TEST_SITE_URL }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }} folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }} libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }} older-than: ${{ env.NOW }}
azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }} azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }} azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
azure-tenant-id: ${{ secrets.TENANT_ID }} azure-tenant-id: ${{ secrets.TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
azure-pnp-client-id: ${{ secrets.AZURE_PNP_CLIENT_ID }}
azure-pnp-client-cert: ${{ secrets.AZURE_PNP_CLIENT_CERT }}
tenant-domain: ${{ vars.TENANT_DOMAIN }}
########################################################################################################################################## ##########################################################################################################################################
# Repository commands # Repository commands
- name: Version Test - name: Version Test
timeout-minutes: 10
run: | run: |
./corso --version | grep -c 'Corso version:' ./corso --version | grep -c 'Corso version:'
- name: Repo init test - name: Repo init test
timeout-minutes: 10
id: repo-init id: repo-init
run: | run: |
set -euo pipefail set -euo pipefail
@ -144,7 +133,6 @@ jobs:
echo result="$prefix" >> $GITHUB_OUTPUT echo result="$prefix" >> $GITHUB_OUTPUT
- name: Repo connect test - name: Repo connect test
timeout-minutes: 10
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }} echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }}
@ -164,7 +152,6 @@ jobs:
# Run maintenance on an empty repo just to make sure the command still # Run maintenance on an empty repo just to make sure the command still
# works. # works.
- name: Repo maintenance test - name: Repo maintenance test
timeout-minutes: 30
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\nRepo maintenance test\n" >> ${{ env.CORSO_LOG_FILE }} echo -e "\nRepo maintenance test\n" >> ${{ env.CORSO_LOG_FILE }}
@ -174,14 +161,13 @@ jobs:
--mode complete \ --mode complete \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/gotest-repo-maintenance.log
########################################################################################################################################## ##########################################################################################################################################
# Exchange # Exchange
# generate new entries to roll into the next load test # generate new entries to roll into the next load test
# only runs if the test was successful # only runs if the test was successful
- name: Exchange - Create new data - name: Exchange - Create new data
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
go run . exchange emails \ go run . exchange emails \
@ -191,68 +177,60 @@ jobs:
--count 4 --count 4
- name: Exchange - Backup - name: Exchange - Backup
timeout-minutes: 30
id: exchange-backup id: exchange-backup
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: first-backup kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Incremental backup - name: Exchange - Incremental backup
timeout-minutes: 30
id: exchange-backup-incremental id: exchange-backup-incremental
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: incremental kind: incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Non delta backup - name: Exchange - Non delta backup
timeout-minutes: 30
id: exchange-backup-non-delta id: exchange-backup-non-delta
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: non-delta kind: non-delta
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
- name: Exchange - Incremental backup after non-delta - name: Exchange - Incremental backup after non-delta
timeout-minutes: 30
id: exchange-backup-incremental-after-non-delta id: exchange-backup-incremental-after-non-delta
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: non-delta-incremental kind: non-delta-incremental
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: "--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
backup-id: ${{ steps.exchange-backup.outputs.backup-id }} base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
##########################################################################################################################################
# Onedrive ##########################################################################################################################################
# Onedrive
# generate new entries for test # generate new entries for test
- name: OneDrive - Create new data - name: OneDrive - Create new data
id: new-data-creation-onedrive id: new-data-creation-onedrive
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S") suffix=$(date +"%Y-%m-%d_%H-%M-%S")
@ -268,20 +246,18 @@ jobs:
- name: OneDrive - Backup - name: OneDrive - Backup
id: onedrive-backup id: onedrive-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: onedrive service: onedrive
kind: first-backup kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"' backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
# generate some more enteries for incremental check # generate some more enteries for incremental check
- name: OneDrive - Create new data (for incremental) - name: OneDrive - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
go run . onedrive files \ go run . onedrive files \
@ -293,31 +269,29 @@ jobs:
- name: OneDrive - Incremental backup - name: OneDrive - Incremental backup
id: onedrive-incremental id: onedrive-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: onedrive service: onedrive
kind: incremental kind: incremental
backup-args: '--user "${{ env.TEST_USER }}"' backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
########################################################################################################################################## ##########################################################################################################################################
# Sharepoint Library # Sharepoint
# generate new entries for test # generate new entries for test
- name: SharePoint - Create new data - name: SharePoint - Create new data
id: new-data-creation-sharepoint id: new-data-creation-sharepoint
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S") suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint files \ go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \ --site ${{ secrets.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \ --user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \ --tenant ${{ secrets.TENANT_ID }} \
@ -328,25 +302,22 @@ jobs:
- name: SharePoint - Backup - name: SharePoint - Backup
id: sharepoint-backup id: sharepoint-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: sharepoint service: sharepoint
kind: first-backup kind: first-backup
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries' backup-args: '--site "${{ secrets.CORSO_M365_TEST_SITE_URL }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
category: libraries
# generate some more enteries for incremental check # generate some more enteries for incremental check
- name: SharePoint - Create new data (for incremental) - name: SharePoint - Create new data (for incremental)
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
go run . sharepoint files \ go run . sharepoint files \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \ --site ${{ secrets.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \ --user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \ --tenant ${{ secrets.TENANT_ID }} \
@ -355,117 +326,23 @@ jobs:
- name: SharePoint - Incremental backup - name: SharePoint - Incremental backup
id: sharepoint-incremental id: sharepoint-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: sharepoint service: sharepoint
kind: incremental kind: incremental
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data libraries' backup-args: '--site "${{ secrets.CORSO_M365_TEST_SITE_URL }}"'
restore-args: "--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true with-export: true
category: libraries
########################################################################################################################################## ##########################################################################################################################################
# Sharepoint Lists # Groups and Teams
# generate new entries for test
# The `awk | tr | sed` command chain is used to get a comma separated list of SharePoint list names.
- name: SharePoint Lists - Create new data
id: new-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
# Extracts the common prefix for the Sharepoint list names.
- name: SharePoint Lists - Store restore container
id: sharepoint-lists-store-restore-container
run: |
echo ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Backup
id: sharepoint-lists-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: first-backup-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
# generate some more enteries for incremental check
- name: SharePoint Lists - Create new data (for incremental)
id: inc-data-creation-sharepoint-lists
timeout-minutes: 30
working-directory: ./src/cmd/factory
run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S")
go run . sharepoint lists \
--site ${{ vars.CORSO_M365_TEST_SITE_URL }} \
--user ${{ env.TEST_USER }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--tenant ${{ secrets.TENANT_ID }} \
--destination ${{ env.RESTORE_DEST_PFX }}$suffix \
--count 4 |
awk 'NR > 1 {print $2}' | tr '\n' ',' | sed -e 's/,$//' -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Store restore container (for incremental)
id: sharepoint-lists-store-restore-container-inc
run: |
echo ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }} |
cut -d',' -f1 |
cut -d'_' -f1,2,3,4,5 |
sed -e 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: SharePoint Lists - Incremental backup
id: sharepoint-lists-incremental
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: incremental-lists
backup-args: '--site "${{ vars.CORSO_M365_TEST_SITE_URL }}" --data lists'
restore-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }} --destination Corso_Test_Sanity_Restore_$(date +'%Y%m%d_%H%M%S')"
export-args: "--list ${{ steps.inc-data-creation-sharepoint-lists.outputs.result }},${{ steps.new-data-creation-sharepoint-lists.outputs.result }}"
restore-container: "${{ steps.sharepoint-lists-store-restore-container-inc.outputs.result }},${{ steps.sharepoint-lists-store-restore-container.outputs.result }}"
log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
category: lists
on-collision: copy
##########################################################################################################################################
# Groups and Teams
# generate new entries for test # generate new entries for test
- name: Groups - Create new data - name: Groups - Create new data
id: new-data-creation-groups id: new-data-creation-groups
timeout-minutes: 30
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
run: | run: |
suffix=$(date +"%Y-%m-%d_%H-%M-%S") suffix=$(date +"%Y-%m-%d_%H-%M-%S")
@ -482,59 +359,55 @@ jobs:
- name: Groups - Backup - name: Groups - Backup
id: groups-backup id: groups-backup
timeout-minutes: 30
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: groups service: groups
kind: first-backup kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries' backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}" test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
with-export: true
# generate some more entries for incremental check # generate some more enteries for incremental check
- name: Groups - Create new data (for incremental) # - name: Groups - Create new data (for incremental)
timeout-minutes: 30 # working-directory: ./src/cmd/factory
working-directory: ./src/cmd/factory # run: |
run: | # go run . sharepoint files \
go run . sharepoint files \ # --site ${{ secrets.CORSO_M365_TEST_GROUPS_SITE_URL }} \
--site ${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }} \ # --user ${{ env.TEST_USER }} \
--user ${{ env.TEST_USER }} \ # --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
--secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ # --tenant ${{ secrets.TENANT_ID }} \
--tenant ${{ secrets.TENANT_ID }} \ # --destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \
--destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \ # --count 4
--count 4
- name: Groups - Incremental backup # - name: Groups - Incremental backup
id: groups-incremental # id: groups-incremental
timeout-minutes: 30 # uses: ./.github/actions/backup-restore-test
uses: ./.github/actions/backup-restore-test # with:
with: # service: groups
service: groups # kind: incremental
kind: incremental # backup-args: '--site "${{ secrets.CORSO_M365_TEST_GROUPS_SITE_URL }}"'
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}" --data messages,libraries' # restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-args: '--site "${{ vars.CORSO_M365_TEST_GROUPS_SITE_URL }}" --folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}' # test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
restore-container: "${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}" # log-dir: ${{ env.CORSO_LOG_DIR }}
log-dir: ${{ env.CORSO_LOG_DIR }} # with-export: true
with-export: true
########################################################################################################################################## ##########################################################################################################################################
# Logging & Notifications # Logging & Notifications
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: always() if: always()
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
name: sanity-test-log name: sanity-test-log
path: ${{ env.CORSO_LOG_DIR }}/* path: ${{ env.CORSO_LOG_DIR }}/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
- name: Notify failure in teams - name: Notify failure in slack
if: failure() if: failure()
uses: ./.github/actions/teams-message uses: ./.github/actions/slack-message
with: with:
msg: "[CORSO FAILED] Sanity Tests" msg: "[FAILED] Sanity Tests"
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }} slack_url: ${{ secrets.SLACK_WEBHOOK_URL }}

View File

@ -1,23 +0,0 @@
name: Manually Test Teams Action
on:
workflow_dispatch:
inputs:
msg:
description: 'Message to send:'
required: true
default: 'This is a test message'
jobs:
notify:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Send notification
uses: ./.github/actions/teams-message
with:
msg: ${{ github.event.inputs.msg }}
teams_url: ${{ secrets.TEAMS_CORSO_CI_WEBHOOK_URL }}

View File

@ -6,123 +6,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased] (beta) ## [Unreleased] (beta)
### Fixed
- Handle the case where an email or event cannot be retrieved from Exchange due to an `ErrorCorruptData` error. Corso will skip over the item but report it in the backup summary.
- Emails attached within other emails are now correctly exported
- Gracefully handle email and post attachments without name when exporting to eml
- Use correct timezone for event start and end times in Exchange exports (helps fix issues in relative recurrence patterns)
- Fixed an issue causing exports dealing with calendar data to have high memory usage
## [v0.19.0] (beta) - 2024-02-06
### Added
- Events can now be exported from Exchange backups as .ics files.
- Update repo init configuration to reduce the total number of GET requests sent
to the object store when using corso. This affects repos that have many
backups created in them per day the most.
- Feature Preview: Corso now supports backup, export & restore of SharePoint lists. Lists backup can be initiated using `corso backup create sharepoint --site <site-url> --data lists`.
- Group mailbox(aka conversations) backup and export support is now officially available. Group mailbox posts can be exported as `.eml` files.
### Fixed
- Retry transient 400 "invalidRequest" errors during onedrive & sharepoint backup.
- Backup attachments associated with group mailbox items.
- Groups and Teams backups no longer fail when a resource has no display name.
- Contacts in-place restore failed if the restore destination was empty.
- Link shares with external users are now backed up and restored as expected
- Ensure persistent repo config is populated on repo init if repo init failed partway through during the previous init attempt.
### Changed
- When running `backup details` on an empty backup returns a more helpful error message.
- Backup List additionally shows the data category for each backup.
- Remove hidden `--succeed-if-exists` flag for repo init. Repo init will now succeed without error if run on an existing repo with the same passphrase.
### Known issues
- Backing up a group mailbox item may fail if it has a very large number of attachments (500+).
- Event description for exchange exports might look slightly different for certain events.
- Exchange in-place restore may restore items in well-known folders to different folders if the user has well-known folder names change based on locale and has updated the locale since the backup was created.
- In-place Exchange contacts restore will merge items in folders named "Contacts" or "contacts" into the default folder.
- External users with access through shared links will not receive these links as they are not sent via email during restore.
- Graph API has limited support for certain column types such as `location`, `hyperlink/picture`, and `metadata`. Restoring SharePoint list items containing these columns will result in differences compared to the original items.
- SharePoint list item attachments are not available due to graph API limitations.
- Group mailbox restore is not supported due to limited Graph API support for creating mailbox items.
- Due to Graph API limitations, any group mailbox items present in subfolders other than Inbox aren't backed up.
## [v0.18.0] (beta) - 2024-01-02
### Fixed
- Handle the case where an email cannot be retrieved from Exchange due to an `ErrorInvalidRecipients` error. In
this case, Corso will skip over the item but report this in the backup summary.
- Fix `ErrorItemNotFound` errors when restoring emails with multiple attachments.
- Avoid Graph SDK `Requests must contain extension changes exclusively.` errors by removing server-populated field from restored event items.
- Improve Group mailbox(conversations) backup performance by only downloading new items or items with modified content.
- Handle cases where Exchange backup stored invalid JSON blobs if there were special characters in the user content. These would result in errors during restore.
### Known issues
- Restoring OneDrive, SharePoint, or Teams & Groups items shared with external users while the tenant or site is configured to not allow sharing with external users will not restore permissions.
### Added
- Contacts can now be exported from Exchange backups as .vcf files
## [v0.17.0] (beta) - 2023-12-11
### Changed
- Memory optimizations for large scale OneDrive and Sharepoint backups.
### Fixed
- Resolved a possible deadlock when backing up Teams Channel Messages.
- Fixed an attachment download failure(ErrorTooManyObjectsOpened) during exchange backup.
## [v0.16.0] (beta) - 2023-11-28
### Added
- Export support for emails in exchange backups as `.eml` files.
- More colorful and informational cli display.
### Changed
- The file extension in Teams messages exports has switched to json to match the content type.
- SDK consumption of the /services/m365 package has shifted from independent functions to a client-based api.
- SDK consumers can now configure the /services/m365 graph api client configuration when constructing a new m365 client.
- Dynamic api rate limiting allows small-scale Exchange backups to complete more quickly.
- Kopia's local config files now uses unique filenames that match Corso configurations. This can protect concurrent Corso operations from mistakenly clobbering storage configs during runtime.
### Fixed
- Handle OneDrive folders being deleted and recreated midway through a backup.
- Automatically re-run a full delta query on incremental if the prior backup is found to have malformed prior-state information.
- Retry drive item permission downloads during long-running backups after the jwt token expires and refreshes.
- Retry item downloads during connection timeouts.
## [v0.15.0] (beta) - 2023-10-31
### Added
- Added `corso repo update-passphrase` command to update the passphrase of an existing Corso repository
- Added Subject and Message preview to channel messages detail entries
### Fixed
- SharePoint backup would fail if any site had an empty display name
- Fix a bug with exports hanging post completion
- Handle 503 errors in nested OneDrive packages
### Changed
- Item Details formatting in Groups and Teams backups
## [v0.14.2] (beta) - 2023-10-17
### Added
- Skips graph calls for expired item download URLs.
- Export operation now shows the stats at the end of the run
### Fixed
- Catch and report cases where a protected resource is locked out of access. SDK consumers have a new errs sentinel that allows them to check for this case.
- Fix a case where missing item LastModifiedTimes could cause incremental backups to fail.
- Email size metadata was incorrectly set to the size of the last attachment. Emails will now correctly report the size of the mail content plus the size of all attachments.
- Improves the filtering capabilities for Groups restore and backup
- Improve check to skip OneNote files that cannot be downloaded.
- Fix Groups backup for non Team groups
### Changed
- Groups restore now expects the site whose backup we should restore
## [v0.14.0] (beta) - 2023-10-09
### Added ### Added
- Enables local or network-attached storage for Corso repositories. - Enables local or network-attached storage for Corso repositories.
@ -131,23 +14,20 @@ this case, Corso will skip over the item but report this in the backup summary.
- Added `--backups` flag to delete multiple backups in `corso backup delete` command. - Added `--backups` flag to delete multiple backups in `corso backup delete` command.
- Backup now includes all sites that belongs to a team, not just the root site. - Backup now includes all sites that belongs to a team, not just the root site.
### Fixed ## Fixed
- Teams Channels that cannot support delta tokens (those without messages) fall back to non-delta enumeration and no longer fail a backup. - Teams Channels that cannot support delta tokens (those without messages) fall back to non-delta enumeration and no longer fail a backup.
### Known issues ### Known issues
- Restoring the data into a different Group from the one it was backed up from is not currently supported - Restoring the data into a different Group from the one it was backed up from is not currently supported
### Other
- Groups and Teams service support is still in feature preview
## [v0.13.0] (beta) - 2023-09-18 ## [v0.13.0] (beta) - 2023-09-18
### Added ### Added
- Groups and Teams service support available as a feature preview! Channel messages and Files are now available for backup and restore in the CLI: `corso backup create groups --group '*'` - Groups and Teams service support available as a feature preview! Channel messages and Files are now available for backup and restore in the CLI: `corso backup create groups --group '*'`
- The cli commands for "groups" and "teams" can be used interchangeably, and will operate on the same backup data. * The cli commands for "groups" and "teams" can be used interchangably, and will operate on the same backup data.
- New permissions are required to backup Channel messages. See the [Corso Documentation](https://corsobackup.io/docs/setup/m365-access/#configure-required-permissions) for complete details. * New permissions are required to backup Channel messages. See the [Corso Documentation](https://corsobackup.io/docs/setup/m365-access/#configure-required-permissions) for complete details.
Even though Channel message restoration is not available, message write permissions are included to cover future integration. Even though Channel message restoration is not available, message write permissions are included to cover future integration.
- This is a feature preview, and may be subject to breaking changes based on feedback and testing. * This is a feature preview, and may be subject to breaking changes based on feedback and testing.
### Changed ### Changed
- Switched to Go 1.21 - Switched to Go 1.21
@ -502,15 +382,7 @@ this case, Corso will skip over the item but report this in the backup summary.
- Miscellaneous - Miscellaneous
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35)) - Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
[Unreleased]: https://github.com/alcionai/corso/compare/v0.19.0...HEAD [Unreleased]: https://github.com/alcionai/corso/compare/v0.11.1...HEAD
[v0.19.0]: https://github.com/alcionai/corso/compare/v0.18.0...v0.19.0
[v0.18.0]: https://github.com/alcionai/corso/compare/v0.17.0...v0.18.0
[v0.17.0]: https://github.com/alcionai/corso/compare/v0.16.0...v0.17.0
[v0.16.0]: https://github.com/alcionai/corso/compare/v0.15.0...v0.16.0
[v0.15.0]: https://github.com/alcionai/corso/compare/v0.14.0...v0.15.0
[v0.14.0]: https://github.com/alcionai/corso/compare/v0.13.0...v0.14.0
[v0.13.0]: https://github.com/alcionai/corso/compare/v0.12.0...v0.13.0
[v0.12.0]: https://github.com/alcionai/corso/compare/v0.11.1...v0.12.0
[v0.11.1]: https://github.com/alcionai/corso/compare/v0.11.0...v0.11.1 [v0.11.1]: https://github.com/alcionai/corso/compare/v0.11.0...v0.11.1
[v0.11.0]: https://github.com/alcionai/corso/compare/v0.10.0...v0.11.0 [v0.11.0]: https://github.com/alcionai/corso/compare/v0.10.0...v0.11.0
[v0.10.0]: https://github.com/alcionai/corso/compare/v0.9.0...v0.10.0 [v0.10.0]: https://github.com/alcionai/corso/compare/v0.9.0...v0.10.0

View File

@ -1,6 +1,3 @@
> [!NOTE]
> **The Corso project is no longer actively maintained and has been archived**.
<p align="center"> <p align="center">
<img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" /> <img src="https://github.com/alcionai/corso/blob/main/website/static/img/corso_logo.svg?raw=true" alt="Corso Logo" width="100" />
</p> </p>

View File

@ -12,7 +12,7 @@ usage() {
} }
ROOT=$(dirname $(dirname $(readlink -f $0))) ROOT=$(dirname $(dirname $(readlink -f $0)))
GOVER=1.21 # go version GOVER=1.19 # go version
CORSO_BUILD_CACHE="/tmp/.corsobuild" # shared persistent cache CORSO_BUILD_CACHE="/tmp/.corsobuild" # shared persistent cache
# Figure out os and architecture # Figure out os and architecture

View File

@ -4,7 +4,6 @@ run:
linters: linters:
enable: enable:
- errcheck - errcheck
- exhaustive
- forbidigo - forbidigo
- gci - gci
- gofmt - gofmt
@ -26,11 +25,6 @@ linters:
- staticcheck - staticcheck
linters-settings: linters-settings:
exhaustive:
check:
- switch
default-signifies-exhaustive: false
explicit-exhaustive-switch: true
gci: gci:
sections: sections:
- standard - standard
@ -55,13 +49,10 @@ linters-settings:
# String formatting should be avoided in favor of structured errors (ie: err.With(k, v)). # String formatting should be avoided in favor of structured errors (ie: err.With(k, v)).
- '(errors|fmt)\.(New|Stack|Wrap|Error)f?\((# error handling should use clues pkg)?' - '(errors|fmt)\.(New|Stack|Wrap|Error)f?\((# error handling should use clues pkg)?'
# Avoid Warn-level logging in favor of Info or Error. # Avoid Warn-level logging in favor of Info or Error.
- 'Warnw?f?\((# logging should use Info or Error)?' - 'Warn[wf]?\((# logging should use Info or Error)?'
# Prefer suite.Run(name, func() {}) for subtests as testify has it instead # Prefer suite.Run(name, func() {}) for subtests as testify has it instead
# of suite.T().Run(name, func(t *testing.T) {}). # of suite.T().Run(name, func(t *testing.T) {}).
- '(T\(\)|\st[a-zA-Z0-9]*)\.Run(# prefer testify suite.Run(name, func()) )?' - '(T\(\)|\st[a-zA-Z0-9]*)\.Run(# prefer testify suite.Run(name, func()) )?'
# Prefer packing ctx values into the error using NewWC, WrapWC, or StackWC
# instead of New|Stack|Wrap().WithClues(ctx)
- 'WithClues(# prefer the builderWC variant - ex: StackWC(ctx, ...))?'
lll: lll:
line-length: 120 line-length: 120
revive: revive:
@ -134,13 +125,7 @@ issues:
linters: linters:
- forbidigo - forbidigo
text: "context.(Background|TODO)" text: "context.(Background|TODO)"
- path: internal/m365/collection/drive/collections_test.go - path: internal/m365/graph/betasdk
linters:
- lll
- path: internal/m365/collection/drive/collections_tree_test.go
linters:
- lll
- path: pkg/services/m365/api/graph/betasdk
linters: linters:
- wsl - wsl
- revive - revive

View File

@ -18,7 +18,7 @@ lint: check-lint-version
fmt: fmt:
gofumpt -w . gofumpt -w .
goimports -w . goimports -w .
gci write --skip-generated -s 'standard' -s 'default' -s 'prefix(github.com/alcionai/corso)' . gci write --skip-generated -s 'standard,default,prefix(github.com/alcionai/corso)' .
check-lint-version: check-lint check-lint-version: check-lint
@if [ "$(LINT_VERSION)" != "$(WANTED_LINT_VERSION)" ]; then \ @if [ "$(LINT_VERSION)" != "$(WANTED_LINT_VERSION)" ]; then \

View File

@ -12,14 +12,12 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/color"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/errs/core"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
@ -27,8 +25,6 @@ import (
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
var ErrEmptyBackup = clues.New("no items in backup")
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// adding commands to cobra // adding commands to cobra
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -45,7 +41,6 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands, addOneDriveCommands,
addSharePointCommands, addSharePointCommands,
addGroupsCommands, addGroupsCommands,
addTeamsChatsCommands,
} }
// AddCommands attaches all `corso backup * *` commands to the parent. // AddCommands attaches all `corso backup * *` commands to the parent.
@ -190,47 +185,31 @@ func genericCreateCommand(
ictx = clues.Add(ctx, "resource_owner_selected", owner) ictx = clues.Add(ctx, "resource_owner_selected", owner)
) )
logger.Ctx(ictx).Infof("setting up backup")
bo, err := r.NewBackupWithLookup(ictx, discSel, ins) bo, err := r.NewBackupWithLookup(ictx, discSel, ins)
if err != nil { if err != nil {
cerr := clues.WrapWC(ictx, err, owner) errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
errs = append(errs, cerr) Errf(ictx, "%v\n", err)
Errf(
ictx,
"%s\nCause: %s",
"Unable to initiate backup",
err.Error())
continue continue
} }
ictx = clues.Add( ictx = clues.Add(
ictx, ctx,
"resource_owner_id", bo.ResourceOwner.ID(), "resource_owner_id", bo.ResourceOwner.ID(),
"resource_owner_name", clues.Hide(bo.ResourceOwner.Name())) "resource_owner_name", bo.ResourceOwner.Name())
logger.Ctx(ictx).Infof("running backup")
err = bo.Run(ictx) err = bo.Run(ictx)
if err != nil { if err != nil {
if errors.Is(err, core.ErrServiceNotEnabled) { if errors.Is(err, graph.ErrServiceNotEnabled) {
logger.Ctx(ictx).Infow("service not enabled", logger.Ctx(ctx).Infow("service not enabled",
"resource_owner_id", bo.ResourceOwner.ID(), "resource_owner_id", bo.ResourceOwner.ID(),
"service", serviceName) "service", serviceName)
continue continue
} }
cerr := clues.Wrap(err, owner) errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
errs = append(errs, cerr) Errf(ictx, "%v\n", err)
Errf(
ictx,
"%s\nCause: %s",
"Unable to complete backup",
err.Error())
continue continue
} }
@ -238,10 +217,10 @@ func genericCreateCommand(
bIDs = append(bIDs, string(bo.Results.BackupID)) bIDs = append(bIDs, string(bo.Results.BackupID))
if !DisplayJSONFormat() { if !DisplayJSONFormat() {
Infof(ictx, fmt.Sprintf("Backup complete %s %s", observe.Bullet, color.BlueOutput(bo.Results.BackupID))) Infof(ctx, "Done\n")
printBackupStats(ictx, r, string(bo.Results.BackupID)) printBackupStats(ctx, r, string(bo.Results.BackupID))
} else { } else {
Infof(ictx, "Backup complete - ID: %v\n", bo.Results.BackupID) Infof(ctx, "Done - ID: %v\n", bo.Results.BackupID)
} }
} }
@ -251,10 +230,11 @@ func genericCreateCommand(
} }
if len(bups) > 0 { if len(bups) > 0 {
Info(ctx, "\nCompleted Backups:") Info(ctx, "Completed Backups:")
backup.PrintAll(ctx, bups)
} }
backup.PrintAll(ctx, bups)
if len(errs) > 0 { if len(errs) > 0 {
sb := fmt.Sprintf("%d of %d backups failed:\n", len(errs), len(selectorSet)) sb := fmt.Sprintf("%d of %d backups failed:\n", len(errs), len(selectorSet))
@ -337,8 +317,7 @@ func genericListCommand(
b.Print(ctx) b.Print(ctx)
fe.PrintItems( fe.PrintItems(
ctx, ctx,
!ifShow(flags.ListAlertsFV), !ifShow(flags.ListFailedItemsFV),
!ifShow(flags.FailedItemsFV),
!ifShow(flags.ListSkippedItemsFV), !ifShow(flags.ListSkippedItemsFV),
!ifShow(flags.ListRecoveredErrorsFV)) !ifShow(flags.ListRecoveredErrorsFV))
@ -398,10 +377,6 @@ func genericDetailsCore(
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository") return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository")
} }
if len(d.Entries) == 0 {
return nil, ErrEmptyBackup
}
if opts.SkipReduce { if opts.SkipReduce {
return d, nil return d, nil
} }
@ -428,5 +403,6 @@ func printBackupStats(ctx context.Context, r repository.Repositoryer, bid string
logger.CtxErr(ctx, err).Error("finding backup immediately after backup operation completion") logger.CtxErr(ctx, err).Error("finding backup immediately after backup operation completion")
} }
b.ToPrintable().Stats.PrintProperties(ctx) b.ToPrintable().Stats.Print(ctx)
Info(ctx, " ")
} }

View File

@ -5,12 +5,10 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata" dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -68,30 +66,3 @@ func (suite *BackupUnitSuite) TestGenericDetailsCore() {
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.ElementsMatch(t, expected, output.Entries) assert.ElementsMatch(t, expected, output.Entries)
} }
func (suite *BackupUnitSuite) TestGenericDetailsCore_empty() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
bg := testdata.VersionedBackupGetter{
Details: &details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.Entry{},
},
},
}
sel := selectors.NewExchangeBackup([]string{"user-id"})
sel.Include(sel.AllData())
_, err := genericDetailsCore(
ctx,
bg,
"backup-ID",
sel.Selector,
control.DefaultOptions())
require.Error(t, err, "has error")
assert.ErrorIs(t, err, ErrEmptyBackup, clues.ToCore(err))
}

View File

@ -3,6 +3,7 @@ package backup
import ( import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
@ -61,11 +62,15 @@ corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
// called by backup.go to map subcommands to provider-specific handling. // called by backup.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command { func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case createCommand: case createCommand:
c, _ = utils.AddCommand(cmd, exchangeCreateCmd()) c, fs = utils.AddCommand(cmd, exchangeCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandCreateUseSuffix c.Use = c.Use + " " + exchangeServiceCommandCreateUseSuffix
c.Example = exchangeServiceCommandCreateExamples c.Example = exchangeServiceCommandCreateExamples
@ -75,20 +80,24 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
flags.AddMailBoxFlag(c) flags.AddMailBoxFlag(c)
flags.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false) flags.AddDataFlag(c, []string{dataEmail, dataContacts, dataEvents}, false)
flags.AddFetchParallelismFlag(c) flags.AddFetchParallelismFlag(c)
flags.AddFailFastFlag(c)
flags.AddDisableIncrementalsFlag(c)
flags.AddForceItemDataDownloadFlag(c)
flags.AddDisableDeltaFlag(c) flags.AddDisableDeltaFlag(c)
flags.AddEnableImmutableIDFlag(c) flags.AddEnableImmutableIDFlag(c)
flags.AddDisableConcurrencyLimiterFlag(c)
flags.AddDeltaPageSizeFlag(c) flags.AddDeltaPageSizeFlag(c)
flags.AddGenericBackupFlags(c)
flags.AddDisableSlidingWindowLimiterFlag(c)
case listCommand: case listCommand:
c, _ = utils.AddCommand(cmd, exchangeListCmd()) c, fs = utils.AddCommand(cmd, exchangeListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false) flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c) flags.AddAllBackupListFlags(c)
case detailsCommand: case detailsCommand:
c, _ = utils.AddCommand(cmd, exchangeDetailsCmd()) c, fs = utils.AddCommand(cmd, exchangeDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandDetailsUseSuffix c.Use = c.Use + " " + exchangeServiceCommandDetailsUseSuffix
c.Example = exchangeServiceCommandDetailsExamples c.Example = exchangeServiceCommandDetailsExamples
@ -98,10 +107,11 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
// Flags addition ordering should follow the order we want them to appear in help and docs: // Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence. // More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddExchangeDetailsAndRestoreFlags(c, false) flags.AddExchangeDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, _ = utils.AddCommand(cmd, exchangeDeleteCmd()) c, fs = utils.AddCommand(cmd, exchangeDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + exchangeServiceCommandDeleteUseSuffix c.Use = c.Use + " " + exchangeServiceCommandDeleteUseSuffix
c.Example = exchangeServiceCommandDeleteExamples c.Example = exchangeServiceCommandDeleteExamples
@ -155,12 +165,7 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
sel := exchangeBackupCreateSelectors(flags.UserFV, flags.CategoryDataFV) sel := exchangeBackupCreateSelectors(flags.UserFV, flags.CategoryDataFV)
ins, err := utils.UsersMap( ins, err := utils.UsersMap(ctx, *acct, utils.Control(), fault.New(true))
ctx,
*acct,
utils.Control(),
r.Counter(),
fault.New(true))
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
} }

View File

@ -12,15 +12,14 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
@ -40,7 +39,7 @@ var (
type NoBackupExchangeE2ESuite struct { type NoBackupExchangeE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies dpnd dependencies
m365 its.M365IntgTestSetup its intgTesterSetup
} }
func TestNoBackupExchangeE2ESuite(t *testing.T) { func TestNoBackupExchangeE2ESuite(t *testing.T) {
@ -55,7 +54,7 @@ func (suite *NoBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService) suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
} }
@ -70,7 +69,7 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "exchange", "backup", "list", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.dpnd.recorder)
@ -94,7 +93,7 @@ func (suite *NoBackupExchangeE2ESuite) TestExchangeBackupListCmd_noBackups() {
type BackupExchangeE2ESuite struct { type BackupExchangeE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies dpnd dependencies
m365 its.M365IntgTestSetup its intgTesterSetup
} }
func TestBackupExchangeE2ESuite(t *testing.T) { func TestBackupExchangeE2ESuite(t *testing.T) {
@ -109,7 +108,7 @@ func (suite *BackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService) suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
} }
@ -139,7 +138,7 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
cmd, ctx := buildExchangeBackupCmd( cmd, ctx := buildExchangeBackupCmd(
ctx, ctx,
suite.dpnd.configFilePath, suite.dpnd.configFilePath,
suite.m365.User.ID, suite.its.user.ID,
category.String(), category.String(),
&recorder) &recorder)
@ -150,11 +149,8 @@ func runExchangeBackupCategoryTest(suite *BackupExchangeE2ESuite, category path.
result := recorder.String() result := recorder.String()
t.Log("backup results", result) t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email. // as an offhand check: the result should contain the m365 user id
assert.Contains( assert.Contains(t, result, suite.its.user.ID)
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_ServiceNotEnabled_email() {
@ -177,7 +173,7 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
cmd, ctx := buildExchangeBackupCmd( cmd, ctx := buildExchangeBackupCmd(
ctx, ctx,
suite.dpnd.configFilePath, suite.dpnd.configFilePath,
fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.m365.User.ID), fmt.Sprintf("%s,%s", tconfig.UnlicensedM365UserID(suite.T()), suite.its.user.ID),
category.String(), category.String(),
&recorder) &recorder)
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
@ -186,11 +182,8 @@ func runExchangeBackupServiceNotEnabledTest(suite *BackupExchangeE2ESuite, categ
result := recorder.String() result := recorder.String()
t.Log("backup results", result) t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email. // as an offhand check: the result should contain the m365 user id
assert.Contains( assert.Contains(t, result, suite.its.user.ID)
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
} }
func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() { func (suite *BackupExchangeE2ESuite) TestExchangeBackupCmd_userNotFound_email() {
@ -229,8 +222,7 @@ func runExchangeBackupUserNotFoundTest(suite *BackupExchangeE2ESuite, category p
assert.Contains( assert.Contains(
t, t,
err.Error(), err.Error(),
"not found", "not found in tenant", "error missing user not found")
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened") assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error()) t.Logf("backup error message: %s", err.Error())
@ -249,7 +241,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAzureClientIDFl
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "exchange", "backup", "create", "exchange",
"--user", suite.m365.User.ID, "--user", suite.its.user.ID,
"--azure-client-id", "invalid-value") "--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -273,8 +265,8 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "exchange", "backup", "create", "exchange",
"--user", suite.m365.User.ID, "--user", suite.its.user.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.dpnd.recorder)
@ -288,11 +280,8 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_fromConfigFile() {
result := suite.dpnd.recorder.String() result := suite.dpnd.recorder.String()
t.Log("backup results", result) t.Log("backup results", result)
// As an offhand check: the result should contain the m365 user's email. // as an offhand check: the result should contain the m365 user id
assert.Contains( assert.Contains(t, result, suite.its.user.ID)
t,
strings.ToLower(result),
strings.ToLower(suite.m365.User.Provider.Name()))
} }
// AWS flags // AWS flags
@ -306,7 +295,7 @@ func (suite *BackupExchangeE2ESuite) TestBackupCreateExchange_badAWSFlags() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "exchange", "backup", "create", "exchange",
"--user", suite.m365.User.ID, "--user", suite.its.user.ID,
"--aws-access-key", "invalid-value", "--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value") "--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -329,7 +318,7 @@ type PreparedBackupExchangeE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies dpnd dependencies
backupOps map[path.CategoryType]string backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup its intgTesterSetup
} }
func TestPreparedBackupExchangeE2ESuite(t *testing.T) { func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
@ -346,13 +335,13 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.ExchangeService) suite.dpnd = prepM365Test(t, ctx, path.ExchangeService)
suite.backupOps = make(map[path.CategoryType]string) suite.backupOps = make(map[path.CategoryType]string)
var ( var (
users = []string{suite.m365.User.ID} users = []string{suite.its.user.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID}) ins = idname.NewCache(map[string]string{suite.its.user.ID: suite.its.user.ID})
) )
for _, set := range []path.CategoryType{email, contacts, events} { for _, set := range []path.CategoryType{email, contacts, events} {
@ -420,7 +409,7 @@ func runExchangeListCmdTest(suite *PreparedBackupExchangeE2ESuite, category path
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "exchange", "backup", "list", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.dpnd.recorder)
@ -461,7 +450,7 @@ func runExchangeListSingleCmdTest(suite *PreparedBackupExchangeE2ESuite, categor
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "exchange", "backup", "list", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", string(bID)) "--backup", string(bID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -488,7 +477,7 @@ func (suite *PreparedBackupExchangeE2ESuite) TestExchangeListCmd_badID() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "exchange", "backup", "list", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", "smarfs") "--backup", "smarfs")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -530,7 +519,7 @@ func runExchangeDetailsCmdTest(suite *PreparedBackupExchangeE2ESuite, category p
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "details", "exchange", "backup", "details", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID)) "--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.dpnd.recorder)
@ -620,7 +609,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "exchange", "backup", "delete", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, "--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s", fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID), string(suite.backupOps[0].Results.BackupID),
@ -634,7 +623,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = cliTD.StubRootCmd(
"backup", "details", "exchange", "backup", "details", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[0].Results.BackupID)) "--backup", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -644,7 +633,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd() {
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = cliTD.StubRootCmd(
"backup", "details", "exchange", "backup", "details", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[1].Results.BackupID)) "--backup", string(suite.backupOps[1].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -662,7 +651,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_SingleID(
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "exchange", "backup", "delete", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, "--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID)) string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -674,7 +663,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_SingleID(
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = cliTD.StubRootCmd(
"backup", "details", "exchange", "backup", "details", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[2].Results.BackupID)) "--backup", string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -692,7 +681,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_UnknownID
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "exchange", "backup", "delete", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString()) "--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -711,7 +700,7 @@ func (suite *BackupDeleteExchangeE2ESuite) TestExchangeBackupDeleteCmd_NoBackupI
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "exchange", "backup", "delete", "exchange",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided // empty backupIDs should error since no data provided

View File

@ -106,30 +106,20 @@ func (suite *ExchangeUnitSuite) TestBackupCreateFlags() {
"--" + flags.DeltaPageSizeFN, flagsTD.DeltaPageSize, "--" + flags.DeltaPageSizeFN, flagsTD.DeltaPageSize,
// bool flags // bool flags
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
"--" + flags.DisableDeltaFN, "--" + flags.DisableDeltaFN,
"--" + flags.EnableImmutableIDFN, "--" + flags.EnableImmutableIDFN,
"--" + flags.DisableSlidingWindowLimiterFN, "--" + flags.DisableConcurrencyLimiterFN,
}, },
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(), flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags())) flagsTD.PreparedStorageFlags()))
opts := utils.MakeExchangeOpts(cmd) opts := utils.MakeExchangeOpts(cmd)
co := utils.Control() co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(backupOpts.Parallelism.ItemFetch))
assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(backupOpts.M365.DeltaPageSize)))
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.True(t, backupOpts.M365.DisableDeltaEndpoint)
assert.True(t, backupOpts.M365.ExchangeImmutableIDs)
assert.True(t, backupOpts.ServiceRateLimiter.DisableSlidingWindowLimiter)
assert.ElementsMatch(t, flagsTD.MailboxInput, opts.Users)
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch)) assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch))
assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(co.DeltaPageSize))) assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(co.DeltaPageSize)))
assert.Equal(t, control.FailFast, co.FailureHandling) assert.Equal(t, control.FailFast, co.FailureHandling)
@ -137,10 +127,7 @@ func (suite *ExchangeUnitSuite) TestBackupCreateFlags() {
assert.True(t, co.ToggleFeatures.ForceItemDataDownload) assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.True(t, co.ToggleFeatures.DisableDelta) assert.True(t, co.ToggleFeatures.DisableDelta)
assert.True(t, co.ToggleFeatures.ExchangeImmutableIDs) assert.True(t, co.ToggleFeatures.ExchangeImmutableIDs)
assert.True(t, co.ToggleFeatures.DisableSlidingWindowLimiter) assert.True(t, co.ToggleFeatures.DisableConcurrencyLimiter)
assert.ElementsMatch(t, flagsTD.MailboxInput, opts.Users)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
@ -35,12 +36,9 @@ const (
groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group groupsServiceCommandCreateExamples = `# Backup all Groups and Teams data for the Marketing group
corso backup create groups --group Marketing corso backup create groups --group Marketing
# Backup only Teams channel messages # Backup only Teams conversations messages
corso backup create groups --group Marketing --data messages corso backup create groups --group Marketing --data messages
# Backup only group mailbox posts
corso backup create groups --group Marketing --data conversations
# Backup all Groups and Teams data for all groups # Backup all Groups and Teams data for all groups
corso backup create groups --group '*'` corso backup create groups --group '*'`
@ -53,39 +51,43 @@ corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd
# Explore Marketing messages posted after the start of 2022 # Explore Marketing messages posted after the start of 2022
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd \
--last-message-reply-after 2022-01-01T00:00:00 --last-message-reply-after 2022-01-01T00:00:00`
# Explore group mailbox posts with conversation subject "hello world"
corso backup details groups --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"`
) )
// called by backup.go to map subcommands to provider-specific handling. // called by backup.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command { func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case createCommand: case createCommand:
c, _ = utils.AddCommand(cmd, groupsCreateCmd(), utils.MarkPreviewCommand()) c, fs = utils.AddCommand(cmd, groupsCreateCmd(), utils.MarkPreviewCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandCreateUseSuffix c.Use = c.Use + " " + groupsServiceCommandCreateUseSuffix
c.Example = groupsServiceCommandCreateExamples c.Example = groupsServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs: // Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddGroupFlag(c) flags.AddGroupFlag(c)
flags.AddDataFlag(c, []string{flags.DataLibraries, flags.DataMessages, flags.DataConversations}, false) flags.AddDataFlag(c, []string{flags.DataLibraries, flags.DataMessages}, false)
flags.AddFetchParallelismFlag(c) flags.AddFetchParallelismFlag(c)
flags.AddFailFastFlag(c)
flags.AddDisableDeltaFlag(c) flags.AddDisableDeltaFlag(c)
flags.AddGenericBackupFlags(c) flags.AddDisableIncrementalsFlag(c)
flags.AddDisableLazyItemReader(c) flags.AddForceItemDataDownloadFlag(c)
case listCommand: case listCommand:
c, _ = utils.AddCommand(cmd, groupsListCmd(), utils.MarkPreviewCommand()) c, fs = utils.AddCommand(cmd, groupsListCmd(), utils.MarkPreviewCommand())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false) flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c) flags.AddAllBackupListFlags(c)
case detailsCommand: case detailsCommand:
c, _ = utils.AddCommand(cmd, groupsDetailsCmd(), utils.MarkPreviewCommand()) c, fs = utils.AddCommand(cmd, groupsDetailsCmd(), utils.MarkPreviewCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandDetailsUseSuffix c.Use = c.Use + " " + groupsServiceCommandDetailsUseSuffix
c.Example = groupsServiceCommandDetailsExamples c.Example = groupsServiceCommandDetailsExamples
@ -99,7 +101,8 @@ func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
flags.AddSharePointDetailsAndRestoreFlags(c) flags.AddSharePointDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, _ = utils.AddCommand(cmd, groupsDeleteCmd(), utils.MarkPreviewCommand()) c, fs = utils.AddCommand(cmd, groupsDeleteCmd(), utils.MarkPreviewCommand())
fs.SortFlags = false
c.Use = c.Use + " " + groupsServiceCommandDeleteUseSuffix c.Use = c.Use + " " + groupsServiceCommandDeleteUseSuffix
c.Example = groupsServiceCommandDeleteExamples c.Example = groupsServiceCommandDeleteExamples
@ -120,7 +123,7 @@ func groupsCreateCmd() *cobra.Command {
return &cobra.Command{ return &cobra.Command{
Use: groupsServiceCommand, Use: groupsServiceCommand,
Aliases: []string{teamsServiceCommand}, Aliases: []string{teamsServiceCommand},
Short: "Backup M365 Groups & Teams service data", Short: "Backup M365 Group service data",
RunE: createGroupsCmd, RunE: createGroupsCmd,
Args: cobra.NoArgs, Args: cobra.NoArgs,
} }
@ -155,12 +158,7 @@ func createGroupsCmd(cmd *cobra.Command, args []string) error {
// TODO: log/print recoverable errors // TODO: log/print recoverable errors
errs := fault.New(false) errs := fault.New(false)
svcCli, err := m365.NewM365Client(ctx, *acct) ins, err := m365.GroupsMap(ctx, *acct, errs)
if err != nil {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.AC.Groups().GetAllIDsAndNames(ctx, errs)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 groups")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 groups"))
} }
@ -289,16 +287,10 @@ func validateGroupsBackupCreateFlags(groups, cats []string) error {
flags.GroupFN + " *") flags.GroupFN + " *")
} }
// TODO(keepers): release conversations support
msg := fmt.Sprintf( msg := fmt.Sprintf(
" is an unrecognized data type; only %s and %s are supported", " is an unrecognized data type; only %s and %s are supported",
flags.DataLibraries, flags.DataMessages) flags.DataLibraries, flags.DataMessages)
// msg := fmt.Sprintf(
// " is an unrecognized data type; only %s, %s and %s are supported",
// flags.DataLibraries, flags.DataMessages, flags.DataConversations)
allowedCats := utils.GroupsAllowedCategories() allowedCats := utils.GroupsAllowedCategories()
for _, d := range cats { for _, d := range cats {
@ -316,7 +308,7 @@ func groupsBackupCreateSelectors(
group, cats []string, group, cats []string,
) *selectors.GroupsBackup { ) *selectors.GroupsBackup {
if filters.PathContains(group).Compare(flags.Wildcard) { if filters.PathContains(group).Compare(flags.Wildcard) {
return includeAllGroupsWithCategories(ins, cats) return includeAllGroupWithCategories(ins, cats)
} }
sel := selectors.NewGroupsBackup(slices.Clone(group)) sel := selectors.NewGroupsBackup(slices.Clone(group))
@ -324,6 +316,6 @@ func groupsBackupCreateSelectors(
return utils.AddGroupsCategories(sel, cats) return utils.AddGroupsCategories(sel, cats)
} }
func includeAllGroupsWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup { func includeAllGroupWithCategories(ins idname.Cacher, categories []string) *selectors.GroupsBackup {
return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories) return utils.AddGroupsCategories(selectors.NewGroupsBackup(ins.IDs()), categories)
} }

View File

@ -14,21 +14,25 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata" storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
) )
var (
channelMessages = path.ChannelMessagesCategory
libraries = path.LibrariesCategory
)
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests that require no existing backups // tests that require no existing backups
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -36,7 +40,7 @@ import (
type NoBackupGroupsE2ESuite struct { type NoBackupGroupsE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies dpnd dependencies
m365 its.M365IntgTestSetup its intgTesterSetup
} }
func TestNoBackupGroupsE2ESuite(t *testing.T) { func TestNoBackupGroupsE2ESuite(t *testing.T) {
@ -51,7 +55,7 @@ func (suite *NoBackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService) suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
} }
@ -66,7 +70,7 @@ func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "groups", "backup", "list", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.dpnd.recorder)
@ -90,7 +94,7 @@ func (suite *NoBackupGroupsE2ESuite) TestGroupsBackupListCmd_noBackups() {
type BackupGroupsE2ESuite struct { type BackupGroupsE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies dpnd dependencies
m365 its.M365IntgTestSetup its intgTesterSetup
} }
func TestBackupGroupsE2ESuite(t *testing.T) { func TestBackupGroupsE2ESuite(t *testing.T) {
@ -105,22 +109,16 @@ func (suite *BackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService) suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_channelMessages() {
runGroupsBackupCategoryTest(suite, flags.DataMessages) runGroupsBackupCategoryTest(suite, "messages")
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsBackupCategoryTest(suite, flags.DataConversations)
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_libraries() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_libraries() {
runGroupsBackupCategoryTest(suite, flags.DataLibraries) runGroupsBackupCategoryTest(suite, libraries.String())
} }
func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) { func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
@ -137,7 +135,7 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
cmd, ctx := buildGroupsBackupCmd( cmd, ctx := buildGroupsBackupCmd(
ctx, ctx,
suite.dpnd.configFilePath, suite.dpnd.configFilePath,
suite.m365.Group.ID, suite.its.group.ID,
category, category,
&recorder) &recorder)
@ -150,15 +148,11 @@ func runGroupsBackupCategoryTest(suite *BackupGroupsE2ESuite, category string) {
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_channelMessages() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_channelMessages() {
runGroupsBackupGroupNotFoundTest(suite, flags.DataMessages) runGroupsBackupGroupNotFoundTest(suite, "messages")
}
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_conversations() {
runGroupsBackupGroupNotFoundTest(suite, flags.DataConversations)
} }
func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_libraries() { func (suite *BackupGroupsE2ESuite) TestGroupsBackupCmd_groupNotFound_libraries() {
runGroupsBackupGroupNotFoundTest(suite, flags.DataLibraries) runGroupsBackupGroupNotFoundTest(suite, libraries.String())
} }
func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category string) { func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category string) {
@ -185,8 +179,7 @@ func runGroupsBackupGroupNotFoundTest(suite *BackupGroupsE2ESuite, category stri
assert.Contains( assert.Contains(
t, t,
err.Error(), err.Error(),
"not found", "not found in tenant", "error missing group not found")
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened") assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error()) t.Logf("backup error message: %s", err.Error())
@ -205,7 +198,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "groups", "backup", "create", "groups",
"--group", suite.m365.Group.ID, "--group", suite.its.group.ID,
"--azure-client-id", "invalid-value") "--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -219,9 +212,6 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAzureClientIDFlag()
} }
func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() { func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
// Skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr) ctx = config.SetViper(ctx, suite.dpnd.vpr)
@ -232,8 +222,8 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_fromConfigFile() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "groups", "backup", "create", "groups",
"--group", suite.m365.Group.ID, "--group", suite.its.group.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.dpnd.recorder)
@ -256,7 +246,7 @@ func (suite *BackupGroupsE2ESuite) TestBackupCreateGroups_badAWSFlags() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "groups", "backup", "create", "groups",
"--group", suite.m365.Group.ID, "--group", suite.its.group.ID,
"--aws-access-key", "invalid-value", "--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value") "--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -279,7 +269,7 @@ type PreparedBackupGroupsE2ESuite struct {
tester.Suite tester.Suite
dpnd dependencies dpnd dependencies
backupOps map[path.CategoryType]string backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup its intgTesterSetup
} }
func TestPreparedBackupGroupsE2ESuite(t *testing.T) { func TestPreparedBackupGroupsE2ESuite(t *testing.T) {
@ -296,37 +286,26 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
suite.m365 = its.GetM365(t) suite.its = newIntegrationTesterSetup(t)
suite.dpnd = prepM365Test(t, ctx, path.GroupsService) suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
suite.backupOps = make(map[path.CategoryType]string) suite.backupOps = make(map[path.CategoryType]string)
var ( var (
groups = []string{suite.m365.Group.ID} groups = []string{suite.its.group.ID}
ins = idname.NewCache(map[string]string{suite.m365.Group.ID: suite.m365.Group.ID}) ins = idname.NewCache(map[string]string{suite.its.group.ID: suite.its.group.ID})
cats = []path.CategoryType{
path.ChannelMessagesCategory,
// TODO(pandeyabs): CorsoCITeam group mailbox backup is currently broken because of invalid
// odata.NextLink which causes an infinite loop during paging. Disabling conversations tests while
// we go fix the group mailbox.
// path.ConversationPostsCategory,
path.LibrariesCategory,
}
) )
for _, set := range cats { for _, set := range []path.CategoryType{channelMessages, libraries} {
var ( var (
sel = selectors.NewGroupsBackup(groups) sel = selectors.NewGroupsBackup(groups)
scopes []selectors.GroupsScope scopes []selectors.GroupsScope
) )
switch set { switch set {
case path.ChannelMessagesCategory: case channelMessages:
scopes = selTD.GroupsBackupChannelScope(sel) scopes = selTD.GroupsBackupChannelScope(sel)
case path.ConversationPostsCategory: case libraries:
scopes = selTD.GroupsBackupConversationScope(sel)
case path.LibrariesCategory:
scopes = selTD.GroupsBackupLibraryFolderScope(sel) scopes = selTD.GroupsBackupLibraryFolderScope(sel)
} }
@ -355,15 +334,11 @@ func (suite *PreparedBackupGroupsE2ESuite) SetupSuite() {
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_channelMessages() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_channelMessages() {
runGroupsListCmdTest(suite, path.ChannelMessagesCategory) runGroupsListCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_conversations() {
runGroupsListCmdTest(suite, path.ConversationPostsCategory)
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_libraries() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_libraries() {
runGroupsListCmdTest(suite, path.LibrariesCategory) runGroupsListCmdTest(suite, libraries)
} }
func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) { func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
@ -378,7 +353,7 @@ func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.Cat
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "groups", "backup", "list", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.dpnd.recorder)
@ -394,15 +369,11 @@ func runGroupsListCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.Cat
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_channelMessages() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_channelMessages() {
runGroupsListSingleCmdTest(suite, path.ChannelMessagesCategory) runGroupsListSingleCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_conversations() {
runGroupsListSingleCmdTest(suite, path.ConversationPostsCategory)
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_libraries() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_singleID_libraries() {
runGroupsListSingleCmdTest(suite, path.LibrariesCategory) runGroupsListSingleCmdTest(suite, libraries)
} }
func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) { func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
@ -419,7 +390,7 @@ func runGroupsListSingleCmdTest(suite *PreparedBackupGroupsE2ESuite, category pa
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "groups", "backup", "list", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", string(bID)) "--backup", string(bID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -446,7 +417,7 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_badID() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "groups", "backup", "list", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", "smarfs") "--backup", "smarfs")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -458,17 +429,11 @@ func (suite *PreparedBackupGroupsE2ESuite) TestGroupsListCmd_badID() {
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_channelMessages() {
runGroupsDetailsCmdTest(suite, path.ChannelMessagesCategory) runGroupsDetailsCmdTest(suite, channelMessages)
}
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_conversations() {
// skip
suite.T().Skip("CorsoCITeam group mailbox backup is broken")
runGroupsDetailsCmdTest(suite, path.ConversationPostsCategory)
} }
func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_libraries() { func (suite *PreparedBackupGroupsE2ESuite) TestGroupsDetailsCmd_libraries() {
runGroupsDetailsCmdTest(suite, path.LibrariesCategory) runGroupsDetailsCmdTest(suite, libraries)
} }
func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) { func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.CategoryType) {
@ -490,7 +455,7 @@ func runGroupsDetailsCmdTest(suite *PreparedBackupGroupsE2ESuite, category path.
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "details", "groups", "backup", "details", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID)) "--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder) cmd.SetOut(&suite.dpnd.recorder)
@ -552,7 +517,7 @@ func (suite *BackupDeleteGroupsE2ESuite) SetupSuite() {
suite.dpnd = prepM365Test(t, ctx, path.GroupsService) suite.dpnd = prepM365Test(t, ctx, path.GroupsService)
m365GroupID := tconfig.M365TeamID(t) m365GroupID := tconfig.M365GroupID(t)
groups := []string{m365GroupID} groups := []string{m365GroupID}
// some tests require an existing backup // some tests require an existing backup
@ -580,7 +545,7 @@ func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "groups", "backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, "--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s", fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID), string(suite.backupOps[0].Results.BackupID),
@ -594,7 +559,7 @@ func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd() {
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = cliTD.StubRootCmd(
"backup", "details", "groups", "backup", "details", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backups", string(suite.backupOps[0].Results.BackupID)) "--backups", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -612,7 +577,7 @@ func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_SingleID() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "groups", "backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, "--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID)) string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -624,7 +589,7 @@ func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_SingleID() {
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = cliTD.StubRootCmd(
"backup", "details", "groups", "backup", "details", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[2].Results.BackupID)) "--backup", string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -642,7 +607,7 @@ func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_UnknownID() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "groups", "backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString()) "--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -661,7 +626,7 @@ func (suite *BackupDeleteGroupsE2ESuite) TestGroupsBackupDeleteCmd_NoBackupID()
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "groups", "backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided // empty backupIDs should error since no data provided
@ -680,7 +645,7 @@ func buildGroupsBackupCmd(
) (*cobra.Command, context.Context) { ) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "groups", "backup", "create", "groups",
"--"+flags.ConfigFileFN, configFile, "--config-file", configFile,
"--"+flags.GroupFN, group, "--"+flags.GroupFN, group,
"--"+flags.CategoryDataFN, category) "--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)

View File

@ -107,17 +107,8 @@ func (suite *GroupsUnitSuite) TestValidateGroupsBackupCreateFlags() {
expect: assert.NoError, expect: assert.NoError,
}, },
{ {
name: "conversations", name: "all allowed",
cats: []string{flags.DataConversations}, cats: []string{flags.DataLibraries, flags.DataMessages},
expect: assert.NoError,
},
{
name: "all allowed",
cats: []string{
flags.DataLibraries,
flags.DataMessages,
flags.DataConversations,
},
expect: assert.NoError, expect: assert.NoError,
}, },
{ {
@ -152,35 +143,23 @@ func (suite *GroupsUnitSuite) TestBackupCreateFlags() {
"--" + flags.GroupFN, flagsTD.FlgInputs(flagsTD.GroupsInput), "--" + flags.GroupFN, flagsTD.FlgInputs(flagsTD.GroupsInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.GroupsCategoryDataInput), "--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.GroupsCategoryDataInput),
"--" + flags.FetchParallelismFN, flagsTD.FetchParallelism, "--" + flags.FetchParallelismFN, flagsTD.FetchParallelism,
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
"--" + flags.DisableDeltaFN, "--" + flags.DisableDeltaFN,
"--" + flags.DisableLazyItemReaderFN,
}, },
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(), flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags())) flagsTD.PreparedStorageFlags()))
opts := utils.MakeGroupsOpts(cmd) opts := utils.MakeGroupsOpts(cmd)
co := utils.Control() co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(backupOpts.Parallelism.ItemFetch))
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.True(t, backupOpts.M365.DisableDeltaEndpoint)
assert.ElementsMatch(t, flagsTD.GroupsInput, opts.Groups)
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch)) assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch))
assert.Equal(t, control.FailFast, co.FailureHandling) assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals) assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload) assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.True(t, co.ToggleFeatures.DisableDelta) assert.True(t, co.ToggleFeatures.DisableDelta)
assert.True(t, co.ToggleFeatures.DisableLazyItemReader)
assert.ElementsMatch(t, flagsTD.GroupsInput, opts.Groups)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -230,11 +209,8 @@ func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
"--" + flags.BackupFN, flagsTD.BackupInput, "--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN, "--" + flags.SkipReduceFN,
}, },
flagsTD.PreparedChannelFlags(),
flagsTD.PreparedConversationFlags(),
flagsTD.PreparedProviderFlags(), flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags(), flagsTD.PreparedStorageFlags()))
flagsTD.PreparedLibraryFlags()))
co := utils.Control() co := utils.Control()
@ -242,9 +218,6 @@ func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
assert.True(t, co.SkipReduce) assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
flagsTD.AssertChannelFlags(t, cmd)
flagsTD.AssertConversationFlags(t, cmd)
flagsTD.AssertLibraryFlags(t, cmd)
} }
func (suite *GroupsUnitSuite) TestBackupDeleteFlags() { func (suite *GroupsUnitSuite) TestBackupDeleteFlags() {

View File

@ -11,19 +11,117 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/mock"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
"github.com/alcionai/corso/src/pkg/storage/testdata" "github.com/alcionai/corso/src/pkg/storage/testdata"
) )
// ---------------------------------------------------------------------------
// Suite Setup
// ---------------------------------------------------------------------------
type ids struct {
ID string
DriveID string
DriveRootFolderID string
}
type intgTesterSetup struct {
acct account.Account
ac api.Client
gockAC api.Client
user ids
site ids
group ids
team ids
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
its := intgTesterSetup{}
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
its.acct = tconfig.NewM365Account(t)
creds, err := its.acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
its.ac, err = api.NewClient(creds, control.DefaultOptions())
require.NoError(t, err, clues.ToCore(err))
its.gockAC, err = mock.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
// user drive
uids := ids{}
uids.ID = tconfig.M365UserID(t)
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, uids.ID)
require.NoError(t, err, clues.ToCore(err))
uids.DriveID = ptr.Val(userDrive.GetId())
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, uids.DriveID)
require.NoError(t, err, clues.ToCore(err))
uids.DriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
its.user = uids
// site
sids := ids{}
sids.ID = tconfig.M365SiteID(t)
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, sids.ID)
require.NoError(t, err, clues.ToCore(err))
sids.DriveID = ptr.Val(siteDrive.GetId())
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, sids.DriveID)
require.NoError(t, err, clues.ToCore(err))
sids.DriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
its.site = sids
// group
gids := ids{}
// use of the TeamID is intentional here, so that we are assured
// the group has full usage of the teams api.
gids.ID = tconfig.M365TeamID(t)
its.group = gids
// team
tids := ids{}
tids.ID = tconfig.M365TeamID(t)
its.team = tids
return its
}
type dependencies struct { type dependencies struct {
st storage.Storage st storage.Storage
repo repository.Repositoryer repo repository.Repositoryer
@ -88,7 +186,7 @@ func buildExchangeBackupCmd(
) (*cobra.Command, context.Context) { ) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "exchange", "backup", "create", "exchange",
"--"+flags.ConfigFileFN, configFile, "--config-file", configFile,
"--"+flags.UserFN, user, "--"+flags.UserFN, user,
"--"+flags.CategoryDataFN, category) "--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)

View File

@ -60,27 +60,26 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
switch cmd.Use { switch cmd.Use {
case createCommand: case createCommand:
c, fs = utils.AddCommand(cmd, oneDriveCreateCmd()) c, fs = utils.AddCommand(cmd, oneDriveCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandCreateUseSuffix
c.Example = oneDriveServiceCommandCreateExamples c.Example = oneDriveServiceCommandCreateExamples
flags.AddUserFlag(c) flags.AddUserFlag(c)
flags.AddGenericBackupFlags(c) flags.AddFailFastFlag(c)
fs.BoolVar( flags.AddDisableIncrementalsFlag(c)
&flags.UseOldDeltaProcessFV, flags.AddForceItemDataDownloadFlag(c)
flags.UseOldDeltaProcessFN,
false,
"process backups using the old delta processor instead of tree-based enumeration")
cobra.CheckErr(fs.MarkHidden(flags.UseOldDeltaProcessFN))
case listCommand: case listCommand:
c, _ = utils.AddCommand(cmd, oneDriveListCmd()) c, fs = utils.AddCommand(cmd, oneDriveListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false) flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c) flags.AddAllBackupListFlags(c)
case detailsCommand: case detailsCommand:
c, _ = utils.AddCommand(cmd, oneDriveDetailsCmd()) c, fs = utils.AddCommand(cmd, oneDriveDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandDetailsUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandDetailsUseSuffix
c.Example = oneDriveServiceCommandDetailsExamples c.Example = oneDriveServiceCommandDetailsExamples
@ -90,7 +89,8 @@ func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
flags.AddOneDriveDetailsAndRestoreFlags(c) flags.AddOneDriveDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, _ = utils.AddCommand(cmd, oneDriveDeleteCmd()) c, fs = utils.AddCommand(cmd, oneDriveDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + oneDriveServiceCommandDeleteUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandDeleteUseSuffix
c.Example = oneDriveServiceCommandDeleteExamples c.Example = oneDriveServiceCommandDeleteExamples
@ -145,12 +145,7 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
sel := oneDriveBackupCreateSelectors(flags.UserFV) sel := oneDriveBackupCreateSelectors(flags.UserFV)
ins, err := utils.UsersMap( ins, err := utils.UsersMap(ctx, *acct, utils.Control(), fault.New(true))
ctx,
*acct,
utils.Control(),
r.Counter(),
fault.New(true))
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
} }

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
@ -19,7 +20,6 @@ import (
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata" selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
@ -64,7 +64,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupListCmd_empty() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "onedrive", "backup", "list", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.dpnd.recorder)
@ -93,8 +93,8 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "create", "onedrive", "backup", "create", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.UserFN, "foo@not-there.com") "--"+flags.UserFN, "foo@nothere.com")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetOut(&recorder) cmd.SetOut(&recorder)
@ -107,8 +107,7 @@ func (suite *NoBackupOneDriveE2ESuite) TestOneDriveBackupCmd_userNotInTenant() {
assert.Contains( assert.Contains(
t, t,
err.Error(), err.Error(),
"not found", "not found in tenant", "error missing user not found")
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened") assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error()) t.Logf("backup error message: %s", err.Error())
@ -176,7 +175,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "onedrive", "backup", "delete", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, "--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s", fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID), string(suite.backupOps[0].Results.BackupID),
@ -201,7 +200,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd() {
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = cliTD.StubRootCmd(
"backup", "details", "onedrive", "backup", "details", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backups", string(suite.backupOps[0].Results.BackupID)) "--backups", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -221,7 +220,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_SingleID(
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "onedrive", "backup", "delete", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, "--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID)) string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -243,7 +242,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_SingleID(
// a follow-up details call should fail, due to the backup ID being deleted // a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd( cmd = cliTD.StubRootCmd(
"backup", "details", "onedrive", "backup", "details", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[0].Results.BackupID)) "--backup", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -261,7 +260,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_unknownID
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "onedrive", "backup", "delete", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString()) "--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -280,7 +279,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) TestOneDriveBackupDeleteCmd_NoBackupI
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "onedrive", "backup", "delete", "onedrive",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided // empty backupIDs should error since no data provided

View File

@ -101,28 +101,20 @@ func (suite *OneDriveUnitSuite) TestBackupCreateFlags() {
[]string{ []string{
"--" + flags.RunModeFN, flags.RunModeFlagTest, "--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput), "--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
}, },
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(), flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags())) flagsTD.PreparedStorageFlags()))
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
co := utils.Control() co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
assert.Equal(t, control.FailFast, co.FailureHandling) assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals) assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload) assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }

View File

@ -5,6 +5,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
@ -37,11 +38,7 @@ corso backup create sharepoint --site https://example.com/hr
corso backup create sharepoint --site https://example.com/hr,https://example.com/team corso backup create sharepoint --site https://example.com/hr,https://example.com/team
# Backup all SharePoint data for all Sites # Backup all SharePoint data for all Sites
corso backup create sharepoint --site '*' corso backup create sharepoint --site '*'`
# Backup all SharePoint list data for a Site
corso backup create sharepoint --site https://example.com/hr --data lists
`
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \ sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce and 1234abcd-12ab-cd34-56de-1234abce
@ -61,54 +58,41 @@ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Explore all files within the document library "Work Documents" # Explore all files within the document library "Work Documents"
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library "Work Documents" --library "Work Documents"
`
# Explore lists by their name(s)
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2"
# Explore lists created after a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34
# Explore lists created before a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34
# Explore lists modified before a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34
# Explore lists modified after a given time
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34`
) )
// called by backup.go to map subcommands to provider-specific handling. // called by backup.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command { func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case createCommand: case createCommand:
c, _ = utils.AddCommand(cmd, sharePointCreateCmd()) c, fs = utils.AddCommand(cmd, sharePointCreateCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix c.Use = c.Use + " " + sharePointServiceCommandCreateUseSuffix
c.Example = sharePointServiceCommandCreateExamples c.Example = sharePointServiceCommandCreateExamples
flags.AddSiteFlag(c, true) flags.AddSiteFlag(c)
flags.AddSiteIDFlag(c, true) flags.AddSiteIDFlag(c)
// [TODO](hitesh) to add lists flag to invoke backup for lists
// when explicit invoke is not required anymore
flags.AddDataFlag(c, []string{flags.DataLibraries}, true) flags.AddDataFlag(c, []string{flags.DataLibraries}, true)
flags.AddGenericBackupFlags(c) flags.AddFailFastFlag(c)
flags.AddDisableIncrementalsFlag(c)
flags.AddForceItemDataDownloadFlag(c)
case listCommand: case listCommand:
c, _ = utils.AddCommand(cmd, sharePointListCmd()) c, fs = utils.AddCommand(cmd, sharePointListCmd())
fs.SortFlags = false
flags.AddBackupIDFlag(c, false) flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c) flags.AddAllBackupListFlags(c)
case detailsCommand: case detailsCommand:
c, _ = utils.AddCommand(cmd, sharePointDetailsCmd()) c, fs = utils.AddCommand(cmd, sharePointDetailsCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix c.Use = c.Use + " " + sharePointServiceCommandDetailsUseSuffix
c.Example = sharePointServiceCommandDetailsExamples c.Example = sharePointServiceCommandDetailsExamples
@ -118,7 +102,8 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
flags.AddSharePointDetailsAndRestoreFlags(c) flags.AddSharePointDetailsAndRestoreFlags(c)
case deleteCommand: case deleteCommand:
c, _ = utils.AddCommand(cmd, sharePointDeleteCmd()) c, fs = utils.AddCommand(cmd, sharePointDeleteCmd())
fs.SortFlags = false
c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix c.Use = c.Use + " " + sharePointServiceCommandDeleteUseSuffix
c.Example = sharePointServiceCommandDeleteExamples c.Example = sharePointServiceCommandDeleteExamples
@ -174,12 +159,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
// TODO: log/print recoverable errors // TODO: log/print recoverable errors
errs := fault.New(false) errs := fault.New(false)
svcCli, err := m365.NewM365Client(ctx, *acct) ins, err := m365.SitesMap(ctx, *acct, errs)
if err != nil {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.SitesMap(ctx, errs)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 sites")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 sites"))
} }
@ -211,12 +191,10 @@ func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
flags.SiteFN + " *") flags.SiteFN + " *")
} }
allowedCats := utils.SharePointAllowedCategories()
for _, d := range cats { for _, d := range cats {
if _, ok := allowedCats[d]; !ok { if d != flags.DataLibraries && d != flags.DataPages {
return clues.New( return clues.New(
d + " is an unrecognized data type; only " + flags.DataLibraries + " supported") d + " is an unrecognized data type; either " + flags.DataLibraries + "or " + flags.DataPages)
} }
} }
@ -243,11 +221,29 @@ func sharePointBackupCreateSelectors(
sel := selectors.NewSharePointBackup(append(slices.Clone(sites), weburls...)) sel := selectors.NewSharePointBackup(append(slices.Clone(sites), weburls...))
return utils.AddCategories(sel, cats), nil return addCategories(sel, cats), nil
} }
func includeAllSitesWithCategories(ins idname.Cacher, categories []string) *selectors.SharePointBackup { func includeAllSitesWithCategories(ins idname.Cacher, categories []string) *selectors.SharePointBackup {
return utils.AddCategories(selectors.NewSharePointBackup(ins.IDs()), categories) return addCategories(selectors.NewSharePointBackup(ins.IDs()), categories)
}
func addCategories(sel *selectors.SharePointBackup, cats []string) *selectors.SharePointBackup {
// Issue #2631: Libraries are the only supported feature for SharePoint at this time.
if len(cats) == 0 {
sel.Include(sel.LibraryFolders(selectors.Any()))
}
for _, d := range cats {
switch d {
case flags.DataLibraries:
sel.Include(sel.LibraryFolders(selectors.Any()))
case flags.DataPages:
sel.Include(sel.Pages(selectors.Any()))
}
}
return sel
} }
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------

View File

@ -1,29 +1,25 @@
package backup_test package backup_test
import ( import (
"context"
"fmt" "fmt"
"strings" "strings"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/selectors/testdata" "github.com/alcionai/corso/src/pkg/selectors/testdata"
@ -31,7 +27,7 @@ import (
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests that require no existing backups // tests with no prior backup
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type NoBackupSharePointE2ESuite struct { type NoBackupSharePointE2ESuite struct {
@ -66,7 +62,7 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "list", "sharepoint", "backup", "list", "sharepoint",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder) cmd.SetErr(&suite.dpnd.recorder)
@ -83,297 +79,6 @@ func (suite *NoBackupSharePointE2ESuite) TestSharePointBackupListCmd_empty() {
assert.True(t, strings.HasSuffix(result, "No backups available\n")) assert.True(t, strings.HasSuffix(result, "No backups available\n"))
} }
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestBackupSharepointE2ESuite(t *testing.T) {
suite.Run(t, &BackupSharepointE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *BackupSharepointE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
}
func (suite *BackupSharepointE2ESuite) TestSharepointBackupCmd_lists() {
// Issue: https://github.com/alcionai/corso/issues/4754
suite.T().Skip("unskip when sharepoint lists support is enabled")
runSharepointBackupCategoryTest(suite, flags.DataLists)
}
func runSharepointBackupCategoryTest(suite *BackupSharepointE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildSharepointBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.Site.ID,
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupSharepointE2ESuite) TestSharepointBackupCmd_siteNotFound_lists() {
// Issue: https://github.com/alcionai/corso/issues/4754
suite.T().Skip("un-skip test when lists support is enabled")
runSharepointBackupSiteNotFoundTest(suite, flags.DataLists)
}
func runSharepointBackupSiteNotFoundTest(suite *BackupSharepointE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildSharepointBackupCmd(
ctx,
suite.dpnd.configFilePath,
uuid.NewString(),
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"Invalid hostname for this tenancy", "error missing site not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupSharepointE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
}
func TestPreparedBackupSharepointE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupSharepointE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupSharepointE2ESuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.SharePointService)
suite.backupOps = make(map[path.CategoryType]string)
var (
sites = []string{suite.m365.Site.ID}
ins = idname.NewCache(map[string]string{suite.m365.Site.ID: suite.m365.Site.ID})
cats = []path.CategoryType{
path.ListsCategory,
}
)
for _, set := range cats {
var (
sel = selectors.NewSharePointBackup(sites)
scopes []selectors.SharePointScope
)
switch set {
case path.ListsCategory:
scopes = testdata.SharePointBackupListsScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointListCmd_lists() {
runSharepointListCmdTest(suite, path.ListsCategory)
}
func runSharepointListCmdTest(suite *PreparedBackupSharepointE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "sharepoint",
"--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
t.Log("backup results", result)
}
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "sharepoint",
"--config-file", suite.dpnd.configFilePath,
"--backup", uuid.NewString())
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupSharepointE2ESuite) TestSharepointDetailsCmd_lists() {
runSharepointDetailsCmdTest(suite, path.ListsCategory)
}
func runSharepointDetailsCmdTest(suite *PreparedBackupSharepointE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "sharepoint",
"--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
findings := make(map[path.CategoryType]int)
incrementor := func(cond bool, cat path.CategoryType) {
if cond {
findings[cat]++
}
}
for _, ent := range deets.Entries {
if ent.SharePoint == nil {
continue
}
isSharePointList := ent.SharePoint.ItemType == details.SharePointList
hasListName := isSharePointList && len(ent.SharePoint.List.Name) > 0
hasItemName := !isSharePointList && len(ent.SharePoint.ItemName) > 0
incrementor(hasListName, category)
incrementor(hasItemName, category)
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
assert.GreaterOrEqual(t, findings[category], 1)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests for deleting backups // tests for deleting backups
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -441,7 +146,7 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "sharepoint", "backup", "delete", "sharepoint",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, "--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s", fmt.Sprintf("%s,%s",
string(suite.backupOp.Results.BackupID), string(suite.backupOp.Results.BackupID),
@ -468,7 +173,7 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd() {
// // a follow-up details call should fail, due to the backup ID being deleted // // a follow-up details call should fail, due to the backup ID being deleted
// cmd = cliTD.StubRootCmd( // cmd = cliTD.StubRootCmd(
// "backup", "details", "sharepoint", // "backup", "details", "sharepoint",
// "--"+flags.ConfigFileFN, suite.cfgFP, // "--config-file", suite.cfgFP,
// "--backup", string(suite.backupOp.Results.BackupID)) // "--backup", string(suite.backupOp.Results.BackupID))
// cli.BuildCommandTree(cmd) // cli.BuildCommandTree(cmd)
@ -485,7 +190,7 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_unkno
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "sharepoint", "backup", "delete", "sharepoint",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath, "--config-file", suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString()) "--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -504,30 +209,10 @@ func (suite *BackupDeleteSharePointE2ESuite) TestSharePointBackupDeleteCmd_NoBac
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"backup", "delete", "groups", "backup", "delete", "groups",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath) "--config-file", suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided // empty backupIDs should error since no data provided
err := cmd.ExecuteContext(ctx) err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err)) require.Error(t, err, clues.ToCore(err))
} }
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildSharepointBackupCmd(
ctx context.Context,
configFile, site, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "sharepoint",
"--config-file", configFile,
"--"+flags.SiteIDFN, site,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -105,29 +105,21 @@ func (suite *SharePointUnitSuite) TestBackupCreateFlags() {
"--" + flags.SiteIDFN, flagsTD.FlgInputs(flagsTD.SiteIDInput), "--" + flags.SiteIDFN, flagsTD.FlgInputs(flagsTD.SiteIDInput),
"--" + flags.SiteFN, flagsTD.FlgInputs(flagsTD.WebURLInput), "--" + flags.SiteFN, flagsTD.FlgInputs(flagsTD.WebURLInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.SharepointCategoryDataInput), "--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.SharepointCategoryDataInput),
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
}, },
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(), flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags())) flagsTD.PreparedStorageFlags()))
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
co := utils.Control() co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.ElementsMatch(t, []string{strings.Join(flagsTD.SiteIDInput, ",")}, opts.SiteID) assert.ElementsMatch(t, []string{strings.Join(flagsTD.SiteIDInput, ",")}, opts.SiteID)
assert.ElementsMatch(t, flagsTD.WebURLInput, opts.WebURL) assert.ElementsMatch(t, flagsTD.WebURLInput, opts.WebURL)
flagsTD.AssertGenericBackupFlags(t, cmd) assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -218,7 +210,6 @@ func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() {
name string name string
site []string site []string
weburl []string weburl []string
cats []string
expect assert.ErrorAssertionFunc expect assert.ErrorAssertionFunc
}{ }{
{ {
@ -226,61 +217,25 @@ func (suite *SharePointUnitSuite) TestValidateSharePointBackupCreateFlags() {
expect: assert.Error, expect: assert.Error,
}, },
{ {
name: "sites but no category", name: "sites",
site: []string{"smarf"}, site: []string{"smarf"},
expect: assert.NoError, expect: assert.NoError,
}, },
{ {
name: "web urls but no category", name: "urls",
weburl: []string{"fnord"}, weburl: []string{"fnord"},
expect: assert.NoError, expect: assert.NoError,
}, },
{ {
name: "both web urls and sites but no category", name: "both",
site: []string{"smarf"}, site: []string{"smarf"},
weburl: []string{"fnord"}, weburl: []string{"fnord"},
expect: assert.NoError, expect: assert.NoError,
}, },
{
name: "site with libraries category",
site: []string{"smarf"},
cats: []string{flags.DataLibraries},
expect: assert.NoError,
},
{
name: "site with invalid category",
site: []string{"smarf"},
cats: []string{"invalid category"},
expect: assert.Error,
},
{
name: "site with lists category",
site: []string{"smarf"},
cats: []string{flags.DataLists},
expect: assert.NoError,
},
// [TODO]: Uncomment when pages are enabled
// {
// name: "site with pages category",
// site: []string{"smarf"},
// cats: []string{flags.DataPages},
// expect: assert.NoError,
// },
// [TODO]: Uncomment when pages & lists are enabled
// {
// name: "site with all categories",
// site: []string{"smarf"},
// cats: []string{flags.DataLists, flags.DataPages, flags.DataLibraries},
// expect: assert.NoError,
// },
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
err := validateSharePointBackupCreateFlags(test.site, test.weburl, test.cats) err := validateSharePointBackupCreateFlags(test.site, test.weburl, nil)
test.expect(suite.T(), err, clues.ToCore(err)) test.expect(suite.T(), err, clues.ToCore(err))
}) })
} }
@ -366,12 +321,6 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
data: []string{flags.DataPages}, data: []string{flags.DataPages},
expect: bothIDs, expect: bothIDs,
}, },
{
name: "Lists",
site: bothIDs,
data: []string{flags.DataLists},
expect: bothIDs,
},
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {

View File

@ -1,305 +0,0 @@
package backup
import (
"context"
"fmt"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
)
// ------------------------------------------------------------------------------------------------
// setup and globals
// ------------------------------------------------------------------------------------------------
const (
teamschatsServiceCommand = "chats"
teamschatsServiceCommandCreateUseSuffix = "--user <userEmail> | '" + flags.Wildcard + "'"
teamschatsServiceCommandDeleteUseSuffix = "--backups <backupId>"
teamschatsServiceCommandDetailsUseSuffix = "--backup <backupId>"
)
const (
teamschatsServiceCommandCreateExamples = `# Backup all chats with bob@company.hr
corso backup create chats --user bob@company.hr
# Backup all chats for all users
corso backup create chats --user '*'`
teamschatsServiceCommandDeleteExamples = `# Delete chats backup with ID 1234abcd-12ab-cd34-56de-1234abcd \
and 1234abcd-12ab-cd34-56de-1234abce
corso backup delete chats --backups 1234abcd-12ab-cd34-56de-1234abcd,1234abcd-12ab-cd34-56de-1234abce`
teamschatsServiceCommandDetailsExamples = `# Explore chats in Bob's latest backup (1234abcd...)
corso backup details chats --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// called by backup.go to map subcommands to provider-specific handling.
func addTeamsChatsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case createCommand:
c, _ = utils.AddCommand(cmd, teamschatsCreateCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandCreateUseSuffix
c.Example = teamschatsServiceCommandCreateExamples
// Flags addition ordering should follow the order we want them to appear in help and docs:
flags.AddUserFlag(c)
flags.AddDataFlag(c, []string{flags.DataChats}, false)
flags.AddGenericBackupFlags(c)
case listCommand:
c, _ = utils.AddCommand(cmd, teamschatsListCmd(), utils.MarkPreReleaseCommand())
flags.AddBackupIDFlag(c, false)
flags.AddAllBackupListFlags(c)
case detailsCommand:
c, _ = utils.AddCommand(cmd, teamschatsDetailsCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandDetailsUseSuffix
c.Example = teamschatsServiceCommandDetailsExamples
flags.AddSkipReduceFlag(c)
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
flags.AddBackupIDFlag(c, true)
flags.AddTeamsChatsDetailsAndRestoreFlags(c)
case deleteCommand:
c, _ = utils.AddCommand(cmd, teamschatsDeleteCmd(), utils.MarkPreReleaseCommand())
c.Use = c.Use + " " + teamschatsServiceCommandDeleteUseSuffix
c.Example = teamschatsServiceCommandDeleteExamples
flags.AddMultipleBackupIDsFlag(c, false)
flags.AddBackupIDFlag(c, false)
}
return c
}
// ------------------------------------------------------------------------------------------------
// backup create
// ------------------------------------------------------------------------------------------------
// `corso backup create chats [<flag>...]`
func teamschatsCreateCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Aliases: []string{teamsServiceCommand},
Short: "Backup M365 Chats data",
RunE: createTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// processes a teamschats backup.
func createTeamsChatsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := validateTeamsChatsBackupCreateFlags(flags.UserFV, flags.CategoryDataFV); err != nil {
return err
}
r, acct, err := utils.AccountConnectAndWriteRepoConfig(
ctx,
cmd,
path.TeamsChatsService)
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
// TODO: log/print recoverable errors
errs := fault.New(false)
svcCli, err := m365.NewM365Client(ctx, *acct)
if err != nil {
return Only(ctx, clues.Stack(err))
}
ins, err := svcCli.AC.Users().GetAllIDsAndNames(ctx, errs)
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 teamschats"))
}
sel := teamschatsBackupCreateSelectors(ctx, ins, flags.UserFV, flags.CategoryDataFV)
selectorSet := []selectors.Selector{}
for _, discSel := range sel.SplitByResourceOwner(ins.IDs()) {
selectorSet = append(selectorSet, discSel.Selector)
}
return genericCreateCommand(
ctx,
r,
"Chats",
selectorSet,
ins)
}
// ------------------------------------------------------------------------------------------------
// backup list
// ------------------------------------------------------------------------------------------------
// `corso backup list teamschats [<flag>...]`
func teamschatsListCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "List the history of M365 Chats backups",
RunE: listTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// lists the history of backup operations
func listTeamsChatsCmd(cmd *cobra.Command, args []string) error {
return genericListCommand(cmd, flags.BackupIDFV, path.TeamsChatsService, args)
}
// ------------------------------------------------------------------------------------------------
// backup details
// ------------------------------------------------------------------------------------------------
// `corso backup details teamschats [<flag>...]`
func teamschatsDetailsCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "Shows the details of a M365 Chats backup",
RunE: detailsTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// processes a teamschats backup.
func detailsTeamsChatsCmd(cmd *cobra.Command, args []string) error {
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
return runDetailsTeamsChatsCmd(cmd)
}
func runDetailsTeamsChatsCmd(cmd *cobra.Command) error {
ctx := cmd.Context()
opts := utils.MakeTeamsChatsOpts(cmd)
sel := utils.IncludeTeamsChatsRestoreDataSelectors(ctx, opts)
sel.Configure(selectors.Config{OnlyMatchItemNames: true})
utils.FilterTeamsChatsRestoreInfoSelectors(sel, opts)
ds, err := genericDetailsCommand(cmd, flags.BackupIDFV, sel.Selector)
if err != nil {
return Only(ctx, err)
}
if len(ds.Entries) > 0 {
ds.PrintEntries(ctx)
} else {
Info(ctx, selectors.ErrorNoMatchingItems)
}
return nil
}
// ------------------------------------------------------------------------------------------------
// backup delete
// ------------------------------------------------------------------------------------------------
// `corso backup delete teamschats [<flag>...]`
func teamschatsDeleteCmd() *cobra.Command {
return &cobra.Command{
Use: teamschatsServiceCommand,
Short: "Delete backed-up M365 Chats data",
RunE: deleteTeamsChatsCmd,
Args: cobra.NoArgs,
}
}
// deletes an teamschats backup.
func deleteTeamsChatsCmd(cmd *cobra.Command, args []string) error {
backupIDValue := []string{}
if len(flags.BackupIDsFV) > 0 {
backupIDValue = flags.BackupIDsFV
} else if len(flags.BackupIDFV) > 0 {
backupIDValue = append(backupIDValue, flags.BackupIDFV)
} else {
return clues.New("either --backup or --backups flag is required")
}
return genericDeleteCommand(cmd, path.TeamsChatsService, "TeamsChats", backupIDValue, args)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func validateTeamsChatsBackupCreateFlags(teamschats, cats []string) error {
if len(teamschats) == 0 {
return clues.New(
"requires one or more --" +
flags.UserFN + " ids, or the wildcard --" +
flags.UserFN + " *")
}
msg := fmt.Sprintf(
" is an unrecognized data type; only %s is supported",
flags.DataChats)
allowedCats := utils.TeamsChatsAllowedCategories()
for _, d := range cats {
if _, ok := allowedCats[d]; !ok {
return clues.New(d + msg)
}
}
return nil
}
func teamschatsBackupCreateSelectors(
ctx context.Context,
ins idname.Cacher,
users, cats []string,
) *selectors.TeamsChatsBackup {
if filters.PathContains(users).Compare(flags.Wildcard) {
return includeAllTeamsChatsWithCategories(ins, cats)
}
sel := selectors.NewTeamsChatsBackup(slices.Clone(users))
return utils.AddTeamsChatsCategories(sel, cats)
}
func includeAllTeamsChatsWithCategories(ins idname.Cacher, categories []string) *selectors.TeamsChatsBackup {
return utils.AddTeamsChatsCategories(selectors.NewTeamsChatsBackup(ins.IDs()), categories)
}

View File

@ -1,636 +0,0 @@
package backup_test
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/print"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
)
// ---------------------------------------------------------------------------
// tests that require no existing backups
// ---------------------------------------------------------------------------
type NoBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestNoBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *NoBackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
func (suite *NoBackupTeamsChatsE2ESuite) TestTeamsChatsBackupListCmd_noBackups() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := suite.dpnd.recorder.String()
// as an offhand check: the result should contain the m365 teamschat id
assert.True(t, strings.HasSuffix(result, "No backups available\n"))
}
// ---------------------------------------------------------------------------
// tests with no prior backup
// ---------------------------------------------------------------------------
type BackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
m365 its.M365IntgTestSetup
}
func TestBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupTeamsChatsE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *BackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
}
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_chats() {
runTeamsChatsBackupCategoryTest(suite, flags.DataChats)
}
func runTeamsChatsBackupCategoryTest(suite *BackupTeamsChatsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
suite.m365.User.ID,
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsChatsE2ESuite) TestTeamsChatsBackupCmd_teamschatNotFound_chats() {
runTeamsChatsBackupTeamsChatNotFoundTest(suite, flags.DataChats)
}
func runTeamsChatsBackupTeamsChatNotFoundTest(suite *BackupTeamsChatsE2ESuite, category string) {
recorder := strings.Builder{}
recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd, ctx := buildTeamsChatsBackupCmd(
ctx,
suite.dpnd.configFilePath,
"foo@not-there.com",
category,
&recorder)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
assert.Contains(
t,
err.Error(),
"not found",
"error missing user not found")
assert.NotContains(t, err.Error(), "runtime error", "panic happened")
t.Logf("backup error message: %s", err.Error())
result := recorder.String()
t.Log("backup results", result)
}
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAzureClientIDFlag() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--azure-client-id", "invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetErr(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_fromConfigFile() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
}
// AWS flags
func (suite *BackupTeamsChatsE2ESuite) TestBackupCreateTeamsChats_badAWSFlags() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd.recorder.Reset()
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--teamschat", suite.m365.User.ID,
"--aws-access-key", "invalid-value",
"--aws-secret-access-key", "some-invalid-value")
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
// since invalid aws creds are explicitly set, should see a failure
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// tests prepared with a previous backup
// ---------------------------------------------------------------------------
type PreparedBackupTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps map[path.CategoryType]string
m365 its.M365IntgTestSetup
}
func TestPreparedBackupTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupTeamsChatsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *PreparedBackupTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.m365 = its.GetM365(t)
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
suite.backupOps = make(map[path.CategoryType]string)
var (
teamschats = []string{suite.m365.User.ID}
ins = idname.NewCache(map[string]string{suite.m365.User.ID: suite.m365.User.ID})
cats = []path.CategoryType{
path.ChatsCategory,
}
)
for _, set := range cats {
var (
sel = selectors.NewTeamsChatsBackup(teamschats)
scopes []selectors.TeamsChatsScope
)
switch set {
case path.ChatsCategory:
scopes = selTD.TeamsChatsBackupChatScope(sel)
}
sel.Include(scopes)
bop, err := suite.dpnd.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err))
err = bop.Run(ctx)
require.NoError(t, err, clues.ToCore(err))
bIDs := string(bop.Results.BackupID)
// sanity check, ensure we can find the backup and its details immediately
b, err := suite.dpnd.repo.Backup(ctx, string(bop.Results.BackupID))
require.NoError(t, err, "retrieving recent backup by ID")
require.Equal(t, bIDs, string(b.ID), "repo backup matches results id")
_, b, errs := suite.dpnd.repo.GetBackupDetails(ctx, bIDs)
require.NoError(t, errs.Failure(), "retrieving recent backup details by ID")
require.Empty(t, errs.Recovered(), "retrieving recent backup details by ID")
require.Equal(t, bIDs, string(b.ID), "repo details matches results id")
suite.backupOps[set] = string(b.ID)
}
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_chats() {
runTeamsChatsListCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsListCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, suite.backupOps[category])
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_singleID_chats() {
runTeamsChatsListSingleCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsListSingleCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
assert.Contains(t, result, bID)
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsListCmd_badID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "list", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", "smarfs")
cli.BuildCommandTree(cmd)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *PreparedBackupTeamsChatsE2ESuite) TestTeamsChatsDetailsCmd_chats() {
runTeamsChatsDetailsCmdTest(suite, path.ChatsCategory)
}
func runTeamsChatsDetailsCmdTest(suite *PreparedBackupTeamsChatsE2ESuite, category path.CategoryType) {
suite.dpnd.recorder.Reset()
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
bID := suite.backupOps[category]
// fetch the details from the repo first
deets, _, errs := suite.dpnd.repo.GetBackupDetails(ctx, string(bID))
require.NoError(t, errs.Failure(), clues.ToCore(errs.Failure()))
require.Empty(t, errs.Recovered())
cmd := cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN, string(bID))
cli.BuildCommandTree(cmd)
cmd.SetOut(&suite.dpnd.recorder)
ctx = print.SetRootCmd(ctx, cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// compare the output
result := suite.dpnd.recorder.String()
i := 0
foundFolders := 0
for _, ent := range deets.Entries {
// Skip folders as they don't mean anything to the end teamschat.
if ent.Folder != nil {
foundFolders++
continue
}
suite.Run(fmt.Sprintf("detail %d", i), func() {
assert.Contains(suite.T(), result, ent.ShortRef)
})
i++
}
// We only backup the default folder for each category so there should be at
// least that folder (we don't make details entries for prefix folders).
assert.GreaterOrEqual(t, foundFolders, 1)
}
// ---------------------------------------------------------------------------
// tests for deleting backups
// ---------------------------------------------------------------------------
type BackupDeleteTeamsChatsE2ESuite struct {
tester.Suite
dpnd dependencies
backupOps [3]operations.BackupOperation
}
func TestBackupDeleteTeamsChatsE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteTeamsChatsE2ESuite{
Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs}),
})
}
func (suite *BackupDeleteTeamsChatsE2ESuite) SetupSuite() {
t := suite.T()
t.Skip("not fully implemented")
ctx, flush := tester.NewContext(t)
defer flush()
suite.dpnd = prepM365Test(t, ctx, path.TeamsChatsService)
m365TeamsChatID := tconfig.M365TeamID(t)
teamschats := []string{m365TeamsChatID}
// some tests require an existing backup
sel := selectors.NewTeamsChatsBackup(teamschats)
sel.Include(selTD.TeamsChatsBackupChatScope(sel))
for i := 0; i < cap(suite.backupOps); i++ {
backupOp, err := suite.dpnd.repo.NewBackup(ctx, sel.Selector)
require.NoError(t, err, clues.ToCore(err))
suite.backupOps[i] = backupOp
err = suite.backupOps[i].Run(ctx)
require.NoError(t, err, clues.ToCore(err))
}
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN,
fmt.Sprintf("%s,%s",
string(suite.backupOps[0].Results.BackupID),
string(suite.backupOps[1].Results.BackupID)))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backups", string(suite.backupOps[0].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_SingleID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupFN,
string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
// run the command
err := cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// a follow-up details call should fail, due to the backup ID being deleted
cmd = cliTD.StubRootCmd(
"backup", "details", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--backup", string(suite.backupOps[2].Results.BackupID))
cli.BuildCommandTree(cmd)
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_UnknownID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath,
"--"+flags.BackupIDsFN, uuid.NewString())
cli.BuildCommandTree(cmd)
// unknown backupIDs should error since the modelStore can't find the backup
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
func (suite *BackupDeleteTeamsChatsE2ESuite) TestTeamsChatsBackupDeleteCmd_NoBackupID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
ctx = config.SetViper(ctx, suite.dpnd.vpr)
defer flush()
cmd := cliTD.StubRootCmd(
"backup", "delete", "chats",
"--"+flags.ConfigFileFN, suite.dpnd.configFilePath)
cli.BuildCommandTree(cmd)
// empty backupIDs should error since no data provided
err := cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func buildTeamsChatsBackupCmd(
ctx context.Context,
configFile, resource, category string,
recorder *strings.Builder,
) (*cobra.Command, context.Context) {
cmd := cliTD.StubRootCmd(
"backup", "create", "chats",
"--"+flags.ConfigFileFN, configFile,
"--"+flags.UserFN, resource,
"--"+flags.CategoryDataFN, category)
cli.BuildCommandTree(cmd)
cmd.SetOut(recorder)
return cmd, print.SetRootCmd(ctx, cmd)
}

View File

@ -1,248 +0,0 @@
package backup
import (
"testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
)
type TeamsChatsUnitSuite struct {
tester.Suite
}
func TestTeamsChatsUnitSuite(t *testing.T) {
suite.Run(t, &TeamsChatsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *TeamsChatsUnitSuite) TestAddTeamsChatsCommands() {
expectUse := teamschatsServiceCommand
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "create teamschats",
use: createCommand,
expectUse: expectUse + " " + teamschatsServiceCommandCreateUseSuffix,
expectShort: teamschatsCreateCmd().Short,
expectRunE: createTeamsChatsCmd,
},
{
name: "list teamschats",
use: listCommand,
expectUse: expectUse,
expectShort: teamschatsListCmd().Short,
expectRunE: listTeamsChatsCmd,
},
{
name: "details teamschats",
use: detailsCommand,
expectUse: expectUse + " " + teamschatsServiceCommandDetailsUseSuffix,
expectShort: teamschatsDetailsCmd().Short,
expectRunE: detailsTeamsChatsCmd,
},
{
name: "delete teamschats",
use: deleteCommand,
expectUse: expectUse + " " + teamschatsServiceCommandDeleteUseSuffix,
expectShort: teamschatsDeleteCmd().Short,
expectRunE: deleteTeamsChatsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
cmd := &cobra.Command{Use: test.use}
c := addTeamsChatsCommands(cmd)
require.NotNil(t, c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
})
}
}
func (suite *TeamsChatsUnitSuite) TestValidateTeamsChatsBackupCreateFlags() {
table := []struct {
name string
cats []string
expect assert.ErrorAssertionFunc
}{
{
name: "none",
cats: []string{},
expect: assert.NoError,
},
{
name: "chats",
cats: []string{flags.DataChats},
expect: assert.NoError,
},
{
name: "all allowed",
cats: []string{
flags.DataChats,
},
expect: assert.NoError,
},
{
name: "bad inputs",
cats: []string{"foo"},
expect: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
err := validateTeamsChatsBackupCreateFlags([]string{"*"}, test.cats)
test.expect(suite.T(), err, clues.ToCore(err))
})
}
}
func (suite *TeamsChatsUnitSuite) TestBackupCreateFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: createCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.TeamsChatsCategoryDataInput),
},
flagsTD.PreparedGenericBackupFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeTeamsChatsOpts(cmd)
co := utils.Control()
backupOpts := utils.ParseBackupOptions()
// TODO(ashmrtn): Remove flag checks on control.Options to control.Backup once
// restore flags are switched over too and we no longer parse flags beyond
// connection info into control.Options.
assert.Equal(t, control.FailFast, backupOpts.FailureHandling)
assert.True(t, backupOpts.Incrementals.ForceFullEnumeration)
assert.True(t, backupOpts.Incrementals.ForceItemDataRefresh)
assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
flagsTD.AssertGenericBackupFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupListFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: listCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedBackupListFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupDetailsFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: detailsCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
},
flagsTD.PreparedTeamsChatsFlags(),
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
flagsTD.AssertTeamsChatsFlags(t, cmd)
}
func (suite *TeamsChatsUnitSuite) TestBackupDeleteFlags() {
t := suite.T()
cmd := cliTD.SetUpCmdHasFlags(
t,
&cobra.Command{Use: deleteCommand},
addTeamsChatsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
teamschatsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
}

View File

@ -10,16 +10,16 @@ import (
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cli/backup" "github.com/alcionai/corso/src/cli/backup"
"github.com/alcionai/corso/src/cli/debug" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/export" "github.com/alcionai/corso/src/cli/export"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/help" "github.com/alcionai/corso/src/cli/help"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/repo" "github.com/alcionai/corso/src/cli/repo"
"github.com/alcionai/corso/src/cli/restore" "github.com/alcionai/corso/src/cli/restore"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
@ -38,7 +38,7 @@ var corsoCmd = &cobra.Command{
} }
func preRun(cc *cobra.Command, args []string) error { func preRun(cc *cobra.Command, args []string) error {
if err := config.InitCmd(cc, args); err != nil { if err := config.InitFunc(cc, args); err != nil {
return err return err
} }
@ -61,6 +61,43 @@ func preRun(cc *cobra.Command, args []string) error {
print.Infof(ctx, "Logging to file: %s", logger.ResolvedLogFile) print.Infof(ctx, "Logging to file: %s", logger.ResolvedLogFile)
} }
avoidTheseDescription := []string{
"Initialize a repository.",
"Initialize a S3 repository",
"Connect to a S3 repository",
"Initialize a repository on local or network storage.",
"Connect to a repository on local or network storage.",
"Help about any command",
"Free, Secure, Open-Source Backup for M365.",
"env var guide",
}
if !slices.Contains(avoidTheseDescription, cc.Short) {
provider, overrides, err := utils.GetStorageProviderAndOverrides(ctx, cc)
if err != nil {
return err
}
cfg, err := config.GetConfigRepoDetails(
ctx,
provider,
true,
false,
overrides)
if err != nil {
log.Error("Error while getting config info to run command: ", cc.Use)
return err
}
utils.SendStartCorsoEvent(
ctx,
cfg.Storage,
cfg.Account.ID(),
map[string]any{"command": cc.CommandPath()},
cfg.RepoID,
utils.Control())
}
// handle deprecated user flag in Backup exchange command // handle deprecated user flag in Backup exchange command
if cc.CommandPath() == "corso backup create exchange" { if cc.CommandPath() == "corso backup create exchange" {
handleMailBoxFlag(ctx, cc, flagSl) handleMailBoxFlag(ctx, cc, flagSl)
@ -73,7 +110,7 @@ func preRun(cc *cobra.Command, args []string) error {
func handleMailBoxFlag(ctx context.Context, c *cobra.Command, flagNames []string) { func handleMailBoxFlag(ctx context.Context, c *cobra.Command, flagNames []string) {
if !slices.Contains(flagNames, "user") && !slices.Contains(flagNames, "mailbox") { if !slices.Contains(flagNames, "user") && !slices.Contains(flagNames, "mailbox") {
print.Err(ctx, "either --user or --mailbox flag is required") print.Errf(ctx, "either --user or --mailbox flag is required")
os.Exit(1) os.Exit(1)
} }
@ -126,7 +163,6 @@ func BuildCommandTree(cmd *cobra.Command) {
backup.AddCommands(cmd) backup.AddCommands(cmd)
restore.AddCommands(cmd) restore.AddCommands(cmd)
export.AddCommands(cmd) export.AddCommands(cmd)
debug.AddCommands(cmd)
help.AddCommands(cmd) help.AddCommands(cmd)
} }

View File

@ -30,9 +30,6 @@ func m365Overrides(in map[string]string) map[string]string {
} }
} }
// add m365 config key names that require path related validations
var m365PathKeys = []string{}
// configureAccount builds a complete account configuration from a mix of // configureAccount builds a complete account configuration from a mix of
// viper properties and manual overrides. // viper properties and manual overrides.
func configureAccount( func configureAccount(
@ -60,7 +57,7 @@ func configureAccount(
return acct, clues.New("unsupported account provider: [" + providerType + "]") return acct, clues.New("unsupported account provider: [" + providerType + "]")
} }
if err := mustMatchConfig(vpr, m365Overrides(overrides), m365PathKeys); err != nil { if err := mustMatchConfig(vpr, m365Overrides(overrides)); err != nil {
return acct, clues.Wrap(err, "verifying m365 configs in corso config file") return acct, clues.Wrap(err, "verifying m365 configs in corso config file")
} }
} }

View File

@ -2,11 +2,8 @@ package config
import ( import (
"context" "context"
"errors"
"io/fs"
"os" "os"
"path/filepath" "path/filepath"
"slices"
"strings" "strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -19,7 +16,6 @@ import (
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
@ -33,9 +29,10 @@ const (
) )
var ( var (
defaultConfigFilePath string configFilePath string
configDir string configFilePathFlag string
displayDefaultFP = filepath.Join("$HOME", ".corso.toml") configDir string
displayDefaultFP = filepath.Join("$HOME", ".corso.toml")
) )
// RepoDetails holds the repository configuration retrieved from // RepoDetails holds the repository configuration retrieved from
@ -57,7 +54,7 @@ func init() {
Infof(context.Background(), "cannot stat CORSO_CONFIG_DIR [%s]: %v", envDir, err) Infof(context.Background(), "cannot stat CORSO_CONFIG_DIR [%s]: %v", envDir, err)
} else { } else {
configDir = envDir configDir = envDir
defaultConfigFilePath = filepath.Join(configDir, ".corso.toml") configFilePath = filepath.Join(configDir, ".corso.toml")
} }
} }
@ -68,71 +65,43 @@ func init() {
if len(configDir) == 0 { if len(configDir) == 0 {
configDir = homeDir configDir = homeDir
defaultConfigFilePath = filepath.Join(configDir, ".corso.toml") configFilePath = filepath.Join(configDir, ".corso.toml")
} }
} }
// adds the persistent flag --config-file to the provided command. // adds the persistent flag --config-file to the provided command.
func AddConfigFlags(cmd *cobra.Command) { func AddConfigFlags(cmd *cobra.Command) {
pf := cmd.PersistentFlags() fs := cmd.PersistentFlags()
pf.StringVar( fs.StringVar(
&flags.ConfigFileFV, &configFilePathFlag,
flags.ConfigFileFN, displayDefaultFP, "config file location") "config-file", displayDefaultFP, "config file location")
} }
// --------------------------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------------------
// Initialization & Storage // Initialization & Storage
// --------------------------------------------------------------------------------------------------------- // ---------------------------------------------------------------------------------------------------------
// InitCmd provides a func that lazily initializes viper and // InitFunc provides a func that lazily initializes viper and
// verifies that the configuration was able to read a file. // verifies that the configuration was able to read a file.
func InitCmd(cmd *cobra.Command, args []string) error { func InitFunc(cmd *cobra.Command, args []string) error {
_, err := commonInit(cmd.Context(), flags.ConfigFileFV) fp := configFilePathFlag
return clues.Stack(err).OrNil()
}
// InitConfig allows sdk consumers to initialize viper.
func InitConfig(
ctx context.Context,
userDefinedConfigFile string,
) (context.Context, error) {
return commonInit(ctx, userDefinedConfigFile)
}
func commonInit(
ctx context.Context,
userDefinedConfigFile string,
) (context.Context, error) {
fp := userDefinedConfigFile
if len(fp) == 0 || fp == displayDefaultFP { if len(fp) == 0 || fp == displayDefaultFP {
fp = defaultConfigFilePath fp = configFilePath
} }
vpr := GetViper(ctx) err := initWithViper(GetViper(cmd.Context()), fp)
if err := initWithViper(ctx, vpr, fp); err != nil { if err != nil {
return ctx, err return err
} }
return SetViper(ctx, vpr), clues.Stack(Read(ctx)).OrNil() return Read(cmd.Context())
} }
// initWithViper implements InitConfig, but takes in a viper // initWithViper implements InitConfig, but takes in a viper
// struct for testing. // struct for testing.
func initWithViper( func initWithViper(vpr *viper.Viper, configFP string) error {
ctx context.Context,
vpr *viper.Viper,
configFP string,
) error {
logger.Ctx(ctx).Debugw("initializing viper", "config_file_path", configFP)
defer func() {
logger.Ctx(ctx).Debugw("initialized config", "config_file_path", configFP)
}()
// Configure default config file location // Configure default config file location
if len(configFP) == 0 || configFP == displayDefaultFP { if len(configFP) == 0 || configFP == displayDefaultFP {
configFP = defaultConfigFilePath
// Find home directory. // Find home directory.
_, err := os.Stat(configDir) _, err := os.Stat(configDir)
if err != nil { if err != nil {
@ -143,23 +112,26 @@ func initWithViper(
vpr.AddConfigPath(configDir) vpr.AddConfigPath(configDir)
vpr.SetConfigType("toml") vpr.SetConfigType("toml")
vpr.SetConfigName(".corso") vpr.SetConfigName(".corso")
} else {
ext := filepath.Ext(configFP)
if len(ext) == 0 {
return clues.New("config file requires an extension e.g. `toml`")
}
fileName := filepath.Base(configFP) return nil
fileName = strings.TrimSuffix(fileName, ext)
vpr.SetConfigType(strings.TrimPrefix(ext, "."))
vpr.SetConfigName(fileName)
vpr.SetConfigFile(configFP)
// We also configure the path, type and filename
// because `vpr.SafeWriteConfig` needs these set to
// work correctly (it does not use the configured file)
vpr.AddConfigPath(filepath.Dir(configFP))
} }
vpr.SetConfigFile(configFP)
// We also configure the path, type and filename
// because `vpr.SafeWriteConfig` needs these set to
// work correctly (it does not use the configured file)
vpr.AddConfigPath(filepath.Dir(configFP))
ext := filepath.Ext(configFP)
if len(ext) == 0 {
return clues.New("config file requires an extension e.g. `toml`")
}
fileName := filepath.Base(configFP)
fileName = strings.TrimSuffix(fileName, ext)
vpr.SetConfigType(strings.TrimPrefix(ext, "."))
vpr.SetConfigName(fileName)
return nil return nil
} }
@ -264,17 +236,19 @@ func writeRepoConfigWithViper(
return nil return nil
} }
// ReadCorsoConfig creates a storage and account instance by mediating all the possible // GetStorageAndAccount creates a storage and account instance by mediating all the possible
// data sources (config file, env vars, flag overrides) and the config file. // data sources (config file, env vars, flag overrides) and the config file.
func ReadCorsoConfig( func GetConfigRepoDetails(
ctx context.Context, ctx context.Context,
provider storage.ProviderType, provider storage.ProviderType,
readFromFile bool, readFromFile bool,
mustMatchFromConfig bool, mustMatchFromConfig bool,
overrides map[string]string, overrides map[string]string,
) (RepoDetails, error) { ) (
RepoDetails,
error,
) {
config, err := getStorageAndAccountWithViper( config, err := getStorageAndAccountWithViper(
ctx,
GetViper(ctx), GetViper(ctx),
provider, provider,
readFromFile, readFromFile,
@ -287,13 +261,15 @@ func ReadCorsoConfig(
// getSorageAndAccountWithViper implements GetSorageAndAccount, but takes in a viper // getSorageAndAccountWithViper implements GetSorageAndAccount, but takes in a viper
// struct for testing. // struct for testing.
func getStorageAndAccountWithViper( func getStorageAndAccountWithViper(
ctx context.Context,
vpr *viper.Viper, vpr *viper.Viper,
provider storage.ProviderType, provider storage.ProviderType,
readFromFile bool, readFromFile bool,
mustMatchFromConfig bool, mustMatchFromConfig bool,
overrides map[string]string, overrides map[string]string,
) (RepoDetails, error) { ) (
RepoDetails,
error,
) {
var ( var (
config RepoDetails config RepoDetails
err error err error
@ -303,19 +279,11 @@ func getStorageAndAccountWithViper(
// possibly read the prior config from a .corso file // possibly read the prior config from a .corso file
if readFromFile { if readFromFile {
ctx = clues.Add(ctx, "viper_config_file", vpr.ConfigFileUsed())
logger.Ctx(ctx).Debug("reading config from file")
if err := vpr.ReadInConfig(); err != nil { if err := vpr.ReadInConfig(); err != nil {
configNotSet := errors.As(err, &viper.ConfigFileNotFoundError{}) if _, ok := err.(viper.ConfigFileNotFoundError); !ok {
configNotFound := errors.Is(err, fs.ErrNotExist)
if !configNotSet && !configNotFound {
return config, clues.Wrap(err, "reading corso config file: "+vpr.ConfigFileUsed()) return config, clues.Wrap(err, "reading corso config file: "+vpr.ConfigFileUsed())
} }
logger.Ctx(ctx).Info("config file not found")
readConfigFromViper = false readConfigFromViper = false
} }
@ -365,7 +333,7 @@ var constToTomlKeyMap = map[string]string{
// If any value differs from the viper value, an error is returned. // If any value differs from the viper value, an error is returned.
// values in m that aren't stored in the config are ignored. // values in m that aren't stored in the config are ignored.
// TODO(pandeyabs): This code is currently duplicated in 2 places. // TODO(pandeyabs): This code is currently duplicated in 2 places.
func mustMatchConfig(vpr *viper.Viper, m map[string]string, pathKeys []string) error { func mustMatchConfig(vpr *viper.Viper, m map[string]string) error {
for k, v := range m { for k, v := range m {
if len(v) == 0 { if len(v) == 0 {
continue // empty variables will get caught by configuration validators, if necessary continue // empty variables will get caught by configuration validators, if necessary
@ -377,16 +345,7 @@ func mustMatchConfig(vpr *viper.Viper, m map[string]string, pathKeys []string) e
} }
vv := vpr.GetString(tomlK) vv := vpr.GetString(tomlK)
areEqual := false if v != vv {
// some of the values maybe paths, hence they require more than just string equality
if len(pathKeys) > 0 && slices.Contains(pathKeys, k) {
areEqual = path.ArePathsEquivalent(v, vv)
} else {
areEqual = v == vv
}
if !areEqual {
return clues.New("value of " + k + " (" + v + ") does not match corso configuration value (" + vv + ")") return clues.New("value of " + k + " (" + v + ") does not match corso configuration value (" + vv + ")")
} }
} }

View File

@ -16,7 +16,6 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/its"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"
@ -150,9 +149,6 @@ func (suite *ConfigSuite) TestWriteReadConfig() {
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
) )
ctx, flush := tester.NewContext(t)
defer flush()
const ( const (
bkt = "write-read-config-bucket" bkt = "write-read-config-bucket"
tid = "3c0748d2-470e-444c-9064-1268e52609d5" tid = "3c0748d2-470e-444c-9064-1268e52609d5"
@ -161,7 +157,7 @@ func (suite *ConfigSuite) TestWriteReadConfig() {
host = "some-host" host = "some-host"
) )
err := initWithViper(ctx, vpr, testConfigFilePath) err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config", clues.ToCore(err)) require.NoError(t, err, "initializing repo config", clues.ToCore(err))
s3Cfg := &storage.S3Config{ s3Cfg := &storage.S3Config{
@ -209,22 +205,17 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath = filepath.Join(t.TempDir(), "corso.toml")
) )
ctx, flush := tester.NewContext(t)
defer flush()
const ( const (
bkt = "must-match-config-bucket" bkt = "must-match-config-bucket"
tid = "dfb12063-7598-458b-85ab-42352c5c25e2" tid = "dfb12063-7598-458b-85ab-42352c5c25e2"
) )
err := initWithViper(ctx, vpr, testConfigFilePath) err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config") require.NoError(t, err, "initializing repo config")
s3Cfg := &storage.S3Config{Bucket: bkt} s3Cfg := &storage.S3Config{Bucket: bkt}
m365 := account.M365Config{AzureTenantID: tid} m365 := account.M365Config{AzureTenantID: tid}
m365PathKeys := []string{}
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, repository.Options{}, "repoid") err = writeRepoConfigWithViper(vpr, s3Cfg, m365, repository.Options{}, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err)) require.NoError(t, err, "writing repo config", clues.ToCore(err))
@ -281,7 +272,7 @@ func (suite *ConfigSuite) TestMustMatchConfig() {
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
test.errCheck(suite.T(), mustMatchConfig(vpr, test.input, m365PathKeys), clues.ToCore(err)) test.errCheck(suite.T(), mustMatchConfig(vpr, test.input), clues.ToCore(err))
}) })
} }
} }
@ -292,9 +283,6 @@ func (suite *ConfigSuite) TestReadFromFlags() {
vpr = viper.New() vpr = viper.New()
) )
ctx, flush := tester.NewContext(t)
defer flush()
const ( const (
b = "read-repo-config-basic-bucket" b = "read-repo-config-basic-bucket"
tID = "6f34ac30-8196-469b-bf8f-d83deadbbbba" tID = "6f34ac30-8196-469b-bf8f-d83deadbbbba"
@ -319,7 +307,7 @@ func (suite *ConfigSuite) TestReadFromFlags() {
flags.AWSSecretAccessKeyFV = "" flags.AWSSecretAccessKeyFV = ""
flags.AWSSessionTokenFV = "" flags.AWSSessionTokenFV = ""
flags.PassphraseFV = "" flags.CorsoPassphraseFV = ""
}) })
// Generate test config file // Generate test config file
@ -356,10 +344,9 @@ func (suite *ConfigSuite) TestReadFromFlags() {
overrides[credentials.AWSSecretAccessKey] = flags.AWSSecretAccessKeyFV overrides[credentials.AWSSecretAccessKey] = flags.AWSSecretAccessKeyFV
overrides[credentials.AWSSessionToken] = flags.AWSSessionTokenFV overrides[credentials.AWSSessionToken] = flags.AWSSessionTokenFV
flags.PassphraseFV = "passphrase-flags" flags.CorsoPassphraseFV = "passphrase-flags"
repoDetails, err := getStorageAndAccountWithViper( repoDetails, err := getStorageAndAccountWithViper(
ctx,
vpr, vpr,
storage.ProviderS3, storage.ProviderS3,
true, true,
@ -391,7 +378,7 @@ func (suite *ConfigSuite) TestReadFromFlags() {
assert.Equal(t, flags.AzureClientSecretFV, m365Config.AzureClientSecret) assert.Equal(t, flags.AzureClientSecretFV, m365Config.AzureClientSecret)
assert.Equal(t, flags.AzureClientTenantFV, m365Config.AzureTenantID) assert.Equal(t, flags.AzureClientTenantFV, m365Config.AzureTenantID)
assert.Equal(t, flags.PassphraseFV, pass) assert.Equal(t, flags.CorsoPassphraseFV, pass)
} }
// ------------------------------------------------------------ // ------------------------------------------------------------
@ -400,7 +387,6 @@ func (suite *ConfigSuite) TestReadFromFlags() {
type ConfigIntegrationSuite struct { type ConfigIntegrationSuite struct {
tester.Suite tester.Suite
m365 its.M365IntgTestSetup
} }
func TestConfigIntegrationSuite(t *testing.T) { func TestConfigIntegrationSuite(t *testing.T) {
@ -409,27 +395,21 @@ func TestConfigIntegrationSuite(t *testing.T) {
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})}) [][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
} }
func (suite *ConfigIntegrationSuite) SetupSuite() {
suite.m365 = its.GetM365(suite.T())
}
func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() { func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
t := suite.T() t := suite.T()
vpr := viper.New() vpr := viper.New()
ctx, flush := tester.NewContext(t)
defer flush()
const ( const (
bkt = "get-storage-and-account-bucket" bkt = "get-storage-and-account-bucket"
end = "https://get-storage-and-account.com" end = "https://get-storage-and-account.com"
pfx = "get-storage-and-account-prefix/" pfx = "get-storage-and-account-prefix/"
tid = "3a2faa4e-a882-445c-9d27-f552ef189381"
) )
// Configure viper to read test config file // Configure viper to read test config file
testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml") testConfigFilePath := filepath.Join(t.TempDir(), "corso.toml")
err := initWithViper(ctx, vpr, testConfigFilePath) err := initWithViper(vpr, testConfigFilePath)
require.NoError(t, err, "initializing repo config", clues.ToCore(err)) require.NoError(t, err, "initializing repo config", clues.ToCore(err))
s3Cfg := &storage.S3Config{ s3Cfg := &storage.S3Config{
@ -439,10 +419,9 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
DoNotVerifyTLS: true, DoNotVerifyTLS: true,
DoNotUseTLS: true, DoNotUseTLS: true,
} }
m365 := account.M365Config{AzureTenantID: tid}
creds := suite.m365.Creds err = writeRepoConfigWithViper(vpr, s3Cfg, m365, repository.Options{}, "repoid")
err = writeRepoConfigWithViper(vpr, s3Cfg, creds, repository.Options{}, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err)) require.NoError(t, err, "writing repo config", clues.ToCore(err))
require.Equal( require.Equal(
@ -454,7 +433,7 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
err = vpr.ReadInConfig() err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err)) require.NoError(t, err, "reading repo config", clues.ToCore(err))
cfg, err := getStorageAndAccountWithViper(ctx, vpr, storage.ProviderS3, true, true, nil) cfg, err := getStorageAndAccountWithViper(vpr, storage.ProviderS3, true, true, nil)
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err)) require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
readS3Cfg, err := cfg.Storage.ToS3Config() readS3Cfg, err := cfg.Storage.ToS3Config()
@ -483,19 +462,17 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
t := suite.T() t := suite.T()
vpr := viper.New() vpr := viper.New()
ctx, flush := tester.NewContext(t)
defer flush()
const ( const (
bkt = "get-storage-and-account-no-file-bucket" bkt = "get-storage-and-account-no-file-bucket"
end = "https://get-storage-and-account.com/no-file" end = "https://get-storage-and-account.com/no-file"
pfx = "get-storage-and-account-no-file-prefix/" pfx = "get-storage-and-account-no-file-prefix/"
tid = "88f8522b-18e4-4d0f-b514-2d7b34d4c5a1"
) )
creds := suite.m365.Creds m365 := account.M365Config{AzureTenantID: tid}
overrides := map[string]string{ overrides := map[string]string{
account.AzureTenantID: suite.m365.TenantID, account.AzureTenantID: tid,
account.AccountProviderTypeKey: account.ProviderM365.String(), account.AccountProviderTypeKey: account.ProviderM365.String(),
storage.Bucket: bkt, storage.Bucket: bkt,
storage.Endpoint: end, storage.Endpoint: end,
@ -505,7 +482,7 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
storage.StorageProviderTypeKey: storage.ProviderS3.String(), storage.StorageProviderTypeKey: storage.ProviderS3.String(),
} }
cfg, err := getStorageAndAccountWithViper(ctx, vpr, storage.ProviderS3, false, true, overrides) cfg, err := getStorageAndAccountWithViper(vpr, storage.ProviderS3, false, true, overrides)
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err)) require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
readS3Cfg, err := cfg.Storage.ToS3Config() readS3Cfg, err := cfg.Storage.ToS3Config()
@ -524,7 +501,7 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
readM365, err := cfg.Account.M365Config() readM365, err := cfg.Account.M365Config()
require.NoError(t, err, "reading m365 config from account", clues.ToCore(err)) require.NoError(t, err, "reading m365 config from account", clues.ToCore(err))
assert.Equal(t, readM365.AzureTenantID, creds.AzureTenantID) assert.Equal(t, readM365.AzureTenantID, m365.AzureTenantID)
assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID)) assert.Equal(t, readM365.AzureClientID, os.Getenv(credentials.AzureClientID))
assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret)) assert.Equal(t, readM365.AzureClientSecret, os.Getenv(credentials.AzureClientSecret))
} }

View File

@ -45,17 +45,15 @@ func configureStorage(
return store, clues.Wrap(err, "validating corso credentials") return store, clues.Wrap(err, "validating corso credentials")
} }
configDir, _ := filepath.Split(vpr.ConfigFileUsed())
cCfg := storage.CommonConfig{ cCfg := storage.CommonConfig{
Corso: corso, Corso: corso,
KopiaCfgDir: configDir,
} }
// the following is a hack purely for integration testing. // the following is a hack purely for integration testing.
// the value is not required, and if empty, kopia will default // the value is not required, and if empty, kopia will default
// to its routine behavior // to its routine behavior
if t, ok := vpr.Get("corso-testing").(bool); t && ok { if t, ok := vpr.Get("corso-testing").(bool); t && ok {
cCfg.KopiaCfgDir = configDir dir, _ := filepath.Split(vpr.ConfigFileUsed())
cCfg.KopiaCfgDir = dir
} }
// ensure required properties are present // ensure required properties are present
@ -78,7 +76,7 @@ func configureStorage(
func GetAndInsertCorso(passphase string) credentials.Corso { func GetAndInsertCorso(passphase string) credentials.Corso {
// fetch data from flag, env var or func param giving priority to func param // fetch data from flag, env var or func param giving priority to func param
// Func param generally will be value fetched from config file using viper. // Func param generally will be value fetched from config file using viper.
corsoPassph := str.First(flags.PassphraseFV, os.Getenv(credentials.CorsoPassphrase), passphase) corsoPassph := str.First(flags.CorsoPassphraseFV, os.Getenv(credentials.CorsoPassphrase), passphase)
return credentials.Corso{ return credentials.Corso{
CorsoPassphrase: corsoPassph, CorsoPassphrase: corsoPassph,

View File

@ -1,120 +0,0 @@
package debug
import (
"context"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/selectors"
)
var subCommandFuncs = []func() *cobra.Command{
metadataFilesCmd,
}
var debugCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addExchangeCommands,
}
// AddCommands attaches all `corso debug * *` commands to the parent.
func AddCommands(cmd *cobra.Command) {
debugC, _ := utils.AddCommand(cmd, debugCmd(), utils.MarkDebugCommand())
for _, sc := range subCommandFuncs {
subCommand := sc()
utils.AddCommand(debugC, subCommand, utils.MarkDebugCommand())
for _, addTo := range debugCommands {
servCmd := addTo(subCommand)
flags.AddAllProviderFlags(servCmd)
flags.AddAllStorageFlags(servCmd)
}
}
}
// ---------------------------------------------------------------------------
// Commands
// ---------------------------------------------------------------------------
const debugCommand = "debug"
// The debug category of commands.
// `corso debug [<subcommand>] [<flag>...]`
func debugCmd() *cobra.Command {
return &cobra.Command{
Use: debugCommand,
Short: "debugging & troubleshooting utilities",
Long: `debug the data stored in corso.`,
RunE: handledebugCmd,
Args: cobra.NoArgs,
}
}
// Handler for flat calls to `corso debug`.
// Produces the same output as `corso debug --help`.
func handledebugCmd(cmd *cobra.Command, args []string) error {
return cmd.Help()
}
// The debug metadataFiles subcommand.
// `corso debug metadata-files <service> [<flag>...]`
var metadataFilesCommand = "metadata-files"
func metadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: metadataFilesCommand,
Short: "display all the metadata file contents stored by the service",
RunE: handleMetadataFilesCmd,
Args: cobra.NoArgs,
}
}
// Handler for calls to `corso debug metadata-files`.
// Produces the same output as `corso debug metadata-files --help`.
func handleMetadataFilesCmd(cmd *cobra.Command, args []string) error {
return cmd.Help()
}
// ---------------------------------------------------------------------------
// runners
// ---------------------------------------------------------------------------
func genericMetadataFiles(
ctx context.Context,
cmd *cobra.Command,
args []string,
sel selectors.Selector,
backupID string,
) error {
ctx = clues.Add(ctx, "backup_id", backupID)
r, _, err := utils.GetAccountAndConnect(ctx, cmd, sel.PathService())
if err != nil {
return Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
// read metadata
files, err := r.GetBackupMetadata(ctx, sel, backupID, fault.New(true))
if err != nil {
return Only(ctx, clues.Wrap(err, "retrieving metadata files"))
}
for _, file := range files {
Infof(ctx, "\n------------------------------")
Info(ctx, file.Name)
Info(ctx, file.Path)
Pretty(ctx, file.Data)
}
return nil
}

View File

@ -1,66 +0,0 @@
package debug
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by debug.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, exchangeMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
}
return c
}
const (
exchangeServiceCommand = "exchange"
exchangeServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll
exchangeServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
corso debug metadata-files exchange --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// `corso debug metadata-files exchange [<flag>...] <destination>`
func exchangeMetadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: exchangeServiceCommand,
Short: "Display exchange metadata file content",
RunE: metadataFilesExchangeCmd,
Args: cobra.NoArgs,
Example: exchangeServiceCommandDebugExamples,
}
}
func metadataFilesExchangeCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
sel := selectors.NewExchangeBackup([]string{"unused-placeholder"})
sel.Include(sel.AllData())
return genericMetadataFiles(
ctx,
cmd,
args,
sel.Selector,
flags.BackupIDFV)
}

View File

@ -1,77 +0,0 @@
package debug
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type ExchangeUnitSuite struct {
tester.Suite
}
func TestExchangeUnitSuite(t *testing.T) {
suite.Run(t, &ExchangeUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeUnitSuite) TestExchangeCommands() {
expectUse := exchangeServiceCommand + " " + exchangeServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "metdata-files exchange",
use: metadataFilesCommand,
expectUse: expectUse,
expectShort: exchangeMetadataFilesCmd().Short,
expectRunE: metadataFilesExchangeCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: metadataFilesCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addExchangeCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,68 +0,0 @@
package debug
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by debug.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, groupsMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
}
return c
}
// TODO: correct examples
const (
groupsServiceCommand = "groups"
groupsServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll
groupsServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
corso debug metadata-files groups --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// `corso debug metadata-files groups [<flag>...] <destination>`
func groupsMetadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: groupsServiceCommand,
Short: "Display groups metadata file content",
RunE: metadataFilesGroupsCmd,
Args: cobra.NoArgs,
Example: groupsServiceCommandDebugExamples,
}
}
func metadataFilesGroupsCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
sel := selectors.NewGroupsBackup([]string{"unused-placeholder"})
sel.Include(sel.AllData())
return genericMetadataFiles(
ctx,
cmd,
args,
sel.Selector,
flags.BackupIDFV)
}

View File

@ -1,76 +0,0 @@
package debug
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type GroupsUnitSuite struct {
tester.Suite
}
func TestGroupsUnitSuite(t *testing.T) {
suite.Run(t, &GroupsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
expectUse := groupsServiceCommand + " " + groupsServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "metdata-files groups",
use: metadataFilesCommand,
expectUse: expectUse,
expectShort: groupsMetadataFilesCmd().Short,
expectRunE: metadataFilesGroupsCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: metadataFilesCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addGroupsCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,66 +0,0 @@
package debug
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by debug.go to map subcommands to provider-specific handling.
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, oneDriveMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
}
return c
}
const (
oneDriveServiceCommand = "onedrive"
oneDriveServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll
oneDriveServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
corso debug metadata-files onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// `corso debug metadata-files onedrive [<flag>...] <destination>`
func oneDriveMetadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: oneDriveServiceCommand,
Short: "Display onedrive metadata file content",
RunE: metadataFilesOneDriveCmd,
Args: cobra.NoArgs,
Example: oneDriveServiceCommandDebugExamples,
}
}
func metadataFilesOneDriveCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
sel := selectors.NewOneDriveBackup([]string{"unused-placeholder"})
sel.Include(sel.AllData())
return genericMetadataFiles(
ctx,
cmd,
args,
sel.Selector,
flags.BackupIDFV)
}

View File

@ -1,76 +0,0 @@
package debug
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type OneDriveUnitSuite struct {
tester.Suite
}
func TestOneDriveUnitSuite(t *testing.T) {
suite.Run(t, &OneDriveUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
expectUse := oneDriveServiceCommand + " " + oneDriveServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "metadata-files onedrive",
use: metadataFilesCommand,
expectUse: expectUse,
expectShort: oneDriveMetadataFilesCmd().Short,
expectRunE: metadataFilesOneDriveCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: metadataFilesCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,66 +0,0 @@
package debug
import (
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/selectors"
)
// called by debug.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case metadataFilesCommand:
c, _ = utils.AddCommand(cmd, sharePointMetadataFilesCmd(), utils.MarkDebugCommand())
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
}
return c
}
const (
sharePointServiceCommand = "sharepoint"
sharePointServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll
sharePointServiceCommandDebugExamples = `# Display file contents for backup 1234abcd
corso debug metadata-files sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd`
)
// `corso debug metadata-files sharepoint [<flag>...] <destination>`
func sharePointMetadataFilesCmd() *cobra.Command {
return &cobra.Command{
Use: sharePointServiceCommand,
Short: "Display sharepoint metadata file content",
RunE: metadataFilesSharePointCmd,
Args: cobra.NoArgs,
Example: sharePointServiceCommandDebugExamples,
}
}
func metadataFilesSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
sel := selectors.NewSharePointBackup([]string{"unused-placeholder"})
sel.Include(sel.LibraryFolders(selectors.Any()))
return genericMetadataFiles(
ctx,
cmd,
args,
sel.Selector,
flags.BackupIDFV)
}

View File

@ -1,76 +0,0 @@
package debug
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester"
)
type SharePointUnitSuite struct {
tester.Suite
}
func TestSharePointUnitSuite(t *testing.T) {
suite.Run(t, &SharePointUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
expectUse := sharePointServiceCommand + " " + sharePointServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{
name: "metdata-files sharepoint",
use: metadataFilesCommand,
expectUse: expectUse,
expectShort: sharePointMetadataFilesCmd().Short,
expectRunE: metadataFilesSharePointCmd,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: metadataFilesCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addSharePointCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -1,101 +0,0 @@
package export
import (
"github.com/pkg/errors"
"github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils"
)
// called by export.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command
switch cmd.Use {
case exportCommand:
c, _ = utils.AddCommand(cmd, exchangeExportCmd())
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
flags.AddBackupIDFlag(c, true)
flags.AddExchangeDetailsAndRestoreFlags(c, true)
flags.AddExportConfigFlags(c)
flags.AddFailFastFlag(c)
}
return c
}
const (
exchangeServiceCommand = "exchange"
exchangeServiceCommandUseSuffix = "<destination> --backup <backupId>"
// TODO(meain): remove message about only supporting email exports once others are added
//nolint:lll
exchangeServiceCommandExportExamples = `> Only email exports are supported as of now.
# Export emails with ID 98765abcdef and 12345abcdef from Alice's last backup (1234abcd...) to my-folder
corso export exchange my-folder --backup 1234abcd-12ab-cd34-56de-1234abcd --email 98765abcdef,12345abcdef
# Export emails with subject containing "Hello world" in the "Inbox" to my-folder
corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
--email-subject "Hello world" --email-folder Inbox my-folder`
// TODO(meain): Uncomment once support for these are added
// `# Export an entire calendar to my-folder
// corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
// --event-calendar Calendar my-folder
// # Export the contact with ID abdef0101 to my-folder
// corso export exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --contact abdef0101 my-folder`
)
// `corso export exchange [<flag>...] <destination>`
func exchangeExportCmd() *cobra.Command {
return &cobra.Command{
Use: exchangeServiceCommand,
Short: "Export M365 Exchange service data",
RunE: exportExchangeCmd,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
return errors.New("missing export destination")
}
return nil
},
Example: exchangeServiceCommandExportExamples,
}
}
// processes an exchange service export.
func exportExchangeCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
opts := utils.MakeExchangeOpts(cmd)
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := utils.ValidateExchangeRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err
}
sel := utils.IncludeExchangeRestoreDataSelectors(opts)
utils.FilterExchangeRestoreInfoSelectors(sel, opts)
return runExport(
ctx,
cmd,
args,
opts.ExportCfg,
sel.Selector,
flags.BackupIDFV,
"Exchange",
defaultAcceptedFormatTypes)
}

View File

@ -1,78 +0,0 @@
package export
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester"
)
type ExchangeUnitSuite struct {
tester.Suite
}
func TestExchangeUnitSuite(t *testing.T) {
suite.Run(t, &ExchangeUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
expectUse := exchangeServiceCommand + " " + exchangeServiceCommandUseSuffix
table := []struct {
name string
use string
expectUse string
expectShort string
expectRunE func(*cobra.Command, []string) error
}{
{"export exchange", exportCommand, expectUse, exchangeExportCmd().Short, exportExchangeCmd},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
parent := &cobra.Command{Use: exportCommand}
cmd := cliTD.SetUpCmdHasFlags(
t,
parent,
addExchangeCommands,
[]cliTD.UseCobraCommandFn{
flags.AddAllProviderFlags,
flags.AddAllStorageFlags,
},
flagsTD.WithFlags(
exchangeServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FormatFN, flagsTD.FormatType,
"--" + flags.ArchiveFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeExchangeOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
flagsTD.AssertStorageFlags(t, cmd)
})
}
}

View File

@ -5,17 +5,15 @@ import (
"errors" "errors"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/dustin/go-humanize"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -24,11 +22,8 @@ var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands, addOneDriveCommands,
addSharePointCommands, addSharePointCommands,
addGroupsCommands, addGroupsCommands,
addExchangeCommands,
} }
var defaultAcceptedFormatTypes = []string{string(control.DefaultFormat)}
// AddCommands attaches all `corso export * *` commands to the parent. // AddCommands attaches all `corso export * *` commands to the parent.
func AddCommands(cmd *cobra.Command) { func AddCommands(cmd *cobra.Command) {
subCommand := exportCmd() subCommand := exportCmd()
@ -67,9 +62,8 @@ func runExport(
ueco utils.ExportCfgOpts, ueco utils.ExportCfgOpts,
sel selectors.Selector, sel selectors.Selector,
backupID, serviceName string, backupID, serviceName string,
acceptedFormatTypes []string,
) error { ) error {
if err := utils.ValidateExportConfigFlags(&ueco, acceptedFormatTypes); err != nil { if err := utils.ValidateExportConfigFlags(&ueco); err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -97,7 +91,7 @@ func runExport(
return Only(ctx, clues.Wrap(err, "Failed to initialize "+serviceName+" export")) return Only(ctx, clues.Wrap(err, "Failed to initialize "+serviceName+" export"))
} }
collections, err := eo.Run(ctx) expColl, err := eo.Run(ctx)
if err != nil { if err != nil {
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, data.ErrNotFound) {
return Only(ctx, clues.New("Backup or backup details missing for id "+backupID)) return Only(ctx, clues.New("Backup or backup details missing for id "+backupID))
@ -106,45 +100,12 @@ func runExport(
return Only(ctx, clues.Wrap(err, "Failed to run "+serviceName+" export")) return Only(ctx, clues.Wrap(err, "Failed to run "+serviceName+" export"))
} }
if err = showExportProgress(ctx, eo, collections, exportLocation); err != nil {
return err
}
if len(eo.Errors.Recovered()) > 0 {
Infof(ctx, "\nExport failures")
for _, i := range eo.Errors.Recovered() {
Err(ctx, i.Error())
}
return Only(ctx, clues.New("Incomplete export of "+serviceName+" data"))
}
stats := eo.GetStats()
if len(stats) > 0 {
Infof(ctx, "\nExport details")
}
for k, s := range stats {
Infof(ctx, "%s: %d items (%s)", k.HumanString(), s.ResourceCount, humanize.Bytes(uint64(s.BytesRead)))
}
return nil
}
// slim wrapper that allows us to defer the progress bar closure with the expected scope.
func showExportProgress(
ctx context.Context,
op operations.ExportOperation,
collections []export.Collectioner,
exportLocation string,
) error {
// It would be better to give a progressbar than a spinner, but we // It would be better to give a progressbar than a spinner, but we
// have any way of knowing how many files are available as of now. // have any way of knowing how many files are available as of now.
progressMessage := observe.MessageWithCompletion(ctx, observe.DefaultCfg(), "Writing data to disk") diskWriteComplete := observe.MessageWithCompletion(ctx, "Writing data to disk")
defer close(progressMessage) defer close(diskWriteComplete)
err := export.ConsumeExportCollections(ctx, exportLocation, collections, op.Errors) err = export.ConsumeExportCollections(ctx, exportLocation, expColl, eo.Errors)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -3,26 +3,30 @@ package export
import ( import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/control"
) )
// called by export.go to map subcommands to provider-specific handling. // called by export.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command { func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case exportCommand: case exportCommand:
c, _ = utils.AddCommand(cmd, groupsExportCmd(), utils.MarkPreviewCommand()) c, fs = utils.AddCommand(cmd, groupsExportCmd(), utils.MarkPreviewCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix c.Use = c.Use + " " + groupsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddSiteFlag(c, false)
flags.AddSiteIDFlag(c, false)
flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddGroupDetailsAndRestoreFlags(c) flags.AddGroupDetailsAndRestoreFlags(c)
flags.AddExportConfigFlags(c) flags.AddExportConfigFlags(c)
flags.AddFailFastFlag(c) flags.AddFailFastFlag(c)
@ -50,13 +54,7 @@ corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd
# Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports # Export all files and folders in folder "Documents/Finance Reports" that were created before 2020 to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd \
--folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00 --folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
# Export all posts from a conversation with topic "hello world" from group mailbox's last backup to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world"
# Export post with ID 98765abcdef from a conversation from group mailbox's last backup to /my-exports
corso export groups my-exports --backup 1234abcd-12ab-cd34-56de-1234abcd --conversation "hello world" --post 98765abcdef`
) )
// `corso export groups [<flag>...] <destination>` // `corso export groups [<flag>...] <destination>`
@ -91,25 +89,12 @@ func exportGroupsCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts, false); err != nil { if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err return err
} }
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts) sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
utils.FilterGroupsRestoreInfoSelectors(sel, opts) utils.FilterGroupsRestoreInfoSelectors(sel, opts)
acceptedGroupsFormatTypes := []string{ return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "Groups")
string(control.DefaultFormat),
string(control.JSONFormat),
}
return runExport(
ctx,
cmd,
args,
opts.ExportCfg,
sel.Selector,
flags.BackupIDFV,
"Groups",
acceptedGroupsFormatTypes)
} }

View File

@ -3,6 +3,7 @@ package export
import ( import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
@ -10,14 +11,21 @@ import (
// called by export.go to map subcommands to provider-specific handling. // called by export.go to map subcommands to provider-specific handling.
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command { func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case exportCommand: case exportCommand:
c, _ = utils.AddCommand(cmd, oneDriveExportCmd()) c, fs = utils.AddCommand(cmd, oneDriveExportCmd())
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddOneDriveDetailsAndRestoreFlags(c) flags.AddOneDriveDetailsAndRestoreFlags(c)
flags.AddExportConfigFlags(c) flags.AddExportConfigFlags(c)
@ -82,13 +90,5 @@ func exportOneDriveCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeOneDriveRestoreDataSelectors(opts) sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
utils.FilterOneDriveRestoreInfoSelectors(sel, opts) utils.FilterOneDriveRestoreInfoSelectors(sel, opts)
return runExport( return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "OneDrive")
ctx,
cmd,
args,
opts.ExportCfg,
sel.Selector,
flags.BackupIDFV,
"OneDrive",
defaultAcceptedFormatTypes)
} }

View File

@ -85,7 +85,7 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore) assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter) assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.Equal(t, flagsTD.CorsoPassphrase, flags.PassphraseFV) assert.Equal(t, flagsTD.CorsoPassphrase, flags.CorsoPassphraseFV)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
}) })
} }

View File

@ -3,6 +3,7 @@ package export
import ( import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
@ -10,14 +11,21 @@ import (
// called by export.go to map subcommands to provider-specific handling. // called by export.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command { func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case exportCommand: case exportCommand:
c, _ = utils.AddCommand(cmd, sharePointExportCmd()) c, fs = utils.AddCommand(cmd, sharePointExportCmd())
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddSharePointDetailsAndRestoreFlags(c) flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddExportConfigFlags(c) flags.AddExportConfigFlags(c)
@ -45,27 +53,7 @@ corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Export all files in the "Documents" library to the current directory. # Export all files in the "Documents" library to the current directory.
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library Documents --folder "Display Templates/Style Sheets" . --library Documents --folder "Display Templates/Style Sheets" .`
# Export lists by their name(s)
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2" .
# Export lists created after a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34 .
# Export lists created before a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34 .
# Export lists modified before a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34 .
# Export lists modified after a given time
corso export sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34 .`
) )
// `corso export sharepoint [<flag>...] <destination>` // `corso export sharepoint [<flag>...] <destination>`
@ -106,13 +94,5 @@ func exportSharePointCmd(cmd *cobra.Command, args []string) error {
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts) sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
utils.FilterSharePointRestoreInfoSelectors(sel, opts) utils.FilterSharePointRestoreInfoSelectors(sel, opts)
return runExport( return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "SharePoint")
ctx,
cmd,
args,
opts.ExportCfg,
sel.Selector,
flags.BackupIDFV,
"SharePoint",
defaultAcceptedFormatTypes)
} }

View File

@ -60,11 +60,8 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput, "--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput, "--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput, "--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput), "--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListCreatedAfterFN, flagsTD.ListCreatedAfterInput, "--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.ListCreatedBeforeFN, flagsTD.ListCreatedBeforeInput,
"--" + flags.ListModifiedAfterFN, flagsTD.ListModifiedAfterInput,
"--" + flags.ListModifiedBeforeFN, flagsTD.ListModifiedBeforeInput,
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput), "--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput), "--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.FormatFN, flagsTD.FormatType, "--" + flags.FormatFN, flagsTD.FormatType,
@ -91,11 +88,8 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore) assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter) assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists) assert.ElementsMatch(t, flagsTD.ListItemInput, opts.ListItem)
assert.Equal(t, flagsTD.ListCreatedAfterInput, opts.ListCreatedAfter) assert.ElementsMatch(t, flagsTD.ListFolderInput, opts.ListFolder)
assert.Equal(t, flagsTD.ListCreatedBeforeInput, opts.ListCreatedBefore)
assert.Equal(t, flagsTD.ListModifiedAfterInput, opts.ListModifiedAfter)
assert.Equal(t, flagsTD.ListModifiedBeforeInput, opts.ListModifiedBefore)
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page) assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder) assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive) assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)

View File

@ -1,11 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
func AddGenericBackupFlags(cmd *cobra.Command) {
AddFailFastFlag(cmd)
AddDisableIncrementalsFlag(cmd)
AddForceItemDataDownloadFlag(cmd)
}

View File

@ -8,15 +8,12 @@ func AddAllBackupListFlags(cmd *cobra.Command) {
AddFailedItemsFN(cmd) AddFailedItemsFN(cmd)
AddSkippedItemsFN(cmd) AddSkippedItemsFN(cmd)
AddRecoveredErrorsFN(cmd) AddRecoveredErrorsFN(cmd)
AddAlertsFN(cmd)
} }
func AddFailedItemsFN(cmd *cobra.Command) { func AddFailedItemsFN(cmd *cobra.Command) {
fs := cmd.Flags() cmd.Flags().StringVar(
fs.StringVar( &ListFailedItemsFV, FailedItemsFN, Show,
&FailedItemsFV, FailedItemsFN, Show,
"Toggles showing or hiding the list of items that failed.") "Toggles showing or hiding the list of items that failed.")
cobra.CheckErr(fs.MarkHidden(FailedItemsFN))
} }
func AddSkippedItemsFN(cmd *cobra.Command) { func AddSkippedItemsFN(cmd *cobra.Command) {
@ -30,9 +27,3 @@ func AddRecoveredErrorsFN(cmd *cobra.Command) {
&ListRecoveredErrorsFV, RecoveredErrorsFN, Show, &ListRecoveredErrorsFV, RecoveredErrorsFN, Show,
"Toggles showing or hiding the list of errors which Corso recovered from.") "Toggles showing or hiding the list of errors which Corso recovered from.")
} }
func AddAlertsFN(cmd *cobra.Command) {
cmd.Flags().StringVar(
&ListAlertsFV, AlertsFN, Show,
"Toggles showing or hiding the list of alerts produced during the operation.")
}

View File

@ -49,7 +49,7 @@ var (
// AddExchangeDetailsAndRestoreFlags adds flags that are common to both the // AddExchangeDetailsAndRestoreFlags adds flags that are common to both the
// details and restore commands. // details and restore commands.
func AddExchangeDetailsAndRestoreFlags(cmd *cobra.Command, emailOnly bool) { func AddExchangeDetailsAndRestoreFlags(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
// email flags // email flags
@ -78,12 +78,6 @@ func AddExchangeDetailsAndRestoreFlags(cmd *cobra.Command, emailOnly bool) {
EmailReceivedBeforeFN, "", EmailReceivedBeforeFN, "",
"Select emails received before this datetime.") "Select emails received before this datetime.")
// NOTE: Only temporary until we add support for exporting the
// others as well in exchange.
if emailOnly {
return
}
// event flags // event flags
fs.StringSliceVar( fs.StringSliceVar(
&EventFV, &EventFV,

View File

@ -28,6 +28,13 @@ func AddFilesystemFlags(cmd *cobra.Command) {
"", "",
"path to local or network storage") "path to local or network storage")
cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN)) cobra.CheckErr(cmd.MarkFlagRequired(FilesystemPathFN))
fs.BoolVar(
&SucceedIfExistsFV,
SucceedIfExistsFN,
false,
"Exit with success if the repo has already been initialized.")
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
} }
func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string { func FilesystemFlagOverrides(cmd *cobra.Command) map[string]string {

View File

@ -4,17 +4,12 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
const ( const DataMessages = "messages"
DataMessages = "messages"
DataConversations = "conversations"
)
const ( const (
ChannelFN = "channel" ChannelFN = "channel"
ConversationFN = "conversation" GroupFN = "group"
GroupFN = "group" MessageFN = "message"
MessageFN = "message"
PostFN = "post"
MessageCreatedAfterFN = "message-created-after" MessageCreatedAfterFN = "message-created-after"
MessageCreatedBeforeFN = "message-created-before" MessageCreatedBeforeFN = "message-created-before"
@ -23,11 +18,9 @@ const (
) )
var ( var (
ChannelFV []string ChannelFV []string
ConversationFV []string GroupFV []string
GroupFV []string MessageFV []string
MessageFV []string
PostFV []string
MessageCreatedAfterFV string MessageCreatedAfterFV string
MessageCreatedBeforeFV string MessageCreatedBeforeFV string
@ -67,24 +60,14 @@ func AddGroupDetailsAndRestoreFlags(cmd *cobra.Command) {
&MessageLastReplyBeforeFV, &MessageLastReplyBeforeFV,
MessageLastReplyBeforeFN, "", MessageLastReplyBeforeFN, "",
"Select messages with replies before this datetime.") "Select messages with replies before this datetime.")
fs.StringSliceVar(
&ConversationFV,
ConversationFN, nil,
"Select data within a Group's Conversation.")
fs.StringSliceVar(
&PostFV,
PostFN, nil,
"Select Conversation Posts by reference.")
} }
// AddGroupFlag adds the --group flag, which accepts either the id, // AddGroupFlag adds the --group flag, which accepts id or name values.
// the display name, or the mailbox address as its values. Users are // TODO: need to decide what the appropriate "name" to accept here is.
// expected to supply the display name. The ID is supported becase, well, // keepers thinks its either DisplayName or MailNickname or Mail
// IDs. The mailbox address is supported as a lookup fallback for certain // Mail is most accurate, MailNickame is accurate and shorter, but the end user
// SDK cases, therefore it's also supported here, though that support // may not see either one visibly.
// isn't exposed to end users. // https://learn.microsoft.com/en-us/graph/api/group-list?view=graph-rest-1.0&tabs=http
func AddGroupFlag(cmd *cobra.Command) { func AddGroupFlag(cmd *cobra.Command) {
cmd.Flags().StringSliceVar( cmd.Flags().StringSliceVar(
&GroupFV, &GroupFV,

View File

@ -12,8 +12,6 @@ const (
FileCreatedBeforeFN = "file-created-before" FileCreatedBeforeFN = "file-created-before"
FileModifiedAfterFN = "file-modified-after" FileModifiedAfterFN = "file-modified-after"
FileModifiedBeforeFN = "file-modified-before" FileModifiedBeforeFN = "file-modified-before"
UseOldDeltaProcessFN = "use-old-delta-process"
) )
var ( var (
@ -24,8 +22,6 @@ var (
FileCreatedBeforeFV string FileCreatedBeforeFV string
FileModifiedAfterFV string FileModifiedAfterFV string
FileModifiedBeforeFV string FileModifiedBeforeFV string
UseOldDeltaProcessFV bool
) )
// AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the // AddOneDriveDetailsAndRestoreFlags adds flags that are common to both the

View File

@ -5,47 +5,41 @@ import (
) )
const ( const (
AlertsFN = "alerts" DeltaPageSizeFN = "delta-page-size"
ConfigFileFN = "config-file" DisableConcurrencyLimiterFN = "disable-concurrency-limiter"
DeltaPageSizeFN = "delta-page-size" DisableDeltaFN = "disable-delta"
DisableDeltaFN = "disable-delta" DisableIncrementalsFN = "disable-incrementals"
DisableIncrementalsFN = "disable-incrementals" ForceItemDataDownloadFN = "force-item-data-download"
DisableLazyItemReaderFN = "disable-lazy-item-reader" EnableImmutableIDFN = "enable-immutable-id"
DisableSlidingWindowLimiterFN = "disable-sliding-window-limiter" FailFastFN = "fail-fast"
ForceItemDataDownloadFN = "force-item-data-download" FailedItemsFN = "failed-items"
EnableImmutableIDFN = "enable-immutable-id" FetchParallelismFN = "fetch-parallelism"
FailFastFN = "fail-fast" NoStatsFN = "no-stats"
FailedItemsFN = "failed-items" RecoveredErrorsFN = "recovered-errors"
FetchParallelismFN = "fetch-parallelism" NoPermissionsFN = "no-permissions"
NoPermissionsFN = "no-permissions" RunModeFN = "run-mode"
NoStatsFN = "no-stats" SkippedItemsFN = "skipped-items"
RecoveredErrorsFN = "recovered-errors" SkipReduceFN = "skip-reduce"
RunModeFN = "run-mode"
SkippedItemsFN = "skipped-items"
SkipReduceFN = "skip-reduce"
) )
var ( var (
ConfigFileFV string DeltaPageSizeFV int
DeltaPageSizeFV int DisableConcurrencyLimiterFV bool
DisableDeltaFV bool DisableDeltaFV bool
DisableIncrementalsFV bool DisableIncrementalsFV bool
DisableLazyItemReaderFV bool ForceItemDataDownloadFV bool
DisableSlidingWindowLimiterFV bool EnableImmutableIDFV bool
ForceItemDataDownloadFV bool FailFastFV bool
EnableImmutableIDFV bool FetchParallelismFV int
FailFastFV bool ListFailedItemsFV string
FailedItemsFV string ListSkippedItemsFV string
FetchParallelismFV int ListRecoveredErrorsFV string
ListAlertsFV string NoStatsFV bool
ListSkippedItemsFV string
ListRecoveredErrorsFV string
NoPermissionsFV bool
NoStatsFV bool
// RunMode describes the type of run, such as: // RunMode describes the type of run, such as:
// flagtest, dry, run. Should default to 'run'. // flagtest, dry, run. Should default to 'run'.
RunModeFV string RunModeFV string
SkipReduceFV bool NoPermissionsFV bool
SkipReduceFV bool
) )
// well-known flag values // well-known flag values
@ -155,6 +149,19 @@ func AddEnableImmutableIDFlag(cmd *cobra.Command) {
cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN)) cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN))
} }
// AddDisableConcurrencyLimiterFlag adds a hidden cli flag which, when set,
// removes concurrency limits when communicating with graph API. This
// flag is only relevant for exchange backups for now
func AddDisableConcurrencyLimiterFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&DisableConcurrencyLimiterFV,
DisableConcurrencyLimiterFN,
false,
"Disable concurrency limiter middleware. Default: false")
cobra.CheckErr(fs.MarkHidden(DisableConcurrencyLimiterFN))
}
// AddRunModeFlag adds the hidden --run-mode flag. // AddRunModeFlag adds the hidden --run-mode flag.
func AddRunModeFlag(cmd *cobra.Command, persistent bool) { func AddRunModeFlag(cmd *cobra.Command, persistent bool) {
fs := cmd.Flags() fs := cmd.Flags()
@ -165,34 +172,3 @@ func AddRunModeFlag(cmd *cobra.Command, persistent bool) {
fs.StringVar(&RunModeFV, RunModeFN, "run", "What mode to run: dry, test, run. Defaults to run.") fs.StringVar(&RunModeFV, RunModeFN, "run", "What mode to run: dry, test, run. Defaults to run.")
cobra.CheckErr(fs.MarkHidden(RunModeFN)) cobra.CheckErr(fs.MarkHidden(RunModeFN))
} }
// AddDisableSlidingWindowLimiterFN disables the experimental sliding window rate
// limiter for graph API requests. This is only relevant for exchange backups.
// Exchange restores continue to use the default token bucket rate limiter.
// Setting this flag switches exchange backups to use the default token bucket
// rate limiter.
func AddDisableSlidingWindowLimiterFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&DisableSlidingWindowLimiterFV,
DisableSlidingWindowLimiterFN,
false,
"Disable sliding window rate limiter.")
cobra.CheckErr(fs.MarkHidden(DisableSlidingWindowLimiterFN))
}
// AddDisableLazyItemReader disables lazy item reader, such that we fall back to
// prefetch reader. This flag is currently only meant for groups conversations
// backup. Although it can be utilized for other services in future.
//
// This flag should only be used if lazy item reader is the default choice and
// we want to fallback to prefetch reader.
func AddDisableLazyItemReader(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&DisableLazyItemReaderFV,
DisableLazyItemReaderFN,
false,
"Disable lazy item reader.")
cobra.CheckErr(fs.MarkHidden(DisableLazyItemReaderFN))
}

View File

@ -12,8 +12,8 @@ const (
AWSSessionTokenFN = "aws-session-token" AWSSessionTokenFN = "aws-session-token"
// Corso Flags // Corso Flags
PassphraseFN = "passphrase" CorsoPassphraseFN = "passphrase"
NewPassphraseFN = "new-passphrase" SucceedIfExistsFN = "succeed-if-exists"
) )
var ( var (
@ -22,8 +22,8 @@ var (
AWSAccessKeyFV string AWSAccessKeyFV string
AWSSecretAccessKeyFV string AWSSecretAccessKeyFV string
AWSSessionTokenFV string AWSSessionTokenFV string
PassphraseFV string CorsoPassphraseFV string
NewPhasephraseFV string SucceedIfExistsFV bool
) )
// AddMultipleBackupIDsFlag adds the --backups flag. // AddMultipleBackupIDsFlag adds the --backups flag.
@ -67,27 +67,12 @@ func AddAWSCredsFlags(cmd *cobra.Command) {
// M365 flags // M365 flags
func AddCorsoPassphaseFlags(cmd *cobra.Command) { func AddCorsoPassphaseFlags(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
fs.StringVar( fs.StringVar(&CorsoPassphraseFV,
&PassphraseFV, CorsoPassphraseFN,
PassphraseFN,
"", "",
"Passphrase to protect encrypted repository contents") "Passphrase to protect encrypted repository contents")
} }
// M365 flags
func AddUpdatePassphraseFlags(cmd *cobra.Command, require bool) {
fs := cmd.Flags()
fs.StringVar(
&NewPhasephraseFV,
NewPassphraseFN,
"",
"update Corso passphrase for repo")
if require {
cobra.CheckErr(cmd.MarkFlagRequired(NewPassphraseFN))
}
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Provider // Provider
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -38,6 +38,11 @@ func AddS3BucketFlags(cmd *cobra.Command) {
fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.") fs.StringVar(&EndpointFV, EndpointFN, "", "S3 service endpoint.")
fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)") fs.BoolVar(&DoNotUseTLSFV, DoNotUseTLSFN, false, "Disable TLS (HTTPS)")
fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.") fs.BoolVar(&DoNotVerifyTLSFV, DoNotVerifyTLSFN, false, "Disable TLS (HTTPS) certificate verification.")
// In general, we don't want to expose this flag to users and have them mistake it
// for a broad-scale idempotency solution. We can un-hide it later the need arises.
fs.BoolVar(&SucceedIfExistsFV, SucceedIfExistsFN, false, "Exit with success if the repo has already been initialized.")
cobra.CheckErr(fs.MarkHidden("succeed-if-exists"))
} }
func S3FlagOverrides(cmd *cobra.Command) map[string]string { func S3FlagOverrides(cmd *cobra.Command) map[string]string {

View File

@ -7,39 +7,26 @@ import (
const ( const (
DataLibraries = "libraries" DataLibraries = "libraries"
DataPages = "pages" DataPages = "pages"
DataLists = "lists"
) )
const ( const (
LibraryFN = "library" LibraryFN = "library"
ListFolderFN = "list"
ListFN = "list" ListItemFN = "list-item"
ListModifiedAfterFN = "list-modified-after"
ListModifiedBeforeFN = "list-modified-before"
ListCreatedAfterFN = "list-created-after"
ListCreatedBeforeFN = "list-created-before"
PageFolderFN = "page-folder" PageFolderFN = "page-folder"
PageFN = "page" PageFN = "page"
SiteFN = "site" // site only accepts WebURL values
SiteFN = "site" // site only accepts WebURL values SiteIDFN = "site-id" // site-id accepts actual site ids
SiteIDFN = "site-id" // site-id accepts actual site ids
) )
var ( var (
LibraryFV string LibraryFV string
ListFolderFV []string
ListFV []string ListItemFV []string
ListModifiedAfterFV string
ListModifiedBeforeFV string
ListCreatedAfterFV string
ListCreatedBeforeFV string
PageFolderFV []string PageFolderFV []string
PageFV []string PageFV []string
SiteIDFV []string
SiteIDFV []string WebURLFV []string
WebURLFV []string
) )
// AddSharePointDetailsAndRestoreFlags adds flags that are common to both the // AddSharePointDetailsAndRestoreFlags adds flags that are common to both the
@ -79,26 +66,17 @@ func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
"Select files modified before this datetime.") "Select files modified before this datetime.")
// lists // lists
fs.StringSliceVar( fs.StringSliceVar(
&ListFV, &ListFolderFV,
ListFN, nil, ListFolderFN, nil,
"Select lists by name.") "Select lists by name; accepts '"+Wildcard+"' to select all lists.")
fs.StringVar( cobra.CheckErr(fs.MarkHidden(ListFolderFN))
&ListModifiedAfterFV, fs.StringSliceVar(
ListModifiedAfterFN, "", &ListItemFV,
"Select lists modified after this datetime.") ListItemFN, nil,
fs.StringVar( "Select lists by item name; accepts '"+Wildcard+"' to select all lists.")
&ListModifiedBeforeFV, cobra.CheckErr(fs.MarkHidden(ListItemFN))
ListModifiedBeforeFN, "",
"Select lists modified before this datetime.")
fs.StringVar(
&ListCreatedAfterFV,
ListCreatedAfterFN, "",
"Select lists created after this datetime.")
fs.StringVar(
&ListCreatedBeforeFV,
ListCreatedBeforeFN, "",
"Select lists created before this datetime.")
// pages // pages
@ -117,28 +95,24 @@ func AddSharePointDetailsAndRestoreFlags(cmd *cobra.Command) {
// AddSiteIDFlag adds the --site-id flag, which accepts site ID values. // AddSiteIDFlag adds the --site-id flag, which accepts site ID values.
// This flag is hidden, since we expect users to prefer the --site url // This flag is hidden, since we expect users to prefer the --site url
// and do not want to encourage confusion. // and do not want to encourage confusion.
func AddSiteIDFlag(cmd *cobra.Command, multiple bool) { func AddSiteIDFlag(cmd *cobra.Command) {
fs := cmd.Flags() fs := cmd.Flags()
message := "ID of the site to operate on"
if multiple {
//nolint:lll
message += "; accepts '" + Wildcard + "' to select all sites. Args cannot be comma-delimited and must use multiple flags."
}
// note string ARRAY var. IDs naturally contain commas, so we cannot accept // note string ARRAY var. IDs naturally contain commas, so we cannot accept
// duplicate values within a flag declaration. ie: --site-id a,b,c does not // duplicate values within a flag declaration. ie: --site-id a,b,c does not
// work. Users must call --site-id a --site-id b --site-id c. // work. Users must call --site-id a --site-id b --site-id c.
fs.StringArrayVar(&SiteIDFV, SiteIDFN, nil, message) fs.StringArrayVar(
&SiteIDFV,
SiteIDFN, nil,
//nolint:lll
"Backup data by site ID; accepts '"+Wildcard+"' to select all sites. Args cannot be comma-delimited and must use multiple flags.")
cobra.CheckErr(fs.MarkHidden(SiteIDFN)) cobra.CheckErr(fs.MarkHidden(SiteIDFN))
} }
// AddSiteFlag adds the --site flag, which accepts webURL values. // AddSiteFlag adds the --site flag, which accepts webURL values.
func AddSiteFlag(cmd *cobra.Command, multiple bool) { func AddSiteFlag(cmd *cobra.Command) {
message := "Web URL of the site to operate on" cmd.Flags().StringSliceVar(
if multiple { &WebURLFV,
message += "; accepts '" + Wildcard + "' to select all sites." SiteFN, nil,
} "Backup data by site URL; accepts '"+Wildcard+"' to select all sites.")
cmd.Flags().StringSliceVar(&WebURLFV, SiteFN, nil, message)
} }

View File

@ -1,13 +0,0 @@
package flags
import (
"github.com/spf13/cobra"
)
const (
DataChats = "chats"
)
func AddTeamsChatsDetailsAndRestoreFlags(cmd *cobra.Command) {
// TODO: add details flags
}

View File

@ -11,7 +11,6 @@ import (
func PreparedBackupListFlags() []string { func PreparedBackupListFlags() []string {
return []string{ return []string{
"--" + flags.AlertsFN, flags.Show,
"--" + flags.FailedItemsFN, flags.Show, "--" + flags.FailedItemsFN, flags.Show,
"--" + flags.SkippedItemsFN, flags.Show, "--" + flags.SkippedItemsFN, flags.Show,
"--" + flags.RecoveredErrorsFN, flags.Show, "--" + flags.RecoveredErrorsFN, flags.Show,
@ -19,8 +18,7 @@ func PreparedBackupListFlags() []string {
} }
func AssertBackupListFlags(t *testing.T, cmd *cobra.Command) { func AssertBackupListFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, flags.Show, flags.ListAlertsFV) assert.Equal(t, flags.Show, flags.ListFailedItemsFV)
assert.Equal(t, flags.Show, flags.FailedItemsFV)
assert.Equal(t, flags.Show, flags.ListSkippedItemsFV) assert.Equal(t, flags.Show, flags.ListSkippedItemsFV)
assert.Equal(t, flags.Show, flags.ListRecoveredErrorsFV) assert.Equal(t, flags.Show, flags.ListRecoveredErrorsFV)
} }

View File

@ -10,7 +10,6 @@ func FlgInputs(in []string) string { return strings.Join(in, ",") }
var ( var (
BackupInput = "backup-id" BackupInput = "backup-id"
SiteInput = "site-id"
GroupsInput = []string{"team1", "group2"} GroupsInput = []string{"team1", "group2"}
MailboxInput = []string{"mailbox1", "mailbox2"} MailboxInput = []string{"mailbox1", "mailbox2"}
@ -21,22 +20,11 @@ var (
ExchangeCategoryDataInput = []string{"email", "events", "contacts"} ExchangeCategoryDataInput = []string{"email", "events", "contacts"}
SharepointCategoryDataInput = []string{"files", "lists", "pages"} SharepointCategoryDataInput = []string{"files", "lists", "pages"}
GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"} GroupsCategoryDataInput = []string{"files", "lists", "pages", "messages"}
TeamsChatsCategoryDataInput = []string{"chats"}
ChannelInput = []string{"channel1", "channel2"}
MessageInput = []string{"message1", "message2"}
MessageCreatedAfterInput = "messageCreatedAfter"
MessageCreatedBeforeInput = "messageCreatedBefore"
MessageLastReplyAfterInput = "messageLastReplyAfter"
MessageLastReplyBeforeInput = "messageLastReplyBefore"
ContactInput = []string{"contact1", "contact2"} ContactInput = []string{"contact1", "contact2"}
ContactFldInput = []string{"contactFld1", "contactFld2"} ContactFldInput = []string{"contactFld1", "contactFld2"}
ContactNameInput = "contactName" ContactNameInput = "contactName"
ConversationInput = []string{"conversation1", "conversation2"}
PostInput = []string{"post1", "post2"}
EmailInput = []string{"mail1", "mail2"} EmailInput = []string{"mail1", "mail2"}
EmailFldInput = []string{"mailFld1", "mailFld2"} EmailFldInput = []string{"mailFld1", "mailFld2"}
EmailReceivedAfterInput = "mailReceivedAfter" EmailReceivedAfterInput = "mailReceivedAfter"
@ -60,11 +48,8 @@ var (
FileModifiedAfterInput = "fileModifiedAfter" FileModifiedAfterInput = "fileModifiedAfter"
FileModifiedBeforeInput = "fileModifiedBefore" FileModifiedBeforeInput = "fileModifiedBefore"
ListsInput = []string{"listName1", "listName2"} ListFolderInput = []string{"listFolder1", "listFolder2"}
ListCreatedAfterInput = "listCreatedAfter" ListItemInput = []string{"listItem1", "listItem2"}
ListCreatedBeforeInput = "listCreatedBefore"
ListModifiedAfterInput = "listModifiedAfter"
ListModifiedBeforeInput = "listModifiedBefore"
PageFolderInput = []string{"pageFolder1", "pageFolder2"} PageFolderInput = []string{"pageFolder1", "pageFolder2"}
PageInput = []string{"page1", "page2"} PageInput = []string{"page1", "page2"}
@ -93,11 +78,12 @@ var (
FetchParallelism = "3" FetchParallelism = "3"
FailFast = true FailFast = true
DisableIncrementals = true DisableIncrementals = true
ForceItemDataDownload = true ForceItemDataDownload = true
DisableDelta = true DisableDelta = true
EnableImmutableID = true EnableImmutableID = true
DisableConcurrencyLimiter = true
) )
func WithFlags2( func WithFlags2(

View File

@ -1,42 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/cli/flags"
)
func PreparedChannelFlags() []string {
return []string{
"--" + flags.ChannelFN, FlgInputs(ChannelInput),
"--" + flags.MessageFN, FlgInputs(MessageInput),
"--" + flags.MessageCreatedAfterFN, MessageCreatedAfterInput,
"--" + flags.MessageCreatedBeforeFN, MessageCreatedBeforeInput,
"--" + flags.MessageLastReplyAfterFN, MessageLastReplyAfterInput,
"--" + flags.MessageLastReplyBeforeFN, MessageLastReplyBeforeInput,
}
}
func AssertChannelFlags(t *testing.T, cmd *cobra.Command) {
assert.ElementsMatch(t, ChannelInput, flags.ChannelFV)
assert.ElementsMatch(t, MessageInput, flags.MessageFV)
assert.Equal(t, MessageCreatedAfterInput, flags.MessageCreatedAfterFV)
assert.Equal(t, MessageCreatedBeforeInput, flags.MessageCreatedBeforeFV)
assert.Equal(t, MessageLastReplyAfterInput, flags.MessageLastReplyAfterFV)
assert.Equal(t, MessageLastReplyBeforeInput, flags.MessageLastReplyBeforeFV)
}
func PreparedConversationFlags() []string {
return []string{
"--" + flags.ConversationFN, FlgInputs(ConversationInput),
"--" + flags.PostFN, FlgInputs(PostInput),
}
}
func AssertConversationFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, ConversationInput, flags.ConversationFV)
assert.Equal(t, PostInput, flags.PostFV)
}

View File

@ -4,7 +4,7 @@ import (
"testing" "testing"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "gotest.tools/v3/assert"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
) )
@ -15,7 +15,7 @@ func PreparedStorageFlags() []string {
"--" + flags.AWSSecretAccessKeyFN, AWSSecretAccessKey, "--" + flags.AWSSecretAccessKeyFN, AWSSecretAccessKey,
"--" + flags.AWSSessionTokenFN, AWSSessionToken, "--" + flags.AWSSessionTokenFN, AWSSessionToken,
"--" + flags.PassphraseFN, CorsoPassphrase, "--" + flags.CorsoPassphraseFN, CorsoPassphrase,
} }
} }
@ -24,7 +24,7 @@ func AssertStorageFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, AWSSecretAccessKey, flags.AWSSecretAccessKeyFV) assert.Equal(t, AWSSecretAccessKey, flags.AWSSecretAccessKeyFV)
assert.Equal(t, AWSSessionToken, flags.AWSSessionTokenFV) assert.Equal(t, AWSSessionToken, flags.AWSSessionTokenFV)
assert.Equal(t, CorsoPassphrase, flags.PassphraseFV) assert.Equal(t, CorsoPassphrase, flags.CorsoPassphraseFV)
} }
func PreparedProviderFlags() []string { func PreparedProviderFlags() []string {
@ -40,17 +40,3 @@ func AssertProviderFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, AzureTenantID, flags.AzureClientTenantFV) assert.Equal(t, AzureTenantID, flags.AzureClientTenantFV)
assert.Equal(t, AzureClientSecret, flags.AzureClientSecretFV) assert.Equal(t, AzureClientSecret, flags.AzureClientSecretFV)
} }
func PreparedGenericBackupFlags() []string {
return []string{
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
}
}
func AssertGenericBackupFlags(t *testing.T, cmd *cobra.Command) {
assert.True(t, flags.FailFastFV, "fail fast flag")
assert.True(t, flags.DisableIncrementalsFV, "disable incrementals flag")
assert.True(t, flags.ForceItemDataDownloadFV, "force item data download flag")
}

View File

@ -1,32 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/cli/flags"
)
func PreparedLibraryFlags() []string {
return []string{
"--" + flags.LibraryFN, LibraryInput,
"--" + flags.FolderFN, FlgInputs(FolderPathInput),
"--" + flags.FileFN, FlgInputs(FileNameInput),
"--" + flags.FileCreatedAfterFN, FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, FileModifiedBeforeInput,
}
}
func AssertLibraryFlags(t *testing.T, cmd *cobra.Command) {
assert.Equal(t, LibraryInput, flags.LibraryFV)
assert.Equal(t, FolderPathInput, flags.FolderPathFV)
assert.Equal(t, FileNameInput, flags.FileNameFV)
assert.Equal(t, FileCreatedAfterInput, flags.FileCreatedAfterFV)
assert.Equal(t, FileCreatedBeforeInput, flags.FileCreatedBeforeFV)
assert.Equal(t, FileModifiedAfterInput, flags.FileModifiedAfterFV)
assert.Equal(t, FileModifiedBeforeInput, flags.FileModifiedBeforeFV)
}

View File

@ -1,25 +0,0 @@
package testdata
import (
"testing"
"github.com/spf13/cobra"
)
func PreparedTeamsChatsFlags() []string {
return []string{
// FIXME: populate when adding filters
// "--" + flags.ChatCreatedAfterFN, ChatCreatedAfterInput,
// "--" + flags.ChatCreatedBeforeFN, ChatCreatedBeforeInput,
// "--" + flags.ChatLastMessageAfterFN, ChatLastMessageAfterInput,
// "--" + flags.ChatLastMessageBeforeFN, ChatLastMessageBeforeInput,
}
}
func AssertTeamsChatsFlags(t *testing.T, cmd *cobra.Command) {
// FIXME: populate when adding filters
// assert.Equal(t, ChatCreatedAfterInput, flags.ChatCreatedAfterFV)
// assert.Equal(t, ChatCreatedBeforeInput, flags.ChatCreatedBeforeFV)
// assert.Equal(t, ChatLastMessageAfterInput, flags.ChatLastMessageAfterFV)
// assert.Equal(t, ChatLastMessageBeforeInput, flags.ChatLastMessageBeforeFV)
}

View File

@ -46,12 +46,11 @@ func (ev envVar) MinimumPrintable() any {
return ev return ev
} }
func (ev envVar) Headers(bool) []string { func (ev envVar) Headers() []string {
// NOTE: skipID does not make sense in this context
return []string{ev.category, " "} return []string{ev.category, " "}
} }
func (ev envVar) Values(bool) []string { func (ev envVar) Values() []string {
return []string{ev.name, ev.description} return []string{ev.name, ev.description}
} }

View File

@ -5,13 +5,11 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"strings"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/tidwall/pretty" "github.com/tidwall/pretty"
"github.com/tomlazar/table" "github.com/tomlazar/table"
"github.com/alcionai/corso/src/internal/common/color"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
) )
@ -85,21 +83,16 @@ func Only(ctx context.Context, e error) error {
// Err prints the params to cobra's error writer (stdErr by default) // Err prints the params to cobra's error writer (stdErr by default)
// if s is nil, prints nothing. // if s is nil, prints nothing.
// Prepends the message with "Error: "
func Err(ctx context.Context, s ...any) { func Err(ctx context.Context, s ...any) {
cw := color.NewColorableWriter(color.Red, getRootCmd(ctx).ErrOrStderr()) out(ctx, getRootCmd(ctx).ErrOrStderr(), s...)
s = append([]any{"Error:"}, s...)
out(ctx, cw, s...)
} }
// Errf prints the params to cobra's error writer (stdErr by default) // Errf prints the params to cobra's error writer (stdErr by default)
// if s is nil, prints nothing. // if s is nil, prints nothing.
// You should ideally be using SimpleError or OperationError. // Prepends the message with "Error: "
func Errf(ctx context.Context, tmpl string, s ...any) { func Errf(ctx context.Context, tmpl string, s ...any) {
cw := color.NewColorableWriter(color.Red, getRootCmd(ctx).ErrOrStderr()) outf(ctx, getRootCmd(ctx).ErrOrStderr(), "\nError: \n\t"+tmpl+"\n", s...)
tmpl = "Error: " + tmpl
outf(ctx, cw, tmpl, s...)
} }
// Out prints the params to cobra's output writer (stdOut by default) // Out prints the params to cobra's output writer (stdOut by default)
@ -126,24 +119,13 @@ func Infof(ctx context.Context, t string, s ...any) {
outf(ctx, getRootCmd(ctx).ErrOrStderr(), t, s...) outf(ctx, getRootCmd(ctx).ErrOrStderr(), t, s...)
} }
// Pretty prettifies and prints the value.
func Pretty(ctx context.Context, a any) {
if a == nil {
Err(ctx, "<nil>")
return
}
printPrettyJSON(ctx, getRootCmd(ctx).ErrOrStderr(), a)
}
// PrettyJSON prettifies and prints the value. // PrettyJSON prettifies and prints the value.
func PrettyJSON(ctx context.Context, p minimumPrintabler) { func PrettyJSON(ctx context.Context, p minimumPrintabler) {
if p == nil { if p == nil {
Err(ctx, "<nil>") Err(ctx, "<nil>")
return
} }
outputJSON(ctx, getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug) outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
} }
// out is the testable core of exported print funcs // out is the testable core of exported print funcs
@ -179,11 +161,11 @@ func outf(ctx context.Context, w io.Writer, t string, s ...any) {
type Printable interface { type Printable interface {
minimumPrintabler minimumPrintabler
// should list the property names of the values surfaced in Values() // should list the property names of the values surfaced in Values()
Headers(skipID bool) []string Headers() []string
// list of values for tabular or csv formatting // list of values for tabular or csv formatting
// if the backing data is nil or otherwise missing, // if the backing data is nil or otherwise missing,
// values should provide an empty string as opposed to skipping entries // values should provide an empty string as opposed to skipping entries
Values(skipID bool) []string Values() []string
} }
type minimumPrintabler interface { type minimumPrintabler interface {
@ -193,56 +175,39 @@ type minimumPrintabler interface {
// Item prints the printable, according to the caller's requested format. // Item prints the printable, according to the caller's requested format.
func Item(ctx context.Context, p Printable) { func Item(ctx context.Context, p Printable) {
printItem(ctx, getRootCmd(ctx).OutOrStdout(), p) printItem(getRootCmd(ctx).OutOrStdout(), p)
} }
// print prints the printable items, // print prints the printable items,
// according to the caller's requested format. // according to the caller's requested format.
func printItem(ctx context.Context, w io.Writer, p Printable) { func printItem(w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug { if outputAsJSON || outputAsJSONDebug {
outputJSON(ctx, w, p, outputAsJSONDebug) outputJSON(w, p, outputAsJSONDebug)
return return
} }
outputTable(ctx, w, []Printable{p}) outputTable(w, []Printable{p})
}
// ItemProperties prints the printable either as in a single line or a json
// The difference between this and Item is that this one does not print the ID
func ItemProperties(ctx context.Context, p Printable) {
printItemProperties(ctx, getRootCmd(ctx).OutOrStdout(), p)
}
// print prints the printable items,
// according to the caller's requested format.
func printItemProperties(ctx context.Context, w io.Writer, p Printable) {
if outputAsJSON || outputAsJSONDebug {
outputJSON(ctx, w, p, outputAsJSONDebug)
return
}
outputOneLine(ctx, w, []Printable{p})
} }
// All prints the slice of printable items, // All prints the slice of printable items,
// according to the caller's requested format. // according to the caller's requested format.
func All(ctx context.Context, ps ...Printable) { func All(ctx context.Context, ps ...Printable) {
printAll(ctx, getRootCmd(ctx).OutOrStdout(), ps) printAll(getRootCmd(ctx).OutOrStdout(), ps)
} }
// printAll prints the slice of printable items, // printAll prints the slice of printable items,
// according to the caller's requested format. // according to the caller's requested format.
func printAll(ctx context.Context, w io.Writer, ps []Printable) { func printAll(w io.Writer, ps []Printable) {
if len(ps) == 0 { if len(ps) == 0 {
return return
} }
if outputAsJSON || outputAsJSONDebug { if outputAsJSON || outputAsJSONDebug {
outputJSONArr(ctx, w, ps, outputAsJSONDebug) outputJSONArr(w, ps, outputAsJSONDebug)
return return
} }
outputTable(ctx, w, ps) outputTable(w, ps)
} }
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
@ -252,23 +217,20 @@ func printAll(ctx context.Context, w io.Writer, ps []Printable) {
// Table writes the printables in a tabular format. Takes headers from // Table writes the printables in a tabular format. Takes headers from
// the 0th printable only. // the 0th printable only.
func Table(ctx context.Context, ps []Printable) { func Table(ctx context.Context, ps []Printable) {
outputTable(ctx, getRootCmd(ctx).OutOrStdout(), ps) outputTable(getRootCmd(ctx).OutOrStdout(), ps)
} }
// output to stdout the list of printable structs in a table // output to stdout the list of printable structs in a table
func outputTable(ctx context.Context, w io.Writer, ps []Printable) { func outputTable(w io.Writer, ps []Printable) {
t := table.Table{ t := table.Table{
Headers: ps[0].Headers(false), Headers: ps[0].Headers(),
Rows: [][]string{}, Rows: [][]string{},
} }
for _, p := range ps { for _, p := range ps {
t.Rows = append(t.Rows, p.Values(false)) t.Rows = append(t.Rows, p.Values())
} }
// observe bars needs to be flushed before printing
observe.Flush(ctx)
_ = t.WriteTable( _ = t.WriteTable(
w, w,
&table.Config{ &table.Config{
@ -282,20 +244,20 @@ func outputTable(ctx context.Context, w io.Writer, ps []Printable) {
// JSON // JSON
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
func outputJSON(ctx context.Context, w io.Writer, p minimumPrintabler, debug bool) { func outputJSON(w io.Writer, p minimumPrintabler, debug bool) {
if debug { if debug {
printJSON(ctx, w, p) printJSON(w, p)
return return
} }
if debug { if debug {
printJSON(ctx, w, p) printJSON(w, p)
} else { } else {
printJSON(ctx, w, p.MinimumPrintable()) printJSON(w, p.MinimumPrintable())
} }
} }
func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool) { func outputJSONArr(w io.Writer, ps []Printable, debug bool) {
sl := make([]any, 0, len(ps)) sl := make([]any, 0, len(ps))
for _, p := range ps { for _, p := range ps {
@ -306,14 +268,11 @@ func outputJSONArr(ctx context.Context, w io.Writer, ps []Printable, debug bool)
} }
} }
printJSON(ctx, w, sl) printJSON(w, sl)
} }
// output to stdout the list of printable structs as json. // output to stdout the list of printable structs as json.
func printJSON(ctx context.Context, w io.Writer, a any) { func printJSON(w io.Writer, a any) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
bs, err := json.Marshal(a) bs, err := json.Marshal(a)
if err != nil { if err != nil {
fmt.Fprintf(w, "error formatting results to json: %v\n", err) fmt.Fprintf(w, "error formatting results to json: %v\n", err)
@ -322,45 +281,3 @@ func printJSON(ctx context.Context, w io.Writer, a any) {
fmt.Fprintln(w, string(pretty.Pretty(bs))) fmt.Fprintln(w, string(pretty.Pretty(bs)))
} }
// output to stdout the list of printable structs as prettified json.
func printPrettyJSON(ctx context.Context, w io.Writer, a any) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
bs, err := json.MarshalIndent(a, "", " ")
if err != nil {
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
return
}
fmt.Fprintln(w, string(pretty.Pretty(bs)))
}
// -------------------------------------------------------------------------------------------
// One line
// -------------------------------------------------------------------------------------------
// Output in the following format:
// Bytes Uploaded: 401 kB | Items Uploaded: 59 | Items Skipped: 0 | Errors: 0
func outputOneLine(ctx context.Context, w io.Writer, ps []Printable) {
// observe bars needs to be flushed before printing
observe.Flush(ctx)
headers := ps[0].Headers(true)
rows := [][]string{}
for _, p := range ps {
rows = append(rows, p.Values(true))
}
printables := []string{}
for _, row := range rows {
for i, col := range row {
printables = append(printables, fmt.Sprintf("%s: %s", headers[i], col))
}
}
fmt.Fprintln(w, strings.Join(printables, " | "))
}

View File

@ -2,13 +2,14 @@ package repo
import ( import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/pkg/config"
ctrlRepo "github.com/alcionai/corso/src/pkg/control/repository" ctrlRepo "github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
@ -72,7 +73,7 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
overrides[flags.FilesystemPathFN] = abs overrides[flags.FilesystemPathFN] = abs
cfg, err := config.ReadCorsoConfig( cfg, err := config.GetConfigRepoDetails(
ctx, ctx,
storage.ProviderFilesystem, storage.ProviderFilesystem,
true, true,
@ -86,6 +87,15 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
// Retention is not supported for filesystem repos. // Retention is not supported for filesystem repos.
retentionOpts := ctrlRepo.Retention{} retentionOpts := ctrlRepo.Retention{}
// SendStartCorsoEvent uses distict ID as tenant ID because repoID is still not generated
utils.SendStartCorsoEvent(
ctx,
cfg.Storage,
cfg.Account.ID(),
map[string]any{"command": "init repo"},
cfg.Account.ID(),
opt)
storageCfg, err := cfg.Storage.ToFilesystemConfig() storageCfg, err := cfg.Storage.ToFilesystemConfig()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving filesystem configuration")) return Only(ctx, clues.Wrap(err, "Retrieving filesystem configuration"))
@ -109,6 +119,10 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
ric := repository.InitConfig{RetentionOpts: retentionOpts} ric := repository.InitConfig{RetentionOpts: retentionOpts}
if err = r.Initialize(ctx, ric); err != nil { if err = r.Initialize(ctx, ric); err != nil {
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
return nil
}
return Only(ctx, clues.Stack(ErrInitializingRepo, err)) return Only(ctx, clues.Stack(ErrInitializingRepo, err))
} }
@ -158,7 +172,7 @@ func connectFilesystemCmd(cmd *cobra.Command, args []string) error {
overrides[flags.FilesystemPathFN] = abs overrides[flags.FilesystemPathFN] = abs
cfg, err := config.ReadCorsoConfig( cfg, err := config.GetConfigRepoDetails(
ctx, ctx,
storage.ProviderFilesystem, storage.ProviderFilesystem,
true, true,

View File

@ -5,16 +5,16 @@ import (
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/config"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
@ -73,7 +73,7 @@ func (suite *FilesystemE2ESuite) TestInitFilesystemCmd() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"repo", "init", "filesystem", "repo", "init", "filesystem",
"--"+flags.ConfigFileFN, configFP, "--config-file", configFP,
"--path", cfg.Path) "--path", cfg.Path)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -81,9 +81,9 @@ func (suite *FilesystemE2ESuite) TestInitFilesystemCmd() {
err = cmd.ExecuteContext(ctx) err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// noop // a second initialization should result in an error
err = cmd.ExecuteContext(ctx) err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
}) })
} }
} }
@ -143,7 +143,7 @@ func (suite *FilesystemE2ESuite) TestConnectFilesystemCmd() {
// then test it // then test it
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"repo", "connect", "filesystem", "repo", "connect", "filesystem",
"--"+flags.ConfigFileFN, configFP, "--config-file", configFP,
"--path", cfg.Path) "--path", cfg.Path)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)

View File

@ -8,24 +8,16 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
repo "github.com/alcionai/corso/src/pkg/repository"
) )
const ( const (
initCommand = "init" initCommand = "init"
connectCommand = "connect" connectCommand = "connect"
updatePassphraseCommand = "update-passphrase" maintenanceCommand = "maintenance"
MaintenanceCommand = "maintenance"
)
const (
providerCommandUpdatePhasephraseExamples = `# Update the Corso repository passphrase"
corso repo update-passphrase --new-passphrase 'newpass'`
) )
var ( var (
@ -43,26 +35,22 @@ func AddCommands(cmd *cobra.Command) {
var ( var (
// Get new instances so that setting the context during tests works // Get new instances so that setting the context during tests works
// properly. // properly.
repoCmd = repoCmd() repoCmd = repoCmd()
initCmd = initCmd() initCmd = initCmd()
connectCmd = connectCmd() connectCmd = connectCmd()
maintenanceCmd = maintenanceCmd() maintenanceCmd = maintenanceCmd()
updatePassphraseCmd = updatePassphraseCmd()
) )
cmd.AddCommand(repoCmd) cmd.AddCommand(repoCmd)
repoCmd.AddCommand(initCmd) repoCmd.AddCommand(initCmd)
repoCmd.AddCommand(connectCmd) repoCmd.AddCommand(connectCmd)
repoCmd.AddCommand(maintenanceCmd) repoCmd.AddCommand(maintenanceCmd)
repoCmd.AddCommand(updatePassphraseCmd)
flags.AddMaintenanceModeFlag(maintenanceCmd) flags.AddMaintenanceModeFlag(maintenanceCmd)
flags.AddForceMaintenanceFlag(maintenanceCmd) flags.AddForceMaintenanceFlag(maintenanceCmd)
flags.AddMaintenanceUserFlag(maintenanceCmd) flags.AddMaintenanceUserFlag(maintenanceCmd)
flags.AddMaintenanceHostnameFlag(maintenanceCmd) flags.AddMaintenanceHostnameFlag(maintenanceCmd)
flags.AddUpdatePassphraseFlags(updatePassphraseCmd, true)
for _, addRepoTo := range repoCommands { for _, addRepoTo := range repoCommands {
addRepoTo(initCmd) addRepoTo(initCmd)
addRepoTo(connectCmd) addRepoTo(connectCmd)
@ -75,7 +63,7 @@ func repoCmd() *cobra.Command {
return &cobra.Command{ return &cobra.Command{
Use: "repo", Use: "repo",
Short: "Manage your repositories", Short: "Manage your repositories",
Long: `Initialize, configure, connect and update to your account backup repositories`, Long: `Initialize, configure, and connect to your account backup repositories.`,
RunE: handleRepoCmd, RunE: handleRepoCmd,
Args: cobra.NoArgs, Args: cobra.NoArgs,
} }
@ -123,7 +111,7 @@ func handleConnectCmd(cmd *cobra.Command, args []string) error {
func maintenanceCmd() *cobra.Command { func maintenanceCmd() *cobra.Command {
return &cobra.Command{ return &cobra.Command{
Use: MaintenanceCommand, Use: maintenanceCommand,
Short: "Run maintenance on an existing repository", Short: "Run maintenance on an existing repository",
Long: `Run maintenance on an existing repository to optimize performance and storage use`, Long: `Run maintenance on an existing repository to optimize performance and storage use`,
RunE: handleMaintenanceCmd, RunE: handleMaintenanceCmd,
@ -146,7 +134,7 @@ func handleMaintenanceCmd(cmd *cobra.Command, args []string) error {
// we don't need the graph client. // we don't need the graph client.
path.OneDriveService) path.OneDriveService)
if err != nil { if err != nil {
return Only(ctx, err) return print.Only(ctx, err)
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
@ -159,12 +147,12 @@ func handleMaintenanceCmd(cmd *cobra.Command, args []string) error {
Force: flags.ForceMaintenanceFV, Force: flags.ForceMaintenanceFV,
}) })
if err != nil { if err != nil {
return Only(ctx, err) return print.Only(ctx, err)
} }
err = m.Run(ctx) err = m.Run(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return print.Only(ctx, err)
} }
return nil return nil
@ -187,55 +175,3 @@ func getMaintenanceType(t string) (repository.MaintenanceType, error) {
return res, nil return res, nil
} }
// The repo update subcommand.
// `corso repo update-passphrase [<flag>...]`
func updatePassphraseCmd() *cobra.Command {
return &cobra.Command{
Use: updatePassphraseCommand,
Short: "Update the repository passphrase",
Long: `Update the repository passphrase`,
RunE: handleUpdateCmd,
Args: cobra.NoArgs,
Example: providerCommandUpdatePhasephraseExamples,
}
}
// Handler for calls to `corso repo update-password`.
func handleUpdateCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
// Need to give it a valid service so it won't error out on us even though
// we don't need the graph client.
repos, rdao, err := utils.GetAccountAndConnect(ctx, cmd, path.OneDriveService)
if err != nil {
return Only(ctx, err)
}
opts := rdao.Opts
defer utils.CloseRepo(ctx, repos)
repoID := repos.GetID()
if len(repoID) == 0 {
repoID = events.RepoIDNotFound
}
r, err := repo.New(
ctx,
rdao.Repo.Account,
rdao.Repo.Storage,
opts,
repoID)
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to create a repository controller"))
}
if err := r.UpdatePassword(ctx, flags.NewPhasephraseFV); err != nil {
return Only(ctx, clues.Wrap(err, "Failed to update s3"))
}
Infof(ctx, "Updated repo password.")
return nil
}

View File

@ -1,23 +1,14 @@
package repo_test package repo
import ( import (
"testing" "testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/repo"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/storage"
storeTD "github.com/alcionai/corso/src/pkg/storage/testdata"
) )
type RepoUnitSuite struct { type RepoUnitSuite struct {
@ -32,7 +23,7 @@ func (suite *RepoUnitSuite) TestAddRepoCommands() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{} cmd := &cobra.Command{}
repo.AddCommands(cmd) AddCommands(cmd)
var found bool var found bool
@ -41,95 +32,10 @@ func (suite *RepoUnitSuite) TestAddRepoCommands() {
require.Len(t, repoCmds, 1) require.Len(t, repoCmds, 1)
for _, c := range repoCmds[0].Commands() { for _, c := range repoCmds[0].Commands() {
if c.Use == repo.MaintenanceCommand { if c.Use == maintenanceCommand {
found = true found = true
} }
} }
assert.True(t, found, "looking for maintenance command") assert.True(t, found, "looking for maintenance command")
} }
type RepoE2ESuite struct {
tester.Suite
}
func TestRepoE2ESuite(t *testing.T) {
suite.Run(t, &RepoE2ESuite{Suite: tester.NewE2ESuite(
t,
[][]string{storeTD.AWSStorageCredEnvs, tconfig.M365AcctCredEnvs})})
}
func (suite *RepoE2ESuite) TestUpdatePassphraseCmd() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
st := storeTD.NewPrefixedS3Storage(t)
sc, err := st.StorageConfig()
require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config)
vpr, configFP := tconfig.MakeTempTestConfigClone(t, nil)
ctx = config.SetViper(ctx, vpr)
cmd := cliTD.StubRootCmd(
"repo", "init", "s3",
"--"+flags.ConfigFileFN, configFP,
"--prefix", cfg.Prefix)
cli.BuildCommandTree(cmd)
// run the command
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// connect with old passphrase
cmd = cliTD.StubRootCmd(
"repo", "connect", "s3",
"--"+flags.ConfigFileFN, configFP,
"--bucket", cfg.Bucket,
"--prefix", cfg.Prefix)
cli.BuildCommandTree(cmd)
// run the command
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
cmd = cliTD.StubRootCmd(
"repo", "update-passphrase",
"--"+flags.ConfigFileFN, configFP,
"--new-passphrase", "newpass")
cli.BuildCommandTree(cmd)
// run the command
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// connect again with new passphrase
cmd = cliTD.StubRootCmd(
"repo", "connect", "s3",
"--"+flags.ConfigFileFN, configFP,
"--bucket", cfg.Bucket,
"--prefix", cfg.Prefix,
"--passphrase", "newpass")
cli.BuildCommandTree(cmd)
// run the command
err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err))
// connect with old passphrase - it will fail
cmd = cliTD.StubRootCmd(
"repo", "connect", "s3",
"--"+flags.ConfigFileFN, configFP,
"--bucket", cfg.Bucket,
"--prefix", cfg.Prefix)
cli.BuildCommandTree(cmd)
// run the command
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
}

View File

@ -4,13 +4,14 @@ import (
"strings" "strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
@ -84,7 +85,7 @@ func s3InitCmd() *cobra.Command {
func initS3Cmd(cmd *cobra.Command, args []string) error { func initS3Cmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
cfg, err := config.ReadCorsoConfig( cfg, err := config.GetConfigRepoDetails(
ctx, ctx,
storage.ProviderS3, storage.ProviderS3,
true, true,
@ -101,6 +102,15 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
return Only(ctx, err) return Only(ctx, err)
} }
// SendStartCorsoEvent uses distict ID as tenant ID because repoID is still not generated
utils.SendStartCorsoEvent(
ctx,
cfg.Storage,
cfg.Account.ID(),
map[string]any{"command": "init repo"},
cfg.Account.ID(),
opt)
s3Cfg, err := cfg.Storage.ToS3Config() s3Cfg, err := cfg.Storage.ToS3Config()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving s3 configuration")) return Only(ctx, clues.Wrap(err, "Retrieving s3 configuration"))
@ -131,6 +141,10 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
ric := repository.InitConfig{RetentionOpts: retentionOpts} ric := repository.InitConfig{RetentionOpts: retentionOpts}
if err = r.Initialize(ctx, ric); err != nil { if err = r.Initialize(ctx, ric); err != nil {
if flags.SucceedIfExistsFV && errors.Is(err, repository.ErrorRepoAlreadyExists) {
return nil
}
return Only(ctx, clues.Stack(ErrInitializingRepo, err)) return Only(ctx, clues.Stack(ErrInitializingRepo, err))
} }
@ -165,7 +179,7 @@ func s3ConnectCmd() *cobra.Command {
func connectS3Cmd(cmd *cobra.Command, args []string) error { func connectS3Cmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
cfg, err := config.ReadCorsoConfig( cfg, err := config.GetConfigRepoDetails(
ctx, ctx,
storage.ProviderS3, storage.ProviderS3,
true, true,

View File

@ -11,13 +11,12 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/config"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
@ -80,7 +79,7 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"repo", "init", "s3", "repo", "init", "s3",
"--"+flags.ConfigFileFN, configFP, "--config-file", configFP,
"--bucket", test.bucketPrefix+cfg.Bucket, "--bucket", test.bucketPrefix+cfg.Bucket,
"--prefix", cfg.Prefix) "--prefix", cfg.Prefix)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -89,9 +88,9 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
err = cmd.ExecuteContext(ctx) err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// noop // a second initialization should result in an error
err = cmd.ExecuteContext(ctx) err = cmd.ExecuteContext(ctx)
require.NoError(t, err, clues.ToCore(err)) assert.ErrorIs(t, err, repository.ErrorRepoAlreadyExists, clues.ToCore(err))
}) })
} }
} }
@ -114,9 +113,10 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
for i := 0; i < 2; i++ { for i := 0; i < 2; i++ {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"repo", "init", "s3", "repo", "init", "s3",
"--"+flags.ConfigFileFN, configFP, "--config-file", configFP,
"--bucket", cfg.Bucket, "--bucket", cfg.Bucket,
"--prefix", cfg.Prefix) "--prefix", cfg.Prefix,
"--succeed-if-exists")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
// run the command // run the command
@ -146,7 +146,7 @@ func (suite *S3E2ESuite) TestInitS3Cmd_missingBucket() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"repo", "init", "s3", "repo", "init", "s3",
"--"+flags.ConfigFileFN, configFP, "--config-file", configFP,
"--prefix", cfg.Prefix) "--prefix", cfg.Prefix)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -219,7 +219,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
// then test it // then test it
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"repo", "connect", "s3", "repo", "connect", "s3",
"--"+flags.ConfigFileFN, configFP, "--config-file", configFP,
"--bucket", test.bucketPrefix+cfg.Bucket, "--bucket", test.bucketPrefix+cfg.Bucket,
"--prefix", cfg.Prefix) "--prefix", cfg.Prefix)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -279,7 +279,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd_badInputs() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"repo", "connect", "s3", "repo", "connect", "s3",
"--"+flags.ConfigFileFN, configFP, "--config-file", configFP,
"--bucket", bucket, "--bucket", bucket,
"--prefix", prefix) "--prefix", prefix)
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)

View File

@ -2,6 +2,7 @@ package restore
import ( import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
@ -9,16 +10,24 @@ import (
// called by restore.go to map subcommands to provider-specific handling. // called by restore.go to map subcommands to provider-specific handling.
func addExchangeCommands(cmd *cobra.Command) *cobra.Command { func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case restoreCommand: case restoreCommand:
c, _ = utils.AddCommand(cmd, exchangeRestoreCmd()) c, fs = utils.AddCommand(cmd, exchangeRestoreCmd())
c.Use = c.Use + " " + exchangeServiceCommandUseSuffix c.Use = c.Use + " " + exchangeServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
// general flags
fs.SortFlags = false
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddExchangeDetailsAndRestoreFlags(c, false) flags.AddExchangeDetailsAndRestoreFlags(c)
flags.AddRestoreConfigFlags(c, true) flags.AddRestoreConfigFlags(c, true)
flags.AddFailFastFlag(c) flags.AddFailFastFlag(c)
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
@ -18,7 +19,6 @@ import (
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/config"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
@ -145,7 +145,7 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"restore", "exchange", "restore", "exchange",
"--"+flags.ConfigFileFN, suite.cfgFP, "--config-file", suite.cfgFP,
"--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID)) "--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID))
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -180,7 +180,7 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd_badTimeFlags() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"restore", "exchange", "restore", "exchange",
"--"+flags.ConfigFileFN, suite.cfgFP, "--config-file", suite.cfgFP,
"--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID), "--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID),
timeFilter, "smarf") timeFilter, "smarf")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)
@ -214,7 +214,7 @@ func (suite *RestoreExchangeE2ESuite) TestExchangeRestoreCmd_badBoolFlags() {
cmd := cliTD.StubRootCmd( cmd := cliTD.StubRootCmd(
"restore", "exchange", "restore", "exchange",
"--"+flags.ConfigFileFN, suite.cfgFP, "--config-file", suite.cfgFP,
"--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID), "--"+flags.BackupFN, string(suite.backupOps[set].Results.BackupID),
timeFilter, "wingbat") timeFilter, "wingbat")
cli.BuildCommandTree(cmd) cli.BuildCommandTree(cmd)

View File

@ -2,28 +2,34 @@ package restore
import ( import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/selectors"
) )
// called by restore.go to map subcommands to provider-specific handling. // called by restore.go to map subcommands to provider-specific handling.
func addGroupsCommands(cmd *cobra.Command) *cobra.Command { func addGroupsCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case restoreCommand: case restoreCommand:
c, _ = utils.AddCommand(cmd, groupsRestoreCmd(), utils.MarkPreviewCommand()) c, fs = utils.AddCommand(cmd, groupsRestoreCmd(), utils.MarkPreviewCommand())
c.Use = c.Use + " " + groupsServiceCommandUseSuffix c.Use = c.Use + " " + groupsServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddSiteFlag(c, false)
flags.AddSiteIDFlag(c, false)
flags.AddNoPermissionsFlag(c) flags.AddNoPermissionsFlag(c)
flags.AddSharePointDetailsAndRestoreFlags(c) flags.AddSharePointDetailsAndRestoreFlags(c) // for sp restores
flags.AddSiteIDFlag(c)
flags.AddRestoreConfigFlags(c, false) flags.AddRestoreConfigFlags(c, false)
flags.AddFailFastFlag(c) flags.AddFailFastFlag(c)
} }
@ -77,17 +83,13 @@ func restoreGroupsCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts, true); err != nil { if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err return err
} }
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts) sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
utils.FilterGroupsRestoreInfoSelectors(sel, opts) utils.FilterGroupsRestoreInfoSelectors(sel, opts)
// TODO(pandeyabs): Exclude conversations from restores since they are not
// supported yet.
sel.Exclude(sel.Conversation(selectors.Any()))
return runRestore( return runRestore(
ctx, ctx,
cmd, cmd,

View File

@ -52,7 +52,6 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
[]string{ []string{
"--" + flags.RunModeFN, flags.RunModeFlagTest, "--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput, "--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SiteFN, flagsTD.SiteInput,
"--" + flags.LibraryFN, flagsTD.LibraryInput, "--" + flags.LibraryFN, flagsTD.LibraryInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput), "--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput), "--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
@ -60,7 +59,8 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput, "--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput, "--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput, "--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput), "--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput), "--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput), "--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions, "--" + flags.CollisionsFN, flagsTD.Collisions,
@ -91,7 +91,6 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions) assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination) assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination)
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists)
// assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource) // assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource)
assert.True(t, flags.NoPermissionsFV) assert.True(t, flags.NoPermissionsFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)

View File

@ -2,22 +2,30 @@ package restore
import ( import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
) )
// called by restore.go to map subcommands to provider-specific handling. // called by restore.go to map subcommands to provider-specific handling.
func addOneDriveCommands(cmd *cobra.Command) *cobra.Command { func addOneDriveCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case restoreCommand: case restoreCommand:
c, _ = utils.AddCommand(cmd, oneDriveRestoreCmd()) c, fs = utils.AddCommand(cmd, oneDriveRestoreCmd())
c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix c.Use = c.Use + " " + oneDriveServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --user) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddOneDriveDetailsAndRestoreFlags(c) flags.AddOneDriveDetailsAndRestoreFlags(c)
flags.AddNoPermissionsFlag(c) flags.AddNoPermissionsFlag(c)

View File

@ -2,22 +2,30 @@ package restore
import ( import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
) )
// called by restore.go to map subcommands to provider-specific handling. // called by restore.go to map subcommands to provider-specific handling.
func addSharePointCommands(cmd *cobra.Command) *cobra.Command { func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
var c *cobra.Command var (
c *cobra.Command
fs *pflag.FlagSet
)
switch cmd.Use { switch cmd.Use {
case restoreCommand: case restoreCommand:
c, _ = utils.AddCommand(cmd, sharePointRestoreCmd()) c, fs = utils.AddCommand(cmd, sharePointRestoreCmd())
c.Use = c.Use + " " + sharePointServiceCommandUseSuffix c.Use = c.Use + " " + sharePointServiceCommandUseSuffix
// Flags addition ordering should follow the order we want them to appear in help and docs:
// More generic (ex: --site) and more frequently used flags take precedence.
fs.SortFlags = false
flags.AddBackupIDFlag(c, true) flags.AddBackupIDFlag(c, true)
flags.AddSharePointDetailsAndRestoreFlags(c) flags.AddSharePointDetailsAndRestoreFlags(c)
flags.AddNoPermissionsFlag(c) flags.AddNoPermissionsFlag(c)
@ -50,27 +58,7 @@ corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
# Restore all files in the "Documents" library. # Restore all files in the "Documents" library.
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library Documents --folder "Display Templates/Style Sheets" --library Documents --folder "Display Templates/Style Sheets" `
# Restore lists by their name(s)
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list "list-name-1,list-name-2"
# Restore lists created after a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-after 2024-01-01T12:23:34
# Restore lists created before a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-created-before 2024-01-01T12:23:34
# Restore lists modified before a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-before 2024-01-01T12:23:34
# Restore lists modified after a given time
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--list-modified-after 2024-01-01T12:23:34`
) )
// `corso restore sharepoint [<flag>...]` // `corso restore sharepoint [<flag>...]`

View File

@ -59,11 +59,8 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput, "--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput, "--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput, "--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListFN, flagsTD.FlgInputs(flagsTD.ListsInput), "--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListCreatedAfterFN, flagsTD.ListCreatedAfterInput, "--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.ListCreatedBeforeFN, flagsTD.ListCreatedBeforeInput,
"--" + flags.ListModifiedAfterFN, flagsTD.ListModifiedAfterInput,
"--" + flags.ListModifiedBeforeFN, flagsTD.ListModifiedBeforeInput,
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput), "--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput), "--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions, "--" + flags.CollisionsFN, flagsTD.Collisions,
@ -92,11 +89,8 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore) assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter) assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListsInput, opts.Lists) assert.ElementsMatch(t, flagsTD.ListItemInput, opts.ListItem)
assert.Equal(t, flagsTD.ListCreatedAfterInput, opts.ListCreatedAfter) assert.ElementsMatch(t, flagsTD.ListFolderInput, opts.ListFolder)
assert.Equal(t, flagsTD.ListCreatedBeforeInput, opts.ListCreatedBefore)
assert.Equal(t, flagsTD.ListModifiedAfterInput, opts.ListModifiedAfter)
assert.Equal(t, flagsTD.ListModifiedBeforeInput, opts.ListModifiedBefore)
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page) assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder) assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions) assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)

View File

@ -1,3 +0,0 @@
package utils
// TODO(keepers): something

View File

@ -31,7 +31,6 @@ type ExchangeOpts struct {
EventSubject string EventSubject string
RestoreCfg RestoreCfgOpts RestoreCfg RestoreCfgOpts
ExportCfg ExportCfgOpts
Populated flags.PopulatedFlags Populated flags.PopulatedFlags
} }
@ -61,7 +60,6 @@ func MakeExchangeOpts(cmd *cobra.Command) ExchangeOpts {
EventSubject: flags.EventSubjectFV, EventSubject: flags.EventSubjectFV,
RestoreCfg: makeRestoreCfgOpts(cmd), RestoreCfg: makeRestoreCfgOpts(cmd),
ExportCfg: makeExportCfgOpts(cmd),
// populated contains the list of flags that appear in the // populated contains the list of flags that appear in the
// command, according to pflags. Use this to differentiate // command, according to pflags. Use this to differentiate

View File

@ -9,8 +9,8 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/dttm"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )

View File

@ -45,7 +45,12 @@ func MakeExportConfig(
// ValidateExportConfigFlags ensures all export config flags that utilize // ValidateExportConfigFlags ensures all export config flags that utilize
// enumerated values match a well-known value. // enumerated values match a well-known value.
func ValidateExportConfigFlags(opts *ExportCfgOpts, acceptedFormatTypes []string) error { func ValidateExportConfigFlags(opts *ExportCfgOpts) error {
acceptedFormatTypes := []string{
string(control.DefaultFormat),
string(control.JSONFormat),
}
if _, populated := opts.Populated[flags.FormatFN]; !populated { if _, populated := opts.Populated[flags.FormatFN]; !populated {
opts.Format = string(control.DefaultFormat) opts.Format = string(control.DefaultFormat)
} else if !filters.Equal(acceptedFormatTypes).Compare(opts.Format) { } else if !filters.Equal(acceptedFormatTypes).Compare(opts.Format) {

View File

@ -55,11 +55,6 @@ func (suite *ExportCfgUnitSuite) TestMakeExportConfig() {
} }
func (suite *ExportCfgUnitSuite) TestValidateExportConfigFlags() { func (suite *ExportCfgUnitSuite) TestValidateExportConfigFlags() {
acceptedFormatTypes := []string{
string(control.DefaultFormat),
string(control.JSONFormat),
}
table := []struct { table := []struct {
name string name string
input ExportCfgOpts input ExportCfgOpts
@ -105,8 +100,7 @@ func (suite *ExportCfgUnitSuite) TestValidateExportConfigFlags() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
err := ValidateExportConfigFlags(&test.input)
err := ValidateExportConfigFlags(&test.input, acceptedFormatTypes)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
assert.Equal(t, test.expectFormat, control.FormatType(test.input.Format)) assert.Equal(t, test.expectFormat, control.FormatType(test.input.Format))

Some files were not shown because too many files have changed in this diff Show More