merge commit

This commit is contained in:
neha-Gupta1 2023-05-18 15:42:11 +05:30
commit 173ded4b2d
290 changed files with 19061 additions and 7689 deletions

View File

@ -0,0 +1,113 @@
name: Backup Restore Test
inputs:
service:
description: Service to test
required: true
kind:
description: Kind of test
required: true
backup-args:
description: Arguments to pass for backup
required: false
default: ""
restore-args:
description: Arguments to pass for restore
required: false
default: ""
test-folder:
description: Folder to use for testing
required: true
base-backup:
description: Base backup to use for testing
required: false
outputs:
backup-id:
value: ${{ steps.backup.outputs.result }}
runs:
using: composite
steps:
- name: Backup ${{ inputs.service }} ${{ inputs.kind }}
id: backup
shell: bash
working-directory: src
run: |
set -euo pipefail
./corso backup create '${{ inputs.service }}' \
--no-stats --hide-progress --json \
${{ inputs.backup-args }} |
tee /dev/stderr | # for printing logs
jq -r '.[0] | .id' |
sed 's/^/result=/' |
tee $GITHUB_OUTPUT
- name: Restore ${{ inputs.service }} ${{ inputs.kind }}
id: restore
shell: bash
working-directory: src
run: |
set -euo pipefail
./corso restore '${{ inputs.service }}' \
--no-stats --hide-progress \
${{ inputs.restore-args }} \
--backup '${{ steps.backup.outputs.result }}' 2>&1 |
tee /tmp/corsologs |
grep -i -e 'Restoring to folder ' |
sed "s/Restoring to folder /result=/" |
tee $GITHUB_OUTPUT
cat /tmp/corsologs
- name: Check ${{ inputs.service }} ${{ inputs.kind }}
shell: bash
working-directory: src
env:
SANITY_RESTORE_FOLDER: ${{ steps.restore.outputs.result }}
SANITY_RESTORE_SERVICE: ${{ inputs.service }}
TEST_DATA: ${{ inputs.test-folder }}
BASE_BACKUP: ${{ inputs.base-backup }}
run: |
./sanity-test
- name: List ${{ inputs.service }} ${{ inputs.kind }}
shell: bash
working-directory: src
run: |
set -euo pipefail
./corso backup list ${{ inputs.service }} \
--no-stats --hide-progress 2>&1 |
tee /tmp/corso-backup-list.log
if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list.log
then
echo "Unable to find backup from previous run in backup list"
exit 1
fi
- name: List item ${{ inputs.service }} ${{ inputs.kind }}
shell: bash
working-directory: src
run: |
set -euo pipefail
./corso backup list ${{ inputs.service }} \
--no-stats --hide-progress \
--backup "${{ steps.backup.outputs.result }}" 2>&1 |
tee /tmp/corso-backup-list-item.log
if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list-item.log
then
echo "Unable to list previous backup"
exit 1
fi
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: always()
uses: actions/upload-artifact@v3
with:
name: "${{ inputs.service }}-${{ inputs.kind }}-logs"
path: ${{ env.WORKING_DIR }}/${{ env.CORSO_LOG_DIR }}/
if-no-files-found: error
retention-days: 14

View File

@ -0,0 +1,75 @@
name: Publish Binary
inputs:
version:
description: Corso version to use for publishing
required: true
github_token:
description: GitHub token for publishing
required: true
rudderstack_write_key:
description: Write key for RudderStack
required: true
rudderstack_data_plane_url:
description: Data plane URL for RudderStack
required: true
runs:
using: composite
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # needed to pull changelog
- name: Setup Golang with cache
uses: magnetikonline/action-golang-cache@v4
with:
go-version-file: src/go.mod
- name: Mark snapshot release
shell: bash
if: ${{ !startsWith(github.ref , 'refs/tags/') }}
run: |
echo "grflags=--snapshot" >> $GITHUB_ENV
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v4
with:
version: latest
args: release --rm-dist --timeout 500m --parallelism 1 ${{ env.grflags }}
workdir: src
env:
GITHUB_TOKEN: ${{ inputs.github_token }}
RUDDERSTACK_CORSO_WRITE_KEY: ${{ inputs.rudderstack_write_key }}
RUDDERSTACK_CORSO_DATA_PLANE_URL: ${{ inputs.rudderstack_data_plane_url }}
CORSO_VERSION: ${{ inputs.version }}
- name: Upload darwin arm64
uses: actions/upload-artifact@v3
with:
name: corso_Darwin_arm64
path: src/dist/corso_darwin_arm64/corso
- name: Upload linux arm64
uses: actions/upload-artifact@v3
with:
name: corso_Linux_arm64
path: src/dist/corso_linux_arm64/corso
- name: Upload darwin amd64
uses: actions/upload-artifact@v3
with:
name: corso_Darwin_amd64
path: src/dist/corso_darwin_amd64_v1/corso
- name: Upload linux amd64
uses: actions/upload-artifact@v3
with:
name: corso_Linux_amd64
path: src/dist/corso_linux_amd64_v1/corso
- name: Upload windows amd64
uses: actions/upload-artifact@v3
with:
name: corso_Windows_amd64
path: src/dist/corso_windows_amd64_v1/corso.exe

View File

@ -9,6 +9,9 @@ on:
websitefileschanged: websitefileschanged:
description: "'true' if websites/** or .github/workflows/** files have changed in the branch" description: "'true' if websites/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.websitefileschanged }} value: ${{ jobs.file-change-check.outputs.websitefileschanged }}
actionsfileschanged:
description: "'true' if .github/actions/** or .github/workflows/** files have changed in the branch"
value: ${{ jobs.file-change-check.outputs.actionsfileschanged }}
jobs: jobs:
file-change-check: file-change-check:
@ -19,6 +22,7 @@ jobs:
outputs: outputs:
srcfileschanged: ${{ steps.srcchecker.outputs.srcfileschanged }} srcfileschanged: ${{ steps.srcchecker.outputs.srcfileschanged }}
websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }} websitefileschanged: ${{ steps.websitechecker.outputs.websitefileschanged }}
actionsfileschanged: ${{ steps.actionschecker.outputs.actionsfileschanged }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -49,4 +53,11 @@ jobs:
if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.website == 'true' || steps.dornycheck.outputs.actions == 'true' if: steps.dornycheck.outputs.src == 'true' || steps.dornycheck.outputs.website == 'true' || steps.dornycheck.outputs.actions == 'true'
run: | run: |
echo "website or workflow file changes occurred" echo "website or workflow file changes occurred"
echo websitefileschanged=true >> $GITHUB_OUTPUT echo websitefileschanged=true >> $GITHUB_OUTPUT
- name: Check dorny for changes in actions filepaths
id: actionschecker
if: steps.dornycheck.outputs.actions == 'true'
run: |
echo "actions file changes occurred"
echo actionsfileschanged=true >> $GITHUB_OUTPUT

46
.github/workflows/accSelector.yaml vendored Normal file
View File

@ -0,0 +1,46 @@
name: SetM365AppAcc
on:
workflow_call:
outputs:
client_app_slot:
value: ${{ jobs.GetM365App.outputs.client_app_slot }}
client_id_env:
value: ${{ jobs.GetM365App.outputs.client_id_env }}
client_secret_env:
value: ${{ jobs.GetM365App.outputs.client_secret_env }}
jobs:
GetM365App:
environment: Testing
runs-on: ubuntu-latest
outputs:
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
steps:
- name: Figure out which client id to use
id: roundrobin
run: |
slot=$((GITHUB_RUN_NUMBER % 4))
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
case $slot in
0)
echo "CLIENT_ID_ENV=CLIENT_ID" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET" >> $GITHUB_OUTPUT
;;
1)
echo "CLIENT_ID_ENV=CLIENT_ID_2" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_2" >> $GITHUB_OUTPUT
;;
2)
echo "CLIENT_ID_ENV=CLIENT_ID_3" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_3" >> $GITHUB_OUTPUT
;;
3)
echo "CLIENT_ID_ENV=CLIENT_ID_4" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_4" >> $GITHUB_OUTPUT
;;
esac

37
.github/workflows/binary-publish.yml vendored Normal file
View File

@ -0,0 +1,37 @@
name: Publish binary
on:
workflow_dispatch:
jobs:
SetEnv:
environment: Testing
runs-on: ubuntu-latest
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@v3
- name: Get version string
id: version
run: |
if ${{ startsWith(github.ref, 'refs/tags/') }}; then
echo "version=$(git describe --exact-match --tags $(git rev-parse HEAD))" | tee -a $GITHUB_OUTPUT
else
echo "version=$(echo unreleased-$(git rev-parse --short HEAD))" | tee -a $GITHUB_OUTPUT
fi
Publish-Binary:
needs: [SetEnv]
environment: Testing
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Publish Binary
uses: ./.github/actions/publish-binary
with:
version: ${{ needs.SetEnv.outputs.version }}
github_token: ${{ secrets.GITHUB_TOKEN }}
rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}

View File

@ -52,38 +52,7 @@ jobs:
# SetM365App will decide which M365 app to use for this CI run # SetM365App will decide which M365 app to use for this CI run
SetM365App: SetM365App:
environment: Testing uses: alcionai/corso/.github/workflows/accSelector.yaml@main
runs-on: ubuntu-latest
outputs:
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
steps:
- name: Figure out which client id to use
id: roundrobin
run: |
slot=$((GITHUB_RUN_NUMBER % 4))
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
case $slot in
0)
echo "CLIENT_ID_ENV=CLIENT_ID" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET" >> $GITHUB_OUTPUT
;;
1)
echo "CLIENT_ID_ENV=CLIENT_ID_2" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_2" >> $GITHUB_OUTPUT
;;
2)
echo "CLIENT_ID_ENV=CLIENT_ID_3" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_3" >> $GITHUB_OUTPUT
;;
3)
echo "CLIENT_ID_ENV=CLIENT_ID_4" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_4" >> $GITHUB_OUTPUT
;;
esac
SetEnv: SetEnv:
environment: Testing environment: Testing
@ -168,6 +137,8 @@ jobs:
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }} AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }} AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }} CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
CORSO_LOG_FILE: ./src/testlog/suite-testlogging.log
LOG_GRAPH_REQUESTS: true
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -197,11 +168,9 @@ jobs:
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }} AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }} AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_CI_TESTS: true CORSO_CI_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ secrets.CORSO_M365_TEST_USER_ID }} CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ secrets.CORSO_SECONDARY_M365_TEST_USER_ID }} CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
CORSO_LOG_FILE: ./src/testlog/testlogging.log
LOG_GRAPH_REQUESTS: true
run: | run: |
set -euo pipefail set -euo pipefail
go test \ go test \
@ -211,14 +180,15 @@ jobs:
-failfast \ -failfast \
-p 1 \ -p 1 \
-timeout 15m \ -timeout 15m \
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests ./... \
2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: test-log name: ci-test-log
path: src/testlog/* path: src/testlog/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
@ -231,6 +201,9 @@ jobs:
defaults: defaults:
run: run:
working-directory: src working-directory: src
env:
CORSO_LOG_FILE: ./src/testlog/unit-testlogging.log
LOG_GRAPH_REQUESTS: true
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -252,8 +225,6 @@ jobs:
# something elsewhere. # something elsewhere.
CORSO_M365_TEST_USER_ID: 'foo' CORSO_M365_TEST_USER_ID: 'foo'
CORSO_SECONDARY_M365_TEST_USER_ID: 'foo' CORSO_SECONDARY_M365_TEST_USER_ID: 'foo'
CORSO_LOG_FILE: ./src/testlog/testlogging.log
LOG_GRAPH_REQUESTS: true
run: | run: |
set -euo pipefail set -euo pipefail
go test \ go test \
@ -263,7 +234,8 @@ jobs:
-failfast \ -failfast \
-p 1 \ -p 1 \
-timeout 15m \ -timeout 15m \
./... 2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests ./... \
2>&1 | tee ./testlog/gotest-unit.log | gotestfmt -hide successful-tests
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
@ -283,6 +255,9 @@ jobs:
defaults: defaults:
run: run:
working-directory: src working-directory: src
env:
CORSO_LOG_FILE: ./src/testlog/fork-testlogging.log
LOG_GRAPH_REQUESTS: true
steps: steps:
- name: Fail check if not repository_dispatch - name: Fail check if not repository_dispatch
if: github.event_name != 'repository_dispatch' if: github.event_name != 'repository_dispatch'
@ -340,23 +315,23 @@ jobs:
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }} AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }} AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_CI_TESTS: true CORSO_CI_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ secrets.CORSO_M365_TEST_USER_ID }} CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
CORSO_LOG_FILE: ./src/testlog/testlogging.log
run: | run: |
set -euo pipefail set -euo pipefail
go test \ go test \
-json \ -json \
-v \ -v \
-timeout 15m \ -timeout 15m \
./... 2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests ./... \
2>&1 | tee ./testlog/gotest.log | gotestfmt -hide successful-tests
# Upload the original go test log as an artifact for later review. # Upload the original go test log as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: failure()
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: test-log name: fork-test-log
path: src/testlog/* path: src/testlog/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
@ -364,7 +339,7 @@ jobs:
# Update check run called "Test-Suite-Fork" # Update check run called "Test-Suite-Fork"
- uses: actions/github-script@v6 - uses: actions/github-script@v6
id: update-check-run id: update-check-run
if: ${{ always() }} if: failure()
env: env:
number: ${{ github.event.client_payload.pull_request.number }} number: ${{ github.event.client_payload.pull_request.number }}
job: ${{ github.job }} job: ${{ github.job }}
@ -395,7 +370,7 @@ jobs:
# --- Source Code Linting ---------------------------------------------------------------------------- # --- Source Code Linting ----------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Linting: Source-Code-Linting:
needs: [Precheck, Checkout] needs: [Precheck, Checkout]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -416,7 +391,7 @@ jobs:
with: with:
# Keep pinned to a verson as sometimes updates will add new lint # Keep pinned to a verson as sometimes updates will add new lint
# failures in unchanged code. # failures in unchanged code.
version: v1.50.1 version: v1.52.2
working-directory: src working-directory: src
skip-pkg-cache: true skip-pkg-cache: true
skip-build-cache: true skip-build-cache: true
@ -435,82 +410,53 @@ jobs:
working-directory: src working-directory: src
# ----------------------------------------------------------------------------------------------------
# --- GitHub Actions Linting -------------------------------------------------------------------------
# ----------------------------------------------------------------------------------------------------
Actions-Lint:
needs: [Precheck]
environment: Testing
runs-on: ubuntu-latest
if: needs.precheck.outputs.actionsfileschanged == 'true'
steps:
- uses: actions/checkout@v3
- name: actionlint
uses: raven-actions/actionlint@v1
with:
fail-on-error: true
cache: true
# Ignore
# * combining commands into a subshell and using single output
# redirect
# * various variable quoting patterns
# * possible ineffective echo commands
flags: "-ignore SC2129 -ignore SC2086 -ignore SC2046 -ignore 2116"
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- Publish steps ---------------------------------------------------------------------------------- # --- Publish steps ----------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
Publish-Binary: Publish-Binary:
needs: [Test-Suite-Trusted, Unit-Test-Suite, Linting, Website-Linting, SetEnv] needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main'
defaults:
run:
working-directory: src
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with:
fetch-depth: 0 # needed to pull changelog
- name: Setup Golang with cache - name: Publish Binary
uses: magnetikonline/action-golang-cache@v4 uses: ./.github/actions/publish-binary
with: with:
go-version-file: src/go.mod version: ${{ needs.SetEnv.outputs.version }}
github_token: ${{ secrets.GITHUB_TOKEN }}
- name: Decide goreleaser release mode rudderstack_write_key: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
shell: bash rudderstack_data_plane_url: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
run: |
if test '${{ github.ref }}' = "refs/heads/main"; then
echo "grflags=--snapshot" >> $GITHUB_ENV
else
echo "grflags=" >> $GITHUB_ENV
fi
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v4
with:
version: latest
args: release --rm-dist --timeout 500m --parallelism 1 ${{ env.grflags }}
workdir: src
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RUDDERSTACK_CORSO_WRITE_KEY: ${{ secrets.RUDDERSTACK_CORSO_WRITE_KEY }}
RUDDERSTACK_CORSO_DATA_PLANE_URL: ${{ secrets.RUDDERSTACK_CORSO_DATA_PLANE_URL }}
CORSO_VERSION: ${{ needs.SetEnv.outputs.version }}
- name: Upload darwin arm64
uses: actions/upload-artifact@v3
with:
name: corso_Darwin_arm64
path: src/dist/corso_darwin_arm64/corso
- name: Upload linux arm64
uses: actions/upload-artifact@v3
with:
name: corso_Linux_arm64
path: src/dist/corso_linux_arm64/corso
- name: Upload darwin amd64
uses: actions/upload-artifact@v3
with:
name: corso_Darwin_amd64
path: src/dist/corso_darwin_amd64_v1/corso
- name: Upload linux amd64
uses: actions/upload-artifact@v3
with:
name: corso_Linux_amd64
path: src/dist/corso_linux_amd64_v1/corso
- name: Upload windows amd64
uses: actions/upload-artifact@v3
with:
name: corso_Windows_amd64
path: src/dist/corso_windows_amd64_v1/corso.exe
Publish-Image: Publish-Image:
needs: [Test-Suite-Trusted, Unit-Test-Suite, Linting, Website-Linting, SetEnv] needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
@ -652,7 +598,7 @@ jobs:
./corso.exe --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$" ./corso.exe --version 2>&1 | grep -E "version: ${{ env.CORSO_VERSION }}$"
Publish-Website-Test: Publish-Website-Test:
needs: [Test-Suite-Trusted, Unit-Test-Suite, Linting, Website-Linting, SetEnv] needs: [Test-Suite-Trusted, Source-Code-Linting, Website-Linting, SetEnv]
environment: ${{ needs.SetEnv.outputs.environment }} environment: ${{ needs.SetEnv.outputs.environment }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: github.ref == 'refs/heads/main' if: github.ref == 'refs/heads/main'

View File

@ -1,5 +1,6 @@
name: CI Test Cleanup name: CI Test Cleanup
on: on:
workflow_dispatch:
schedule: schedule:
# every half hour # every half hour
- cron: "*/30 * * * *" - cron: "*/30 * * * *"
@ -27,7 +28,7 @@ jobs:
- name: Purge CI-Produced Folders for Users - name: Purge CI-Produced Folders for Users
uses: ./.github/actions/purge-m365-data uses: ./.github/actions/purge-m365-data
with: with:
user: ${{ secrets[matrix.user] }} user: ${{ vars[matrix.user] }}
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }} folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
older-than: ${{ env.HALF_HOUR_AGO }} older-than: ${{ env.HALF_HOUR_AGO }}
azure-client-id: ${{ secrets.CLIENT_ID }} azure-client-id: ${{ secrets.CLIENT_ID }}
@ -58,7 +59,7 @@ jobs:
- name: Purge CI-Produced Folders for Sites - name: Purge CI-Produced Folders for Sites
uses: ./.github/actions/purge-m365-data uses: ./.github/actions/purge-m365-data
with: with:
site: ${{ secrets[matrix.site] }} site: ${{ vars[matrix.site] }}
folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }} folder-prefix: ${{ vars.CORSO_M365_TEST_PREFIXES }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }} libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.HALF_HOUR_AGO }} older-than: ${{ env.HALF_HOUR_AGO }}

View File

@ -1,10 +1,8 @@
name: Nightly Load Testing name: Nightly Load Testing
on: on:
schedule: schedule:
# every day at 01:59 (01:59am) UTC # every day at 03:59 GMT (roughly 8pm PST)
# - cron: "59 1 * * *" - cron: "59 3 * * *"
# temp, for testing: every 4 hours
- cron: "0 */4 * * *"
permissions: permissions:
# required to retrieve AWS credentials # required to retrieve AWS credentials
@ -20,6 +18,10 @@ jobs:
Load-Tests: Load-Tests:
environment: Load Testing environment: Load Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
# Skipping load testing for now. They need some love to get up and
# running properly, and it's better to not fight for resources with
# tests that are guaranteed to fail.
if: false
defaults: defaults:
run: run:
working-directory: src working-directory: src
@ -57,7 +59,7 @@ jobs:
CORSO_M365_LOAD_TEST_USER_ID: ${{ secrets.CORSO_M365_LOAD_TEST_USER_ID }} CORSO_M365_LOAD_TEST_USER_ID: ${{ secrets.CORSO_M365_LOAD_TEST_USER_ID }}
CORSO_M365_LOAD_TEST_ORG_USERS: ${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }} CORSO_M365_LOAD_TEST_ORG_USERS: ${{ secrets.CORSO_M365_LOAD_TEST_ORG_USERS }}
CORSO_PASSPHRASE: ${{ secrets.CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.CORSO_PASSPHRASE }}
IGNORE_LOAD_TEST_USER_ID: ${{ secrets.EXT_SDK_TEST_USER_ID }} IGNORE_LOAD_TEST_USER_ID: ${{ vars.EXT_SDK_TEST_USER_ID }}
LOG_GRAPH_REQUESTS: true LOG_GRAPH_REQUESTS: true
run: | run: |
set -euo pipefail set -euo pipefail

View File

@ -3,12 +3,8 @@ on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
# Run every day at 0 minutes and 0 hours (midnight GMT) # Run every day at 04:00 GMT (roughly 8pm PST)
- cron: "0 0 * * *" - cron: "0 4 * * *"
push:
branches: [main]
tags: ["v*.*.*"]
permissions: permissions:
# required to retrieve AWS credentials # required to retrieve AWS credentials
@ -45,38 +41,7 @@ jobs:
# SetM365App will decide which M365 app to use for this CI run # SetM365App will decide which M365 app to use for this CI run
SetM365App: SetM365App:
environment: Testing uses: alcionai/corso/.github/workflows/accSelector.yaml@main
runs-on: ubuntu-latest
outputs:
client_app_slot: ${{ steps.roundrobin.outputs.CLIENT_APP_SLOT }}
client_id_env: ${{ steps.roundrobin.outputs.CLIENT_ID_ENV }}
client_secret_env: ${{ steps.roundrobin.outputs.CLIENT_SECRET_ENV }}
steps:
- name: Figure out which client id to use
id: roundrobin
run: |
slot=$((GITHUB_RUN_NUMBER % 4))
echo "CLIENT_APP_SLOT=$slot" >> $GITHUB_OUTPUT
case $slot in
0)
echo "CLIENT_ID_ENV=CLIENT_ID" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET" >> $GITHUB_OUTPUT
;;
1)
echo "CLIENT_ID_ENV=CLIENT_ID_2" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_2" >> $GITHUB_OUTPUT
;;
2)
echo "CLIENT_ID_ENV=CLIENT_ID_3" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_3" >> $GITHUB_OUTPUT
;;
3)
echo "CLIENT_ID_ENV=CLIENT_ID_4" >> $GITHUB_OUTPUT
echo "CLIENT_SECRET_ENV=CLIENT_SECRET_4" >> $GITHUB_OUTPUT
;;
esac
SetEnv: SetEnv:
environment: Testing environment: Testing
@ -85,7 +50,6 @@ jobs:
environment: ${{ steps.environment.outputs.environment }} environment: ${{ steps.environment.outputs.environment }}
version: ${{ steps.version.outputs.version }} version: ${{ steps.version.outputs.version }}
website-bucket: ${{ steps.website-bucket.outputs.website-bucket }} website-bucket: ${{ steps.website-bucket.outputs.website-bucket }}
website-cfid: ${{ steps.website-cfid.outputs.website-cfid }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@ -157,8 +121,9 @@ jobs:
AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }} AZURE_CLIENT_SECRET: ${{ secrets[env.AZURE_CLIENT_SECRET_NAME] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }} AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_NIGHTLY_TESTS: true CORSO_NIGHTLY_TESTS: true
CORSO_M365_TEST_USER_ID: ${{ secrets.CORSO_M365_TEST_USER_ID }} CORSO_E2E_TESTS: true
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ secrets.CORSO_SECONDARY_M365_TEST_USER_ID }} CORSO_M365_TEST_USER_ID: ${{ vars.CORSO_M365_TEST_USER_ID }}
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
CORSO_LOG_FILE: ./src/testlog/testlogging.log CORSO_LOG_FILE: ./src/testlog/testlogging.log
LOG_GRAPH_REQUESTS: true LOG_GRAPH_REQUESTS: true
@ -175,10 +140,10 @@ jobs:
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: always()
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: test-log name: nightly-test-log
path: src/testlog/* path: src/testlog/*
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14

View File

@ -1,5 +1,5 @@
name: Sanity Testing name: Sanity Testing
on: on:
push: push:
branches: branches:
- main - main
@ -19,25 +19,40 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
SetM365App:
uses: alcionai/corso/.github/workflows/accSelector.yaml@main
Sanity-Tests: Sanity-Tests:
needs: [ SetM365App ]
environment: Testing environment: Testing
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }}
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }} AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }}
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }} AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }} AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
CORSO_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }} CORSO_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
CORSO_LOG_FILE: ./src/testlog/testlogging.log CORSO_LOG_DIR: testlog
CORSO_M365_TEST_USER_ID: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }} CORSO_LOG_FILE: testlog/testlogging.log
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
TEST_RESULT: "test_results" RESTORE_DEST_PFX: Corso_Test_Sanity_
TEST_RESULT: test_results
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }}
TEST_SITE: ${{ secrets.CORSO_M365_TEST_SITE_URL }}
SECONDARY_TEST_USER : ${{ secrets.CORSO_SECONDARY_M365_TEST_USER_ID }}
# The default working directory doesn't seem to apply to things without
# the 'run' directive. https://stackoverflow.com/a/67845456
WORKING_DIR: src
defaults: defaults:
run: run:
working-directory: src working-directory: ${{ env.WORKING_DIR }}
steps: steps:
##########################################################################################################################################
# setup
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Setup Golang with cache - name: Setup Golang with cache
@ -45,336 +60,281 @@ jobs:
with: with:
go-version-file: src/go.mod go-version-file: src/go.mod
- run: make build - run: go build -o corso
- run: go build -o sanity-test ./cmd/sanity_test
- run: go build -o sanityCheck ./cmd/sanity_test - run: mkdir ${TEST_RESULT}
- run: mkdir ${CORSO_LOG_DIR}
- run: mkdir test_results ##########################################################################################################################################
- run: mkdir testlog # Pre-Run cleanup
# unlike CI tests, sanity tests are not expected to run concurrently.
# however, the sanity yaml concurrency is set to a maximum of 1 run, preferring
# the latest release. If we wait to clean up the production til after the tests
# It would be possible to complete all the testing but cancel the run before
# cleanup occurs. Setting the cleanup before the tests ensures we always begin
# with a clean slate, and cannot compound data production.
- name: Set purge boundary
if: always()
run: |
echo "NOW=$(date +"%Y-%m-%dT%H:%M:%SZ")" >> $GITHUB_ENV
- name: Purge CI-Produced Folders for Users
uses: ./.github/actions/purge-m365-data
with:
user: ${{ env.TEST_USER }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ env.AZURE_CLIENT_ID }}
azure-client-secret: ${{ env.AZURE_CLIENT_SECRET }}
azure-tenant-id: ${{ env.AZURE_TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
- name: Purge CI-Produced Folders for Sites
if: always()
uses: ./.github/actions/purge-m365-data
with:
site: ${{ env.TEST_SITE }}
folder-prefix: ${{ env.RESTORE_DEST_PFX }}
libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }}
older-than: ${{ env.NOW }}
azure-client-id: ${{ env.AZURE_CLIENT_ID }}
azure-client-secret: ${{ env.AZURE_CLIENT_SECRET }}
azure-tenant-id: ${{ env.AZURE_TENANT_ID }}
m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }}
m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }}
##########################################################################################################################################
# Repository commands
# run the tests
- name: Version Test - name: Version Test
run: | run: |
set -euo pipefail ./corso --version | grep -c 'Corso version:'
if [ $( ./corso --version | grep 'Corso version:' | wc -l) -ne 1 ]
then
echo "valid version not found"
exit 1
fi
- name: Repo init test - name: Repo init test
id: repo-init id: repo-init
env: env:
TEST_RESULT: "test_results" TEST_RESULT: "test_results"
run: | run: |
set -euo pipefail set -euo pipefail
prefix=`date +"%Y-%m-%d-%T"` prefix=$(date +"%Y-%m-%d-%T")
echo -e "\nRepo init test\n" >> ${CORSO_LOG_FILE}
./corso repo init s3 \ ./corso repo init s3 \
--no-stats \ --no-stats --hide-progress --prefix $prefix \
--hide-progress \ --bucket ${CORSO_BUCKET} 2>&1 | tee $TEST_RESULT/initrepo.txt
--prefix $prefix \
--bucket ${CORSO_BUCKET} 2>&1 | tee $TEST_RESULT/initrepo.txt
if ! grep -q 'Initialized a S3 repository within bucket' $TEST_RESULT/initrepo.txt if ! grep -q 'Initialized a S3 repository within bucket' $TEST_RESULT/initrepo.txt
then then
echo "repo could not be initiated" echo "Repo could not be initialized"
exit 1 exit 1
fi fi
echo result="$prefix" >> $GITHUB_OUTPUT
# run the tests echo result="$prefix" >> $GITHUB_OUTPUT
- name: Repo connect test - name: Repo connect test
run: | run: |
set -euo pipefail set -euo pipefail
echo -e "\nRepo connect test\n" >> ${CORSO_LOG_FILE}
./corso repo connect s3 \ ./corso repo connect s3 \
--no-stats \ --no-stats --hide-progress --prefix ${{ steps.repo-init.outputs.result }} \
--hide-progress \ --bucket ${CORSO_BUCKET} 2>&1 | tee $TEST_RESULT/connect.txt
--prefix ${{ steps.repo-init.outputs.result }} \
--bucket ${CORSO_BUCKET} 2>&1 | tee $TEST_RESULT/connect.txt
if ! grep -q 'Connected to S3 bucket' $TEST_RESULT/connect.txt if ! grep -q 'Connected to S3 bucket' $TEST_RESULT/connect.txt
then then
echo "repo could not be connected" echo "Repo could not be connected"
exit 1 exit 1
fi fi
##########################################################################################################################################
# Exchange
# generate new entries to roll into the next load test # generate new entries to roll into the next load test
# only runs if the test was successful # only runs if the test was successful
- name: New Data Creation - name: Exchange - Create new data
working-directory: ./src/cmd/factory working-directory: ./src/cmd/factory
env:
AZURE_CLIENT_ID: ${{ secrets.CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.CLIENT_SECRET }}
AZURE_TENANT_ID: ${{ secrets.TENANT_ID }}
run: | run: |
go run . exchange emails \ go run . exchange emails \
--user ${{ env.CORSO_M365_TEST_USER_ID }} \ --user ${TEST_USER} \
--tenant ${{ env.AZURE_TENANT_ID }} \ --tenant ${AZURE_TENANT_ID} \
--destination Corso_Restore_st_${{ steps.repo-init.outputs.result }} \ --destination ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }} \
--count 4 --count 4
# run the tests - name: Exchange - Backup
- name: Backup exchange test id: exchange-backup
id: exchange-test uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: backup
backup-args: '--mailbox "${TEST_USER}" --data "email"'
restore-args: '--email-folder ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}'
test-folder: '${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}'
- name: Exchange - Incremental backup
id: exchange-backup-incremental
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: backup-incremental
backup-args: '--mailbox "${TEST_USER}" --data "email"'
restore-args: '--email-folder ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}'
test-folder: '${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}'
base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
- name: Exchange - Non delta backup
id: exchange-backup-non-delta
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: backup-non-delta
backup-args: '--mailbox "${TEST_USER}" --data "email" --disable-delta'
restore-args: '--email-folder ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}'
test-folder: '${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}'
base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
- name: Exchange - Incremental backup after non-delta
id: exchange-backup-incremental-after-non-delta
uses: ./.github/actions/backup-restore-test
with:
service: exchange
kind: backup-incremental-after-non-delta
backup-args: '--mailbox "${TEST_USER}" --data "email"'
restore-args: '--email-folder ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}'
test-folder: '${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}'
base-backup: ${{ steps.exchange-backup.outputs.backup-id }}
##########################################################################################################################################
# Onedrive
# generate new entries for test
- name: OneDrive - Create new data
id: new-data-creation-onedrive
working-directory: ./src/cmd/factory
run: | run: |
./corso backup create exchange \ suffix=$(date +"%Y-%m-%d_%H-%M-%S")
--no-stats \
--mailbox "${CORSO_M365_TEST_USER_ID}" \
--hide-progress \
--data 'email' \
--json \
2>&1 | tee $TEST_RESULT/backup_exchange.txt
resultjson=$(sed -e '1,/Completed Backups/d' $TEST_RESULT/backup_exchange.txt ) go run . onedrive files \
--user ${TEST_USER} \
--secondaryuser ${SECONDARY_TEST_USER} \
--tenant ${AZURE_TENANT_ID} \
--destination ${RESTORE_DEST_PFX}$suffix \
--count 4
if [[ $( echo $resultjson | jq -r '.[0] | .errorCount') -ne 0 ]]; then echo result="${suffix}" >> $GITHUB_OUTPUT
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' ) - name: OneDrive - Backup
echo result=$data >> $GITHUB_OUTPUT id: onedrive-backup
uses: ./.github/actions/backup-restore-test
with:
service: onedrive
kind: backup
backup-args: '--user "${TEST_USER}"'
restore-args: '--folder ${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }} --restore-permissions'
test-folder: '${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }}'
# list all exchange backups # generate some more enteries for incremental check
- name: Backup exchange list test - name: OneDrive - Create new data (for incremental)
working-directory: ./src/cmd/factory
run: | run: |
set -euo pipefail go run . onedrive files \
./corso backup list exchange \ --user ${TEST_USER} \
--no-stats \ --secondaryuser ${SECONDARY_TEST_USER} \
--hide-progress \ --tenant ${AZURE_TENANT_ID} \
2>&1 | tee $TEST_RESULT/backup_exchange_list.txt --destination ${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }} \
--count 4
if ! grep -q ${{ steps.exchange-test.outputs.result }} $TEST_RESULT/backup_exchange_list.txt - name: OneDrive - Incremental backup
then id: onedrive-incremental
echo "listing of backup was not successful" uses: ./.github/actions/backup-restore-test
exit 1 with:
fi service: onedrive
kind: incremental
backup-args: '--user "${TEST_USER}"'
restore-args: '--folder ${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }} --restore-permissions'
test-folder: '${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }}'
# list the previous exchange backups ##########################################################################################################################################
- name: Backup exchange list single backup test
# Sharepoint
# generate new entries for test
- name: SharePoint - Create new data
id: new-data-creation-sharepoint
working-directory: ./src/cmd/factory
run: | run: |
set -euo pipefail suffix=$(date +"%Y-%m-%d_%H-%M-%S")
./corso backup list exchange \
--no-stats \
--hide-progress \
--backup "${{ steps.exchange-test.outputs.result }}" \
2>&1 | tee $TEST_RESULT/backup_exchange_list_single.txt
if ! grep -q ${{ steps.exchange-test.outputs.result }} $TEST_RESULT/backup_exchange_list.txt go run . sharepoint files \
then --site ${TEST_SITE} \
echo "listing of backup was not successful" --user ${TEST_USER} \
exit 1 --secondaryuser ${SECONDARY_TEST_USER} \
fi --tenant ${AZURE_TENANT_ID} \
--destination ${RESTORE_DEST_PFX}$suffix \
--count 4
# test exchange restore echo result="${suffix}" >> $GITHUB_OUTPUT
- name: Backup exchange restore
id: exchange-restore-test - name: SharePoint - Backup
id: sharepoint-backup
uses: ./.github/actions/backup-restore-test
with:
service: sharepoint
kind: backup
backup-args: '--site "${TEST_SITE}"'
restore-args: '--folder ${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }} --restore-permissions'
test-folder: '${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }}'
# generate some more enteries for incremental check
- name: SharePoint - Create new data (for incremental)
working-directory: ./src/cmd/factory
run: | run: |
set -euo pipefail go run . sharepoint files \
./corso restore exchange \ --site ${TEST_SITE} \
--no-stats \ --user ${TEST_USER} \
--email-folder Corso_Restore_st_${{ steps.repo-init.outputs.result }} \ --secondaryuser ${SECONDARY_TEST_USER} \
--hide-progress \ --tenant ${AZURE_TENANT_ID} \
--backup "${{ steps.exchange-test.outputs.result }}" \ --destination ${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }} \
2>&1 | tee $TEST_RESULT/exchange-restore-test.txt --count 4
echo result=$(grep -i -e 'Restoring to folder ' $TEST_RESULT/exchange-restore-test.txt | sed "s/Restoring to folder//" ) >> $GITHUB_OUTPUT
- name: Restoration check
env:
SANITY_RESTORE_FOLDER: ${{ steps.exchange-restore-test.outputs.result }}
SANITY_RESTORE_SERVICE: "exchange"
TEST_DATA: Corso_Restore_st_${{ steps.repo-init.outputs.result }}
run: |
set -euo pipefail
./sanityCheck
# test incremental backup exchange - name: SharePoint - Incremental backup
- name: Backup exchange incremental id: sharepoint-incremental
id: exchange-incremental-test uses: ./.github/actions/backup-restore-test
run: | with:
set -euo pipefail service: sharepoint
./corso backup create exchange \ kind: incremental
--no-stats \ backup-args: '--site "${TEST_SITE}"'
--hide-progress \ restore-args: '--folder ${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }} --restore-permissions'
--mailbox "${CORSO_M365_TEST_USER_ID}" \ test-folder: '${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }}'
--json \
2>&1 | tee $TEST_RESULT/backup_exchange_incremental.txt
resultjson=$(sed -e '1,/Completed Backups/d' $TEST_RESULT/backup_exchange_incremental.txt ) ##########################################################################################################################################
if [[ $( echo $resultjson | jq -r '.[0] | .errorCount') -ne 0 ]]; then # Logging & Notifications
echo "backup was not successful"
exit 1
fi
echo result=$( echo $resultjson | jq -r '.[0] | .id' ) >> $GITHUB_OUTPUT
# test exchange restore
- name: Backup incremantal exchange restore
id: exchange-incremantal-restore-test
run: |
set -euo pipefail
./corso restore exchange \
--no-stats \
--hide-progress \
--backup "${{ steps.exchange-incremental-test.outputs.result }}" \
--email-folder Corso_Restore_st_${{ steps.repo-init.outputs.result }} \
2>&1 | tee $TEST_RESULT/exchange-incremantal-restore-test.txt
echo result=$(grep -i -e 'Restoring to folder ' $TEST_RESULT/exchange-incremantal-restore-test.txt | sed "s/Restoring to folder//" ) >> $GITHUB_OUTPUT
- name: Restoration check
env:
SANITY_RESTORE_FOLDER: ${{ steps.exchange-incremantal-restore-test.outputs.result }}
SANITY_RESTORE_SERVICE: "exchange"
TEST_DATA: Corso_Restore_st_${{ steps.repo-init.outputs.result }}
BASE_BACKUP: ${{ steps.exchange-restore-test.outputs.result }}
run: |
set -euo pipefail
./sanityCheck
# Onedrive test
# run the tests
- name: Backup onedrive test
id: onedrive-test
run: |
set -euo pipefail
./corso backup create onedrive \
--no-stats \
--hide-progress \
--user "${CORSO_M365_TEST_USER_ID}" \
--json \
2>&1 | tee $TEST_RESULT/backup_onedrive.txt
resultjson=$(sed -e '1,/Completed Backups/d' $TEST_RESULT/backup_onedrive.txt )
if [[ $( echo $resultjson | jq -r '.[0] | .errorCount') -ne 0 ]]; then
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
# list all onedrive backups
- name: Backup onedrive list test
run: |
set -euo pipefail
./corso backup list onedrive \
--no-stats \
--hide-progress \
2>&1 | tee $TEST_RESULT/backup_onedrive_list.txt
if ! grep -q ${{ steps.onedrive-test.outputs.result }} $TEST_RESULT/backup_onedrive_list.txt
then
echo "listing of backup was not successful"
exit 1
fi
# list the previous onedrive backup
- name: Backup onedrive list test
run: |
set -euo pipefail
./corso backup list onedrive \
--no-stats \
--hide-progress \
--backup "${{ steps.onedrive-test.outputs.result }}" \
2>&1 | tee $TEST_RESULT/backup_onedrive_list_single.txt
if ! grep -q ${{ steps.onedrive-test.outputs.result }} $TEST_RESULT/backup_onedrive_list.txt
then
echo "listing of backup was not successful"
exit 1
fi
# test onedrive restore
- name: Backup onedrive restore
id: onedrive-restore-test
run: |
set -euo pipefail
./corso restore onedrive \
--no-stats \
--restore-permissions \
--hide-progress \
--backup "${{ steps.onedrive-test.outputs.result }}" \
2>&1 | tee $TEST_RESULT/onedrive-restore-test.txt
echo result=$(grep -i -e 'Restoring to folder ' $TEST_RESULT/onedrive-restore-test.txt | sed "s/Restoring to folder//") >> $GITHUB_OUTPUT
- name: Restoration oneDrive check
env:
SANITY_RESTORE_FOLDER: ${{ steps.onedrive-restore-test.outputs.result }}
SANITY_RESTORE_SERVICE: "onedrive"
run: |
set -euo pipefail
./sanityCheck
# test onedrive incremental
- name: Backup onedrive incremental
id: onedrive-incremental-test
run: |
set -euo pipefail
./corso backup create onedrive \
--no-stats \
--hide-progress \
--user "${CORSO_M365_TEST_USER_ID}" \
--json \
2>&1 | tee $TEST_RESULT/backup_onedrive_incremental.txt
resultjson=$(sed -e '1,/Completed Backups/d' $TEST_RESULT/backup_onedrive_incremental.txt )
if [[ $( echo $resultjson | jq -r '.[0] | .errorCount') -ne 0 ]]; then
echo "backup was not successful"
exit 1
fi
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
# test onedrive restore
- name: Backup onedrive restore
id: onedrive-incremental-restore-test
run: |
set -euo pipefail
./corso restore onedrive \
--no-stats \
--restore-permissions \
--hide-progress \
--backup "${{ steps.onedrive-incremental-test.outputs.result }}" \
2>&1 | tee $TEST_RESULT/onedrive-incremental-restore-test.txt
echo result=$(grep -i -e 'Restoring to folder ' $TEST_RESULT/onedrive-incremental-restore-test.txt | sed "s/Restoring to folder//") >> $GITHUB_OUTPUT
- name: Restoration oneDrive check
env:
SANITY_RESTORE_FOLDER: ${{ steps.onedrive-incremental-restore-test.outputs.result }}
SANITY_RESTORE_SERVICE: "onedrive"
run: |
set -euo pipefail
./sanityCheck
# Upload the original go test output as an artifact for later review. # Upload the original go test output as an artifact for later review.
- name: Upload test log - name: Upload test log
if: failure() if: always()
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: test-log name: sanity-test-log
path: src/testlog/* path: ${{ env.WORKING_DIR }}/${{ env.CORSO_LOG_DIR }}/
if-no-files-found: error if-no-files-found: error
retention-days: 14 retention-days: 14
# run the tests
- name: SHA info - name: SHA info
id: sha-info id: sha-info
if: failure() if: failure()
run: | run: |
echo SHA=${GITHUB_REF#refs/heads/}-${GITHUB_SHA} >> $GITHUB_OUTPUT echo ${GITHUB_REF#refs/heads/}-${GITHUB_SHA}
echo RUN_URL=${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} >> $GITHUB_OUTPUT echo SHA=${GITHUB_REF#refs/heads/}-${GITHUB_SHA} >> $GITHUB_OUTPUT
echo COMMIT_URL=${{ github.server_url }}/${{ github.repository }}/commit/${GITHUB_SHA} >> $GITHUB_OUTPUT echo RUN_URL=${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} >> $GITHUB_OUTPUT
echo COMMIT_URL=${{ github.server_url }}/${{ github.repository }}/commit/${GITHUB_SHA} >> $GITHUB_OUTPUT
- name: Send Github Action failure to Slack - name: Send Github Action failure to Slack
id: slack-notification id: slack-notification
if: failure() if: failure()
@ -384,21 +344,11 @@ jobs:
{ {
"text": "GitHub Action build result: ${{ job.status }} on SHA: ${{ steps.sha-info.outputs.SHA }}", "text": "GitHub Action build result: ${{ job.status }} on SHA: ${{ steps.sha-info.outputs.SHA }}",
"blocks": [ "blocks": [
{
"type": "header",
"text": {
"type": "plain_text",
"text": "Failure in Sanity Test"
}
},
{
"type": "divider"
},
{ {
"type": "section", "type": "section",
"text": { "text": {
"type": "mrkdwn", "type": "mrkdwn",
"text": "<${{ steps.sha-info.outputs.RUN_URL }}|Check logs> for <${{ steps.sha-info.outputs.COMMIT_URL }}|${{ steps.sha-info.outputs.SHA }}>" "text": "[FAILED] Sanity Checks :: <${{ steps.sha-info.outputs.RUN_URL }}|[Logs]> <${{ github.event.pull_request.html_url || github.event.head_commit.url }}|[Base]>\nCommit: <${{ steps.sha-info.outputs.COMMIT_URL }}|${{ steps.sha-info.outputs.SHA }}>"
} }
} }
] ]

View File

@ -28,8 +28,7 @@ jobs:
- name: Get version string - name: Get version string
id: version id: version
run: | run: |
echo "set-output name=version::$(git describe --tags --abbrev=0)" echo version=$(git describe --tags --abbrev=0) | tee -a $GITHUB_OUTPUT
echo "::set-output name=version::$(git describe --tags --abbrev=0)"
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- Website Linting ----------------------------------------------------------------------------------- # --- Website Linting -----------------------------------------------------------------------------------

View File

@ -7,9 +7,43 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] (beta) ## [Unreleased] (beta)
### Added
### Fixed
### Known Issues
## [v0.8.0] (beta) - 2023-05-15
### Added
- Released the --mask-sensitive-data flag, which will automatically obscure private data in logs.
- Added `--disable-delta` flag to disable delta based backups for Exchange
- Permission support for SharePoint libraries.
### Fixed
- Graph requests now automatically retry in case of a Bad Gateway or Gateway Timeout.
- POST Retries following certain status codes (500, 502, 504) will re-use the post body instead of retrying with a no-content request.
- Fix nil pointer exception when running an incremental backup on SharePoint where the base backup used an older index data format.
- --user and --mailbox flags have been removed from CLI examples for details and restore commands (they were already not supported, this only updates the docs).
- Improve restore time on large restores by optimizing how items are loaded from the remote repository.
- Remove exchange item filtering based on m365 item ID via the CLI.
- OneDrive backups no longer include a user's non-default drives.
- OneDrive and SharePoint file downloads will properly redirect from 3xx responses.
- Refined oneDrive rate limiter controls to reduce throttling errors.
- Fix handling of duplicate folders at the same hierarchy level in Exchange. Duplicate folders will be merged during restore operations.
- Fix backup for mailboxes that has used up all their storage quota
- Restored folders no longer appear in the Restore results. Only restored items will be displayed.
### Known Issues
- Restore operations will merge duplicate Exchange folders at the same hierarchy level into a single folder.
- Sharepoint SiteGroup permissions are not restored.
- SharePoint document library data can't be restored after the library has been deleted.
## [v0.7.0] (beta) - 2023-05-02
### Added ### Added
- Permissions backup for OneDrive is now out of experimental (By default, only newly backed up items will have their permissions backed up. You will have to run a full backup to ensure all items have their permissions backed up.) - Permissions backup for OneDrive is now out of experimental (By default, only newly backed up items will have their permissions backed up. You will have to run a full backup to ensure all items have their permissions backed up.)
- LocationRef is now populated for all services and data types. It should be used in place of RepoRef if a location for an item is required. - LocationRef is now populated for all services and data types. It should be used in place of RepoRef if a location for an item is required.
- User selection for Exchange and OneDrive can accept either a user PrincipalName or the user's canonical ID.
- Add path information to items that were skipped during backup because they were flagged as malware.
### Fixed ### Fixed
- Fixed permissions restore in latest backup version. - Fixed permissions restore in latest backup version.
@ -24,9 +58,17 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- ParentPath of json output for Exchange calendar now shows names instead of IDs. - ParentPath of json output for Exchange calendar now shows names instead of IDs.
- Fixed failure when downloading huge amount of attachments - Fixed failure when downloading huge amount of attachments
- Graph API requests that return an ECONNRESET error are now retried. - Graph API requests that return an ECONNRESET error are now retried.
- Fixed edge case in incremental backups where moving a subfolder, deleting and recreating the subfolder's original parent folder, and moving the subfolder back to where it started would skip backing up unchanged items in the subfolder.
- SharePoint now correctly displays site urls on `backup list`, instead of the site id.
- Drives with a directory containing a folder named 'folder' will now restore without error.
- The CORSO_LOG_FILE env is appropriately utilized if no --log-file flag is provided.
- Fixed Exchange events progress output to show calendar names instead of IDs.
- Fixed reporting no items match if restoring or listing details on an older Exchange backup and filtering by folder.
- Fix backup for mailboxes that has used up all their storage quota
### Known Issues ### Known Issues
- Restoring a OneDrive or SharePoint file with the same name as a file with that name as its M365 ID may restore both items. - Restoring a OneDrive or SharePoint file with the same name as a file with that name as its M365 ID may restore both items.
- Exchange event restores will display calendar IDs instead of names in the progress output.
## [v0.6.1] (beta) - 2023-03-21 ## [v0.6.1] (beta) - 2023-03-21
@ -237,7 +279,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Miscellaneous - Miscellaneous
- Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35)) - Optional usage statistics reporting ([RM-35](https://github.com/alcionai/corso-roadmap/issues/35))
[Unreleased]: https://github.com/alcionai/corso/compare/v0.6.1...HEAD [Unreleased]: https://github.com/alcionai/corso/compare/v0.7.0...HEAD
[v0.7.0]: https://github.com/alcionai/corso/compare/v0.6.1...v0.7.0
[v0.6.1]: https://github.com/alcionai/corso/compare/v0.5.0...v0.6.1 [v0.6.1]: https://github.com/alcionai/corso/compare/v0.5.0...v0.6.1
[v0.5.0]: https://github.com/alcionai/corso/compare/v0.4.0...v0.5.0 [v0.5.0]: https://github.com/alcionai/corso/compare/v0.4.0...v0.5.0
[v0.4.0]: https://github.com/alcionai/corso/compare/v0.3.0...v0.4.0 [v0.4.0]: https://github.com/alcionai/corso/compare/v0.3.0...v0.4.0

View File

@ -6,7 +6,7 @@ COPY src .
ARG CORSO_BUILD_LDFLAGS="" ARG CORSO_BUILD_LDFLAGS=""
RUN go build -o corso -ldflags "$CORSO_BUILD_LDFLAGS" RUN go build -o corso -ldflags "$CORSO_BUILD_LDFLAGS"
FROM alpine:3.17 FROM alpine:3
LABEL org.opencontainers.image.title="Corso" LABEL org.opencontainers.image.title="Corso"
LABEL org.opencontainers.image.description="Free, Secure, and Open-Source Backup for Microsoft 365" LABEL org.opencontainers.image.description="Free, Secure, and Open-Source Backup for Microsoft 365"

View File

@ -20,7 +20,7 @@ ARG TARGETARCH
RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o /corso . RUN GOOS=${TARGETOS} GOARCH=${TARGETARCH} go build -o /corso .
## Deploy ## Deploy
FROM ubuntu:latest FROM ubuntu:22.10
COPY --from=build /corso / COPY --from=build /corso /

View File

@ -1,5 +1,5 @@
# This must match the version defined in .github/workflows/lint.yaml. # This must match the version defined in .github/workflows/lint.yaml.
WANTED_LINT_VERSION := 1.50.1 WANTED_LINT_VERSION := 1.52.2
LINT_VERSION := $(shell golangci-lint version | cut -d' ' -f4) LINT_VERSION := $(shell golangci-lint version | cut -d' ' -f4)
HAS_LINT := $(shell which golangci-lint) HAS_LINT := $(shell which golangci-lint)
@ -82,4 +82,4 @@ load-test:
./pkg/repository/loadtest/repository_load_test.go ./pkg/repository/loadtest/repository_load_test.go
getM365: getM365:
go build -o getM365 cmd/getM365/main.go go build -o getM365 cmd/getM365/main.go

View File

@ -9,13 +9,10 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -198,7 +195,7 @@ func runBackups(
r repository.Repository, r repository.Repository,
serviceName, resourceOwnerType string, serviceName, resourceOwnerType string,
selectorSet []selectors.Selector, selectorSet []selectors.Selector,
ins common.IDNameSwapper, ins idname.Cacher,
) error { ) error {
var ( var (
bIDs []string bIDs []string
@ -210,7 +207,7 @@ func runBackups(
var ( var (
owner = discSel.DiscreteOwner owner = discSel.DiscreteOwner
ictx = clues.Add(ctx, "resource_owner", owner) ictx = clues.Add(ctx, "resource_owner_selected", owner)
) )
bo, err := r.NewBackupWithLookup(ictx, discSel, ins) bo, err := r.NewBackupWithLookup(ictx, discSel, ins)
@ -221,6 +218,11 @@ func runBackups(
continue continue
} }
ictx = clues.Add(
ctx,
"resource_owner_id", bo.ResourceOwner.ID(),
"resource_owner_name", bo.ResourceOwner.Name())
err = bo.Run(ictx) err = bo.Run(ictx)
if err != nil { if err != nil {
errs = append(errs, clues.Wrap(err, owner).WithClues(ictx)) errs = append(errs, clues.Wrap(err, owner).WithClues(ictx))
@ -230,7 +232,13 @@ func runBackups(
} }
bIDs = append(bIDs, string(bo.Results.BackupID)) bIDs = append(bIDs, string(bo.Results.BackupID))
Infof(ctx, "Done - ID: %v\n", bo.Results.BackupID)
if !DisplayJSONFormat() {
Infof(ctx, "Done\n")
printBackupStats(ctx, r, string(bo.Results.BackupID))
} else {
Infof(ctx, "Done - ID: %v\n", bo.Results.BackupID)
}
} }
bups, berrs := r.Backups(ctx, bIDs) bups, berrs := r.Backups(ctx, bIDs)
@ -264,7 +272,7 @@ func genericDeleteCommand(cmd *cobra.Command, bID, designation string, args []st
ctx := clues.Add(cmd.Context(), "delete_backup_id", bID) ctx := clues.Add(cmd.Context(), "delete_backup_id", bID)
r, _, err := getAccountAndConnect(ctx) r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -285,7 +293,7 @@ func genericDeleteCommand(cmd *cobra.Command, bID, designation string, args []st
func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType, args []string) error { func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
r, _, err := getAccountAndConnect(ctx) r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -318,20 +326,16 @@ func genericListCommand(cmd *cobra.Command, bID string, service path.ServiceType
return nil return nil
} }
func getAccountAndConnect(ctx context.Context) (repository.Repository, *account.Account, error) {
cfg, err := config.GetConfigRepoDetails(ctx, true, nil)
if err != nil {
return nil, nil, err
}
r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, options.Control())
if err != nil {
return nil, nil, clues.Wrap(err, "Failed to connect to the "+cfg.Storage.Provider.String()+" repository")
}
return r, &cfg.Account, nil
}
func ifShow(flag string) bool { func ifShow(flag string) bool {
return strings.ToLower(strings.TrimSpace(flag)) == "show" return strings.ToLower(strings.TrimSpace(flag)) == "show"
} }
func printBackupStats(ctx context.Context, r repository.Repository, bid string) {
b, err := r.Backup(ctx, bid)
if err != nil {
logger.CtxErr(ctx, err).Error("finding backup immediately after backup operation completion")
}
b.ToPrintable().Stats.Print(ctx)
Info(ctx, " ")
}

View File

@ -17,7 +17,6 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
) )
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -50,20 +49,20 @@ corso backup create exchange --mailbox '*'`
exchangeServiceCommandDeleteExamples = `# Delete Exchange backup with ID 1234abcd-12ab-cd34-56de-1234abcd exchangeServiceCommandDeleteExamples = `# Delete Exchange backup with ID 1234abcd-12ab-cd34-56de-1234abcd
corso backup delete exchange --backup 1234abcd-12ab-cd34-56de-1234abcd` corso backup delete exchange --backup 1234abcd-12ab-cd34-56de-1234abcd`
exchangeServiceCommandDetailsExamples = `# Explore Alice's items in backup 1234abcd-12ab-cd34-56de-1234abcd exchangeServiceCommandDetailsExamples = `# Explore items in Alice's latest backup (1234abcd...)
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --mailbox alice@example.com corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd
# Explore Alice's emails with subject containing "Hello world" in folder "Inbox" from a specific backup # Explore emails in the folder "Inbox" with subject containing "Hello world"
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
--mailbox alice@example.com --email-subject "Hello world" --email-folder Inbox --email-subject "Hello world" --email-folder Inbox
# Explore Bobs's events occurring after start of 2022 from a specific backup # Explore calendar events occurring after start of 2022
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
--mailbox bob@example.com --event-starts-after 2022-01-01T00:00:00 --event-starts-after 2022-01-01T00:00:00
# Explore Alice's contacts with name containing Andy from a specific backup # Explore contacts named Andy
corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
--mailbox alice@example.com --contact-name Andy` --contact-name Andy`
) )
// called by backup.go to map subcommands to provider-specific handling. // called by backup.go to map subcommands to provider-specific handling.
@ -88,7 +87,9 @@ func addExchangeCommands(cmd *cobra.Command) *cobra.Command {
options.AddFetchParallelismFlag(c) options.AddFetchParallelismFlag(c)
options.AddFailFastFlag(c) options.AddFailFastFlag(c)
options.AddDisableIncrementalsFlag(c) options.AddDisableIncrementalsFlag(c)
options.AddDisableDeltaFlag(c)
options.AddEnableImmutableIDFlag(c) options.AddEnableImmutableIDFlag(c)
options.AddDisableConcurrencyLimiterFlag(c)
case listCommand: case listCommand:
c, fs = utils.AddCommand(cmd, exchangeListCmd()) c, fs = utils.AddCommand(cmd, exchangeListCmd())
@ -152,7 +153,7 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, acct, err := getAccountAndConnect(ctx) r, acct, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -161,10 +162,7 @@ func createExchangeCmd(cmd *cobra.Command, args []string) error {
sel := exchangeBackupCreateSelectors(utils.UserFV, utils.CategoryDataFV) sel := exchangeBackupCreateSelectors(utils.UserFV, utils.CategoryDataFV)
// TODO: log/print recoverable errors ins, err := utils.UsersMap(ctx, *acct, fault.New(true))
errs := fault.New(false)
ins, err := m365.UsersMap(ctx, *acct, errs)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
} }
@ -264,7 +262,7 @@ func detailsExchangeCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeExchangeOpts(cmd) opts := utils.MakeExchangeOpts(cmd)
r, _, err := getAccountAndConnect(ctx) r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -18,7 +18,7 @@ import (
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -54,7 +54,6 @@ func TestNoBackupExchangeE2ESuite(t *testing.T) {
suite.Run(t, &NoBackupExchangeE2ESuite{Suite: tester.NewE2ESuite( suite.Run(t, &NoBackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCITests,
)}) )})
} }
@ -120,7 +119,6 @@ func TestBackupExchangeE2ESuite(t *testing.T) {
suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite( suite.Run(t, &BackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCITests,
)}) )})
} }
@ -235,7 +233,6 @@ func TestPreparedBackupExchangeE2ESuite(t *testing.T) {
suite.Run(t, &PreparedBackupExchangeE2ESuite{Suite: tester.NewE2ESuite( suite.Run(t, &PreparedBackupExchangeE2ESuite{Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCITests,
)}) )})
} }
@ -256,13 +253,8 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
suite.backupOps = make(map[path.CategoryType]string) suite.backupOps = make(map[path.CategoryType]string)
var ( var (
users = []string{suite.m365UserID} users = []string{suite.m365UserID}
idToName = map[string]string{suite.m365UserID: suite.m365UserID} ins = idname.NewCache(map[string]string{suite.m365UserID: suite.m365UserID})
nameToID = map[string]string{suite.m365UserID: suite.m365UserID}
ins = common.IDsNames{
IDToName: idToName,
NameToID: nameToID,
}
) )
for _, set := range []path.CategoryType{email, contacts, events} { for _, set := range []path.CategoryType{email, contacts, events} {
@ -495,7 +487,6 @@ func TestBackupDeleteExchangeE2ESuite(t *testing.T) {
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCITests,
), ),
}) })
} }

View File

@ -43,6 +43,7 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
utils.UserFN, utils.UserFN,
utils.CategoryDataFN, utils.CategoryDataFN,
options.DisableIncrementalsFN, options.DisableIncrementalsFN,
options.DisableDeltaFN,
options.FailFastFN, options.FailFastFN,
options.FetchParallelismFN, options.FetchParallelismFN,
options.SkipReduceFN, options.SkipReduceFN,

View File

@ -46,7 +46,7 @@ func prepM365Test(
vpr, cfgFP := tester.MakeTempTestConfigClone(t, force) vpr, cfgFP := tester.MakeTempTestConfigClone(t, force)
ctx = config.SetViper(ctx, vpr) ctx = config.SetViper(ctx, vpr)
repo, err := repository.Initialize(ctx, acct, st, control.Options{}) repo, err := repository.Initialize(ctx, acct, st, control.Defaults())
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return acct, st, repo, vpr, recorder, cfgFP return acct, st, repo, vpr, recorder, cfgFP

View File

@ -17,7 +17,6 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
) )
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
@ -44,16 +43,16 @@ corso backup create onedrive --user '*'`
oneDriveServiceCommandDeleteExamples = `# Delete OneDrive backup with ID 1234abcd-12ab-cd34-56de-1234abcd oneDriveServiceCommandDeleteExamples = `# Delete OneDrive backup with ID 1234abcd-12ab-cd34-56de-1234abcd
corso backup delete onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd` corso backup delete onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd`
oneDriveServiceCommandDetailsExamples = `# Explore Alice's files from backup 1234abcd-12ab-cd34-56de-1234abcd oneDriveServiceCommandDetailsExamples = `# Explore items in Bob's latest backup (1234abcd...)
corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --user alice@example.com corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd
# Explore Alice or Bob's files with name containing "Fiscal 22" in folder "Reports" # Explore files in the folder "Reports" named "Fiscal 22"
corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \
--user alice@example.com,bob@example.com --file-name "Fiscal 22" --folder "Reports" --file-name "Fiscal 22" --folder "Reports"
# Explore Alice's files created before end of 2015 from a specific backup # Explore files created before the end of 2015
corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \
--user alice@example.com --file-created-before 2015-01-01T00:00:00` --file-created-before 2015-01-01T00:00:00`
) )
// called by backup.go to map subcommands to provider-specific handling. // called by backup.go to map subcommands to provider-specific handling.
@ -135,7 +134,7 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, acct, err := getAccountAndConnect(ctx) r, acct, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -144,10 +143,7 @@ func createOneDriveCmd(cmd *cobra.Command, args []string) error {
sel := oneDriveBackupCreateSelectors(utils.UserFV) sel := oneDriveBackupCreateSelectors(utils.UserFV)
// TODO: log/print recoverable errors ins, err := utils.UsersMap(ctx, *acct, fault.New(true))
errs := fault.New(false)
ins, err := m365.UsersMap(ctx, *acct, errs)
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users")) return Only(ctx, clues.Wrap(err, "Failed to retrieve M365 users"))
} }
@ -224,7 +220,7 @@ func detailsOneDriveCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
r, _, err := getAccountAndConnect(ctx) r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -16,12 +16,13 @@ import (
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/repository" "github.com/alcionai/corso/src/pkg/repository"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
@ -44,9 +45,7 @@ func TestNoBackupOneDriveE2ESuite(t *testing.T) {
suite.Run(t, &NoBackupOneDriveE2ESuite{ suite.Run(t, &NoBackupOneDriveE2ESuite{
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}),
tester.CorsoCITests,
),
}) })
} }
@ -148,9 +147,7 @@ func TestBackupDeleteOneDriveE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteOneDriveE2ESuite{ suite.Run(t, &BackupDeleteOneDriveE2ESuite{
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}),
tester.CorsoCITests,
),
}) })
} }
@ -171,17 +168,12 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
var ( var (
m365UserID = tester.M365UserID(t) m365UserID = tester.M365UserID(t)
users = []string{m365UserID} users = []string{m365UserID}
idToName = map[string]string{m365UserID: m365UserID} ins = idname.NewCache(map[string]string{m365UserID: m365UserID})
nameToID = map[string]string{m365UserID: m365UserID}
ins = common.IDsNames{
IDToName: idToName,
NameToID: nameToID,
}
) )
// some tests require an existing backup // some tests require an existing backup
sel := selectors.NewOneDriveBackup(users) sel := selectors.NewOneDriveBackup(users)
sel.Include(sel.Folders(selectors.Any())) sel.Include(selTD.OneDriveBackupFolderScope(sel))
backupOp, err := suite.repo.NewBackupWithLookup(ctx, sel.Selector, ins) backupOp, err := suite.repo.NewBackupWithLookup(ctx, sel.Selector, ins)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))

View File

@ -12,7 +12,7 @@ import (
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -40,10 +40,10 @@ const (
) )
const ( const (
sharePointServiceCommandCreateExamples = `# Backup SharePoint data for a Site sharePointServiceCommandCreateExamples = `# Backup SharePoint data in the HR Site
corso backup create sharepoint --site <siteURL> corso backup create sharepoint --site https://example.com/hr
# Backup SharePoint for two sites: HR and Team # Backup SharePoint for the HR and Team sites
corso backup create sharepoint --site https://example.com/hr,https://example.com/team corso backup create sharepoint --site https://example.com/hr,https://example.com/team
# Backup all SharePoint data for all Sites # Backup all SharePoint data for all Sites
@ -52,16 +52,20 @@ corso backup create sharepoint --site '*'`
sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd sharePointServiceCommandDeleteExamples = `# Delete SharePoint backup with ID 1234abcd-12ab-cd34-56de-1234abcd
corso backup delete sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd` corso backup delete sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd`
sharePointServiceCommandDetailsExamples = `# Explore a site's files from backup 1234abcd-12ab-cd34-56de-1234abcd sharePointServiceCommandDetailsExamples = `# Explore items in the HR site's latest backup (1234abcd...)
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd
# Find all files that were created before a certain date. # Explore files in the folder "Reports" named "Fiscal 22"
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file-name "Fiscal 22" --folder "Reports"
# Explore files in the folder ""Display Templates/Style Sheets"" created before the end of 2015.
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file-created-before 2015-01-01T00:00:00 --folder "Display Templates/Style Sheets" --file-created-before 2015-01-01T00:00:00 --folder "Display Templates/Style Sheets"
# Find all files within a specific library. # Explore all files within the document library "Work Documents"
corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso backup details sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--library documents --folder "Display Templates/Style Sheets" --library "Work Documents"
` `
) )
@ -146,7 +150,7 @@ func createSharePointCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
r, acct, err := getAccountAndConnect(ctx) r, acct, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -203,7 +207,7 @@ func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
// TODO: users might specify a data type, this only supports AllData(). // TODO: users might specify a data type, this only supports AllData().
func sharePointBackupCreateSelectors( func sharePointBackupCreateSelectors(
ctx context.Context, ctx context.Context,
ins common.IDNameSwapper, ins idname.Cacher,
sites, weburls, cats []string, sites, weburls, cats []string,
) (*selectors.SharePointBackup, error) { ) (*selectors.SharePointBackup, error) {
if len(sites) == 0 && len(weburls) == 0 { if len(sites) == 0 && len(weburls) == 0 {
@ -223,7 +227,7 @@ func sharePointBackupCreateSelectors(
return addCategories(sel, cats), nil return addCategories(sel, cats), nil
} }
func includeAllSitesWithCategories(ins common.IDNameSwapper, categories []string) *selectors.SharePointBackup { func includeAllSitesWithCategories(ins idname.Cacher, categories []string) *selectors.SharePointBackup {
return addCategories(selectors.NewSharePointBackup(ins.IDs()), categories) return addCategories(selectors.NewSharePointBackup(ins.IDs()), categories)
} }
@ -308,7 +312,7 @@ func detailsSharePointCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context() ctx := cmd.Context()
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
r, _, err := getAccountAndConnect(ctx) r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }

View File

@ -16,7 +16,7 @@ import (
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
@ -45,7 +45,6 @@ func TestNoBackupSharePointE2ESuite(t *testing.T) {
suite.Run(t, &NoBackupSharePointE2ESuite{Suite: tester.NewE2ESuite( suite.Run(t, &NoBackupSharePointE2ESuite{Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCITests,
)}) )})
} }
@ -112,9 +111,7 @@ func TestBackupDeleteSharePointE2ESuite(t *testing.T) {
suite.Run(t, &BackupDeleteSharePointE2ESuite{ suite.Run(t, &BackupDeleteSharePointE2ESuite{
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}),
tester.CorsoCITests,
),
}) })
} }
@ -135,12 +132,7 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
var ( var (
m365SiteID = tester.M365SiteID(t) m365SiteID = tester.M365SiteID(t)
sites = []string{m365SiteID} sites = []string{m365SiteID}
idToName = map[string]string{m365SiteID: m365SiteID} ins = idname.NewCache(map[string]string{m365SiteID: m365SiteID})
nameToID = map[string]string{m365SiteID: m365SiteID}
ins = common.IDsNames{
IDToName: idToName,
NameToID: nameToID,
}
) )
// some tests require an existing backup // some tests require an existing backup

View File

@ -12,7 +12,7 @@ import (
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/options"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -156,10 +156,7 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
) )
var ( var (
ins = common.IDsNames{ ins = idname.NewCache(map[string]string{id1: url1, id2: url2})
IDToName: map[string]string{id1: url1, id2: url2},
NameToID: map[string]string{url1: id1, url2: id2},
}
bothIDs = []string{id1, id2} bothIDs = []string{id1, id2}
) )

View File

@ -6,7 +6,6 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
@ -72,7 +71,7 @@ func configureAccount(
} }
// ensure required properties are present // ensure required properties are present
if err := utils.RequireProps(map[string]string{ if err := requireProps(map[string]string{
credentials.AzureClientID: m365Cfg.AzureClientID, credentials.AzureClientID: m365Cfg.AzureClientID,
credentials.AzureClientSecret: m365Cfg.AzureClientSecret, credentials.AzureClientSecret: m365Cfg.AzureClientSecret,
account.AzureTenantID: m365Cfg.AzureTenantID, account.AzureTenantID: m365Cfg.AzureTenantID,

View File

@ -321,3 +321,15 @@ func mustMatchConfig(vpr *viper.Viper, m map[string]string) error {
return nil return nil
} }
// requireProps validates the existence of the properties
// in the map. Expects the format map[propName]propVal.
func requireProps(props map[string]string) error {
for name, val := range props {
if len(val) == 0 {
return clues.New(name + " is required to perform this command")
}
}
return nil
}

View File

@ -39,6 +39,27 @@ func TestConfigSuite(t *testing.T) {
suite.Run(t, &ConfigSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &ConfigSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *ConfigSuite) TestRequireProps() {
table := []struct {
name string
props map[string]string
errCheck assert.ErrorAssertionFunc
}{
{
props: map[string]string{"exists": "I have seen the fnords!"},
errCheck: assert.NoError,
},
{
props: map[string]string{"not-exists": ""},
errCheck: assert.Error,
},
}
for _, test := range table {
err := requireProps(test.props)
test.errCheck(suite.T(), err, clues.ToCore(err))
}
}
func (suite *ConfigSuite) TestReadRepoConfigBasic() { func (suite *ConfigSuite) TestReadRepoConfigBasic() {
var ( var (
t = suite.T() t = suite.T()

View File

@ -9,7 +9,6 @@ import (
"github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/defaults"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
@ -112,7 +111,7 @@ func configureStorage(
} }
// ensure required properties are present // ensure required properties are present
if err := utils.RequireProps(map[string]string{ if err := requireProps(map[string]string{
storage.Bucket: s3Cfg.Bucket, storage.Bucket: s3Cfg.Bucket,
credentials.CorsoPassphrase: corso.CorsoPassphrase, credentials.CorsoPassphrase: corso.CorsoPassphrase,
}); err != nil { }); err != nil {

View File

@ -18,8 +18,10 @@ func Control() control.Options {
opt.RestorePermissions = restorePermissionsFV opt.RestorePermissions = restorePermissionsFV
opt.SkipReduce = skipReduceFV opt.SkipReduce = skipReduceFV
opt.ToggleFeatures.DisableIncrementals = disableIncrementalsFV opt.ToggleFeatures.DisableIncrementals = disableIncrementalsFV
opt.ToggleFeatures.DisableDelta = disableDeltaFV
opt.ToggleFeatures.ExchangeImmutableIDs = enableImmutableID opt.ToggleFeatures.ExchangeImmutableIDs = enableImmutableID
opt.ItemFetchParallelism = fetchParallelismFV opt.ToggleFeatures.DisableConcurrencyLimiter = disableConcurrencyLimiterFV
opt.Parallelism.ItemFetch = fetchParallelismFV
return opt return opt
} }
@ -29,13 +31,15 @@ func Control() control.Options {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
const ( const (
FailFastFN = "fail-fast" FailFastFN = "fail-fast"
FetchParallelismFN = "fetch-parallelism" FetchParallelismFN = "fetch-parallelism"
NoStatsFN = "no-stats" NoStatsFN = "no-stats"
RestorePermissionsFN = "restore-permissions" RestorePermissionsFN = "restore-permissions"
SkipReduceFN = "skip-reduce" SkipReduceFN = "skip-reduce"
DisableIncrementalsFN = "disable-incrementals" DisableDeltaFN = "disable-delta"
EnableImmutableIDFN = "enable-immutable-id" DisableIncrementalsFN = "disable-incrementals"
EnableImmutableIDFN = "enable-immutable-id"
DisableConcurrencyLimiterFN = "disable-concurrency-limiter"
) )
var ( var (
@ -90,7 +94,10 @@ func AddFetchParallelismFlag(cmd *cobra.Command) {
// Feature Flags // Feature Flags
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
var disableIncrementalsFV bool var (
disableIncrementalsFV bool
disableDeltaFV bool
)
// Adds the hidden '--disable-incrementals' cli flag which, when set, disables // Adds the hidden '--disable-incrementals' cli flag which, when set, disables
// incremental backups. // incremental backups.
@ -104,6 +111,18 @@ func AddDisableIncrementalsFlag(cmd *cobra.Command) {
cobra.CheckErr(fs.MarkHidden(DisableIncrementalsFN)) cobra.CheckErr(fs.MarkHidden(DisableIncrementalsFN))
} }
// Adds the hidden '--disable-delta' cli flag which, when set, disables
// delta based backups.
func AddDisableDeltaFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&disableDeltaFV,
DisableDeltaFN,
false,
"Disable delta based data retrieval in backups.")
cobra.CheckErr(fs.MarkHidden(DisableDeltaFN))
}
var enableImmutableID bool var enableImmutableID bool
// Adds the hidden '--enable-immutable-id' cli flag which, when set, enables // Adds the hidden '--enable-immutable-id' cli flag which, when set, enables
@ -117,3 +136,18 @@ func AddEnableImmutableIDFlag(cmd *cobra.Command) {
"Enable exchange immutable ID.") "Enable exchange immutable ID.")
cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN)) cobra.CheckErr(fs.MarkHidden(EnableImmutableIDFN))
} }
var disableConcurrencyLimiterFV bool
// AddDisableConcurrencyLimiterFlag adds a hidden cli flag which, when set,
// removes concurrency limits when communicating with graph API. This
// flag is only relevant for exchange backups for now
func AddDisableConcurrencyLimiterFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&disableConcurrencyLimiterFV,
DisableConcurrencyLimiterFN,
false,
"Disable concurrency limiter middleware. Default: false")
cobra.CheckErr(fs.MarkHidden(DisableConcurrencyLimiterFN))
}

View File

@ -28,10 +28,12 @@ func (suite *OptionsUnitSuite) TestAddExchangeCommands() {
Run: func(cmd *cobra.Command, args []string) { Run: func(cmd *cobra.Command, args []string) {
assert.True(t, failFastFV, FailFastFN) assert.True(t, failFastFV, FailFastFN)
assert.True(t, disableIncrementalsFV, DisableIncrementalsFN) assert.True(t, disableIncrementalsFV, DisableIncrementalsFN)
assert.True(t, disableDeltaFV, DisableDeltaFN)
assert.True(t, noStatsFV, NoStatsFN) assert.True(t, noStatsFV, NoStatsFN)
assert.True(t, restorePermissionsFV, RestorePermissionsFN) assert.True(t, restorePermissionsFV, RestorePermissionsFN)
assert.True(t, skipReduceFV, SkipReduceFN) assert.True(t, skipReduceFV, SkipReduceFN)
assert.Equal(t, 2, fetchParallelismFV, FetchParallelismFN) assert.Equal(t, 2, fetchParallelismFV, FetchParallelismFN)
assert.True(t, disableConcurrencyLimiterFV, DisableConcurrencyLimiterFN)
}, },
} }
@ -40,21 +42,23 @@ func (suite *OptionsUnitSuite) TestAddExchangeCommands() {
AddFailFastFlag(cmd) AddFailFastFlag(cmd)
AddDisableIncrementalsFlag(cmd) AddDisableIncrementalsFlag(cmd)
AddDisableDeltaFlag(cmd)
AddRestorePermissionsFlag(cmd) AddRestorePermissionsFlag(cmd)
AddSkipReduceFlag(cmd) AddSkipReduceFlag(cmd)
AddFetchParallelismFlag(cmd) AddFetchParallelismFlag(cmd)
AddDisableConcurrencyLimiterFlag(cmd)
// Test arg parsing for few args // Test arg parsing for few args
cmd.SetArgs([]string{ cmd.SetArgs([]string{
"test", "test",
"--" + FailFastFN, "--" + FailFastFN,
"--" + DisableIncrementalsFN, "--" + DisableIncrementalsFN,
"--" + DisableDeltaFN,
"--" + NoStatsFN, "--" + NoStatsFN,
"--" + RestorePermissionsFN, "--" + RestorePermissionsFN,
"--" + SkipReduceFN, "--" + SkipReduceFN,
"--" + FetchParallelismFN, "2", "--" + FetchParallelismFN, "2",
"--" + DisableConcurrencyLimiterFN,
}) })
err := cmd.Execute() err := cmd.Execute()

View File

@ -50,8 +50,8 @@ func AddOutputFlag(cmd *cobra.Command) {
cobra.CheckErr(fs.MarkHidden("json-debug")) cobra.CheckErr(fs.MarkHidden("json-debug"))
} }
// JSONFormat returns true if the printer plans to output as json. // DisplayJSONFormat returns true if the printer plans to output as json.
func JSONFormat() bool { func DisplayJSONFormat() bool {
return outputAsJSON || outputAsJSONDebug return outputAsJSON || outputAsJSONDebug
} }

View File

@ -1,12 +1,21 @@
package repo package repo
import ( import (
"strings"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/control/repository"
) )
const ( const (
initCommand = "init" initCommand = "init"
connectCommand = "connect" connectCommand = "connect"
maintenanceCommand = "maintenance"
) )
var repoCommands = []func(cmd *cobra.Command) *cobra.Command{ var repoCommands = []func(cmd *cobra.Command) *cobra.Command{
@ -18,15 +27,24 @@ func AddCommands(cmd *cobra.Command) {
var ( var (
// Get new instances so that setting the context during tests works // Get new instances so that setting the context during tests works
// properly. // properly.
repoCmd = repoCmd() repoCmd = repoCmd()
initCmd = initCmd() initCmd = initCmd()
connectCmd = connectCmd() connectCmd = connectCmd()
maintenanceCmd = maintenanceCmd()
) )
cmd.AddCommand(repoCmd) cmd.AddCommand(repoCmd)
repoCmd.AddCommand(initCmd) repoCmd.AddCommand(initCmd)
repoCmd.AddCommand(connectCmd) repoCmd.AddCommand(connectCmd)
utils.AddCommand(
repoCmd,
maintenanceCmd,
utils.HideCommand(),
utils.MarkPreReleaseCommand())
utils.AddMaintenanceModeFlag(maintenanceCmd)
utils.AddForceMaintenanceFlag(maintenanceCmd)
for _, addRepoTo := range repoCommands { for _, addRepoTo := range repoCommands {
addRepoTo(initCmd) addRepoTo(initCmd)
addRepoTo(connectCmd) addRepoTo(connectCmd)
@ -84,3 +102,65 @@ func connectCmd() *cobra.Command {
func handleConnectCmd(cmd *cobra.Command, args []string) error { func handleConnectCmd(cmd *cobra.Command, args []string) error {
return cmd.Help() return cmd.Help()
} }
func maintenanceCmd() *cobra.Command {
return &cobra.Command{
Use: maintenanceCommand,
Short: "Run maintenance on an existing repository",
Long: `Run maintenance on an existing repository to optimize performance and storage use`,
RunE: handleMaintenanceCmd,
Args: cobra.NoArgs,
}
}
func handleMaintenanceCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
t, err := getMaintenanceType(utils.MaintenanceModeFV)
if err != nil {
return err
}
r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil {
return print.Only(ctx, err)
}
defer utils.CloseRepo(ctx, r)
m, err := r.NewMaintenance(
ctx,
repository.Maintenance{
Type: t,
Safety: repository.FullMaintenanceSafety,
Force: utils.ForceMaintenanceFV,
})
if err != nil {
return print.Only(ctx, err)
}
err = m.Run(ctx)
if err != nil {
return print.Only(ctx, err)
}
return nil
}
func getMaintenanceType(t string) (repository.MaintenanceType, error) {
res, ok := repository.StringToMaintenanceType[t]
if !ok {
modes := maps.Keys(repository.StringToMaintenanceType)
allButLast := []string{}
for i := 0; i < len(modes)-1; i++ {
allButLast = append(allButLast, string(modes[i]))
}
valuesStr := strings.Join(allButLast, ", ") + " or " + string(modes[len(modes)-1])
return res, clues.New(t + " is an unrecognized maintenance mode; must be one of " + valuesStr)
}
return res, nil
}

41
src/cli/repo/repo_test.go Normal file
View File

@ -0,0 +1,41 @@
package repo
import (
"testing"
"github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
)
type RepoUnitSuite struct {
tester.Suite
}
func TestRepoUnitSuite(t *testing.T) {
suite.Run(t, &RepoUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *RepoUnitSuite) TestAddRepoCommands() {
t := suite.T()
cmd := &cobra.Command{}
AddCommands(cmd)
var found bool
// This is the repo command.
repoCmds := cmd.Commands()
require.Len(t, repoCmds, 1)
for _, c := range repoCmds[0].Commands() {
if c.Use == maintenanceCommand {
found = true
}
}
assert.True(t, found, "looking for maintenance command")
}

View File

@ -25,7 +25,6 @@ func TestS3E2ESuite(t *testing.T) {
suite.Run(t, &S3E2ESuite{Suite: tester.NewE2ESuite( suite.Run(t, &S3E2ESuite{Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs},
tester.CorsoCITests,
)}) )})
} }
@ -194,7 +193,7 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
ctx = config.SetViper(ctx, vpr) ctx = config.SetViper(ctx, vpr)
// init the repo first // init the repo first
_, err = repository.Initialize(ctx, account.Account{}, st, control.Options{}) _, err = repository.Initialize(ctx, account.Account{}, st, control.Defaults())
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// then test it // then test it

View File

@ -6,14 +6,12 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository"
) )
// called by restore.go to map subcommands to provider-specific handling. // called by restore.go to map subcommands to provider-specific handling.
@ -46,18 +44,19 @@ const (
exchangeServiceCommand = "exchange" exchangeServiceCommand = "exchange"
exchangeServiceCommandUseSuffix = "--backup <backupId>" exchangeServiceCommandUseSuffix = "--backup <backupId>"
exchangeServiceCommandRestoreExamples = `# Restore emails with ID 98765abcdef and 12345abcdef from a specific backup //nolint:lll
exchangeServiceCommandRestoreExamples = `# Restore emails with ID 98765abcdef and 12345abcdef from Alice's last backup (1234abcd...)
corso restore exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --email 98765abcdef,12345abcdef corso restore exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --email 98765abcdef,12345abcdef
# Restore Alice's emails with subject containing "Hello world" in "Inbox" from a specific backup # Restore emails with subject containing "Hello world" in the "Inbox"
corso restore exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso restore exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
--user alice@example.com --email-subject "Hello world" --email-folder Inbox --email-subject "Hello world" --email-folder Inbox
# Restore Bobs's entire calendar from a specific backup # Restore an entire calendar
corso restore exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso restore exchange --backup 1234abcd-12ab-cd34-56de-1234abcd \
--user bob@example.com --event-calendar Calendar --event-calendar Calendar
# Restore contact with ID abdef0101 from a specific backup # Restore the contact with ID abdef0101
corso restore exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --contact abdef0101` corso restore exchange --backup 1234abcd-12ab-cd34-56de-1234abcd --contact abdef0101`
) )
@ -90,19 +89,14 @@ func restoreExchangeCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
cfg, err := config.GetConfigRepoDetails(ctx, true, nil) r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, options.Control())
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to connect to the "+cfg.Storage.Provider.String()+" repository"))
}
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
dest := control.DefaultRestoreDestination(common.SimpleDateTime) dest := control.DefaultRestoreDestination(dttm.HumanReadable)
Infof(ctx, "Restoring to folder %s", dest.ContainerName) Infof(ctx, "Restoring to folder %s", dest.ContainerName)
sel := utils.IncludeExchangeRestoreDataSelectors(opts) sel := utils.IncludeExchangeRestoreDataSelectors(opts)
@ -122,7 +116,7 @@ func restoreExchangeCmd(cmd *cobra.Command, args []string) error {
return Only(ctx, clues.Wrap(err, "Failed to run Exchange restore")) return Only(ctx, clues.Wrap(err, "Failed to run Exchange restore"))
} }
ds.PrintEntries(ctx) ds.Items().PrintEntries(ctx)
return nil return nil
} }

View File

@ -13,7 +13,7 @@ import (
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -48,9 +48,7 @@ func TestRestoreExchangeE2ESuite(t *testing.T) {
suite.Run(t, &RestoreExchangeE2ESuite{ suite.Run(t, &RestoreExchangeE2ESuite{
Suite: tester.NewE2ESuite( Suite: tester.NewE2ESuite(
t, t,
[][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}, [][]string{tester.AWSStorageCredEnvs, tester.M365AcctCredEnvs}),
tester.CorsoCITests,
),
}) })
} }
@ -77,13 +75,8 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
suite.m365UserID = strings.ToLower(tester.M365UserID(t)) suite.m365UserID = strings.ToLower(tester.M365UserID(t))
var ( var (
users = []string{suite.m365UserID} users = []string{suite.m365UserID}
idToName = map[string]string{suite.m365UserID: suite.m365UserID} ins = idname.NewCache(map[string]string{suite.m365UserID: suite.m365UserID})
nameToID = map[string]string{suite.m365UserID: suite.m365UserID}
ins = common.IDsNames{
IDToName: idToName,
NameToID: nameToID,
}
) )
// init the repo first // init the repo first

View File

@ -6,14 +6,12 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository"
) )
// called by restore.go to map subcommands to provider-specific handling. // called by restore.go to map subcommands to provider-specific handling.
@ -48,19 +46,19 @@ const (
oneDriveServiceCommand = "onedrive" oneDriveServiceCommand = "onedrive"
oneDriveServiceCommandUseSuffix = "--backup <backupId>" oneDriveServiceCommandUseSuffix = "--backup <backupId>"
oneDriveServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef oneDriveServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef in Bob's last backup (1234abcd...)
corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
# Restore file with ID 98765abcdef along with its associated permissions # Restore the file with ID 98765abcdef along with its associated permissions
corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef --restore-permissions corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef --restore-permissions
# Restore Alice's file named "FY2021 Planning.xlsx in "Documents/Finance Reports" from a specific backup # Restore files named "FY2021 Planning.xlsx" in "Documents/Finance Reports"
corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd \
--user alice@example.com --file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports" --file "FY2021 Planning.xlsx" --folder "Documents/Finance Reports"
# Restore all files from Bob's folder that were created before 2020 when captured in a specific backup # Restore all files and folders in folder "Documents/Finance Reports" that were created before 2020
corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd corso restore onedrive --backup 1234abcd-12ab-cd34-56de-1234abcd
--user bob@example.com --folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00` --folder "Documents/Finance Reports" --file-created-before 2020-01-01T00:00:00`
) )
// `corso restore onedrive [<flag>...]` // `corso restore onedrive [<flag>...]`
@ -92,19 +90,14 @@ func restoreOneDriveCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
cfg, err := config.GetConfigRepoDetails(ctx, true, nil) r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, options.Control())
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to connect to the "+cfg.Storage.Provider.String()+" repository"))
}
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
dest := control.DefaultRestoreDestination(common.SimpleDateTimeOneDrive) dest := control.DefaultRestoreDestination(dttm.HumanReadableDriveItem)
Infof(ctx, "Restoring to folder %s", dest.ContainerName) Infof(ctx, "Restoring to folder %s", dest.ContainerName)
sel := utils.IncludeOneDriveRestoreDataSelectors(opts) sel := utils.IncludeOneDriveRestoreDataSelectors(opts)
@ -124,7 +117,7 @@ func restoreOneDriveCmd(cmd *cobra.Command, args []string) error {
return Only(ctx, clues.Wrap(err, "Failed to run OneDrive restore")) return Only(ctx, clues.Wrap(err, "Failed to run OneDrive restore"))
} }
ds.PrintEntries(ctx) ds.Items().PrintEntries(ctx)
return nil return nil
} }

View File

@ -6,14 +6,12 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/options" "github.com/alcionai/corso/src/cli/options"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/repository"
) )
// called by restore.go to map subcommands to provider-specific handling. // called by restore.go to map subcommands to provider-specific handling.
@ -35,6 +33,8 @@ func addSharePointCommands(cmd *cobra.Command) *cobra.Command {
utils.AddBackupIDFlag(c, true) utils.AddBackupIDFlag(c, true)
utils.AddSharePointDetailsAndRestoreFlags(c) utils.AddSharePointDetailsAndRestoreFlags(c)
options.AddRestorePermissionsFlag(c)
options.AddFailFastFlag(c) options.AddFailFastFlag(c)
} }
@ -46,20 +46,24 @@ const (
sharePointServiceCommandUseSuffix = "--backup <backupId>" sharePointServiceCommandUseSuffix = "--backup <backupId>"
//nolint:lll //nolint:lll
sharePointServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef sharePointServiceCommandRestoreExamples = `# Restore file with ID 98765abcdef in Bob's latest backup (1234abcd...)
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd --file 98765abcdef
# Restore a file named "ServerRenderTemplate.xsl in "Display Templates/Style Sheets". # Restore the file with ID 98765abcdef along with its associated permissions
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file 98765abcdef --restore-permissions
# Restore files named "ServerRenderTemplate.xsl" in the folder "Display Templates/Style Sheets".
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \ corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd \
--file "ServerRenderTemplate.xsl" --folder "Display Templates/Style Sheets" --file "ServerRenderTemplate.xsl" --folder "Display Templates/Style Sheets"
# Restore all files that were created before 2020. # Restore all files in the folder "Display Templates/Style Sheets" that were created before 2020.
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd
--file-created-before 2020-01-01T00:00:00 --folder "Display Templates/Style Sheets" --file-created-before 2020-01-01T00:00:00 --folder "Display Templates/Style Sheets"
# Restore all files in a certain library. # Restore all files in the "Documents" library.
corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd corso restore sharepoint --backup 1234abcd-12ab-cd34-56de-1234abcd
--library documents --folder "Display Templates/Style Sheets" ` --library Documents --folder "Display Templates/Style Sheets" `
) )
// `corso restore sharepoint [<flag>...]` // `corso restore sharepoint [<flag>...]`
@ -91,19 +95,14 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
return err return err
} }
cfg, err := config.GetConfigRepoDetails(ctx, true, nil) r, _, err := utils.GetAccountAndConnect(ctx)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, options.Control())
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to connect to the "+cfg.Storage.Provider.String()+" repository"))
}
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
dest := control.DefaultRestoreDestination(common.SimpleDateTimeOneDrive) dest := control.DefaultRestoreDestination(dttm.HumanReadableDriveItem)
Infof(ctx, "Restoring to folder %s", dest.ContainerName) Infof(ctx, "Restoring to folder %s", dest.ContainerName)
sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts) sel := utils.IncludeSharePointRestoreDataSelectors(ctx, opts)
@ -123,7 +122,7 @@ func restoreSharePointCmd(cmd *cobra.Command, args []string) error {
return Only(ctx, clues.Wrap(err, "Failed to run SharePoint restore")) return Only(ctx, clues.Wrap(err, "Failed to run SharePoint restore"))
} }
ds.PrintEntries(ctx) ds.Items().PrintEntries(ctx)
return nil return nil
} }

View File

@ -113,7 +113,7 @@ func AddExchangeDetailsAndRestoreFlags(cmd *cobra.Command) {
fs.StringSliceVar( fs.StringSliceVar(
&EmailFV, &EmailFV,
EmailFN, nil, EmailFN, nil,
"Select emails by email ID; accepts '"+Wildcard+"' to select all emails.") "Select email messages by ID; accepts '"+Wildcard+"' to select all emails.")
fs.StringSliceVar( fs.StringSliceVar(
&EmailFolderFV, &EmailFolderFV,
EmailFolderFN, nil, EmailFolderFN, nil,

View File

@ -8,7 +8,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -42,7 +42,7 @@ func (suite *ExchangeUtilsSuite) TestValidateRestoreFlags() {
{ {
name: "valid time", name: "valid time",
backupID: "bid", backupID: "bid",
opts: utils.ExchangeOpts{EmailReceivedAfter: common.Now()}, opts: utils.ExchangeOpts{EmailReceivedAfter: dttm.Now()},
expect: assert.NoError, expect: assert.NoError,
}, },
{ {

View File

@ -8,8 +8,10 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
) )
// common flag vars (eg: FV) // common flag vars (eg: FV)
@ -36,6 +38,9 @@ var (
// for selection of data by category. eg: `--data email,contacts` // for selection of data by category. eg: `--data email,contacts`
CategoryDataFV []string CategoryDataFV []string
MaintenanceModeFV string
ForceMaintenanceFV bool
) )
// common flag names (eg: FN) // common flag names (eg: FN)
@ -58,6 +63,10 @@ const (
FileCreatedBeforeFN = "file-created-before" FileCreatedBeforeFN = "file-created-before"
FileModifiedAfterFN = "file-modified-after" FileModifiedAfterFN = "file-modified-after"
FileModifiedBeforeFN = "file-modified-before" FileModifiedBeforeFN = "file-modified-before"
// Maintenance stuff.
MaintenanceModeFN = "mode"
ForceMaintenanceFN = "force"
) )
// well-known flag values // well-known flag values
@ -167,6 +176,30 @@ func AddSiteFlag(cmd *cobra.Command) {
"Backup data by site URL; accepts '"+Wildcard+"' to select all sites.") "Backup data by site URL; accepts '"+Wildcard+"' to select all sites.")
} }
func AddMaintenanceModeFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.StringVar(
&MaintenanceModeFV,
MaintenanceModeFN,
repository.CompleteMaintenance.String(),
"Type of maintenance operation to run. Pass '"+
repository.MetadataMaintenance.String()+"' to run a faster maintenance "+
"that does minimal clean-up and optimization. Pass '"+
repository.CompleteMaintenance.String()+"' to fully compact existing "+
"data and delete unused data.")
cobra.CheckErr(fs.MarkHidden(MaintenanceModeFN))
}
func AddForceMaintenanceFlag(cmd *cobra.Command) {
fs := cmd.Flags()
fs.BoolVar(
&ForceMaintenanceFV,
ForceMaintenanceFN,
false,
"Force maintenance. Caution: user must ensure this is not run concurrently on a single repo")
cobra.CheckErr(fs.MarkHidden(ForceMaintenanceFN))
}
type PopulatedFlags map[string]struct{} type PopulatedFlags map[string]struct{}
func (fs PopulatedFlags) populate(pf *pflag.Flag) { func (fs PopulatedFlags) populate(pf *pflag.Flag) {
@ -198,7 +231,7 @@ func GetPopulatedFlags(cmd *cobra.Command) PopulatedFlags {
// IsValidTimeFormat returns true if the input is recognized as a // IsValidTimeFormat returns true if the input is recognized as a
// supported format by the common time parser. // supported format by the common time parser.
func IsValidTimeFormat(in string) bool { func IsValidTimeFormat(in string) bool {
_, err := common.ParseTime(in) _, err := dttm.ParseTime(in)
return err == nil return err == nil
} }
@ -215,6 +248,11 @@ func trimFolderSlash(folders []string) []string {
res := make([]string, 0, len(folders)) res := make([]string, 0, len(folders))
for _, p := range folders { for _, p := range folders {
if p == string(path.PathSeparator) {
res = selectors.Any()
break
}
// Use path package because it has logic to handle escaping already. // Use path package because it has logic to handle escaping already.
res = append(res, path.TrimTrailingSlash(p)) res = append(res, path.TrimTrailingSlash(p))
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
) )
type OneDriveUtilsSuite struct { type OneDriveUtilsSuite struct {
@ -26,6 +27,7 @@ func (suite *OneDriveUtilsSuite) TestIncludeOneDriveRestoreDataSelectors() {
containsOnly = []string{"contains"} containsOnly = []string{"contains"}
prefixOnly = []string{"/prefix"} prefixOnly = []string{"/prefix"}
containsAndPrefix = []string{"contains", "/prefix"} containsAndPrefix = []string{"contains", "/prefix"}
onlySlash = []string{string(path.PathSeparator)}
) )
table := []struct { table := []struct {
@ -87,6 +89,15 @@ func (suite *OneDriveUtilsSuite) TestIncludeOneDriveRestoreDataSelectors() {
}, },
expectIncludeLen: 2, expectIncludeLen: 2,
}, },
{
name: "folder with just /",
opts: utils.OneDriveOpts{
Users: empty,
FileName: empty,
FolderPath: onlySlash,
},
expectIncludeLen: 1,
},
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {

View File

@ -7,8 +7,9 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -30,6 +31,7 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
containsOnly = []string{"contains"} containsOnly = []string{"contains"}
prefixOnly = []string{"/prefix"} prefixOnly = []string{"/prefix"}
containsAndPrefix = []string{"contains", "/prefix"} containsAndPrefix = []string{"contains", "/prefix"}
onlySlash = []string{string(path.PathSeparator)}
) )
table := []struct { table := []struct {
@ -182,6 +184,13 @@ func (suite *SharePointUtilsSuite) TestIncludeSharePointRestoreDataSelectors() {
}, },
expectIncludeLen: 2, expectIncludeLen: 2,
}, },
{
name: "folder with just /",
opts: utils.SharePointOpts{
FolderPath: onlySlash,
},
expectIncludeLen: 1,
},
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
@ -280,10 +289,10 @@ func (suite *SharePointUtilsSuite) TestValidateSharePointRestoreFlags() {
backupID: "id", backupID: "id",
opts: utils.SharePointOpts{ opts: utils.SharePointOpts{
WebURL: []string{"www.corsobackup.io/sites/foo"}, WebURL: []string{"www.corsobackup.io/sites/foo"},
FileCreatedAfter: common.Now(), FileCreatedAfter: dttm.Now(),
FileCreatedBefore: common.Now(), FileCreatedBefore: dttm.Now(),
FileModifiedAfter: common.Now(), FileModifiedAfter: dttm.Now(),
FileModifiedBefore: common.Now(), FileModifiedBefore: dttm.Now(),
Populated: utils.PopulatedFlags{ Populated: utils.PopulatedFlags{
utils.SiteFN: {}, utils.SiteFN: {},
utils.FileCreatedAfterFN: {}, utils.FileCreatedAfterFN: {},

View File

@ -43,4 +43,6 @@ var (
PageFolderInput = []string{"pageFolder1", "pageFolder2"} PageFolderInput = []string{"pageFolder1", "pageFolder2"}
PageInput = []string{"page1", "page2"} PageInput = []string{"page1", "page2"}
RestorePermissions = true
) )

View File

@ -7,7 +7,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/details/testdata" "github.com/alcionai/corso/src/pkg/backup/details/testdata"
@ -21,7 +21,7 @@ type ExchangeOptionsTest struct {
Name string Name string
Opts utils.ExchangeOpts Opts utils.ExchangeOpts
BackupGetter *MockBackupGetter BackupGetter *MockBackupGetter
Expected []details.DetailsEntry Expected []details.Entry
} }
var ( var (
@ -138,39 +138,39 @@ var (
Name: "EmailsFolderPrefixMatch", Name: "EmailsFolderPrefixMatch",
Expected: testdata.ExchangeEmailItems, Expected: testdata.ExchangeEmailItems,
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false)}, EmailFolder: []string{testdata.ExchangeEmailInboxPath.FolderLocation()},
}, },
}, },
{ {
Name: "EmailsFolderPrefixMatchTrailingSlash", Name: "EmailsFolderPrefixMatchTrailingSlash",
Expected: testdata.ExchangeEmailItems, Expected: testdata.ExchangeEmailItems,
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailInboxPath.Folder(false) + "/"}, EmailFolder: []string{testdata.ExchangeEmailInboxPath.FolderLocation() + "/"},
}, },
}, },
{ {
Name: "EmailsFolderWithSlashPrefixMatch", Name: "EmailsFolderWithSlashPrefixMatch",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.ExchangeEmailItems[1], testdata.ExchangeEmailItems[1],
testdata.ExchangeEmailItems[2], testdata.ExchangeEmailItems[2],
}, },
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false)}, EmailFolder: []string{testdata.ExchangeEmailBasePath2.FolderLocation()},
}, },
}, },
{ {
Name: "EmailsFolderWithSlashPrefixMatchTrailingSlash", Name: "EmailsFolderWithSlashPrefixMatchTrailingSlash",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.ExchangeEmailItems[1], testdata.ExchangeEmailItems[1],
testdata.ExchangeEmailItems[2], testdata.ExchangeEmailItems[2],
}, },
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailFolder: []string{testdata.ExchangeEmailBasePath2.Folder(false) + "/"}, EmailFolder: []string{testdata.ExchangeEmailBasePath2.FolderLocation() + "/"},
}, },
}, },
{ {
Name: "EmailsBySubject", Name: "EmailsBySubject",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.ExchangeEmailItems[0], testdata.ExchangeEmailItems[0],
testdata.ExchangeEmailItems[1], testdata.ExchangeEmailItems[1],
}, },
@ -183,7 +183,7 @@ var (
Expected: append( Expected: append(
append( append(
append( append(
[]details.DetailsEntry{}, []details.Entry{},
testdata.ExchangeEmailItems..., testdata.ExchangeEmailItems...,
), ),
testdata.ExchangeContactsItems..., testdata.ExchangeContactsItems...,
@ -193,41 +193,43 @@ var (
}, },
{ {
Name: "MailReceivedTime", Name: "MailReceivedTime",
Expected: []details.DetailsEntry{testdata.ExchangeEmailItems[0]}, Expected: []details.Entry{testdata.ExchangeEmailItems[0]},
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailReceivedBefore: common.FormatTime(testdata.Time1.Add(time.Second)), EmailReceivedBefore: dttm.Format(testdata.Time1.Add(time.Second)),
}, },
}, },
{ {
Name: "MailItemRef", Name: "MailShortRef",
Expected: []details.DetailsEntry{testdata.ExchangeEmailItems[0]}, Expected: []details.Entry{testdata.ExchangeEmailItems[0]},
Opts: utils.ExchangeOpts{
Email: []string{testdata.ExchangeEmailItemPath1.RR.ShortRef()},
},
},
{
Name: "BadMailItemRef",
// no matches are expected, since exchange ItemRefs
// are not matched when using the CLI's selectors.
Expected: []details.Entry{},
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
Email: []string{testdata.ExchangeEmailItems[0].ItemRef}, Email: []string{testdata.ExchangeEmailItems[0].ItemRef},
}, },
}, },
{
Name: "MailShortRef",
Expected: []details.DetailsEntry{testdata.ExchangeEmailItems[0]},
Opts: utils.ExchangeOpts{
Email: []string{testdata.ExchangeEmailItemPath1.ShortRef()},
},
},
{ {
Name: "MultipleMailShortRef", Name: "MultipleMailShortRef",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.ExchangeEmailItems[0], testdata.ExchangeEmailItems[0],
testdata.ExchangeEmailItems[1], testdata.ExchangeEmailItems[1],
}, },
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
Email: []string{ Email: []string{
testdata.ExchangeEmailItemPath1.ShortRef(), testdata.ExchangeEmailItemPath1.RR.ShortRef(),
testdata.ExchangeEmailItemPath2.ShortRef(), testdata.ExchangeEmailItemPath2.RR.ShortRef(),
}, },
}, },
}, },
{ {
Name: "AllEventsAndMailWithSubject", Name: "AllEventsAndMailWithSubject",
Expected: []details.DetailsEntry{testdata.ExchangeEmailItems[0]}, Expected: []details.Entry{testdata.ExchangeEmailItems[0]},
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailSubject: "foo", EmailSubject: "foo",
Event: selectors.Any(), Event: selectors.Any(),
@ -235,7 +237,7 @@ var (
}, },
{ {
Name: "EventsAndMailWithSubject", Name: "EventsAndMailWithSubject",
Expected: []details.DetailsEntry{}, Expected: []details.Entry{},
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
EmailSubject: "foo", EmailSubject: "foo",
EventSubject: "foo", EventSubject: "foo",
@ -243,13 +245,13 @@ var (
}, },
{ {
Name: "EventsAndMailByShortRef", Name: "EventsAndMailByShortRef",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.ExchangeEmailItems[0], testdata.ExchangeEmailItems[0],
testdata.ExchangeEventsItems[0], testdata.ExchangeEventsItems[0],
}, },
Opts: utils.ExchangeOpts{ Opts: utils.ExchangeOpts{
Email: []string{testdata.ExchangeEmailItemPath1.ShortRef()}, Email: []string{testdata.ExchangeEmailItemPath1.RR.ShortRef()},
Event: []string{testdata.ExchangeEventsItemPath1.ShortRef()}, Event: []string{testdata.ExchangeEventsItemPath1.RR.ShortRef()},
}, },
}, },
} }
@ -259,7 +261,7 @@ type OneDriveOptionsTest struct {
Name string Name string
Opts utils.OneDriveOpts Opts utils.OneDriveOpts
BackupGetter *MockBackupGetter BackupGetter *MockBackupGetter
Expected []details.DetailsEntry Expected []details.Entry
} }
var ( var (
@ -354,6 +356,13 @@ var (
FolderPath: selectors.Any(), FolderPath: selectors.Any(),
}, },
}, },
{
Name: "FilesWithSingleSlash",
Expected: testdata.OneDriveItems,
Opts: utils.OneDriveOpts{
FolderPath: []string{"/"},
},
},
{ {
Name: "FolderPrefixMatch", Name: "FolderPrefixMatch",
Expected: testdata.OneDriveItems, Expected: testdata.OneDriveItems,
@ -375,9 +384,16 @@ var (
FolderPath: []string{testdata.OneDriveFolderFolder + "/"}, FolderPath: []string{testdata.OneDriveFolderFolder + "/"},
}, },
}, },
{
Name: "FolderRepoRefMatchesNothing",
Expected: []details.Entry{},
Opts: utils.OneDriveOpts{
FolderPath: []string{testdata.OneDriveFolderPath.RR.Folder(true)},
},
},
{ {
Name: "ShortRef", Name: "ShortRef",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.OneDriveItems[0], testdata.OneDriveItems[0],
testdata.OneDriveItems[1], testdata.OneDriveItems[1],
}, },
@ -390,7 +406,7 @@ var (
}, },
{ {
Name: "SingleItem", Name: "SingleItem",
Expected: []details.DetailsEntry{testdata.OneDriveItems[0]}, Expected: []details.Entry{testdata.OneDriveItems[0]},
Opts: utils.OneDriveOpts{ Opts: utils.OneDriveOpts{
FileName: []string{ FileName: []string{
testdata.OneDriveItems[0].OneDrive.ItemName, testdata.OneDriveItems[0].OneDrive.ItemName,
@ -399,7 +415,7 @@ var (
}, },
{ {
Name: "MultipleItems", Name: "MultipleItems",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.OneDriveItems[0], testdata.OneDriveItems[0],
testdata.OneDriveItems[1], testdata.OneDriveItems[1],
}, },
@ -412,7 +428,7 @@ var (
}, },
{ {
Name: "ItemRefMatchesNothing", Name: "ItemRefMatchesNothing",
Expected: []details.DetailsEntry{}, Expected: []details.Entry{},
Opts: utils.OneDriveOpts{ Opts: utils.OneDriveOpts{
FileName: []string{ FileName: []string{
testdata.OneDriveItems[0].ItemRef, testdata.OneDriveItems[0].ItemRef,
@ -421,9 +437,9 @@ var (
}, },
{ {
Name: "CreatedBefore", Name: "CreatedBefore",
Expected: []details.DetailsEntry{testdata.OneDriveItems[1]}, Expected: []details.Entry{testdata.OneDriveItems[1]},
Opts: utils.OneDriveOpts{ Opts: utils.OneDriveOpts{
FileCreatedBefore: common.FormatTime(testdata.Time1.Add(time.Second)), FileCreatedBefore: dttm.Format(testdata.Time1.Add(time.Second)),
}, },
}, },
} }
@ -433,7 +449,7 @@ type SharePointOptionsTest struct {
Name string Name string
Opts utils.SharePointOpts Opts utils.SharePointOpts
BackupGetter *MockBackupGetter BackupGetter *MockBackupGetter
Expected []details.DetailsEntry Expected []details.Entry
} }
var ( var (
@ -473,6 +489,13 @@ var (
FolderPath: selectors.Any(), FolderPath: selectors.Any(),
}, },
}, },
{
Name: "LibraryItemsWithSingleSlash",
Expected: testdata.SharePointLibraryItems,
Opts: utils.SharePointOpts{
FolderPath: []string{"/"},
},
},
{ {
Name: "FolderPrefixMatch", Name: "FolderPrefixMatch",
Expected: testdata.SharePointLibraryItems, Expected: testdata.SharePointLibraryItems,
@ -494,9 +517,16 @@ var (
FolderPath: []string{testdata.SharePointLibraryFolder + "/"}, FolderPath: []string{testdata.SharePointLibraryFolder + "/"},
}, },
}, },
{
Name: "FolderRepoRefMatchesNothing",
Expected: []details.Entry{},
Opts: utils.SharePointOpts{
FolderPath: []string{testdata.SharePointLibraryPath.RR.Folder(true)},
},
},
{ {
Name: "ShortRef", Name: "ShortRef",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.SharePointLibraryItems[0], testdata.SharePointLibraryItems[0],
testdata.SharePointLibraryItems[1], testdata.SharePointLibraryItems[1],
}, },
@ -509,7 +539,7 @@ var (
}, },
{ {
Name: "SingleItem", Name: "SingleItem",
Expected: []details.DetailsEntry{testdata.SharePointLibraryItems[0]}, Expected: []details.Entry{testdata.SharePointLibraryItems[0]},
Opts: utils.SharePointOpts{ Opts: utils.SharePointOpts{
FileName: []string{ FileName: []string{
testdata.SharePointLibraryItems[0].SharePoint.ItemName, testdata.SharePointLibraryItems[0].SharePoint.ItemName,
@ -518,7 +548,7 @@ var (
}, },
{ {
Name: "MultipleItems", Name: "MultipleItems",
Expected: []details.DetailsEntry{ Expected: []details.Entry{
testdata.SharePointLibraryItems[0], testdata.SharePointLibraryItems[0],
testdata.SharePointLibraryItems[1], testdata.SharePointLibraryItems[1],
}, },
@ -531,7 +561,7 @@ var (
}, },
{ {
Name: "ItemRefMatchesNothing", Name: "ItemRefMatchesNothing",
Expected: []details.DetailsEntry{}, Expected: []details.Entry{},
Opts: utils.SharePointOpts{ Opts: utils.SharePointOpts{
FileName: []string{ FileName: []string{
testdata.SharePointLibraryItems[0].ItemRef, testdata.SharePointLibraryItems[0].ItemRef,
@ -542,7 +572,7 @@ var (
// Name: "CreatedBefore", // Name: "CreatedBefore",
// Expected: []details.DetailsEntry{testdata.SharePointLibraryItems[1]}, // Expected: []details.DetailsEntry{testdata.SharePointLibraryItems[1]},
// Opts: utils.SharePointOpts{ // Opts: utils.SharePointOpts{
// FileCreatedBefore: common.FormatTime(testdata.Time1.Add(time.Second)), // FileCreatedBefore: dttm.Format(testdata.Time1.Add(time.Second)),
// }, // },
// }, // },
} }

40
src/cli/utils/users.go Normal file
View File

@ -0,0 +1,40 @@
package utils
import (
"context"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// UsersMap retrieves all users in the tenant and returns them in an idname.Cacher
func UsersMap(
ctx context.Context,
acct account.Account,
errs *fault.Bus,
) (idname.Cacher, error) {
au, err := makeUserAPI(acct)
if err != nil {
return nil, clues.Wrap(err, "constructing a graph client")
}
return au.GetAllIDsAndNames(ctx, errs)
}
func makeUserAPI(acct account.Account) (api.Users, error) {
creds, err := acct.M365Config()
if err != nil {
return api.Users{}, clues.Wrap(err, "getting m365 account creds")
}
cli, err := api.NewClient(creds)
if err != nil {
return api.Users{}, clues.Wrap(err, "constructing api client")
}
return cli.Users(), nil
}

View File

@ -8,7 +8,10 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/config"
"github.com/alcionai/corso/src/cli/options"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -21,16 +24,18 @@ const (
Wildcard = "*" Wildcard = "*"
) )
// RequireProps validates the existence of the properties func GetAccountAndConnect(ctx context.Context) (repository.Repository, *account.Account, error) {
// in the map. Expects the format map[propName]propVal. cfg, err := config.GetConfigRepoDetails(ctx, true, nil)
func RequireProps(props map[string]string) error { if err != nil {
for name, val := range props { return nil, nil, err
if len(val) == 0 {
return clues.New(name + " is required to perform this command")
}
} }
return nil r, err := repository.Connect(ctx, cfg.Account, cfg.Storage, options.Control())
if err != nil {
return nil, nil, clues.Wrap(err, "Failed to connect to the "+cfg.Storage.Provider.String()+" repository")
}
return r, &cfg.Account, nil
} }
// CloseRepo handles closing a repo. // CloseRepo handles closing a repo.

View File

@ -3,7 +3,6 @@ package utils
import ( import (
"testing" "testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -19,27 +18,6 @@ func TestCliUtilsSuite(t *testing.T) {
suite.Run(t, &CliUtilsSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &CliUtilsSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *CliUtilsSuite) TestRequireProps() {
table := []struct {
name string
props map[string]string
errCheck assert.ErrorAssertionFunc
}{
{
props: map[string]string{"exists": "I have seen the fnords!"},
errCheck: assert.NoError,
},
{
props: map[string]string{"not-exists": ""},
errCheck: assert.Error,
},
}
for _, test := range table {
err := RequireProps(test.props)
test.errCheck(suite.T(), err, clues.ToCore(err))
}
}
func (suite *CliUtilsSuite) TestSplitFoldersIntoContainsAndPrefix() { func (suite *CliUtilsSuite) TestSplitFoldersIntoContainsAndPrefix() {
table := []struct { table := []struct {
name string name string

View File

@ -8,6 +8,7 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cmd/factory/impl" "github.com/alcionai/corso/src/cmd/factory/impl"
"github.com/alcionai/corso/src/internal/common/crash"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
@ -29,21 +30,38 @@ var oneDriveCmd = &cobra.Command{
RunE: handleOneDriveFactory, RunE: handleOneDriveFactory,
} }
var sharePointCmd = &cobra.Command{
Use: "sharepoint",
Short: "Generate shareopint data",
RunE: handleSharePointFactory,
}
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
// CLI command handlers // CLI command handlers
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
func main() { func main() {
ctx, _ := logger.SeedLevel(context.Background(), logger.Development) ls := logger.Settings{
Level: logger.LLDebug,
Format: logger.LFText,
}
ctx, _ := logger.CtxOrSeed(context.Background(), ls)
ctx = SetRootCmd(ctx, factoryCmd) ctx = SetRootCmd(ctx, factoryCmd)
defer logger.Flush(ctx) defer func() {
if err := crash.Recovery(ctx, recover(), "backup"); err != nil {
logger.CtxErr(ctx, err).Error("panic in factory")
}
logger.Flush(ctx)
}()
// persistent flags that are common to all use cases // persistent flags that are common to all use cases
fs := factoryCmd.PersistentFlags() fs := factoryCmd.PersistentFlags()
fs.StringVar(&impl.Tenant, "tenant", "", "m365 tenant containing the user") fs.StringVar(&impl.Tenant, "tenant", "", "m365 tenant containing the user")
fs.StringVar(&impl.Site, "site", "", "sharepoint site owning the new data")
fs.StringVar(&impl.User, "user", "", "m365 user owning the new data") fs.StringVar(&impl.User, "user", "", "m365 user owning the new data")
cobra.CheckErr(factoryCmd.MarkPersistentFlagRequired("user")) fs.StringVar(&impl.SecondaryUser, "secondaryuser", "", "m365 secondary user owning the new data")
fs.IntVar(&impl.Count, "count", 0, "count of items to produce") fs.IntVar(&impl.Count, "count", 0, "count of items to produce")
cobra.CheckErr(factoryCmd.MarkPersistentFlagRequired("count")) cobra.CheckErr(factoryCmd.MarkPersistentFlagRequired("count"))
fs.StringVar(&impl.Destination, "destination", "", "destination of the new data (will create as needed)") fs.StringVar(&impl.Destination, "destination", "", "destination of the new data (will create as needed)")
@ -53,6 +71,8 @@ func main() {
impl.AddExchangeCommands(exchangeCmd) impl.AddExchangeCommands(exchangeCmd)
factoryCmd.AddCommand(oneDriveCmd) factoryCmd.AddCommand(oneDriveCmd)
impl.AddOneDriveCommands(oneDriveCmd) impl.AddOneDriveCommands(oneDriveCmd)
factoryCmd.AddCommand(sharePointCmd)
impl.AddSharePointCommands(sharePointCmd)
if err := factoryCmd.ExecuteContext(ctx); err != nil { if err := factoryCmd.ExecuteContext(ctx); err != nil {
logger.Flush(ctx) logger.Flush(ctx)
@ -74,3 +94,8 @@ func handleOneDriveFactory(cmd *cobra.Command, args []string) error {
Err(cmd.Context(), impl.ErrNotYetImplemented) Err(cmd.Context(), impl.ErrNotYetImplemented)
return cmd.Help() return cmd.Help()
} }
func handleSharePointFactory(cmd *cobra.Command, args []string) error {
Err(cmd.Context(), impl.ErrNotYetImplemented)
return cmd.Help()
}

View File

@ -2,6 +2,7 @@ package impl
import ( import (
"context" "context"
"fmt"
"os" "os"
"strings" "strings"
"time" "time"
@ -11,9 +12,13 @@ import (
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -22,14 +27,15 @@ import (
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365"
) )
var ( var (
Count int Count int
Destination string Destination string
Tenant string Site string
User string Tenant string
User string
SecondaryUser string
) )
// TODO: ErrGenerating = clues.New("not all items were successfully generated") // TODO: ErrGenerating = clues.New("not all items were successfully generated")
@ -59,8 +65,8 @@ func generateAndRestoreItems(
for i := 0; i < howMany; i++ { for i := 0; i < howMany; i++ {
var ( var (
now = common.Now() now = dttm.Now()
nowLegacy = common.FormatLegacyTime(time.Now()) nowLegacy = dttm.FormatToLegacy(time.Now())
id = uuid.NewString() id = uuid.NewString()
subject = "automated " + now[:16] + " - " + id[:8] subject = "automated " + now[:16] + " - " + id[:8]
body = "automated " + cat.String() + " generation for " + userID + " at " + now + " - " + id body = "automated " + cat.String() + " generation for " + userID + " at " + now + " - " + id
@ -73,13 +79,12 @@ func generateAndRestoreItems(
} }
collections := []collection{{ collections := []collection{{
pathElements: []string{destFldr}, PathElements: []string{destFldr},
category: cat, category: cat,
items: items, items: items,
}} }}
// TODO: fit the destination to the containers dest := control.DefaultRestoreDestination(dttm.SafeForTesting)
dest := control.DefaultRestoreDestination(common.SimpleTimeTesting)
dest.ContainerName = destFldr dest.ContainerName = destFldr
print.Infof(ctx, "Restoring to folder %s", dest.ContainerName) print.Infof(ctx, "Restoring to folder %s", dest.ContainerName)
@ -101,7 +106,16 @@ func generateAndRestoreItems(
// Common Helpers // Common Helpers
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
func getGCAndVerifyUser(ctx context.Context, userID string) (*connector.GraphConnector, account.Account, error) { func getGCAndVerifyResourceOwner(
ctx context.Context,
resource connector.Resource,
resourceOwner string,
) (
*connector.GraphConnector,
account.Account,
idname.Provider,
error,
) {
tid := common.First(Tenant, os.Getenv(account.AzureTenantID)) tid := common.First(Tenant, os.Getenv(account.AzureTenantID))
if len(Tenant) == 0 { if len(Tenant) == 0 {
@ -116,34 +130,20 @@ func getGCAndVerifyUser(ctx context.Context, userID string) (*connector.GraphCon
acct, err := account.NewAccount(account.ProviderM365, m365Cfg) acct, err := account.NewAccount(account.ProviderM365, m365Cfg)
if err != nil { if err != nil {
return nil, account.Account{}, clues.Wrap(err, "finding m365 account details") return nil, account.Account{}, nil, clues.Wrap(err, "finding m365 account details")
} }
// TODO: log/print recoverable errors gc, err := connector.NewGraphConnector(ctx, acct, resource)
errs := fault.New(false)
ins, err := m365.UsersMap(ctx, acct, errs)
if err != nil { if err != nil {
return nil, account.Account{}, clues.Wrap(err, "getting tenant users") return nil, account.Account{}, nil, clues.Wrap(err, "connecting to graph api")
} }
_, idOK := ins.NameOf(strings.ToLower(userID)) id, _, err := gc.PopulateOwnerIDAndNamesFrom(ctx, resourceOwner, nil)
_, nameOK := ins.IDOf(strings.ToLower(userID))
if !idOK && !nameOK {
return nil, account.Account{}, clues.New("user not found within tenant")
}
gc, err := connector.NewGraphConnector(
ctx,
acct,
connector.Users,
errs)
if err != nil { if err != nil {
return nil, account.Account{}, clues.Wrap(err, "connecting to graph api") return nil, account.Account{}, nil, clues.Wrap(err, "verifying user")
} }
return gc, acct, nil return gc, acct, gc.IDNameLookup.ProviderForID(id), nil
} }
type item struct { type item struct {
@ -156,7 +156,7 @@ type collection struct {
// only contain elements after the prefix that corso uses for the path. For // only contain elements after the prefix that corso uses for the path. For
// example, a collection for the Inbox folder in exchange mail would just be // example, a collection for the Inbox folder in exchange mail would just be
// "Inbox". // "Inbox".
pathElements []string PathElements []string
category path.CategoryType category path.CategoryType
items []item items []item
} }
@ -176,7 +176,7 @@ func buildCollections(
service, service,
c.category, c.category,
false, false,
c.pathElements...) c.PathElements...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -193,3 +193,219 @@ func buildCollections(
return collections, nil return collections, nil
} }
var (
folderAName = "folder-a"
folderBName = "b"
folderCName = "folder-c"
fileAData = []byte(strings.Repeat("a", 33))
fileBData = []byte(strings.Repeat("b", 65))
fileEData = []byte(strings.Repeat("e", 257))
// Cannot restore owner or empty permissions and so not testing them
writePerm = []string{"write"}
readPerm = []string{"read"}
)
func generateAndRestoreDriveItems(
gc *connector.GraphConnector,
resourceOwner, secondaryUserID, secondaryUserName string,
acct account.Account,
service path.ServiceType,
cat path.CategoryType,
sel selectors.Selector,
tenantID, destFldr string,
count int,
errs *fault.Bus,
) (
*details.Details,
error,
) {
ctx, flush := tester.NewContext()
defer flush()
dest := control.DefaultRestoreDestination(dttm.SafeForTesting)
dest.ContainerName = destFldr
print.Infof(ctx, "Restoring to folder %s", dest.ContainerName)
var driveID string
switch service {
case path.SharePointService:
d, err := gc.Service.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
if err != nil {
return nil, clues.Wrap(err, "getting site's default drive")
}
driveID = ptr.Val(d.GetId())
default:
d, err := gc.Service.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
if err != nil {
return nil, clues.Wrap(err, "getting user's default drive")
}
driveID = ptr.Val(d.GetId())
}
var (
cols []connector.OnedriveColInfo
rootPath = []string{"drives", driveID, "root:"}
folderAPath = []string{"drives", driveID, "root:", folderAName}
folderBPath = []string{"drives", driveID, "root:", folderBName}
folderCPath = []string{"drives", driveID, "root:", folderCName}
now = time.Now()
year, mnth, date = now.Date()
hour, min, sec = now.Clock()
currentTime = fmt.Sprintf("%d-%v-%d-%d-%d-%d", year, mnth, date, hour, min, sec)
)
for i := 0; i < count; i++ {
col := []connector.OnedriveColInfo{
// basic folder and file creation
{
PathElements: rootPath,
Files: []connector.ItemData{
{
Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
Data: fileAData,
Perms: connector.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
},
{
Name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime),
Data: fileBData,
},
},
Folders: []connector.ItemData{
{
Name: folderBName,
},
{
Name: folderAName,
Perms: connector.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
{
Name: folderCName,
Perms: connector.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
},
},
{
// a folder that has permissions with an item in the folder with
// the different permissions.
PathElements: folderAPath,
Files: []connector.ItemData{
{
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileEData,
Perms: connector.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
},
},
Perms: connector.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
{
// a folder that has permissions with an item in the folder with
// no permissions.
PathElements: folderCPath,
Files: []connector.ItemData{
{
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileAData,
},
},
Perms: connector.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
{
PathElements: folderBPath,
Files: []connector.ItemData{
{
// restoring a file in a non-root folder that doesn't inherit
// permissions.
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
Data: fileBData,
Perms: connector.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: writePerm,
},
},
},
Folders: []connector.ItemData{
{
Name: folderAName,
Perms: connector.PermData{
User: secondaryUserName,
EntityID: secondaryUserID,
Roles: readPerm,
},
},
},
},
}
cols = append(cols, col...)
}
input, err := connector.DataForInfo(service, cols, version.Backup)
if err != nil {
return nil, err
}
// collections := getCollections(
// service,
// tenantID,
// []string{resourceOwner},
// input,
// version.Backup)
opts := control.Options{
RestorePermissions: true,
ToggleFeatures: control.Toggles{},
}
config := connector.ConfigInfo{
Acct: acct,
Opts: opts,
Resource: connector.Users,
Service: service,
Tenant: tenantID,
ResourceOwners: []string{resourceOwner},
Dest: tester.DefaultTestRestoreDestination(""),
}
_, _, collections, _, err := connector.GetCollectionsAndExpected(
config,
input,
version.Backup)
if err != nil {
return nil, err
}
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
}

View File

@ -5,6 +5,7 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -51,7 +52,7 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
return nil return nil
} }
gc, acct, err := getGCAndVerifyUser(ctx, User) gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -71,7 +72,7 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
subject, body, body, subject, body, body,
now, now, now, now) now, now, now, now)
}, },
control.Options{}, control.Defaults(),
errs) errs)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
@ -98,7 +99,7 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
return nil return nil
} }
gc, acct, err := getGCAndVerifyUser(ctx, User) gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -117,7 +118,7 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
User, subject, body, body, User, subject, body, body,
now, now, exchMock.NoRecurrence, exchMock.NoAttendees, false) now, now, exchMock.NoRecurrence, exchMock.NoAttendees, false)
}, },
control.Options{}, control.Defaults(),
errs) errs)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
@ -144,7 +145,7 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
return nil return nil
} }
gc, acct, err := getGCAndVerifyUser(ctx, User) gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)
} }
@ -168,7 +169,7 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
"123-456-7890", "123-456-7890",
) )
}, },
control.Options{}, control.Defaults(),
errs) errs)
if err != nil { if err != nil {
return Only(ctx, err) return Only(ctx, err)

View File

@ -1,28 +1,71 @@
package impl package impl
import ( import (
"strings"
"github.com/spf13/cobra" "github.com/spf13/cobra"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
) )
var filesCmd = &cobra.Command{ var odFilesCmd = &cobra.Command{
Use: "files", Use: "files",
Short: "Generate OneDrive files", Short: "Generate OneDrive files",
RunE: handleOneDriveFileFactory, RunE: handleOneDriveFileFactory,
} }
func AddOneDriveCommands(cmd *cobra.Command) { func AddOneDriveCommands(cmd *cobra.Command) {
cmd.AddCommand(filesCmd) cmd.AddCommand(odFilesCmd)
} }
func handleOneDriveFileFactory(cmd *cobra.Command, args []string) error { func handleOneDriveFileFactory(cmd *cobra.Command, args []string) error {
Err(cmd.Context(), ErrNotYetImplemented) var (
ctx = cmd.Context()
service = path.OneDriveService
category = path.FilesCategory
errs = fault.New(false)
)
if utils.HasNoFlagsAndShownHelp(cmd) { if utils.HasNoFlagsAndShownHelp(cmd) {
return nil return nil
} }
gc, acct, inp, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
if err != nil {
return Only(ctx, err)
}
sel := selectors.NewOneDriveBackup([]string{User}).Selector
sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name())
deets, err := generateAndRestoreDriveItems(
gc,
inp.ID(),
SecondaryUser,
strings.ToLower(SecondaryUser),
acct,
service,
category,
sel,
Tenant,
Destination,
Count,
errs)
if err != nil {
return Only(ctx, err)
}
for _, e := range errs.Recovered() {
logger.CtxErr(ctx, err).Error(e.Error())
}
deets.PrintEntries(ctx)
return nil return nil
} }

View File

@ -0,0 +1,71 @@
package impl
import (
"strings"
"github.com/spf13/cobra"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
)
var spFilesCmd = &cobra.Command{
Use: "files",
Short: "Generate SharePoint files",
RunE: handleSharePointLibraryFileFactory,
}
func AddSharePointCommands(cmd *cobra.Command) {
cmd.AddCommand(spFilesCmd)
}
func handleSharePointLibraryFileFactory(cmd *cobra.Command, args []string) error {
var (
ctx = cmd.Context()
service = path.SharePointService
category = path.LibrariesCategory
errs = fault.New(false)
)
if utils.HasNoFlagsAndShownHelp(cmd) {
return nil
}
gc, acct, inp, err := getGCAndVerifyResourceOwner(ctx, connector.Sites, Site)
if err != nil {
return Only(ctx, err)
}
sel := selectors.NewSharePointBackup([]string{Site}).Selector
sel.SetDiscreteOwnerIDName(inp.ID(), inp.Name())
deets, err := generateAndRestoreDriveItems(
gc,
inp.ID(),
SecondaryUser,
strings.ToLower(SecondaryUser),
acct,
service,
category,
sel,
Tenant,
Destination,
Count,
errs)
if err != nil {
return Only(ctx, err)
}
for _, e := range errs.Recovered() {
logger.CtxErr(ctx, err).Error(e.Error())
}
deets.PrintEntries(ctx)
return nil
}

View File

@ -16,12 +16,12 @@ import (
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// Required inputs from user for command execution // Required inputs from user for command execution

View File

@ -17,7 +17,11 @@ var rootCmd = &cobra.Command{
} }
func main() { func main() {
ctx, _ := logger.SeedLevel(context.Background(), logger.Development) ls := logger.Settings{
Level: logger.LLDebug,
Format: logger.LFText,
}
ctx, _ := logger.CtxOrSeed(context.Background(), ls)
ctx = SetRootCmd(ctx, rootCmd) ctx = SetRootCmd(ctx, rootCmd)
defer logger.Flush(ctx) defer logger.Flush(ctx)

View File

@ -22,9 +22,9 @@ import (
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
const downloadURLKey = "@microsoft.graph.downloadUrl" const downloadURLKey = "@microsoft.graph.downloadUrl"
@ -77,7 +77,10 @@ func handleOneDriveCmd(cmd *cobra.Command, args []string) error {
return Only(ctx, clues.Wrap(err, "creating graph adapter")) return Only(ctx, clues.Wrap(err, "creating graph adapter"))
} }
err = runDisplayM365JSON(ctx, graph.NewService(adpt), creds, user, m365ID) svc := graph.NewService(adpt)
gr := graph.NewNoTimeoutHTTPWrapper()
err = runDisplayM365JSON(ctx, svc, gr, creds, user, m365ID)
if err != nil { if err != nil {
cmd.SilenceUsage = true cmd.SilenceUsage = true
cmd.SilenceErrors = true cmd.SilenceErrors = true
@ -105,10 +108,11 @@ func (i itemPrintable) MinimumPrintable() any {
func runDisplayM365JSON( func runDisplayM365JSON(
ctx context.Context, ctx context.Context,
srv graph.Servicer, srv graph.Servicer,
gr graph.Requester,
creds account.M365Config, creds account.M365Config,
user, itemID string, user, itemID string,
) error { ) error {
drive, err := api.GetDriveByID(ctx, srv, user) drive, err := api.GetUsersDrive(ctx, srv, user)
if err != nil { if err != nil {
return err return err
} }
@ -123,7 +127,7 @@ func runDisplayM365JSON(
} }
if item != nil { if item != nil {
content, err := getDriveItemContent(item) content, err := getDriveItemContent(ctx, gr, item)
if err != nil { if err != nil {
return err return err
} }
@ -180,23 +184,21 @@ func serializeObject(data serialization.Parsable) (string, error) {
return string(content), err return string(content), err
} }
func getDriveItemContent(item models.DriveItemable) ([]byte, error) { func getDriveItemContent(
ctx context.Context,
gr graph.Requester,
item models.DriveItemable,
) ([]byte, error) {
url, ok := item.GetAdditionalData()[downloadURLKey].(*string) url, ok := item.GetAdditionalData()[downloadURLKey].(*string)
if !ok { if !ok {
return nil, clues.New("get download url") return nil, clues.New("retrieving download url")
} }
req, err := http.NewRequest(http.MethodGet, *url, nil) resp, err := gr.Request(ctx, http.MethodGet, *url, nil, nil)
if err != nil { if err != nil {
return nil, clues.New("create download request").With("error", err) return nil, clues.New("downloading item").With("error", err)
}
hc := graph.HTTPClient(graph.NoTimeout())
resp, err := hc.Do(req)
if err != nil {
return nil, clues.New("download item").With("error", err)
} }
defer resp.Body.Close()
content, err := io.ReadAll(resp.Body) content, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {

View File

@ -12,6 +12,7 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive" "github.com/alcionai/corso/src/internal/connector/onedrive"
@ -48,7 +49,11 @@ var ErrPurging = clues.New("not all items were successfully purged")
// ------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------
func main() { func main() {
ctx, _ := logger.SeedLevel(context.Background(), logger.Development) ls := logger.Settings{
Level: logger.LLDebug,
Format: logger.LFText,
}
ctx, _ := logger.CtxOrSeed(context.Background(), ls)
ctx = SetRootCmd(ctx, purgeCmd) ctx = SetRootCmd(ctx, purgeCmd)
defer logger.Flush(ctx) defer logger.Flush(ctx)
@ -226,8 +231,8 @@ func purgeFolders(
// compare the folder time to the deletion boundary time first // compare the folder time to the deletion boundary time first
displayName := *fld.GetDisplayName() displayName := *fld.GetDisplayName()
dnTime, err := common.ExtractTime(displayName) dnTime, err := dttm.ExtractTime(displayName)
if err != nil && !errors.Is(err, common.ErrNoTimeString) { if err != nil && !errors.Is(err, dttm.ErrNoTimeString) {
err = clues.Wrap(err, "!! Error: parsing container: "+displayName) err = clues.Wrap(err, "!! Error: parsing container: "+displayName)
Info(ctx, err) Info(ctx, err)
@ -266,11 +271,7 @@ func getGC(ctx context.Context) (account.Account, *connector.GraphConnector, err
return account.Account{}, nil, Only(ctx, clues.Wrap(err, "finding m365 account details")) return account.Account{}, nil, Only(ctx, clues.Wrap(err, "finding m365 account details"))
} }
// build a graph connector gc, err := connector.NewGraphConnector(ctx, acct, connector.Users)
// TODO: log/print recoverable errors
errs := fault.New(false)
gc, err := connector.NewGraphConnector(ctx, acct, connector.Users, errs)
if err != nil { if err != nil {
return account.Account{}, nil, Only(ctx, clues.Wrap(err, "connecting to graph api")) return account.Account{}, nil, Only(ctx, clues.Wrap(err, "connecting to graph api"))
} }
@ -286,7 +287,7 @@ func getBoundaryTime(ctx context.Context) (time.Time, error) {
) )
if len(before) > 0 { if len(before) > 0 {
boundaryTime, err = common.ParseTime(before) boundaryTime, err = dttm.ParseTime(before)
if err != nil { if err != nil {
return time.Time{}, Only(ctx, clues.Wrap(err, "parsing before flag to time")) return time.Time{}, Only(ctx, clues.Wrap(err, "parsing before flag to time"))
} }

View File

@ -131,6 +131,12 @@ if (![string]::IsNullOrEmpty($User)) {
# Works for dev domains where format is <user name>@<domain>.onmicrosoft.com # Works for dev domains where format is <user name>@<domain>.onmicrosoft.com
$domain = $User.Split('@')[1].Split('.')[0] $domain = $User.Split('@')[1].Split('.')[0]
$userNameEscaped = $User.Replace('.', '_').Replace('@', '_') $userNameEscaped = $User.Replace('.', '_').Replace('@', '_')
# hacky special case because of recreated CI user
if ($userNameEscaped -ilike "lynner*") {
$userNameEscaped += '1'
}
$siteUrl = "https://$domain-my.sharepoint.com/personal/$userNameEscaped/" $siteUrl = "https://$domain-my.sharepoint.com/personal/$userNameEscaped/"
if ($LibraryNameList.count -eq 0) { if ($LibraryNameList.count -eq 0) {

View File

@ -5,7 +5,7 @@ import (
"errors" "errors"
"fmt" "fmt"
"os" "os"
"path" stdpath "path"
"strings" "strings"
"time" "time"
@ -15,12 +15,13 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/users" "github.com/microsoftgraph/msgraph-sdk-go/users"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -63,6 +64,7 @@ func main() {
var ( var (
client = msgraphsdk.NewGraphServiceClient(adapter) client = msgraphsdk.NewGraphServiceClient(adapter)
testUser = tester.GetM365UserID(ctx) testUser = tester.GetM365UserID(ctx)
testSite = tester.GetM365SiteID(ctx)
testService = os.Getenv("SANITY_RESTORE_SERVICE") testService = os.Getenv("SANITY_RESTORE_SERVICE")
folder = strings.TrimSpace(os.Getenv("SANITY_RESTORE_FOLDER")) folder = strings.TrimSpace(os.Getenv("SANITY_RESTORE_FOLDER"))
startTime, _ = mustGetTimeFromName(ctx, folder) startTime, _ = mustGetTimeFromName(ctx, folder)
@ -83,7 +85,9 @@ func main() {
case "exchange": case "exchange":
checkEmailRestoration(ctx, client, testUser, folder, dataFolder, baseBackupFolder, startTime) checkEmailRestoration(ctx, client, testUser, folder, dataFolder, baseBackupFolder, startTime)
case "onedrive": case "onedrive":
checkOnedriveRestoration(ctx, client, testUser, folder, startTime) checkOneDriveRestoration(ctx, client, testUser, folder, dataFolder, startTime)
case "sharepoint":
checkSharePointRestoration(ctx, client, testSite, testUser, folder, dataFolder, startTime)
default: default:
fatal(ctx, "no service specified", nil) fatal(ctx, "no service specified", nil)
} }
@ -105,7 +109,7 @@ func checkEmailRestoration(
restoreFolder models.MailFolderable restoreFolder models.MailFolderable
itemCount = make(map[string]int32) itemCount = make(map[string]int32)
restoreItemCount = make(map[string]int32) restoreItemCount = make(map[string]int32)
builder = client.UsersById(testUser).MailFolders() builder = client.Users().ByUserId(testUser).MailFolders()
) )
for { for {
@ -148,8 +152,10 @@ func checkEmailRestoration(
"restore_folder_name", folderName) "restore_folder_name", folderName)
childFolder, err := client. childFolder, err := client.
UsersById(testUser). Users().
MailFoldersById(folderID). ByUserId(testUser).
MailFolders().
ByMailFolderId(folderID).
ChildFolders(). ChildFolders().
Get(ctx, nil) Get(ctx, nil)
if err != nil { if err != nil {
@ -209,8 +215,10 @@ func getAllMailSubFolders(
ctx = clues.Add(ctx, "parent_folder_id", folderID) ctx = clues.Add(ctx, "parent_folder_id", folderID)
childFolder, err := client. childFolder, err := client.
UsersById(testUser). Users().
MailFoldersById(folderID). ByUserId(testUser).
MailFolders().
ByMailFolderId(folderID).
ChildFolders(). ChildFolders().
Get(ctx, options) Get(ctx, options)
if err != nil { if err != nil {
@ -222,7 +230,7 @@ func getAllMailSubFolders(
childDisplayName = ptr.Val(child.GetDisplayName()) childDisplayName = ptr.Val(child.GetDisplayName())
childFolderCount = ptr.Val(child.GetChildFolderCount()) childFolderCount = ptr.Val(child.GetChildFolderCount())
//nolint:forbidigo //nolint:forbidigo
fullFolderName = path.Join(parentFolder, childDisplayName) fullFolderName = stdpath.Join(parentFolder, childDisplayName)
) )
if filters.PathContains([]string{dataFolder}).Compare(fullFolderName) { if filters.PathContains([]string{dataFolder}).Compare(fullFolderName) {
@ -259,8 +267,10 @@ func checkAllSubFolder(
) )
childFolder, err := client. childFolder, err := client.
UsersById(testUser). Users().
MailFoldersById(folderID). ByUserId(testUser).
MailFolders().
ByMailFolderId(folderID).
ChildFolders(). ChildFolders().
Get(ctx, options) Get(ctx, options)
if err != nil { if err != nil {
@ -271,7 +281,7 @@ func checkAllSubFolder(
var ( var (
childDisplayName = ptr.Val(child.GetDisplayName()) childDisplayName = ptr.Val(child.GetDisplayName())
//nolint:forbidigo //nolint:forbidigo
fullFolderName = path.Join(parentFolder, childDisplayName) fullFolderName = stdpath.Join(parentFolder, childDisplayName)
) )
if filters.PathContains([]string{dataFolder}).Compare(fullFolderName) { if filters.PathContains([]string{dataFolder}).Compare(fullFolderName) {
@ -292,41 +302,97 @@ func checkAllSubFolder(
// oneDrive // oneDrive
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func checkOnedriveRestoration( func checkOneDriveRestoration(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, client *msgraphsdk.GraphServiceClient,
testUser, userID, folderName, dataFolder string,
folderName string,
startTime time.Time, startTime time.Time,
) { ) {
var (
// map itemID -> item size
fileSizes = make(map[string]int64)
// map itemID -> permission id -> []permission roles
folderPermission = make(map[string][]permissionInfo)
restoreFile = make(map[string]int64)
restoreFolderPermission = make(map[string][]permissionInfo)
)
drive, err := client. drive, err := client.
UsersById(testUser). Users().
ByUserId(userID).
Drive(). Drive().
Get(ctx, nil) Get(ctx, nil)
if err != nil { if err != nil {
fatal(ctx, "getting the drive:", err) fatal(ctx, "getting the drive:", err)
} }
checkDriveRestoration(
ctx,
client,
path.OneDriveService,
folderName,
ptr.Val(drive.GetId()),
ptr.Val(drive.GetName()),
dataFolder,
startTime,
false)
}
// ---------------------------------------------------------------------------
// sharePoint
// ---------------------------------------------------------------------------
func checkSharePointRestoration(
ctx context.Context,
client *msgraphsdk.GraphServiceClient,
siteID, userID, folderName, dataFolder string,
startTime time.Time,
) {
drive, err := client.
Sites().
BySiteId(siteID).
Drive().
Get(ctx, nil)
if err != nil {
fatal(ctx, "getting the drive:", err)
}
checkDriveRestoration(
ctx,
client,
path.SharePointService,
folderName,
ptr.Val(drive.GetId()),
ptr.Val(drive.GetName()),
dataFolder,
startTime,
true)
}
// ---------------------------------------------------------------------------
// shared drive tests
// ---------------------------------------------------------------------------
func checkDriveRestoration(
ctx context.Context,
client *msgraphsdk.GraphServiceClient,
service path.ServiceType,
folderName,
driveID,
driveName,
dataFolder string,
startTime time.Time,
skipPermissionTest bool,
) {
var ( var (
driveID = ptr.Val(drive.GetId()) // map itemID -> item size
driveName = ptr.Val(drive.GetName()) fileSizes = make(map[string]int64)
restoreFolderID string // map itemID -> permission id -> []permission roles
folderPermissions = make(map[string][]permissionInfo)
restoreFile = make(map[string]int64)
restoredFolderPermissions = make(map[string][]permissionInfo)
) )
var restoreFolderID string
ctx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName) ctx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName)
response, err := client. response, err := client.
DrivesById(driveID). Drives().
Root(). ByDriveId(driveID).
Items().
ByDriveItemId("root").
Children(). Children().
Get(ctx, nil) Get(ctx, nil)
if err != nil { if err != nil {
@ -337,7 +403,6 @@ func checkOnedriveRestoration(
var ( var (
itemID = ptr.Val(driveItem.GetId()) itemID = ptr.Val(driveItem.GetId())
itemName = ptr.Val(driveItem.GetName()) itemName = ptr.Val(driveItem.GetName())
ictx = clues.Add(ctx, "item_id", itemID, "item_name", itemName)
) )
if itemName == folderName { if itemName == folderName {
@ -345,8 +410,8 @@ func checkOnedriveRestoration(
continue continue
} }
folderTime, hasTime := mustGetTimeFromName(ictx, itemName) if itemName != dataFolder {
if !isWithinTimeBound(ctx, startTime, folderTime, hasTime) { logAndPrint(ctx, "test data for folder: %s", dataFolder)
continue continue
} }
@ -362,59 +427,26 @@ func checkOnedriveRestoration(
// currently we don't restore blank folders. // currently we don't restore blank folders.
// skip permission check for empty folders // skip permission check for empty folders
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 { if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
logger.Ctx(ctx).Info("skipped empty folder: ", itemName) logAndPrint(ctx, "skipped empty folder: %s", itemName)
fmt.Println("skipped empty folder: ", itemName)
continue continue
} }
folderPermission[itemName] = permissionIn(ctx, client, driveID, itemID) folderPermissions[itemName] = permissionIn(ctx, client, driveID, itemID)
getOneDriveChildFolder(ctx, client, driveID, itemID, itemName, fileSizes, folderPermission, startTime) getOneDriveChildFolder(ctx, client, driveID, itemID, itemName, fileSizes, folderPermissions, startTime)
} }
getRestoredDrive(ctx, client, *drive.GetId(), restoreFolderID, restoreFile, restoreFolderPermission, startTime) getRestoredDrive(ctx, client, driveID, restoreFolderID, restoreFile, restoredFolderPermissions, startTime)
for folderName, permissions := range folderPermission { checkRestoredDriveItemPermissions(
logger.Ctx(ctx).Info("checking for folder: ", folderName) ctx,
fmt.Printf("checking for folder: %s\n", folderName) service,
skipPermissionTest,
restoreFolderPerm := restoreFolderPermission[folderName] folderPermissions,
restoredFolderPermissions)
if len(permissions) < 1 {
logger.Ctx(ctx).Info("no permissions found in:", folderName)
fmt.Println("no permissions found in:", folderName)
continue
}
assert(
ctx,
func() bool { return len(permissions) == len(restoreFolderPerm) },
fmt.Sprintf("wrong number of restored permissions: %s", folderName),
permissions,
restoreFolderPerm)
for i, perm := range permissions {
// permissions should be sorted, so a by-index comparison works
restored := restoreFolderPerm[i]
assert(
ctx,
func() bool { return strings.EqualFold(perm.entityID, restored.entityID) },
fmt.Sprintf("non-matching entity id: %s", folderName),
perm.entityID,
restored.entityID)
assert(
ctx,
func() bool { return slices.Equal(perm.roles, restored.roles) },
fmt.Sprintf("different roles restored: %s", folderName),
perm.roles,
restored.roles)
}
}
for fileName, expected := range fileSizes { for fileName, expected := range fileSizes {
logAndPrint(ctx, "checking for file: %s", fileName)
got := restoreFile[fileName] got := restoreFile[fileName]
assert( assert(
@ -428,6 +460,69 @@ func checkOnedriveRestoration(
fmt.Println("Success") fmt.Println("Success")
} }
func checkRestoredDriveItemPermissions(
ctx context.Context,
service path.ServiceType,
skip bool,
folderPermissions map[string][]permissionInfo,
restoredFolderPermissions map[string][]permissionInfo,
) {
if skip {
return
}
/**
TODO: replace this check with testElementsMatch
from internal/connecter/graph_connector_helper_test.go
**/
for folderName, permissions := range folderPermissions {
logAndPrint(ctx, "checking for folder: %s", folderName)
restoreFolderPerm := restoredFolderPermissions[folderName]
if len(permissions) < 1 {
logAndPrint(ctx, "no permissions found in: %s", folderName)
continue
}
permCheck := func() bool { return len(permissions) == len(restoreFolderPerm) }
if service == path.SharePointService {
permCheck = func() bool { return len(permissions) <= len(restoreFolderPerm) }
}
assert(
ctx,
permCheck,
fmt.Sprintf("wrong number of restored permissions: %s", folderName),
permissions,
restoreFolderPerm)
for _, perm := range permissions {
eqID := func(pi permissionInfo) bool { return strings.EqualFold(pi.entityID, perm.entityID) }
i := slices.IndexFunc(restoreFolderPerm, eqID)
assert(
ctx,
func() bool { return i >= 0 },
fmt.Sprintf("permission was restored in: %s", folderName),
perm.entityID,
restoreFolderPerm)
// permissions should be sorted, so a by-index comparison works
restored := restoreFolderPerm[i]
assert(
ctx,
func() bool { return slices.Equal(perm.roles, restored.roles) },
fmt.Sprintf("different roles restored: %s", folderName),
perm.roles,
restored.roles)
}
}
}
func getOneDriveChildFolder( func getOneDriveChildFolder(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, client *msgraphsdk.GraphServiceClient,
@ -436,7 +531,7 @@ func getOneDriveChildFolder(
folderPermission map[string][]permissionInfo, folderPermission map[string][]permissionInfo,
startTime time.Time, startTime time.Time,
) { ) {
response, err := client.DrivesById(driveID).ItemsById(itemID).Children().Get(ctx, nil) response, err := client.Drives().ByDriveId(driveID).Items().ByDriveItemId(itemID).Children().Get(ctx, nil)
if err != nil { if err != nil {
fatal(ctx, "getting child folder", err) fatal(ctx, "getting child folder", err)
} }
@ -465,8 +560,7 @@ func getOneDriveChildFolder(
// currently we don't restore blank folders. // currently we don't restore blank folders.
// skip permission check for empty folders // skip permission check for empty folders
if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 { if ptr.Val(driveItem.GetFolder().GetChildCount()) == 0 {
logger.Ctx(ctx).Info("skipped empty folder: ", fullName) logAndPrint(ctx, "skipped empty folder: %s", fullName)
fmt.Println("skipped empty folder: ", fullName)
continue continue
} }
@ -485,8 +579,10 @@ func getRestoredDrive(
startTime time.Time, startTime time.Time,
) { ) {
restored, err := client. restored, err := client.
DrivesById(driveID). Drives().
ItemsById(restoreFolderID). ByDriveId(driveID).
Items().
ByDriveItemId(restoreFolderID).
Children(). Children().
Get(ctx, nil) Get(ctx, nil)
if err != nil { if err != nil {
@ -526,8 +622,10 @@ func permissionIn(
pi := []permissionInfo{} pi := []permissionInfo{}
pcr, err := client. pcr, err := client.
DrivesById(driveID). Drives().
ItemsById(itemID). ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Permissions(). Permissions().
Get(ctx, nil) Get(ctx, nil)
if err != nil { if err != nil {
@ -545,6 +643,7 @@ func permissionIn(
entityID string entityID string
) )
// TODO: replace with filterUserPermissions in onedrive item.go
if gv2.GetUser() != nil { if gv2.GetUser() != nil {
entityID = ptr.Val(gv2.GetUser().GetId()) entityID = ptr.Val(gv2.GetUser().GetId())
} else if gv2.GetGroup() != nil { } else if gv2.GetGroup() != nil {
@ -577,12 +676,12 @@ func fatal(ctx context.Context, msg string, err error) {
} }
func mustGetTimeFromName(ctx context.Context, name string) (time.Time, bool) { func mustGetTimeFromName(ctx context.Context, name string) (time.Time, bool) {
t, err := common.ExtractTime(name) t, err := dttm.ExtractTime(name)
if err != nil && !errors.Is(err, common.ErrNoTimeString) { if err != nil && !errors.Is(err, dttm.ErrNoTimeString) {
fatal(ctx, "extracting time from name: "+name, err) fatal(ctx, "extracting time from name: "+name, err)
} }
return t, !errors.Is(err, common.ErrNoTimeString) return t, !errors.Is(err, dttm.ErrNoTimeString)
} }
func isWithinTimeBound(ctx context.Context, bound, check time.Time, hasTime bool) bool { func isWithinTimeBound(ctx context.Context, bound, check time.Time, hasTime bool) bool {
@ -633,3 +732,8 @@ func assert(
os.Exit(1) os.Exit(1)
} }
func logAndPrint(ctx context.Context, tmpl string, vs ...any) {
logger.Ctx(ctx).Infof(tmpl, vs...)
fmt.Printf(tmpl+"\n", vs...)
}

View File

@ -2,25 +2,25 @@ module github.com/alcionai/corso/src
go 1.19 go 1.19
replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230417220734-efdcd8c54f7f replace github.com/kopia/kopia => github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c
github.com/armon/go-metrics v0.4.1 github.com/armon/go-metrics v0.4.1
github.com/aws/aws-sdk-go v1.44.245 github.com/aws/aws-sdk-go v1.44.264
github.com/aws/aws-xray-sdk-go v1.8.1 github.com/aws/aws-xray-sdk-go v1.8.1
github.com/cenkalti/backoff/v4 v4.2.1 github.com/cenkalti/backoff/v4 v4.2.1
github.com/google/uuid v1.3.0 github.com/google/uuid v1.3.0
github.com/h2non/gock v1.2.0 github.com/h2non/gock v1.2.0
github.com/kopia/kopia v0.12.2-0.20230327171220-747baeebdab1 github.com/kopia/kopia v0.12.2-0.20230327171220-747baeebdab1
github.com/microsoft/kiota-abstractions-go v0.18.0 github.com/microsoft/kiota-abstractions-go v1.0.0
github.com/microsoft/kiota-authentication-azure-go v0.6.0 github.com/microsoft/kiota-authentication-azure-go v1.0.0
github.com/microsoft/kiota-http-go v0.16.1 github.com/microsoft/kiota-http-go v1.0.0
github.com/microsoft/kiota-serialization-form-go v0.8.2 github.com/microsoft/kiota-serialization-form-go v1.0.0
github.com/microsoft/kiota-serialization-json-go v0.8.2 github.com/microsoft/kiota-serialization-json-go v1.0.0
github.com/microsoftgraph/msgraph-sdk-go v0.53.0 github.com/microsoftgraph/msgraph-sdk-go v1.1.0
github.com/microsoftgraph/msgraph-sdk-go-core v0.33.0 github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/rudderlabs/analytics-go v3.3.3+incompatible github.com/rudderlabs/analytics-go v3.3.3+incompatible
github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1 github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1
@ -33,9 +33,8 @@ require (
github.com/vbauerster/mpb/v8 v8.1.6 github.com/vbauerster/mpb/v8 v8.1.6
go.uber.org/zap v1.24.0 go.uber.org/zap v1.24.0
golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb
golang.org/x/time v0.1.0 golang.org/x/time v0.3.0
golang.org/x/tools v0.8.0 golang.org/x/tools v0.9.1
gopkg.in/resty.v1 v1.12.0
) )
require ( require (
@ -44,6 +43,7 @@ require (
github.com/andybalholm/brotli v1.0.4 // indirect github.com/andybalholm/brotli v1.0.4 // indirect
github.com/dnaeon/go-vcr v1.2.0 // indirect github.com/dnaeon/go-vcr v1.2.0 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/gofrs/flock v0.8.1 // indirect
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect
@ -59,8 +59,8 @@ require (
) )
require ( require (
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 // indirect github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0 // indirect github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
@ -70,7 +70,7 @@ require (
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dustin/go-humanize v1.0.1 github.com/dustin/go-humanize v1.0.1
github.com/edsrzf/mmap-go v1.1.0 // indirect github.com/edsrzf/mmap-go v1.1.0 // indirect
github.com/go-logr/logr v1.2.3 // indirect github.com/go-logr/logr v1.2.4 // indirect
github.com/go-logr/stdr v1.2.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
github.com/golang/protobuf v1.5.3 // indirect github.com/golang/protobuf v1.5.3 // indirect
@ -78,7 +78,7 @@ require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.16.4 // indirect github.com/klauspost/compress v1.16.5 // indirect
github.com/klauspost/cpuid/v2 v2.2.4 // indirect github.com/klauspost/cpuid/v2 v2.2.4 // indirect
github.com/klauspost/pgzip v1.2.5 // indirect github.com/klauspost/pgzip v1.2.5 // indirect
github.com/klauspost/reedsolomon v1.11.7 // indirect github.com/klauspost/reedsolomon v1.11.7 // indirect
@ -88,7 +88,7 @@ require (
github.com/mattn/go-runewidth v0.0.14 // indirect github.com/mattn/go-runewidth v0.0.14 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/microsoft/kiota-serialization-text-go v0.7.0 github.com/microsoft/kiota-serialization-text-go v1.0.0
github.com/minio/md5-simd v1.1.2 // indirect github.com/minio/md5-simd v1.1.2 // indirect
github.com/minio/minio-go/v7 v7.0.52 // indirect github.com/minio/minio-go/v7 v7.0.52 // indirect
github.com/minio/sha256-simd v1.0.0 // indirect github.com/minio/sha256-simd v1.0.0 // indirect
@ -111,17 +111,17 @@ require (
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
github.com/zeebo/blake3 v0.2.3 // indirect github.com/zeebo/blake3 v0.2.3 // indirect
go.opentelemetry.io/otel v1.14.0 // indirect go.opentelemetry.io/otel v1.15.1 // indirect
go.opentelemetry.io/otel/trace v1.14.0 // indirect go.opentelemetry.io/otel/trace v1.15.1 // indirect
go.uber.org/atomic v1.10.0 // indirect go.uber.org/atomic v1.10.0 // indirect
go.uber.org/multierr v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.8.0 // indirect golang.org/x/crypto v0.8.0 // indirect
golang.org/x/mod v0.10.0 // indirect golang.org/x/mod v0.10.0 // indirect
golang.org/x/net v0.9.0 // indirect golang.org/x/net v0.10.0 // indirect
golang.org/x/sync v0.1.0 // indirect golang.org/x/sync v0.2.0 // indirect
golang.org/x/sys v0.7.0 // indirect golang.org/x/sys v0.8.0 // indirect
golang.org/x/text v0.9.0 // indirect golang.org/x/text v0.9.0 // indirect
google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd // indirect google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect
google.golang.org/grpc v1.54.0 // indirect google.golang.org/grpc v1.54.0 // indirect
google.golang.org/protobuf v1.30.0 // indirect google.golang.org/protobuf v1.30.0 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect

View File

@ -36,12 +36,12 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.0 h1:VuHAcMq8pU1IWNT/m5yRaGqbK0BiQKHT8X4DTp9CHdI= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 h1:8kDqDngH+DmVBiCtIjCFTGa7MBnsIOkF9IccInFEbjk=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.0/go.mod h1:tZoQYdDZNOiIjdSn0dVWVfl0NEPGOJqVLzSrcFk4Is0= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 h1:t/W5MYAuQy81cvM8VUNfRLzhtKpXhVUAN7Cd7KVbTyc= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 h1:t/W5MYAuQy81cvM8VUNfRLzhtKpXhVUAN7Cd7KVbTyc=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0/go.mod h1:NBanQUfSWiWn3QEpWDTCU0IjBECKOYvl2R8xdRtMtiM= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0/go.mod h1:NBanQUfSWiWn3QEpWDTCU0IjBECKOYvl2R8xdRtMtiM=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 h1:+5VZ72z0Qan5Bog5C+ZkgSqUbeVUd9wgtHOrIKuc5b8= github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w= github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM=
github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0 h1:VgSJlZH5u0k2qxSpqyghcFQKmvYckj46uymKK5XzkBM= github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0 h1:VgSJlZH5u0k2qxSpqyghcFQKmvYckj46uymKK5XzkBM=
github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0/go.mod h1:BDJ5qMFKx9DugEg3+uQSDCdbYPr5s9vBTrL9P8TpqOU= github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0/go.mod h1:BDJ5qMFKx9DugEg3+uQSDCdbYPr5s9vBTrL9P8TpqOU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
@ -55,8 +55,8 @@ github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpH
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo=
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c h1:Njdw/Nnq2DN3f8QMaHuZZHdVHTUSxFqPMMxDIInDWB4= github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c h1:Njdw/Nnq2DN3f8QMaHuZZHdVHTUSxFqPMMxDIInDWB4=
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8= github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c/go.mod h1:DeaMbAwDvYM6ZfPMR/GUl3hceqI5C8jIQ1lstjB2IW8=
github.com/alcionai/kopia v0.12.2-0.20230417220734-efdcd8c54f7f h1:cD7mcWVTEu83qX6Ml3aqgo8DDv+fBZt/7mQQps2TokM= github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79 h1:Wrl99Y7jftZMnNDiOIcRJrjstZO3IEj3+Q/sip27vmI=
github.com/alcionai/kopia v0.12.2-0.20230417220734-efdcd8c54f7f/go.mod h1:eTgZSDaU2pDzVGC7QRubbKOeohvHzzbRXvhZMH+AGHA= github.com/alcionai/kopia v0.12.2-0.20230502235504-2509b1d72a79/go.mod h1:Iic7CcKhsq+A7MLR9hh6VJfgpcJhLx3Kn+BgjY+azvI=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/aws/aws-sdk-go v1.44.245 h1:KtY2s4q31/kn33AdV63R5t77mdxsI7rq3YT7Mgo805M= github.com/aws/aws-sdk-go v1.44.264 h1:5klL62ebn6uv3oJ0ixF7K12hKItj8lV3QqWeQPlkFSs=
github.com/aws/aws-sdk-go v1.44.245/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-sdk-go v1.44.264/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
@ -124,13 +124,14 @@ github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
@ -202,7 +203,7 @@ github.com/h2non/gock v1.2.0 h1:K6ol8rfrRkUOefooBC8elXoaNGYkpp7y2qcxGG6BzUE=
github.com/h2non/gock v1.2.0/go.mod h1:tNhoxHYW2W42cYkYb1WqzdbYIieALC99kpYr7rH/BQk= github.com/h2non/gock v1.2.0/go.mod h1:tNhoxHYW2W42cYkYb1WqzdbYIieALC99kpYr7rH/BQk=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI=
github.com/hanwen/go-fuse/v2 v2.2.0 h1:jo5QZYmBLNcl9ovypWaQ5yXMSSV+Ch68xoC3rtZvvBM= github.com/hanwen/go-fuse/v2 v2.3.0 h1:t5ivNIH2PK+zw4OBul/iJjsoG9K6kXo4nMDoBpciC8A=
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc=
@ -233,8 +234,8 @@ github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/X
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.16.4 h1:91KN02FnsOYhuunwU4ssRe8lc2JosWmizWa91B5v1PU= github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI=
github.com/klauspost/compress v1.16.4/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
@ -272,22 +273,22 @@ github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zk
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/microsoft/kiota-abstractions-go v0.18.0 h1:H1kQE5hAq/7Q8gENPJ1Y7DuvG9QqKCpglN8D7TJi9qY= github.com/microsoft/kiota-abstractions-go v1.0.0 h1:teQS3yOmcTyps+O48AD17LI8TR1B3wCEwGFcwC6K75c=
github.com/microsoft/kiota-abstractions-go v0.18.0/go.mod h1:0lbPErVO6Rj3HHpntNYW/OFmHhJJ1ewPdsi1xPxYIMc= github.com/microsoft/kiota-abstractions-go v1.0.0/go.mod h1:2yaRQnx2KU7UaenYSApiTT4pf7fFkPV0B71Rm2uYynQ=
github.com/microsoft/kiota-authentication-azure-go v0.6.0 h1:Il9bLO34J6D8DY89xYAXoGh9muvlphayqG4eihyT6B8= github.com/microsoft/kiota-authentication-azure-go v1.0.0 h1:29FNZZ/4nnCOwFcGWlB/sxPvWz487HA2bXH8jR5k2Rk=
github.com/microsoft/kiota-authentication-azure-go v0.6.0/go.mod h1:EJCHiLWLXW1/mSgX7lYReAhVO37MzRT5Xi2mcPTwCRQ= github.com/microsoft/kiota-authentication-azure-go v1.0.0/go.mod h1:rnx3PRlkGdXDcA/0lZQTbBwyYGmc+3POt7HpE/e4jGw=
github.com/microsoft/kiota-http-go v0.16.1 h1:5SZbSwHs14Xve5VMQHHz00lwL/kEg3H9rgESAUrXnvw= github.com/microsoft/kiota-http-go v1.0.0 h1:F1hd6gMlLeEgH2CkRB7z13ow7LxMKMWEmms/t0VfS+k=
github.com/microsoft/kiota-http-go v0.16.1/go.mod h1:pKSaeSaBwh3Zadbnzw3kALEZbCZA1gq7A5PuxwVd/aU= github.com/microsoft/kiota-http-go v1.0.0/go.mod h1:eujxJliqodotsYepIc6ihhK+vXMMt5Q8YiSNL7+7M7U=
github.com/microsoft/kiota-serialization-form-go v0.8.2 h1:qrkJGBObZo0NjJwwbT3lUySjaLKqjz+r4TQGQTX/C/c= github.com/microsoft/kiota-serialization-form-go v1.0.0 h1:UNdrkMnLFqUCccQZerKjblsyVgifS11b3WCx+eFEsAI=
github.com/microsoft/kiota-serialization-form-go v0.8.2/go.mod h1:FQqYzIrGX6KUoDOlg+DhDWoGaZoB8AicBYGOsBq0Dw4= github.com/microsoft/kiota-serialization-form-go v1.0.0/go.mod h1:h4mQOO6KVTNciMF6azi1J9QB19ujSw3ULKcSNyXXOMA=
github.com/microsoft/kiota-serialization-json-go v0.8.2 h1:vLKZAOiMsaUxq36RDo3S/FfQbW2VQCdAIu4DS7+Qhrk= github.com/microsoft/kiota-serialization-json-go v1.0.0 h1:snT+SwS/R4CMjkmj7mjCHrmib2nKWqGvUWaedgliMbI=
github.com/microsoft/kiota-serialization-json-go v0.8.2/go.mod h1:gGcLNSdIdOZ4la2qztA0vaCq/LtlA53gpP+ur8n/+oA= github.com/microsoft/kiota-serialization-json-go v1.0.0/go.mod h1:psfgIfqWm/9P1JAdl2cxHHIg9SdEtYHOetfDLIQ5/dw=
github.com/microsoft/kiota-serialization-text-go v0.7.0 h1:uayeq8fpDcZgL0zDyLkYZsH6zNnEXKgp+bRWfR5LcxA= github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
github.com/microsoft/kiota-serialization-text-go v0.7.0/go.mod h1:2su1PTllHCMNkHugmvpYad+AKBXUUGoiNP3xOAJUL7w= github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
github.com/microsoftgraph/msgraph-sdk-go v0.53.0 h1:HpQd1Nvr8yQNeqhDuiVSbqn1fkHsFbRFDmnuhhXJXOQ= github.com/microsoftgraph/msgraph-sdk-go v1.1.0 h1:NtFsFVIt8lpXcTlRbLG1WuCOTzltzS5j+U8Fecqdnr4=
github.com/microsoftgraph/msgraph-sdk-go v0.53.0/go.mod h1:BZLyon4n4T4EuLIAlX+kJ5JgneFTXVQDah1AJuq3FRY= github.com/microsoftgraph/msgraph-sdk-go v1.1.0/go.mod h1:NIk9kSn7lQ5Hnhhn3FM4NrJWz54JfDHD0JvhJZky27g=
github.com/microsoftgraph/msgraph-sdk-go-core v0.33.0 h1:cDL3ov/IZ2ZarUJdGGPsdR+46ALdd3CRAiDBIylLCoA= github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY=
github.com/microsoftgraph/msgraph-sdk-go-core v0.33.0/go.mod h1:d0mU3PQAWnN/C4CwPJEZz2QhesrnR5UDnqRu2ODWPkI= github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM= github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
github.com/minio/minio-go/v7 v7.0.52 h1:8XhG36F6oKQUDDSuz6dY3rioMzovKjW40W6ANuN0Dps= github.com/minio/minio-go/v7 v7.0.52 h1:8XhG36F6oKQUDDSuz6dY3rioMzovKjW40W6ANuN0Dps=
@ -430,10 +431,10 @@ go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
go.opentelemetry.io/otel v1.14.0 h1:/79Huy8wbf5DnIPhemGB+zEPVwnN6fuQybr/SRXa6hM= go.opentelemetry.io/otel v1.15.1 h1:3Iwq3lfRByPaws0f6bU3naAqOR1n5IeDWd9390kWHa8=
go.opentelemetry.io/otel v1.14.0/go.mod h1:o4buv+dJzx8rohcUeRmWUZhqupFvzWis188WlggnNeU= go.opentelemetry.io/otel v1.15.1/go.mod h1:mHHGEHVDLal6YrKMmk9LqC4a3sF5g+fHfrttQIB1NTc=
go.opentelemetry.io/otel/trace v1.14.0 h1:wp2Mmvj41tDsyAJXiWDWpfNsOiIyd38fy85pyKcFq/M= go.opentelemetry.io/otel/trace v1.15.1 h1:uXLo6iHJEzDfrNC0L0mNjItIp06SyaBQxu5t3xMlngY=
go.opentelemetry.io/otel/trace v1.14.0/go.mod h1:8avnQLK+CG77yNLUae4ea2JDQ6iT+gozhnZjy/rw9G8= go.opentelemetry.io/otel/trace v1.15.1/go.mod h1:IWdQG/5N1x7f6YUlmdLeJvH9yxtuJAfc4VW5Agv9r/8=
go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ= go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ=
go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI=
@ -494,7 +495,6 @@ golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
@ -529,8 +529,8 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.9.0 h1:aWJ/m6xSmxWBx+V0XRHTlrYrPG56jKsLdTFmsSsCzOM= golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -551,8 +551,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -603,8 +603,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU= golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@ -622,8 +622,8 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.1.0 h1:xYY+Bajn2a7VBmTM5GikTmnK8ZuX8YgnQCqZpbBNtmA= golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
@ -672,8 +672,8 @@ golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.8.0 h1:vSDcovVPld282ceKgDimkRSC8kpaH1dgyc9UMzlt84Y= golang.org/x/tools v0.9.1 h1:8WMNJAz3zrtPmnYC7ISf5dEn3MT0gY7jBJfw27yrrLo=
golang.org/x/tools v0.8.0/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4= golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -740,8 +740,8 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd h1:sLpv7bNL1AsX3fdnWh9WVh7ejIzXdOc1RRHGeAmeStU= google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 h1:KpwkzHKEF7B9Zxg18WzOa7djJ+Ha5DzthMyZYQfEn2A=
google.golang.org/genproto v0.0.0-20230403163135-c38d8f061ccd/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak= google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@ -782,8 +782,6 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/resty.v1 v1.12.0 h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"runtime" "runtime"
"runtime/debug" "runtime/debug"
"strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -22,31 +23,46 @@ import (
// err = crErr // err needs to be a named return variable // err = crErr // err needs to be a named return variable
// } // }
// }() // }()
func Recovery(ctx context.Context, r any) error { func Recovery(ctx context.Context, r any, namespace string) error {
var ( var (
err error err error
inFile string inFile string
j int
) )
if r != nil { if r == nil {
if re, ok := r.(error); ok { return nil
err = re
} else if re, ok := r.(string); ok {
err = clues.New(re)
} else {
err = clues.New(fmt.Sprintf("%v", r))
}
_, file, _, ok := runtime.Caller(3)
if ok {
inFile = " in file: " + file
}
err = clues.Wrap(err, "panic recovery"+inFile).
WithClues(ctx).
With("stacktrace", string(debug.Stack()))
logger.CtxErr(ctx, err).Error("backup panic")
} }
if re, ok := r.(error); ok {
err = re
} else if re, ok := r.(string); ok {
err = clues.New(re)
} else {
err = clues.New(fmt.Sprintf("%v", r))
}
for i := 1; i < 10; i++ {
_, file, line, ok := runtime.Caller(i)
if j > 0 {
if !strings.Contains(file, "panic.go") {
inFile = fmt.Sprintf(": file %s - line %d", file, line)
break
}
j = 0
}
// skip the location where Recovery() gets called.
if j == 0 && ok && !strings.Contains(file, "panic.go") && !strings.Contains(file, "crash.go") {
j++
}
}
err = clues.Wrap(err, "panic recovery"+inFile).
WithClues(ctx).
With("stacktrace", string(debug.Stack()))
logger.CtxErr(ctx, err).Error(namespace + " panic")
return err return err
} }

View File

@ -52,7 +52,7 @@ func (suite *CrashTestDummySuite) TestRecovery() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer func() { defer func() {
err := crash.Recovery(ctx, recover()) err := crash.Recovery(ctx, recover(), "test")
test.expect(t, err, clues.ToCore(err)) test.expect(t, err, clues.ToCore(err))
flush() flush()
}() }()

View File

@ -1,4 +1,4 @@
package common package dttm
import ( import (
"regexp" "regexp"
@ -10,8 +10,8 @@ import (
type TimeFormat string type TimeFormat string
const ( const (
// StandardTime is the canonical format used for all data storage in corso // Standard is the canonical format used for all data storage in corso
StandardTime TimeFormat = time.RFC3339Nano Standard TimeFormat = time.RFC3339Nano
// DateOnly is accepted by the CLI as a valid input for timestamp-based // DateOnly is accepted by the CLI as a valid input for timestamp-based
// filters. Time and timezone are assumed to be 00:00:00 and UTC. // filters. Time and timezone are assumed to be 00:00:00 and UTC.
@ -21,23 +21,23 @@ const (
// non-json cli outputs. // non-json cli outputs.
TabularOutput TimeFormat = "2006-01-02T15:04:05Z" TabularOutput TimeFormat = "2006-01-02T15:04:05Z"
// LegacyTime is used in /exchange/service_restore to comply with certain // Legacy is used in /exchange/service_restore to comply with certain
// graphAPI time format requirements. // graphAPI time format requirements.
LegacyTime TimeFormat = time.RFC3339 Legacy TimeFormat = time.RFC3339
// SimpleDateTime is the default value appended to the root restoration folder name. // HumanReadable is the default value appended to the root restoration folder name.
SimpleDateTime TimeFormat = "02-Jan-2006_15:04:05" HumanReadable TimeFormat = "02-Jan-2006_15:04:05"
// SimpleDateTimeOneDrive modifies SimpleDateTimeFormat to comply with onedrive folder // HumanReadableDriveItem modifies SimpleDateTimeFormat to comply with onedrive folder
// restrictions: primarily swapping `-` instead of `:` which is a reserved character. // restrictions: primarily swapping `-` instead of `:` which is a reserved character.
SimpleDateTimeOneDrive TimeFormat = "02-Jan-2006_15-04-05" HumanReadableDriveItem TimeFormat = "02-Jan-2006_15-04-05"
// m365 will remove the :00 second suffix on folder names, resulting in the following formats. // m365 will remove the :00 second suffix on folder names, resulting in the following formats.
ClippedSimple TimeFormat = "02-Jan-2006_15:04" ClippedHuman TimeFormat = "02-Jan-2006_15:04"
ClippedSimpleOneDrive TimeFormat = "02-Jan-2006_15-04" ClippedHumanDriveItem TimeFormat = "02-Jan-2006_15-04"
// SimpleTimeTesting is used for testing restore destination folders. // SafeForTesting is used for testing restore destination folders.
// Microsecond granularity prevents collisions in parallel package or workflow runs. // Microsecond granularity prevents collisions in parallel package or workflow runs.
SimpleTimeTesting TimeFormat = SimpleDateTimeOneDrive + ".000000" SafeForTesting TimeFormat = HumanReadableDriveItem + ".000000"
// M365dateTimeTimeZoneTimeFormat is the format used by M365 for datetimetimezone resource // M365dateTimeTimeZoneTimeFormat is the format used by M365 for datetimetimezone resource
// https://learn.microsoft.com/en-us/graph/api/resources/datetimetimezone?view=graph-rest-1.0 // https://learn.microsoft.com/en-us/graph/api/resources/datetimetimezone?view=graph-rest-1.0
@ -48,42 +48,42 @@ const (
// identify the folders produced in external data during automated testing. For safety, each // identify the folders produced in external data during automated testing. For safety, each
// time format described above should have a matching regexp. // time format described above should have a matching regexp.
var ( var (
clippedSimpleRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}:\d{2}).*`) clippedHumanRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}:\d{2}).*`)
clippedSimpleOneDriveRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}-\d{2}).*`) clippedHumanOneDriveRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}-\d{2}).*`)
dateOnlyRE = regexp.MustCompile(`.*(\d{4}-\d{2}-\d{2}).*`) dateOnlyRE = regexp.MustCompile(`.*(\d{4}-\d{2}-\d{2}).*`)
legacyTimeRE = regexp.MustCompile( legacyRE = regexp.MustCompile(
`.*(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}?([Zz]|[a-zA-Z]{2}|([\+|\-]([01]\d|2[0-3])))).*`) `.*(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}?([Zz]|[a-zA-Z]{2}|([\+|\-]([01]\d|2[0-3])))).*`)
simpleTimeTestingRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}-\d{2}-\d{2}.\d{6}).*`) SafeForTestingRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}-\d{2}-\d{2}.\d{6}).*`)
simpleDateTimeRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}:\d{2}:\d{2}).*`) HumanReadableRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}:\d{2}:\d{2}).*`)
simpleDateTimeOneDriveRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}-\d{2}-\d{2}).*`) HumanReadableOneDriveRE = regexp.MustCompile(`.*(\d{2}-[a-zA-Z]{3}-\d{4}_\d{2}-\d{2}-\d{2}).*`)
standardTimeRE = regexp.MustCompile( standardRE = regexp.MustCompile(
`.*(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?([Zz]|[a-zA-Z]{2}|([\+|\-]([01]\d|2[0-3])))).*`) `.*(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?([Zz]|[a-zA-Z]{2}|([\+|\-]([01]\d|2[0-3])))).*`)
tabularOutputTimeRE = regexp.MustCompile(`.*(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}([Zz]|[a-zA-Z]{2})).*`) tabularOutputRE = regexp.MustCompile(`.*(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}([Zz]|[a-zA-Z]{2})).*`)
) )
var ( var (
// shortened formats (clipped*, DateOnly) must follow behind longer formats, otherwise they'll // shortened formats (clipped*, DateOnly) must follow behind longer formats, otherwise they'll
// get eagerly chosen as the parsable format, slicing out some data. // get eagerly chosen as the parsable format, slicing out some data.
formats = []TimeFormat{ formats = []TimeFormat{
StandardTime, Standard,
SimpleTimeTesting, SafeForTesting,
SimpleDateTime, HumanReadable,
SimpleDateTimeOneDrive, HumanReadableDriveItem,
LegacyTime, Legacy,
TabularOutput, TabularOutput,
ClippedSimple, ClippedHuman,
ClippedSimpleOneDrive, ClippedHumanDriveItem,
DateOnly, DateOnly,
} }
regexes = []*regexp.Regexp{ regexes = []*regexp.Regexp{
standardTimeRE, standardRE,
simpleTimeTestingRE, SafeForTestingRE,
simpleDateTimeRE, HumanReadableRE,
simpleDateTimeOneDriveRE, HumanReadableOneDriveRE,
legacyTimeRE, legacyRE,
tabularOutputTimeRE, tabularOutputRE,
clippedSimpleRE, clippedHumanRE,
clippedSimpleOneDriveRE, clippedHumanOneDriveRE,
dateOnlyRE, dateOnlyRE,
} }
) )
@ -95,43 +95,43 @@ var (
// Now produces the current time as a string in the standard format. // Now produces the current time as a string in the standard format.
func Now() string { func Now() string {
return FormatNow(StandardTime) return FormatNow(Standard)
} }
// FormatNow produces the current time in UTC using the provided // FormatNow produces the current time in UTC using the provided
// time format. // time format.
func FormatNow(fmt TimeFormat) string { func FormatNow(fmt TimeFormat) string {
return FormatTimeWith(time.Now(), fmt) return FormatTo(time.Now(), fmt)
} }
// FormatTimeWith produces the a datetime with the given format. // FormatTo produces the a datetime with the given format.
func FormatTimeWith(t time.Time, fmt TimeFormat) string { func FormatTo(t time.Time, fmt TimeFormat) string {
return t.UTC().Format(string(fmt)) return t.UTC().Format(string(fmt))
} }
// FormatTime produces the standard format for corso time values. // Format produces the standard format for corso time values.
// Always formats into the UTC timezone. // Always formats into the UTC timezone.
func FormatTime(t time.Time) string { func Format(t time.Time) string {
return FormatTimeWith(t, StandardTime) return FormatTo(t, Standard)
} }
// FormatSimpleDateTime produces a simple datetime of the format // FormatToHumanReadable produces a simple datetime of the format
// "02-Jan-2006_15:04:05" // "02-Jan-2006_15:04:05"
func FormatSimpleDateTime(t time.Time) string { func FormatToHumanReadable(t time.Time) string {
return FormatTimeWith(t, SimpleDateTime) return FormatTo(t, HumanReadable)
} }
// FormatTabularDisplayTime produces the standard format for displaying // FormatToTabularDisplay produces the standard format for displaying
// a timestamp as part of user-readable cli output. // a timestamp as part of user-readable cli output.
// "2016-01-02T15:04:05Z" // "2016-01-02T15:04:05Z"
func FormatTabularDisplayTime(t time.Time) string { func FormatToTabularDisplay(t time.Time) string {
return FormatTimeWith(t, TabularOutput) return FormatTo(t, TabularOutput)
} }
// FormatLegacyTime produces standard format for string values // FormatToLegacy produces standard format for string values
// that are placed in SingleValueExtendedProperty tags // that are placed in SingleValueExtendedProperty tags
func FormatLegacyTime(t time.Time) string { func FormatToLegacy(t time.Time) string {
return FormatTimeWith(t, LegacyTime) return FormatTo(t, Legacy)
} }
// ParseTime makes a best attempt to produce a time value from // ParseTime makes a best attempt to produce a time value from

View File

@ -1,4 +1,4 @@
package common_test package dttm_test
import ( import (
"testing" "testing"
@ -9,65 +9,64 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
type CommonTimeUnitSuite struct { type DTTMUnitSuite struct {
tester.Suite tester.Suite
} }
func TestCommonTimeUnitSuite(t *testing.T) { func TestDTTMUnitSuite(t *testing.T) {
s := &CommonTimeUnitSuite{Suite: tester.NewUnitSuite(t)} suite.Run(t, &DTTMUnitSuite{Suite: tester.NewUnitSuite(t)})
suite.Run(t, s)
} }
func (suite *CommonTimeUnitSuite) TestFormatTime() { func (suite *DTTMUnitSuite) TestFormatTime() {
t := suite.T() t := suite.T()
now := time.Now() now := time.Now()
result := common.FormatTime(now) result := dttm.Format(now)
assert.Equal(t, now.UTC().Format(time.RFC3339Nano), result) assert.Equal(t, now.UTC().Format(time.RFC3339Nano), result)
} }
func (suite *CommonTimeUnitSuite) TestLegacyTime() { func (suite *DTTMUnitSuite) TestLegacyTime() {
t := suite.T() t := suite.T()
now := time.Now() now := time.Now()
result := common.FormatLegacyTime(now) result := dttm.FormatToLegacy(now)
assert.Equal(t, now.UTC().Format(time.RFC3339), result) assert.Equal(t, now.UTC().Format(time.RFC3339), result)
} }
func (suite *CommonTimeUnitSuite) TestFormatTabularDisplayTime() { func (suite *DTTMUnitSuite) TestFormatTabularDisplayTime() {
t := suite.T() t := suite.T()
now := time.Now() now := time.Now()
result := common.FormatTabularDisplayTime(now) result := dttm.FormatToTabularDisplay(now)
assert.Equal(t, now.UTC().Format(string(common.TabularOutput)), result) assert.Equal(t, now.UTC().Format(string(dttm.TabularOutput)), result)
} }
func (suite *CommonTimeUnitSuite) TestParseTime() { func (suite *DTTMUnitSuite) TestParseTime() {
t := suite.T() t := suite.T()
now := time.Now() now := time.Now()
nowStr := now.Format(time.RFC3339Nano) nowStr := now.Format(time.RFC3339Nano)
result, err := common.ParseTime(nowStr) result, err := dttm.ParseTime(nowStr)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, now.UTC(), result) assert.Equal(t, now.UTC(), result)
_, err = common.ParseTime("") _, err = dttm.ParseTime("")
require.Error(t, err, clues.ToCore(err)) require.Error(t, err, clues.ToCore(err))
_, err = common.ParseTime("flablabls") _, err = dttm.ParseTime("flablabls")
require.Error(t, err, clues.ToCore(err)) require.Error(t, err, clues.ToCore(err))
} }
func (suite *CommonTimeUnitSuite) TestExtractTime() { func (suite *DTTMUnitSuite) TestExtractTime() {
comparable := func(t *testing.T, tt time.Time, shortFormat common.TimeFormat) time.Time { comparable := func(t *testing.T, tt time.Time, shortFormat dttm.TimeFormat) time.Time {
ts := common.FormatLegacyTime(tt.UTC()) ts := dttm.FormatToLegacy(tt.UTC())
if len(shortFormat) > 0 { if len(shortFormat) > 0 {
ts = tt.UTC().Format(string(shortFormat)) ts = tt.UTC().Format(string(shortFormat))
} }
c, err := common.ParseTime(ts) c, err := dttm.ParseTime(ts)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -92,16 +91,16 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
parseT("2006-01-02T03:00:04-01:00"), parseT("2006-01-02T03:00:04-01:00"),
} }
formats := []common.TimeFormat{ formats := []dttm.TimeFormat{
common.ClippedSimple, dttm.ClippedHuman,
common.ClippedSimpleOneDrive, dttm.ClippedHumanDriveItem,
common.LegacyTime, dttm.Legacy,
common.SimpleDateTime, dttm.HumanReadable,
common.SimpleDateTimeOneDrive, dttm.HumanReadableDriveItem,
common.StandardTime, dttm.Standard,
common.TabularOutput, dttm.TabularOutput,
common.SimpleTimeTesting, dttm.SafeForTesting,
common.DateOnly, dttm.DateOnly,
} }
type presuf struct { type presuf struct {
@ -118,7 +117,7 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
type testable struct { type testable struct {
input string input string
clippedFormat common.TimeFormat clippedFormat dttm.TimeFormat
expect time.Time expect time.Time
} }
@ -129,13 +128,13 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
for _, f := range formats { for _, f := range formats {
shortFormat := f shortFormat := f
if f != common.ClippedSimple && if f != dttm.ClippedHuman &&
f != common.ClippedSimpleOneDrive && f != dttm.ClippedHumanDriveItem &&
f != common.DateOnly { f != dttm.DateOnly {
shortFormat = "" shortFormat = ""
} }
v := common.FormatTimeWith(in, f) v := dttm.FormatTo(in, f)
for _, ps := range pss { for _, ps := range pss {
table = append(table, testable{ table = append(table, testable{
@ -151,7 +150,7 @@ func (suite *CommonTimeUnitSuite) TestExtractTime() {
suite.Run(test.input, func() { suite.Run(test.input, func() {
t := suite.T() t := suite.T()
result, err := common.ExtractTime(test.input) result, err := dttm.ExtractTime(test.input)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect, comparable(t, result, test.clippedFormat)) assert.Equal(t, test.expect, comparable(t, result, test.clippedFormat))
}) })

View File

@ -1,51 +0,0 @@
package common
import (
"strings"
"golang.org/x/exp/maps"
)
type IDNamer interface {
// the canonical id of the thing, generated and usable
// by whichever system has ownership of it.
ID() string
// the human-readable name of the thing.
Name() string
}
type IDNameSwapper interface {
IDOf(name string) (string, bool)
NameOf(id string) (string, bool)
IDs() []string
Names() []string
}
var _ IDNameSwapper = &IDsNames{}
type IDsNames struct {
IDToName map[string]string
NameToID map[string]string
}
// IDOf returns the id associated with the given name.
func (in IDsNames) IDOf(name string) (string, bool) {
id, ok := in.NameToID[strings.ToLower(name)]
return id, ok
}
// NameOf returns the name associated with the given id.
func (in IDsNames) NameOf(id string) (string, bool) {
name, ok := in.IDToName[strings.ToLower(id)]
return name, ok
}
// IDs returns all known ids.
func (in IDsNames) IDs() []string {
return maps.Keys(in.IDToName)
}
// Names returns all known names.
func (in IDsNames) Names() []string {
return maps.Keys(in.NameToID)
}

View File

@ -0,0 +1,107 @@
package idname
import (
"strings"
"golang.org/x/exp/maps"
)
// Provider is a tuple containing an ID and a Name. Names are
// assumed to be human-displayable versions of system IDs.
// Providers should always be populated, while a nil values is
// likely an error. Compliant structs should provide both a name
// and an ID, never just one. Values are not validated, so both
// values being empty is an allowed conditions, but the assumption
// is that downstream consumers will have problems as a result.
type Provider interface {
// ID returns the canonical id of the thing, generated and
// usable by whichever system has ownership of it.
ID() string
// the human-readable name of the thing.
Name() string
}
var _ Provider = &is{}
type is struct {
id string
name string
}
func (is is) ID() string { return is.id }
func (is is) Name() string { return is.name }
type Cacher interface {
IDOf(name string) (string, bool)
NameOf(id string) (string, bool)
IDs() []string
Names() []string
ProviderForID(id string) Provider
ProviderForName(id string) Provider
}
var _ Cacher = &cache{}
type cache struct {
idToName map[string]string
nameToID map[string]string
}
func NewCache(idToName map[string]string) cache {
nti := make(map[string]string, len(idToName))
for id, name := range idToName {
nti[name] = id
}
return cache{
idToName: idToName,
nameToID: nti,
}
}
// IDOf returns the id associated with the given name.
func (c cache) IDOf(name string) (string, bool) {
id, ok := c.nameToID[strings.ToLower(name)]
return id, ok
}
// NameOf returns the name associated with the given id.
func (c cache) NameOf(id string) (string, bool) {
name, ok := c.idToName[strings.ToLower(id)]
return name, ok
}
// IDs returns all known ids.
func (c cache) IDs() []string {
return maps.Keys(c.idToName)
}
// Names returns all known names.
func (c cache) Names() []string {
return maps.Keys(c.nameToID)
}
func (c cache) ProviderForID(id string) Provider {
n, ok := c.NameOf(id)
if !ok {
return &is{}
}
return &is{
id: id,
name: n,
}
}
func (c cache) ProviderForName(name string) Provider {
i, ok := c.IDOf(name)
if !ok {
return &is{}
}
return &is{
id: i,
name: name,
}
}

View File

@ -0,0 +1,84 @@
package mock
import (
"strings"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/idname"
)
var _ idname.Provider = &in{}
func NewProvider(id, name string) *in {
return &in{
id: id,
name: name,
}
}
type in struct {
id string
name string
}
func (i in) ID() string { return i.id }
func (i in) Name() string { return i.name }
type Cache struct {
IDToName map[string]string
NameToID map[string]string
}
func NewCache(itn, nti map[string]string) Cache {
return Cache{
IDToName: itn,
NameToID: nti,
}
}
// IDOf returns the id associated with the given name.
func (c Cache) IDOf(name string) (string, bool) {
id, ok := c.NameToID[strings.ToLower(name)]
return id, ok
}
// NameOf returns the name associated with the given id.
func (c Cache) NameOf(id string) (string, bool) {
name, ok := c.IDToName[strings.ToLower(id)]
return name, ok
}
// IDs returns all known ids.
func (c Cache) IDs() []string {
return maps.Keys(c.IDToName)
}
// Names returns all known names.
func (c Cache) Names() []string {
return maps.Keys(c.NameToID)
}
func (c Cache) ProviderForID(id string) idname.Provider {
n, ok := c.NameOf(id)
if !ok {
return nil
}
return &in{
id: id,
name: n,
}
}
func (c Cache) ProviderForName(name string) idname.Provider {
i, ok := c.IDOf(name)
if !ok {
return nil
}
return &in{
id: i,
name: name,
}
}

View File

@ -0,0 +1,44 @@
package mock
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
)
var _ prefixmatcher.StringSetReader = &PrefixMap{}
type PrefixMap struct {
prefixmatcher.StringSetBuilder
}
func NewPrefixMap(m map[string]map[string]struct{}) *PrefixMap {
r := PrefixMap{StringSetBuilder: prefixmatcher.NewMatcher[map[string]struct{}]()}
for k, v := range m {
r.Add(k, v)
}
return &r
}
func (pm PrefixMap) AssertEqual(t *testing.T, r prefixmatcher.StringSetReader) {
if pm.Empty() {
require.True(t, r.Empty(), "both prefix maps are empty")
return
}
pks := pm.Keys()
rks := r.Keys()
assert.ElementsMatch(t, pks, rks, "prefix keys match")
for _, pk := range pks {
p, _ := pm.Get(pk)
r, _ := r.Get(pk)
assert.Equal(t, p, r, "values match")
}
}

View File

@ -2,28 +2,48 @@ package prefixmatcher
import ( import (
"strings" "strings"
"golang.org/x/exp/maps"
) )
type View[T any] interface { type Reader[T any] interface {
Get(key string) (T, bool) Get(key string) (T, bool)
LongestPrefix(key string) (string, T, bool) LongestPrefix(key string) (string, T, bool)
Empty() bool Empty() bool
Keys() []string
} }
type Matcher[T any] interface { type Builder[T any] interface {
// Add adds or updates the item with key to have value value. // Add adds or updates the item with key to have value value.
Add(key string, value T) Add(key string, value T)
View[T] Reader[T]
} }
// ---------------------------------------------------------------------------
// Implementation
// ---------------------------------------------------------------------------
// prefixMatcher implements Builder
type prefixMatcher[T any] struct { type prefixMatcher[T any] struct {
data map[string]T data map[string]T
} }
func (m *prefixMatcher[T]) Add(key string, value T) { func NewMatcher[T any]() Builder[T] {
m.data[key] = value return &prefixMatcher[T]{
data: map[string]T{},
}
} }
func NopReader[T any]() *prefixMatcher[T] {
return &prefixMatcher[T]{
data: make(map[string]T),
}
}
func (m *prefixMatcher[T]) Add(key string, value T) { m.data[key] = value }
func (m prefixMatcher[T]) Empty() bool { return len(m.data) == 0 }
func (m prefixMatcher[T]) Keys() []string { return maps.Keys(m.data) }
func (m *prefixMatcher[T]) Get(key string) (T, bool) { func (m *prefixMatcher[T]) Get(key string) (T, bool) {
if m == nil { if m == nil {
return *new(T), false return *new(T), false
@ -58,11 +78,3 @@ func (m *prefixMatcher[T]) LongestPrefix(key string) (string, T, bool) {
return rk, rv, found return rk, rv, found
} }
func (m prefixMatcher[T]) Empty() bool {
return len(m.data) == 0
}
func NewMatcher[T any]() Matcher[T] {
return &prefixMatcher[T]{data: map[string]T{}}
}

View File

@ -5,6 +5,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -41,6 +42,8 @@ func (suite *PrefixMatcherUnitSuite) TestAdd_Get() {
assert.True(t, ok, "searching for key", k) assert.True(t, ok, "searching for key", k)
assert.Equal(t, v, val, "returned value") assert.Equal(t, v, val, "returned value")
} }
assert.ElementsMatch(t, maps.Keys(kvs), pm.Keys())
} }
func (suite *PrefixMatcherUnitSuite) TestLongestPrefix() { func (suite *PrefixMatcherUnitSuite) TestLongestPrefix() {

View File

@ -0,0 +1,122 @@
package prefixmatcher
import "golang.org/x/exp/maps"
// StringSetReader is a reader designed specifially to contain a set
// of string values (ie: Reader[map[string]struct{}]).
// This is a quality-of-life typecast for the generic Reader.
type StringSetReader interface {
Reader[map[string]struct{}]
}
// StringSetReader is a builder designed specifially to contain a set
// of string values (ie: Builder[map[string]struct{}]).
// This is a quality-of-life typecast for the generic Builder.
type StringSetBuilder interface {
Builder[map[string]struct{}]
}
// ---------------------------------------------------------------------------
// Implementation
// ---------------------------------------------------------------------------
var (
_ StringSetReader = &StringSetMatcher{}
_ StringSetBuilder = &StringSetMatchBuilder{}
)
// Items that should be excluded when sourcing data from the base backup.
// Parent Path -> item ID -> {}
type StringSetMatcher struct {
ssb StringSetBuilder
}
func (m *StringSetMatcher) LongestPrefix(parent string) (string, map[string]struct{}, bool) {
if m == nil {
return "", nil, false
}
return m.ssb.LongestPrefix(parent)
}
func (m *StringSetMatcher) Empty() bool {
return m == nil || m.ssb.Empty()
}
func (m *StringSetMatcher) Get(parent string) (map[string]struct{}, bool) {
if m == nil {
return nil, false
}
return m.ssb.Get(parent)
}
func (m *StringSetMatcher) Keys() []string {
if m == nil {
return []string{}
}
return m.ssb.Keys()
}
func (m *StringSetMatchBuilder) ToReader() *StringSetMatcher {
if m == nil {
return nil
}
return m.ssm
}
// Items that should be excluded when sourcing data from the base backup.
// Parent Path -> item ID -> {}
type StringSetMatchBuilder struct {
ssm *StringSetMatcher
}
func NewStringSetBuilder() *StringSetMatchBuilder {
return &StringSetMatchBuilder{
ssm: &StringSetMatcher{
ssb: NewMatcher[map[string]struct{}](),
},
}
}
// copies all items into the key's bucket.
func (m *StringSetMatchBuilder) Add(key string, items map[string]struct{}) {
if m == nil {
return
}
vs, ok := m.ssm.Get(key)
if !ok {
m.ssm.ssb.Add(key, items)
return
}
maps.Copy(vs, items)
m.ssm.ssb.Add(key, vs)
}
func (m *StringSetMatchBuilder) LongestPrefix(parent string) (string, map[string]struct{}, bool) {
return m.ssm.LongestPrefix(parent)
}
func (m *StringSetMatchBuilder) Empty() bool {
return m == nil || m.ssm.Empty()
}
func (m *StringSetMatchBuilder) Get(parent string) (map[string]struct{}, bool) {
if m == nil {
return nil, false
}
return m.ssm.Get(parent)
}
func (m *StringSetMatchBuilder) Keys() []string {
if m == nil {
return []string{}
}
return m.ssm.Keys()
}

View File

@ -0,0 +1,166 @@
package prefixmatcher_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/tester"
)
type StringSetUnitSuite struct {
tester.Suite
}
func TestSTringSetUnitSuite(t *testing.T) {
suite.Run(t, &StringSetUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *StringSetUnitSuite) TestEmpty() {
pm := prefixmatcher.NewStringSetBuilder()
assert.True(suite.T(), pm.Empty())
}
func (suite *StringSetUnitSuite) TestToReader() {
var (
pr prefixmatcher.StringSetReader
t = suite.T()
pm = prefixmatcher.NewStringSetBuilder()
)
pr = pm.ToReader()
_, ok := pr.(prefixmatcher.StringSetBuilder)
assert.False(t, ok, "cannot cast to builder")
}
func (suite *StringSetUnitSuite) TestAdd_Get() {
t := suite.T()
pm := prefixmatcher.NewStringSetBuilder()
kvs := map[string]map[string]struct{}{
"hello": {"world": {}},
"hola": {"mundo": {}},
"foo": {"bar": {}},
}
for k, v := range kvs {
pm.Add(k, v)
}
for k, v := range kvs {
val, ok := pm.Get(k)
assert.True(t, ok, "searching for key", k)
assert.Equal(t, v, val, "returned value")
}
assert.ElementsMatch(t, maps.Keys(kvs), pm.Keys())
}
func (suite *StringSetUnitSuite) TestAdd_Union() {
t := suite.T()
pm := prefixmatcher.NewStringSetBuilder()
pm.Add("hello", map[string]struct{}{
"world": {},
"mundo": {},
})
pm.Add("hello", map[string]struct{}{
"goodbye": {},
"aideu": {},
})
expect := map[string]struct{}{
"world": {},
"mundo": {},
"goodbye": {},
"aideu": {},
}
result, _ := pm.Get("hello")
assert.Equal(t, expect, result)
assert.ElementsMatch(t, []string{"hello"}, pm.Keys())
}
func (suite *StringSetUnitSuite) TestLongestPrefix() {
key := "hello"
value := "world"
table := []struct {
name string
inputKVs map[string]map[string]struct{}
searchKey string
expectedKey string
expectedValue map[string]struct{}
expectedFound assert.BoolAssertionFunc
}{
{
name: "Empty Prefix",
inputKVs: map[string]map[string]struct{}{
"": {value: {}},
},
searchKey: key,
expectedKey: "",
expectedValue: map[string]struct{}{value: {}},
expectedFound: assert.True,
},
{
name: "Exact Match",
inputKVs: map[string]map[string]struct{}{
key: {value: {}},
},
searchKey: key,
expectedKey: key,
expectedValue: map[string]struct{}{value: {}},
expectedFound: assert.True,
},
{
name: "Prefix Match",
inputKVs: map[string]map[string]struct{}{
key[:len(key)-2]: {value: {}},
},
searchKey: key,
expectedKey: key[:len(key)-2],
expectedValue: map[string]struct{}{value: {}},
expectedFound: assert.True,
},
{
name: "Longest Prefix Match",
inputKVs: map[string]map[string]struct{}{
key[:len(key)-2]: {value: {}},
"": {value + "2": {}},
key[:len(key)-4]: {value + "3": {}},
},
searchKey: key,
expectedKey: key[:len(key)-2],
expectedValue: map[string]struct{}{value: {}},
expectedFound: assert.True,
},
{
name: "No Match",
inputKVs: map[string]map[string]struct{}{
"foo": {value: {}},
},
searchKey: key,
expectedKey: "",
expectedValue: nil,
expectedFound: assert.False,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
pm := prefixmatcher.NewStringSetBuilder()
for k, v := range test.inputKVs {
pm.Add(k, v)
}
k, v, ok := pm.LongestPrefix(test.searchKey)
assert.Equal(t, test.expectedKey, k, "key")
assert.Equal(t, test.expectedValue, v, "value")
test.expectedFound(t, ok, "found")
})
}
}

View File

@ -6,7 +6,8 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/connector/discovery" "github.com/alcionai/corso/src/internal/connector/discovery"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
@ -19,6 +20,8 @@ import (
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -34,28 +37,36 @@ import (
// prior history (ie, incrementals) and run a full backup. // prior history (ie, incrementals) and run a full backup.
func (gc *GraphConnector) ProduceBackupCollections( func (gc *GraphConnector) ProduceBackupCollections(
ctx context.Context, ctx context.Context,
owner common.IDNamer, owner idname.Provider,
sels selectors.Selector, sels selectors.Selector,
metadata []data.RestoreCollection, metadata []data.RestoreCollection,
lastBackupVersion int,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, map[string]map[string]struct{}, error) { ) ([]data.BackupCollection, prefixmatcher.StringSetReader, error) {
ctx, end := diagnostics.Span( ctx, end := diagnostics.Span(
ctx, ctx,
"gc:produceBackupCollections", "gc:produceBackupCollections",
diagnostics.Index("service", sels.Service.String())) diagnostics.Index("service", sels.Service.String()))
defer end() defer end()
ctx = graph.BindRateLimiterConfig(ctx, graph.LimiterCfg{Service: sels.PathService()})
// Limit the max number of active requests to graph from this collection.
ctrlOpts.Parallelism.ItemFetch = graph.Parallelism(sels.PathService()).
ItemOverride(ctx, ctrlOpts.Parallelism.ItemFetch)
err := verifyBackupInputs(sels, gc.IDNameLookup.IDs()) err := verifyBackupInputs(sels, gc.IDNameLookup.IDs())
if err != nil { if err != nil {
return nil, nil, clues.Stack(err).WithClues(ctx) return nil, nil, clues.Stack(err).WithClues(ctx)
} }
serviceEnabled, err := checkServiceEnabled( serviceEnabled, canMakeDeltaQueries, err := checkServiceEnabled(
ctx, ctx,
gc.Discovery.Users(), gc.Discovery.Users(),
path.ServiceType(sels.Service), path.ServiceType(sels.Service),
sels.DiscreteOwner) sels.DiscreteOwner,
)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -64,12 +75,23 @@ func (gc *GraphConnector) ProduceBackupCollections(
return []data.BackupCollection{}, nil, nil return []data.BackupCollection{}, nil, nil
} }
var (
colls []data.BackupCollection
ssmb *prefixmatcher.StringSetMatcher
)
if !canMakeDeltaQueries {
logger.Ctx(ctx).Info("delta requests not available")
ctrlOpts.ToggleFeatures.DisableDelta = true
}
switch sels.Service { switch sels.Service {
case selectors.ServiceExchange: case selectors.ServiceExchange:
colls, excludes, err := exchange.DataCollections( colls, ssmb, err = exchange.DataCollections(
ctx, ctx,
sels, sels,
sels, owner,
metadata, metadata,
gc.credentials, gc.credentials,
gc.UpdateStatus, gc.UpdateStatus,
@ -79,26 +101,13 @@ func (gc *GraphConnector) ProduceBackupCollections(
return nil, nil, err return nil, nil, err
} }
for _, c := range colls {
// kopia doesn't stream Items() from deleted collections,
// and so they never end up calling the UpdateStatus closer.
// This is a brittle workaround, since changes in consumer
// behavior (such as calling Items()) could inadvertently
// break the process state, putting us into deadlock or
// panics.
if c.State() != data.DeletedState {
gc.incrementAwaitingMessages()
}
}
return colls, excludes, nil
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
colls, excludes, err := onedrive.DataCollections( colls, ssmb, err = onedrive.DataCollections(
ctx, ctx,
sels, sels,
sels, owner,
metadata, metadata,
lastBackupVersion,
gc.credentials.AzureTenantID, gc.credentials.AzureTenantID,
gc.itemClient, gc.itemClient,
gc.Service, gc.Service,
@ -109,20 +118,13 @@ func (gc *GraphConnector) ProduceBackupCollections(
return nil, nil, err return nil, nil, err
} }
for _, c := range colls {
// kopia doesn't stream Items() from deleted collections.
if c.State() != data.DeletedState {
gc.incrementAwaitingMessages()
}
}
return colls, excludes, nil
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
colls, excludes, err := sharepoint.DataCollections( colls, ssmb, err = sharepoint.DataCollections(
ctx, ctx,
gc.itemClient, gc.itemClient,
sels, sels,
owner,
metadata,
gc.credentials, gc.credentials,
gc.Service, gc.Service,
gc, gc,
@ -132,13 +134,23 @@ func (gc *GraphConnector) ProduceBackupCollections(
return nil, nil, err return nil, nil, err
} }
gc.incrementMessagesBy(len(colls))
return colls, excludes, nil
default: default:
return nil, nil, clues.Wrap(clues.New(sels.Service.String()), "service not supported").WithClues(ctx) return nil, nil, clues.Wrap(clues.New(sels.Service.String()), "service not supported").WithClues(ctx)
} }
for _, c := range colls {
// kopia doesn't stream Items() from deleted collections,
// and so they never end up calling the UpdateStatus closer.
// This is a brittle workaround, since changes in consumer
// behavior (such as calling Items()) could inadvertently
// break the process state, putting us into deadlock or
// panics.
if c.State() != data.DeletedState {
gc.incrementAwaitingMessages()
}
}
return colls, ssmb, nil
} }
func verifyBackupInputs(sels selectors.Selector, siteIDs []string) error { func verifyBackupInputs(sels selectors.Selector, siteIDs []string) error {
@ -155,16 +167,7 @@ func verifyBackupInputs(sels selectors.Selector, siteIDs []string) error {
resourceOwner := strings.ToLower(sels.DiscreteOwner) resourceOwner := strings.ToLower(sels.DiscreteOwner)
var found bool if !filters.Equal(ids).Compare(resourceOwner) {
for _, id := range ids {
if strings.ToLower(id) == resourceOwner {
found = true
break
}
}
if !found {
return clues.Stack(graph.ErrResourceOwnerNotFound).With("missing_resource_owner", sels.DiscreteOwner) return clues.Stack(graph.ErrResourceOwnerNotFound).With("missing_resource_owner", sels.DiscreteOwner)
} }
@ -176,22 +179,28 @@ func checkServiceEnabled(
gi discovery.GetInfoer, gi discovery.GetInfoer,
service path.ServiceType, service path.ServiceType,
resource string, resource string,
) (bool, error) { ) (bool, bool, error) {
if service == path.SharePointService { if service == path.SharePointService {
// No "enabled" check required for sharepoint // No "enabled" check required for sharepoint
return true, nil return true, true, nil
} }
info, err := gi.GetInfo(ctx, resource) info, err := gi.GetInfo(ctx, resource)
if err != nil { if err != nil {
return false, err return false, false, err
} }
if !info.ServiceEnabled(service) { if !info.ServiceEnabled(service) {
return false, clues.Wrap(graph.ErrServiceNotEnabled, "checking service access") return false, false, clues.Wrap(graph.ErrServiceNotEnabled, "checking service access")
} }
return true, nil canMakeDeltaQueries := true
if service == path.ExchangeService {
// we currently can only check quota exceeded for exchange
canMakeDeltaQueries = info.CanMakeDeltaQueries()
}
return true, canMakeDeltaQueries, nil
} }
// ConsumeRestoreCollections restores data from the specified collections // ConsumeRestoreCollections restores data from the specified collections
@ -201,7 +210,7 @@ func (gc *GraphConnector) ConsumeRestoreCollections(
ctx context.Context, ctx context.Context,
backupVersion int, backupVersion int,
acct account.Account, acct account.Account,
selector selectors.Selector, sels selectors.Selector,
dest control.RestoreDestination, dest control.RestoreDestination,
opts control.Options, opts control.Options,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
@ -210,6 +219,8 @@ func (gc *GraphConnector) ConsumeRestoreCollections(
ctx, end := diagnostics.Span(ctx, "connector:restore") ctx, end := diagnostics.Span(ctx, "connector:restore")
defer end() defer end()
ctx = graph.BindRateLimiterConfig(ctx, graph.LimiterCfg{Service: sels.PathService()})
var ( var (
status *support.ConnectorOperationStatus status *support.ConnectorOperationStatus
deets = &details.Builder{} deets = &details.Builder{}
@ -220,15 +231,15 @@ func (gc *GraphConnector) ConsumeRestoreCollections(
return nil, clues.Wrap(err, "malformed azure credentials") return nil, clues.Wrap(err, "malformed azure credentials")
} }
switch selector.Service { switch sels.Service {
case selectors.ServiceExchange: case selectors.ServiceExchange:
status, err = exchange.RestoreExchangeDataCollections(ctx, creds, gc.Service, dest, dcs, deets, errs) status, err = exchange.RestoreExchangeDataCollections(ctx, creds, gc.Service, dest, dcs, deets, errs)
case selectors.ServiceOneDrive: case selectors.ServiceOneDrive:
status, err = onedrive.RestoreCollections(ctx, creds, backupVersion, gc.Service, dest, opts, dcs, deets, errs) status, err = onedrive.RestoreCollections(ctx, creds, backupVersion, gc.Service, dest, opts, dcs, deets, errs)
case selectors.ServiceSharePoint: case selectors.ServiceSharePoint:
status, err = sharepoint.RestoreCollections(ctx, backupVersion, creds, gc.Service, dest, dcs, deets, errs) status, err = sharepoint.RestoreCollections(ctx, backupVersion, creds, gc.Service, dest, opts, dcs, deets, errs)
default: default:
err = clues.Wrap(clues.New(selector.Service.String()), "service not supported") err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
} }
gc.incrementAwaitingMessages() gc.incrementAwaitingMessages()

View File

@ -10,15 +10,17 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/sharepoint" "github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/selectors/testdata" selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -93,44 +95,57 @@ func (suite *DataCollectionIntgSuite) TestExchangeDataCollection() {
} }
for _, test := range tests { for _, test := range tests {
suite.Run(test.name, func() { for _, canMakeDeltaQueries := range []bool{true, false} {
t := suite.T() name := test.name
sel := test.getSelector(t) if canMakeDeltaQueries {
name += "-delta"
collections, excludes, err := exchange.DataCollections( } else {
ctx, name += "-non-delta"
sel,
sel,
nil,
connector.credentials,
connector.UpdateStatus,
control.Options{},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.Empty(t, excludes)
for range collections {
connector.incrementAwaitingMessages()
} }
// Categories with delta endpoints will produce a collection for metadata suite.Run(name, func() {
// as well as the actual data pulled, and the "temp" root collection. t := suite.T()
assert.GreaterOrEqual(t, len(collections), 1, "expected 1 <= num collections <= 2")
assert.GreaterOrEqual(t, 3, len(collections), "expected 1 <= num collections <= 3")
for _, col := range collections { sel := test.getSelector(t)
for object := range col.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{} ctrlOpts := control.Defaults()
_, err := buf.ReadFrom(object.ToReader()) ctrlOpts.ToggleFeatures.DisableDelta = !canMakeDeltaQueries
assert.NoError(t, err, "received a buf.Read error", clues.ToCore(err))
collections, excludes, err := exchange.DataCollections(
ctx,
sel,
sel,
nil,
connector.credentials,
connector.UpdateStatus,
ctrlOpts,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.True(t, excludes.Empty())
for range collections {
connector.incrementAwaitingMessages()
} }
}
status := connector.Wait() // Categories with delta endpoints will produce a collection for metadata
assert.NotZero(t, status.Successes) // as well as the actual data pulled, and the "temp" root collection.
t.Log(status.String()) assert.GreaterOrEqual(t, len(collections), 1, "expected 1 <= num collections <= 2")
}) assert.GreaterOrEqual(t, 3, len(collections), "expected 1 <= num collections <= 3")
for _, col := range collections {
for object := range col.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
_, err := buf.ReadFrom(object.ToReader())
assert.NoError(t, err, "received a buf.Read error", clues.ToCore(err))
}
}
status := connector.Wait()
assert.NotZero(t, status.Successes)
t.Log(status.String())
})
}
} }
} }
@ -158,7 +173,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
name: "Invalid onedrive backup user", name: "Invalid onedrive backup user",
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup(owners) sel := selectors.NewOneDriveBackup(owners)
sel.Include(sel.Folders(selectors.Any())) sel.Include(selTD.OneDriveBackupFolderScope(sel))
return sel.Selector return sel.Selector
}, },
}, },
@ -166,7 +181,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
name: "Invalid sharepoint backup site", name: "Invalid sharepoint backup site",
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup(owners) sel := selectors.NewSharePointBackup(owners)
sel.Include(testdata.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
return sel.Selector return sel.Selector
}, },
}, },
@ -183,7 +198,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
name: "missing onedrive backup user", name: "missing onedrive backup user",
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewOneDriveBackup(owners) sel := selectors.NewOneDriveBackup(owners)
sel.Include(sel.Folders(selectors.Any())) sel.Include(selTD.OneDriveBackupFolderScope(sel))
sel.DiscreteOwner = "" sel.DiscreteOwner = ""
return sel.Selector return sel.Selector
}, },
@ -192,7 +207,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
name: "missing sharepoint backup site", name: "missing sharepoint backup site",
getSelector: func(t *testing.T) selectors.Selector { getSelector: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup(owners) sel := selectors.NewSharePointBackup(owners)
sel.Include(testdata.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
sel.DiscreteOwner = "" sel.DiscreteOwner = ""
return sel.Selector return sel.Selector
}, },
@ -208,11 +223,12 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
test.getSelector(t), test.getSelector(t),
test.getSelector(t), test.getSelector(t),
nil, nil,
control.Options{}, version.NoBackup,
control.Defaults(),
fault.New(true)) fault.New(true))
assert.Error(t, err, clues.ToCore(err)) assert.Error(t, err, clues.ToCore(err))
assert.Empty(t, collections) assert.Empty(t, collections)
assert.Empty(t, excludes) assert.Nil(t, excludes)
}) })
} }
} }
@ -236,7 +252,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
name: "Libraries", name: "Libraries",
getSelector: func() selectors.Selector { getSelector: func() selectors.Selector {
sel := selectors.NewSharePointBackup(selSites) sel := selectors.NewSharePointBackup(selSites)
sel.Include(testdata.SharePointBackupFolderScope(sel)) sel.Include(selTD.SharePointBackupFolderScope(sel))
return sel.Selector return sel.Selector
}, },
}, },
@ -258,16 +274,18 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
collections, excludes, err := sharepoint.DataCollections( collections, excludes, err := sharepoint.DataCollections(
ctx, ctx,
graph.HTTPClient(graph.NoTimeout()), graph.NewNoTimeoutHTTPWrapper(),
sel, sel,
sel,
nil,
connector.credentials, connector.credentials,
connector.Service, connector.Service,
connector, connector,
control.Options{}, control.Defaults(),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// Not expecting excludes as this isn't an incremental backup. // Not expecting excludes as this isn't an incremental backup.
assert.Empty(t, excludes) assert.True(t, excludes.Empty())
for range collections { for range collections {
connector.incrementAwaitingMessages() connector.incrementAwaitingMessages()
@ -342,15 +360,16 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
cols, excludes, err := gc.ProduceBackupCollections( cols, excludes, err := gc.ProduceBackupCollections(
ctx, ctx,
sel.Selector, inMock.NewProvider(id, name),
sel.Selector, sel.Selector,
nil, nil,
control.Options{}, version.NoBackup,
control.Defaults(),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.Len(t, cols, 2) // 1 collection, 1 path prefix directory to ensure the root path exists. require.Len(t, cols, 2) // 1 collection, 1 path prefix directory to ensure the root path exists.
// No excludes yet as this isn't an incremental backup. // No excludes yet as this isn't an incremental backup.
assert.Empty(t, excludes) assert.True(t, excludes.Empty())
t.Logf("cols[0] Path: %s\n", cols[0].FullPath().String()) t.Logf("cols[0] Path: %s\n", cols[0].FullPath().String())
assert.Equal( assert.Equal(
@ -386,15 +405,16 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
cols, excludes, err := gc.ProduceBackupCollections( cols, excludes, err := gc.ProduceBackupCollections(
ctx, ctx,
sel.Selector, inMock.NewProvider(id, name),
sel.Selector, sel.Selector,
nil, nil,
control.Options{}, version.NoBackup,
control.Defaults(),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Less(t, 0, len(cols)) assert.Less(t, 0, len(cols))
// No excludes yet as this isn't an incremental backup. // No excludes yet as this isn't an incremental backup.
assert.Empty(t, excludes) assert.True(t, excludes.Empty())
for _, collection := range cols { for _, collection := range cols {
t.Logf("Path: %s\n", collection.FullPath().String()) t.Logf("Path: %s\n", collection.FullPath().String())

View File

@ -69,6 +69,22 @@ func Users(
return users, nil return users, nil
} }
// UserDetails fetches detailed info like - userPurpose for all users in the tenant.
func GetUserInfo(
ctx context.Context,
acct account.Account,
userID string,
errs *fault.Bus,
) (*api.UserInfo, error) {
client, err := apiClient(ctx, acct)
if err != nil {
return nil, err
}
return client.Users().GetInfo(ctx, userID)
}
// User fetches a single user's data.
func User( func User(
ctx context.Context, ctx context.Context,
gwi getWithInfoer, gwi getWithInfoer,
@ -77,7 +93,7 @@ func User(
u, err := gwi.GetByID(ctx, userID) u, err := gwi.GetByID(ctx, userID)
if err != nil { if err != nil {
if graph.IsErrUserNotFound(err) { if graph.IsErrUserNotFound(err) {
return nil, nil, clues.Stack(graph.ErrResourceOwnerNotFound).With("user_id", userID) return nil, nil, clues.Stack(graph.ErrResourceOwnerNotFound, err).With("user_id", userID)
} }
return nil, nil, clues.Wrap(err, "getting user") return nil, nil, clues.Wrap(err, "getting user")

View File

@ -18,19 +18,19 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
type DiscoveryIntegrationSuite struct { type DiscoveryIntgSuite struct {
tester.Suite tester.Suite
} }
func TestDiscoveryIntegrationSuite(t *testing.T) { func TestDiscoveryIntgSuite(t *testing.T) {
suite.Run(t, &DiscoveryIntegrationSuite{ suite.Run(t, &DiscoveryIntgSuite{
Suite: tester.NewIntegrationSuite( Suite: tester.NewIntegrationSuite(
t, t,
[][]string{tester.M365AcctCredEnvs}), [][]string{tester.M365AcctCredEnvs}),
}) })
} }
func (suite *DiscoveryIntegrationSuite) TestUsers() { func (suite *DiscoveryIntgSuite) TestUsers() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
@ -55,7 +55,7 @@ func (suite *DiscoveryIntegrationSuite) TestUsers() {
assert.NotEmpty(t, users) assert.NotEmpty(t, users)
} }
func (suite *DiscoveryIntegrationSuite) TestUsers_InvalidCredentials() { func (suite *DiscoveryIntgSuite) TestUsers_InvalidCredentials() {
table := []struct { table := []struct {
name string name string
acct func(t *testing.T) account.Account acct func(t *testing.T) account.Account
@ -101,7 +101,7 @@ func (suite *DiscoveryIntegrationSuite) TestUsers_InvalidCredentials() {
} }
} }
func (suite *DiscoveryIntegrationSuite) TestSites() { func (suite *DiscoveryIntgSuite) TestSites() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
@ -120,7 +120,7 @@ func (suite *DiscoveryIntegrationSuite) TestSites() {
assert.NotEmpty(t, sites) assert.NotEmpty(t, sites)
} }
func (suite *DiscoveryIntegrationSuite) TestSites_InvalidCredentials() { func (suite *DiscoveryIntgSuite) TestSites_InvalidCredentials() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
@ -171,10 +171,9 @@ func (suite *DiscoveryIntegrationSuite) TestSites_InvalidCredentials() {
} }
} }
func (suite *DiscoveryIntegrationSuite) TestUserInfo() { func (suite *DiscoveryIntgSuite) TestUserInfo() {
t := suite.T() t := suite.T()
acct := tester.NewM365Account(t) acct := tester.NewM365Account(t)
userID := tester.M365UserID(t)
creds, err := acct.M365Config() creds, err := acct.M365Config()
require.NoError(t, err) require.NoError(t, err)
@ -185,26 +184,86 @@ func (suite *DiscoveryIntegrationSuite) TestUserInfo() {
uapi := cli.Users() uapi := cli.Users()
table := []struct { table := []struct {
name string name string
user string user string
expect *api.UserInfo expect *api.UserInfo
expectErr require.ErrorAssertionFunc
}{ }{
{ {
name: "standard test user", name: "standard test user",
user: userID, user: tester.M365UserID(t),
expect: &api.UserInfo{ expect: &api.UserInfo{
DiscoveredServices: map[path.ServiceType]struct{}{ ServicesEnabled: map[path.ServiceType]struct{}{
path.ExchangeService: {}, path.ExchangeService: {},
path.OneDriveService: {}, path.OneDriveService: {},
}, },
Mailbox: api.MailboxInfo{
Purpose: "user",
ErrGetMailBoxSetting: nil,
},
}, },
expectErr: require.NoError,
}, },
{ {
name: "user does not exist", name: "user does not exist",
user: uuid.NewString(), user: uuid.NewString(),
expect: &api.UserInfo{ expect: &api.UserInfo{
DiscoveredServices: map[path.ServiceType]struct{}{ ServicesEnabled: map[path.ServiceType]struct{}{},
path.OneDriveService: {}, // currently statically populated Mailbox: api.MailboxInfo{},
},
expectErr: require.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
ctx, flush := tester.NewContext()
defer flush()
t := suite.T()
result, err := discovery.UserInfo(ctx, uapi, test.user)
test.expectErr(t, err, clues.ToCore(err))
if err != nil {
return
}
assert.Equal(t, test.expect.ServicesEnabled, result.ServicesEnabled)
})
}
}
func (suite *DiscoveryIntgSuite) TestUserWithoutDrive() {
t := suite.T()
acct := tester.NewM365Account(t)
userID := tester.M365UserID(t)
table := []struct {
name string
user string
expect *api.UserInfo
}{
{
name: "user without drive and exchange",
user: "a53c26f7-5100-4acb-a910-4d20960b2c19", // User: testevents@10rqc2.onmicrosoft.com
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{},
Mailbox: api.MailboxInfo{
ErrGetMailBoxSetting: []error{api.ErrMailBoxSettingsNotFound},
},
},
},
{
name: "user with drive and exchange",
user: userID,
expect: &api.UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{
path.ExchangeService: {},
path.OneDriveService: {},
},
Mailbox: api.MailboxInfo{
Purpose: "user",
ErrGetMailBoxSetting: []error{},
}, },
}, },
}, },
@ -216,9 +275,11 @@ func (suite *DiscoveryIntegrationSuite) TestUserInfo() {
t := suite.T() t := suite.T()
result, err := discovery.UserInfo(ctx, uapi, test.user) result, err := discovery.GetUserInfo(ctx, acct, test.user, fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect, result) assert.Equal(t, test.expect.ServicesEnabled, result.ServicesEnabled)
assert.Equal(t, test.expect.Mailbox.ErrGetMailBoxSetting, result.Mailbox.ErrGetMailBoxSetting)
assert.Equal(t, test.expect.Mailbox.Purpose, result.Mailbox.Purpose)
}) })
} }
} }

View File

@ -1,147 +0,0 @@
package api
import (
"context"
"strings"
"github.com/alcionai/clues"
"github.com/microsoft/kiota-abstractions-go/serialization"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/account"
)
// ---------------------------------------------------------------------------
// common types and consts
// ---------------------------------------------------------------------------
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
// a delta-queryable folder.
type DeltaUpdate struct {
// the deltaLink itself
URL string
// true if the old delta was marked as invalid
Reset bool
}
// GraphQuery represents functions which perform exchange-specific queries
// into M365 backstore. Responses -> returned items will only contain the information
// that is included in the options
// TODO: use selector or path for granularity into specific folders or specific date ranges
type GraphQuery func(ctx context.Context, userID string) (serialization.Parsable, error)
// GraphRetrievalFunctions are functions from the Microsoft Graph API that retrieve
// the default associated data of a M365 object. This varies by object. Additional
// Queries must be run to obtain the omitted fields.
type GraphRetrievalFunc func(
ctx context.Context,
user, m365ID string,
) (serialization.Parsable, error)
// ---------------------------------------------------------------------------
// interfaces
// ---------------------------------------------------------------------------
// Client is used to fulfill the interface for exchange
// queries that are traditionally backed by GraphAPI. A
// struct is used in this case, instead of deferring to
// pure function wrappers, so that the boundary separates the
// granular implementation of the graphAPI and kiota away
// from the exchange package's broader intents.
type Client struct {
Credentials account.M365Config
// The Stable service is re-usable for any non-paged request.
// This allows us to maintain performance across async requests.
Stable graph.Servicer
// The LargeItem graph servicer is configured specifically for
// downloading large items. Specifically for use when handling
// attachments, and for no other use.
LargeItem graph.Servicer
}
// NewClient produces a new exchange api client. Must be used in
// place of creating an ad-hoc client struct.
func NewClient(creds account.M365Config) (Client, error) {
s, err := NewService(creds)
if err != nil {
return Client{}, err
}
li, err := newLargeItemService(creds)
if err != nil {
return Client{}, err
}
return Client{creds, s, li}, nil
}
// service generates a new service. Used for paged and other long-running
// requests instead of the client's stable service, so that in-flight state
// within the adapter doesn't get clobbered
func (c Client) service() (*graph.Service, error) {
s, err := NewService(c.Credentials)
return s, err
}
func NewService(creds account.M365Config, opts ...graph.Option) (*graph.Service, error) {
a, err := graph.CreateAdapter(
creds.AzureTenantID,
creds.AzureClientID,
creds.AzureClientSecret,
opts...)
if err != nil {
return nil, clues.Wrap(err, "generating graph adapter")
}
return graph.NewService(a), nil
}
func newLargeItemService(creds account.M365Config) (*graph.Service, error) {
a, err := NewService(creds, graph.NoTimeout())
if err != nil {
return nil, clues.Wrap(err, "generating no-timeout graph adapter")
}
return a, nil
}
// ---------------------------------------------------------------------------
// helper funcs
// ---------------------------------------------------------------------------
// checkIDAndName is a helper function to ensure that
// the ID and name pointers are set prior to being called.
func checkIDAndName(c graph.Container) error {
id := ptr.Val(c.GetId())
if len(id) == 0 {
return clues.New("container missing ID")
}
dn := ptr.Val(c.GetDisplayName())
if len(dn) == 0 {
return clues.New("container missing display name").With("container_id", id)
}
return nil
}
func HasAttachments(body models.ItemBodyable) bool {
if body == nil {
return false
}
if ct, ok := ptr.ValOK(body.GetContentType()); !ok || ct == models.TEXT_BODYTYPE {
return false
}
if body, ok := ptr.ValOK(body.GetContent()); !ok || len(body) == 0 {
return false
}
return strings.Contains(ptr.Val(body.GetContent()), "src=\"cid:")
}

View File

@ -11,7 +11,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/connector/uploadsession"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
@ -93,10 +92,12 @@ func uploadLargeAttachment(
uploader attachmentUploadable, uploader attachmentUploadable,
attachment models.Attachmentable, attachment models.Attachmentable,
) error { ) error {
var ( bs, err := GetAttachmentBytes(attachment)
bs = attachmentBytes(attachment) if err != nil {
size = int64(len(bs)) return clues.Stack(err).WithClues(ctx)
) }
size := int64(len(bs))
session, err := uploader.uploadSession(ctx, ptr.Val(attachment.GetName()), size) session, err := uploader.uploadSession(ctx, ptr.Val(attachment.GetName()), size)
if err != nil { if err != nil {
@ -104,7 +105,7 @@ func uploadLargeAttachment(
} }
url := ptr.Val(session.GetUploadUrl()) url := ptr.Val(session.GetUploadUrl())
aw := uploadsession.NewWriter(uploader.getItemID(), url, size) aw := graph.NewLargeItemWriter(uploader.getItemID(), url, size)
logger.Ctx(ctx).Debugw("uploading large attachment", "attachment_url", graph.LoggableURL(url)) logger.Ctx(ctx).Debugw("uploading large attachment", "attachment_url", graph.LoggableURL(url))
// Upload the stream data // Upload the stream data

View File

@ -37,9 +37,12 @@ func (mau *mailAttachmentUploader) getItemID() string {
func (mau *mailAttachmentUploader) uploadSmallAttachment(ctx context.Context, attach models.Attachmentable) error { func (mau *mailAttachmentUploader) uploadSmallAttachment(ctx context.Context, attach models.Attachmentable) error {
_, err := mau.service.Client(). _, err := mau.service.Client().
UsersById(mau.userID). Users().
MailFoldersById(mau.folderID). ByUserId(mau.userID).
MessagesById(mau.itemID). MailFolders().
ByMailFolderId(mau.folderID).
Messages().
ByMessageId(mau.itemID).
Attachments(). Attachments().
Post(ctx, attach, nil) Post(ctx, attach, nil)
if err != nil { if err != nil {
@ -60,9 +63,12 @@ func (mau *mailAttachmentUploader) uploadSession(
r, err := mau. r, err := mau.
service. service.
Client(). Client().
UsersById(mau.userID). Users().
MailFoldersById(mau.folderID). ByUserId(mau.userID).
MessagesById(mau.itemID). MailFolders().
ByMailFolderId(mau.folderID).
Messages().
ByMessageId(mau.itemID).
Attachments(). Attachments().
CreateUploadSession(). CreateUploadSession().
Post(ctx, session, nil) Post(ctx, session, nil)
@ -87,9 +93,12 @@ func (eau *eventAttachmentUploader) getItemID() string {
func (eau *eventAttachmentUploader) uploadSmallAttachment(ctx context.Context, attach models.Attachmentable) error { func (eau *eventAttachmentUploader) uploadSmallAttachment(ctx context.Context, attach models.Attachmentable) error {
_, err := eau.service.Client(). _, err := eau.service.Client().
UsersById(eau.userID). Users().
CalendarsById(eau.calendarID). ByUserId(eau.userID).
EventsById(eau.itemID). Calendars().
ByCalendarId(eau.calendarID).
Events().
ByEventId(eau.itemID).
Attachments(). Attachments().
Post(ctx, attach, nil) Post(ctx, attach, nil)
if err != nil { if err != nil {
@ -108,9 +117,12 @@ func (eau *eventAttachmentUploader) uploadSession(
session.SetAttachmentItem(makeSessionAttachment(attachmentName, attachmentSize)) session.SetAttachmentItem(makeSessionAttachment(attachmentName, attachmentSize))
r, err := eau.service.Client(). r, err := eau.service.Client().
UsersById(eau.userID). Users().
CalendarsById(eau.calendarID). ByUserId(eau.userID).
EventsById(eau.itemID). Calendars().
ByCalendarId(eau.calendarID).
Events().
ByEventId(eau.itemID).
Attachments(). Attachments().
CreateUploadSession(). CreateUploadSession().
Post(ctx, session, nil) Post(ctx, session, nil)

View File

@ -549,7 +549,7 @@ func (suite *FolderCacheIntegrationSuite) TestCreateContainerDestination() {
var ( var (
user = tester.M365UserID(suite.T()) user = tester.M365UserID(suite.T())
directoryCaches = make(map[path.CategoryType]graph.ContainerResolver) directoryCaches = make(map[path.CategoryType]graph.ContainerResolver)
folderName = tester.DefaultTestRestoreDestination().ContainerName folderName = tester.DefaultTestRestoreDestination("").ContainerName
tests = []struct { tests = []struct {
name string name string
pathFunc1 func(t *testing.T) path.Path pathFunc1 func(t *testing.T) path.Path

View File

@ -6,8 +6,8 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/connector/exchange/api" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -17,6 +17,7 @@ import (
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// MetadataFileNames produces the category-specific set of filenames used to // MetadataFileNames produces the category-specific set of filenames used to
@ -40,7 +41,7 @@ func (dps DeltaPaths) AddDelta(k, d string) {
dp = DeltaPath{} dp = DeltaPath{}
} }
dp.delta = d dp.Delta = d
dps[k] = dp dps[k] = dp
} }
@ -50,13 +51,13 @@ func (dps DeltaPaths) AddPath(k, p string) {
dp = DeltaPath{} dp = DeltaPath{}
} }
dp.path = p dp.Path = p
dps[k] = dp dps[k] = dp
} }
type DeltaPath struct { type DeltaPath struct {
delta string Delta string
path string Path string
} }
// ParseMetadataCollections produces a map of structs holding delta // ParseMetadataCollections produces a map of structs holding delta
@ -147,7 +148,7 @@ func parseMetadataCollections(
// complete backup on the next run. // complete backup on the next run.
for _, dps := range cdp { for _, dps := range cdp {
for k, dp := range dps { for k, dp := range dps {
if len(dp.delta) == 0 || len(dp.path) == 0 { if len(dp.Path) == 0 {
delete(dps, k) delete(dps, k)
} }
} }
@ -163,14 +164,14 @@ func parseMetadataCollections(
// Add iota to this call -> mail, contacts, calendar, etc. // Add iota to this call -> mail, contacts, calendar, etc.
func DataCollections( func DataCollections(
ctx context.Context, ctx context.Context,
user common.IDNamer,
selector selectors.Selector, selector selectors.Selector,
user idname.Provider,
metadata []data.RestoreCollection, metadata []data.RestoreCollection,
acct account.M365Config, acct account.M365Config,
su support.StatusUpdater, su support.StatusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, map[string]map[string]struct{}, error) { ) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, error) {
eb, err := selector.ToExchangeBackup() eb, err := selector.ToExchangeBackup()
if err != nil { if err != nil {
return nil, nil, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx) return nil, nil, clues.Wrap(err, "exchange dataCollection selector").WithClues(ctx)
@ -182,6 +183,12 @@ func DataCollections(
categories = map[path.CategoryType]struct{}{} categories = map[path.CategoryType]struct{}{}
) )
// Turn on concurrency limiter middleware for exchange backups
// unless explicitly disabled through DisableConcurrencyLimiterFN cli flag
if !ctrlOpts.ToggleFeatures.DisableConcurrencyLimiter {
graph.InitializeConcurrencyLimiter(ctrlOpts.Parallelism.ItemFetch)
}
cdps, err := parseMetadataCollections(ctx, metadata, errs) cdps, err := parseMetadataCollections(ctx, metadata, errs)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
@ -214,6 +221,7 @@ func DataCollections(
if len(collections) > 0 { if len(collections) > 0 {
baseCols, err := graph.BaseCollections( baseCols, err := graph.BaseCollections(
ctx, ctx,
collections,
acct.AzureTenantID, acct.AzureTenantID,
user.ID(), user.ID(),
path.ExchangeService, path.ExchangeService,
@ -249,7 +257,7 @@ func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedIte
func createCollections( func createCollections(
ctx context.Context, ctx context.Context,
creds account.M365Config, creds account.M365Config,
user common.IDNamer, user idname.Provider,
scope selectors.ExchangeScope, scope selectors.ExchangeScope,
dps DeltaPaths, dps DeltaPaths,
ctrlOpts control.Options, ctrlOpts control.Options,
@ -269,9 +277,6 @@ func createCollections(
return nil, clues.Stack(err).WithClues(ctx) return nil, clues.Stack(err).WithClues(ctx)
} }
// Create collection of ExchangeDataCollection
collections := make(map[string]data.BackupCollection)
qp := graph.QueryParams{ qp := graph.QueryParams{
Category: category, Category: category,
ResourceOwner: user, ResourceOwner: user,
@ -289,11 +294,10 @@ func createCollections(
return nil, clues.Wrap(err, "populating container cache") return nil, clues.Wrap(err, "populating container cache")
} }
err = filterContainersAndFillCollections( collections, err := filterContainersAndFillCollections(
ctx, ctx,
qp, qp,
getter, getter,
collections,
su, su,
resolver, resolver,
scope, scope,

View File

@ -10,8 +10,8 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -20,6 +20,7 @@ import (
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -67,7 +68,12 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
data: []fileValues{ data: []fileValues{
{graph.PreviousPathFileName, "prev-path"}, {graph.PreviousPathFileName, "prev-path"},
}, },
expect: map[string]DeltaPath{}, expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
expectError: assert.NoError, expectError: assert.NoError,
}, },
{ {
@ -86,8 +92,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
}, },
expect: map[string]DeltaPath{ expect: map[string]DeltaPath{
"key": { "key": {
delta: "delta-link", Delta: "delta-link",
path: "prev-path", Path: "prev-path",
}, },
}, },
expectError: assert.NoError, expectError: assert.NoError,
@ -107,7 +113,12 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{graph.DeltaURLsFileName, ""}, {graph.DeltaURLsFileName, ""},
{graph.PreviousPathFileName, "prev-path"}, {graph.PreviousPathFileName, "prev-path"},
}, },
expect: map[string]DeltaPath{}, expect: map[string]DeltaPath{
"key": {
Delta: "delta-link",
Path: "prev-path",
},
},
expectError: assert.NoError, expectError: assert.NoError,
}, },
{ {
@ -118,8 +129,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
}, },
expect: map[string]DeltaPath{ expect: map[string]DeltaPath{
"key": { "key": {
delta: "`!@#$%^&*()_[]{}/\"\\", Delta: "`!@#$%^&*()_[]{}/\"\\",
path: "prev-path", Path: "prev-path",
}, },
}, },
expectError: assert.NoError, expectError: assert.NoError,
@ -132,8 +143,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
}, },
expect: map[string]DeltaPath{ expect: map[string]DeltaPath{
"key": { "key": {
delta: "\\n\\r\\t\\b\\f\\v\\0\\\\", Delta: "\\n\\r\\t\\b\\f\\v\\0\\\\",
path: "prev-path", Path: "prev-path",
}, },
}, },
expectError: assert.NoError, expectError: assert.NoError,
@ -149,8 +160,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
}, },
expect: map[string]DeltaPath{ expect: map[string]DeltaPath{
"key": { "key": {
delta: "\\n", Delta: "\\n",
path: "prev-path", Path: "prev-path",
}, },
}, },
expectError: assert.NoError, expectError: assert.NoError,
@ -190,8 +201,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
assert.Len(t, emails, len(test.expect)) assert.Len(t, emails, len(test.expect))
for k, v := range emails { for k, v := range emails {
assert.Equal(t, v.delta, emails[k].delta, "delta") assert.Equal(t, v.Delta, emails[k].Delta, "delta")
assert.Equal(t, v.path, emails[k].path, "path") assert.Equal(t, v.Path, emails[k].Path, "path")
} }
}) })
} }
@ -239,15 +250,15 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
userID = tester.M365UserID(suite.T()) userID = tester.M365UserID(suite.T())
users = []string{userID} users = []string{userID}
acct, err = tester.NewM365Account(suite.T()).M365Config() acct, err = tester.NewM365Account(suite.T()).M365Config()
ss = selectors.Selector{}.SetDiscreteOwnerIDName(userID, userID)
) )
require.NoError(suite.T(), err, clues.ToCore(err)) require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct { tests := []struct {
name string name string
scope selectors.ExchangeScope scope selectors.ExchangeScope
folderNames map[string]struct{} folderNames map[string]struct{}
canMakeDeltaQueries bool
}{ }{
{ {
name: "Folder Iterative Check Mail", name: "Folder Iterative Check Mail",
@ -258,6 +269,18 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
folderNames: map[string]struct{}{ folderNames: map[string]struct{}{
DefaultMailFolder: {}, DefaultMailFolder: {},
}, },
canMakeDeltaQueries: true,
},
{
name: "Folder Iterative Check Mail Non-Delta",
scope: selectors.NewExchangeBackup(users).MailFolders(
[]string{DefaultMailFolder},
selectors.PrefixMatch(),
)[0],
folderNames: map[string]struct{}{
DefaultMailFolder: {},
},
canMakeDeltaQueries: false,
}, },
} }
@ -265,13 +288,16 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
ctrlOpts := control.Defaults()
ctrlOpts.ToggleFeatures.DisableDelta = !test.canMakeDeltaQueries
collections, err := createCollections( collections, err := createCollections(
ctx, ctx,
acct, acct,
ss, inMock.NewProvider(userID, userID),
test.scope, test.scope,
DeltaPaths{}, DeltaPaths{},
control.Options{}, ctrlOpts,
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -282,9 +308,18 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
} }
require.NotEmpty(t, c.FullPath().Folder(false)) require.NotEmpty(t, c.FullPath().Folder(false))
folder := c.FullPath().Folder(false)
delete(test.folderNames, folder) // TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), c) {
continue
}
loc := c.(data.LocationPather).LocationPath().String()
require.NotEmpty(t, loc)
delete(test.folderNames, loc)
} }
assert.Empty(t, test.folderNames) assert.Empty(t, test.folderNames)
@ -300,7 +335,6 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
userID = tester.M365UserID(suite.T()) userID = tester.M365UserID(suite.T())
users = []string{userID} users = []string{userID}
acct, err = tester.NewM365Account(suite.T()).M365Config() acct, err = tester.NewM365Account(suite.T()).M365Config()
ss = selectors.Selector{}.SetDiscreteOwnerIDName(userID, userID)
) )
require.NoError(suite.T(), err, clues.ToCore(err)) require.NoError(suite.T(), err, clues.ToCore(err))
@ -339,10 +373,10 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
collections, err := createCollections( collections, err := createCollections(
ctx, ctx,
acct, acct,
ss, inMock.NewProvider(userID, userID),
test.scope, test.scope,
DeltaPaths{}, DeltaPaths{},
control.Options{}, control.Defaults(),
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -370,10 +404,10 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
collections, err = createCollections( collections, err = createCollections(
ctx, ctx,
acct, acct,
ss, inMock.NewProvider(userID, userID),
test.scope, test.scope,
dps, dps,
control.Options{}, control.Defaults(),
func(status *support.ConnectorOperationStatus) {}, func(status *support.ConnectorOperationStatus) {},
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -405,7 +439,6 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
t = suite.T() t = suite.T()
wg sync.WaitGroup wg sync.WaitGroup
users = []string{suite.user} users = []string{suite.user}
ss = selectors.Selector{}.SetDiscreteOwnerIDName(suite.user, suite.user)
) )
acct, err := tester.NewM365Account(t).M365Config() acct, err := tester.NewM365Account(t).M365Config()
@ -417,10 +450,10 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
collections, err := createCollections( collections, err := createCollections(
ctx, ctx,
acct, acct,
ss, inMock.NewProvider(suite.user, suite.user),
sel.Scopes()[0], sel.Scopes()[0],
DeltaPaths{}, DeltaPaths{},
control.Options{}, control.Defaults(),
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -467,7 +500,6 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
require.NoError(suite.T(), err, clues.ToCore(err)) require.NoError(suite.T(), err, clues.ToCore(err))
users := []string{suite.user} users := []string{suite.user}
ss := selectors.Selector{}.SetDiscreteOwnerIDName(suite.user, suite.user)
tests := []struct { tests := []struct {
name string name string
@ -491,10 +523,10 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
edcs, err := createCollections( edcs, err := createCollections(
ctx, ctx,
acct, acct,
ss, inMock.NewProvider(suite.user, suite.user),
test.scope, test.scope,
DeltaPaths{}, DeltaPaths{},
control.Options{}, control.Defaults(),
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -528,7 +560,16 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
continue continue
} }
assert.Equal(t, edc.FullPath().Folder(false), DefaultContactFolder) // TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface.
if !assert.Implements(t, (*data.LocationPather)(nil), edc) {
continue
}
assert.Equal(
t,
edc.(data.LocationPather).LocationPath().String(),
DefaultContactFolder)
assert.NotZero(t, count) assert.NotZero(t, count)
} }
@ -556,8 +597,6 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
bdayID string bdayID string
) )
ss := selectors.Selector{}.SetDiscreteOwnerIDName(suite.user, suite.user)
fn := func(gcf graph.CacheFolder) error { fn := func(gcf graph.CacheFolder) error {
if ptr.Val(gcf.GetDisplayName()) == DefaultCalendar { if ptr.Val(gcf.GetDisplayName()) == DefaultCalendar {
calID = ptr.Val(gcf.GetId()) calID = ptr.Val(gcf.GetId())
@ -605,10 +644,10 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
collections, err := createCollections( collections, err := createCollections(
ctx, ctx,
acct, acct,
ss, inMock.NewProvider(suite.user, suite.user),
test.scope, test.scope,
DeltaPaths{}, DeltaPaths{},
control.Options{}, control.Defaults(),
newStatusUpdater(t, &wg), newStatusUpdater(t, &wg),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))

View File

@ -35,10 +35,6 @@ var (
const ( const (
collectionChannelBufferSize = 1000 collectionChannelBufferSize = 1000
numberOfRetries = 4 numberOfRetries = 4
// Outlooks expects max 4 concurrent requests
// https://learn.microsoft.com/en-us/graph/throttling-limits#outlook-service-limits
urlPrefetchChannelBufferSize = 4
) )
type itemer interface { type itemer interface {
@ -186,8 +182,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
colProgress, closer = observe.CollectionProgress( colProgress, closer = observe.CollectionProgress(
ctx, ctx,
col.fullPath.Category().String(), col.fullPath.Category().String(),
// TODO(keepers): conceal compliance in path, drop Hide() col.LocationPath().Elements())
clues.Hide(col.fullPath.Folder(false)))
go closer() go closer()
@ -196,22 +191,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
}() }()
} }
// Limit the max number of active requests to GC semaphoreCh := make(chan struct{}, col.ctrl.Parallelism.ItemFetch)
fetchParallelism := col.ctrl.ItemFetchParallelism
if fetchParallelism < 1 || fetchParallelism > urlPrefetchChannelBufferSize {
fetchParallelism = urlPrefetchChannelBufferSize
logger.Ctx(ctx).Infow(
"fetch parallelism value not set or out of bounds, using default",
"default_parallelism",
urlPrefetchChannelBufferSize,
"requested_parallellism",
col.ctrl.ItemFetchParallelism,
)
}
logger.Ctx(ctx).Infow("fetching data with parallelism", "fetch_parallelism", fetchParallelism)
semaphoreCh := make(chan struct{}, fetchParallelism)
defer close(semaphoreCh) defer close(semaphoreCh)
// delete all removed items // delete all removed items
@ -280,7 +260,12 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
return return
} }
info.Size = int64(len(data)) // In case of mail the size of data is calc as- size of body content+size of attachment
// in all other case the size is - total item's serialized size
if info.Size <= 0 {
info.Size = int64(len(data))
}
info.ParentPath = col.locationPath.String() info.ParentPath = col.locationPath.String()
col.data <- &Stream{ col.data <- &Stream{

View File

@ -179,7 +179,7 @@ func (suite *ExchangeDataCollectionSuite) TestNewCollection_state() {
test.curr, test.prev, test.loc, test.curr, test.prev, test.loc,
0, 0,
&mockItemer{}, nil, &mockItemer{}, nil,
control.Options{}, control.Defaults(),
false) false)
assert.Equal(t, test.expect, c.State(), "collection state") assert.Equal(t, test.expect, c.State(), "collection state")
assert.Equal(t, test.curr, c.fullPath, "full path") assert.Equal(t, test.curr, c.fullPath, "full path")

View File

@ -8,11 +8,11 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
type CacheResolverSuite struct { type CacheResolverSuite struct {

View File

@ -9,10 +9,10 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
const ( const (

View File

@ -7,7 +7,7 @@ import (
"strings" "strings"
"time" "time"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
) )
// Order of fields to fill in: // Order of fields to fill in:
@ -221,8 +221,8 @@ func EventBytes(subject string) []byte {
func EventWithSubjectBytes(subject string) []byte { func EventWithSubjectBytes(subject string) []byte {
tomorrow := time.Now().UTC().AddDate(0, 0, 1) tomorrow := time.Now().UTC().AddDate(0, 0, 1)
at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC) at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime := common.FormatTime(at) atTime := dttm.Format(at)
endTime := common.FormatTime(at.Add(30 * time.Minute)) endTime := dttm.Format(at.Add(30 * time.Minute))
return EventWith( return EventWith(
defaultEventOrganizer, subject, defaultEventOrganizer, subject,
@ -234,7 +234,7 @@ func EventWithSubjectBytes(subject string) []byte {
func EventWithAttachment(subject string) []byte { func EventWithAttachment(subject string) []byte {
tomorrow := time.Now().UTC().AddDate(0, 0, 1) tomorrow := time.Now().UTC().AddDate(0, 0, 1)
at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC) at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime := common.FormatTime(at) atTime := dttm.Format(at)
return EventWith( return EventWith(
defaultEventOrganizer, subject, defaultEventOrganizer, subject,
@ -246,7 +246,7 @@ func EventWithAttachment(subject string) []byte {
func EventWithRecurrenceBytes(subject, recurrenceTimeZone string) []byte { func EventWithRecurrenceBytes(subject, recurrenceTimeZone string) []byte {
tomorrow := time.Now().UTC().AddDate(0, 0, 1) tomorrow := time.Now().UTC().AddDate(0, 0, 1)
at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC) at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime := common.FormatTime(at) atTime := dttm.Format(at)
timeSlice := strings.Split(atTime, "T") timeSlice := strings.Split(atTime, "T")
recurrence := string(fmt.Sprintf( recurrence := string(fmt.Sprintf(
@ -265,7 +265,7 @@ func EventWithRecurrenceBytes(subject, recurrenceTimeZone string) []byte {
func EventWithAttendeesBytes(subject string) []byte { func EventWithAttendeesBytes(subject string) []byte {
tomorrow := time.Now().UTC().AddDate(0, 0, 1) tomorrow := time.Now().UTC().AddDate(0, 0, 1)
at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC) at := time.Date(tomorrow.Year(), tomorrow.Month(), tomorrow.Day(), tomorrow.Hour(), 0, 0, 0, time.UTC)
atTime := common.FormatTime(at) atTime := dttm.Format(at)
return EventWith( return EventWith(
defaultEventOrganizer, subject, defaultEventOrganizer, subject,

View File

@ -11,7 +11,7 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
) )
//nolint:lll //nolint:lll
@ -107,7 +107,7 @@ const (
// Contents verified as working with sample data from kiota-serialization-json-go v0.5.5 // Contents verified as working with sample data from kiota-serialization-json-go v0.5.5
func MessageBytes(subject string) []byte { func MessageBytes(subject string) []byte {
return MessageWithBodyBytes( return MessageWithBodyBytes(
"TPS Report "+subject+" "+common.FormatNow(common.SimpleDateTime), "TPS Report "+subject+" "+dttm.FormatNow(dttm.HumanReadable),
defaultMessageBody, defaultMessagePreview) defaultMessageBody, defaultMessagePreview)
} }

View File

@ -67,9 +67,9 @@ func (suite *MockSuite) TestMockExchangeCollection_NewExchangeCollectionMail_Hyd
t := suite.T() t := suite.T()
mdc := NewCollection(nil, nil, 3) mdc := NewCollection(nil, nil, 3)
buf := &bytes.Buffer{}
for stream := range mdc.Items(ctx, fault.New(true)) { for stream := range mdc.Items(ctx, fault.New(true)) {
buf := &bytes.Buffer{}
_, err := buf.ReadFrom(stream.ToReader()) _, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))

View File

@ -3,16 +3,13 @@ package exchange
import ( import (
"context" "context"
"testing" "testing"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -20,6 +17,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
type ExchangeRestoreSuite struct { type ExchangeRestoreSuite struct {
@ -67,8 +65,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreContact() {
var ( var (
t = suite.T() t = suite.T()
userID = tester.M365UserID(t) userID = tester.M365UserID(t)
now = time.Now() folderName = tester.DefaultTestRestoreDestination("contact").ContainerName
folderName = "TestRestoreContact: " + common.FormatSimpleDateTime(now)
) )
aFolder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName) aFolder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName)
@ -102,7 +99,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreEvent() {
var ( var (
t = suite.T() t = suite.T()
userID = tester.M365UserID(t) userID = tester.M365UserID(t)
subject = "TestRestoreEvent: " + common.FormatSimpleDateTime(time.Now()) subject = tester.DefaultTestRestoreDestination("event").ContainerName
) )
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, subject) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, subject)
@ -172,7 +169,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
} }
userID := tester.M365UserID(suite.T()) userID := tester.M365UserID(suite.T())
now := time.Now()
tests := []struct { tests := []struct {
name string name string
bytes []byte bytes []byte
@ -184,7 +181,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageBytes("Restore Exchange Object"), bytes: exchMock.MessageBytes("Restore Exchange Object"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailObject: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("mailobj").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -196,7 +193,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithDirectAttachment("Restore 1 Attachment"), bytes: exchMock.MessageWithDirectAttachment("Restore 1 Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithAttachment: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("mailwattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -208,7 +205,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithItemAttachmentEvent("Event Item Attachment"), bytes: exchMock.MessageWithItemAttachmentEvent("Event Item Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreEventItemAttachment: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("eventwattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -220,7 +217,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithItemAttachmentMail("Mail Item Attachment"), bytes: exchMock.MessageWithItemAttachmentMail("Mail Item Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailItemAttachment: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("mailitemattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -235,7 +232,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
), ),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailBasicItemAttachment: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("mailbasicattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -250,7 +247,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
), ),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "ItemMailAttachmentwAttachment " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("mailnestattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -265,7 +262,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
), ),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "ItemMailAttachment_Contact " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("mailcontactattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -277,7 +274,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithNestedItemAttachmentEvent("Nested Item Attachment"), bytes: exchMock.MessageWithNestedItemAttachmentEvent("Nested Item Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreNestedEventItemAttachment: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("nestedattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -289,7 +286,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithLargeAttachment("Restore Large Attachment"), bytes: exchMock.MessageWithLargeAttachment("Restore Large Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithLargeAttachment: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("maillargeattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -301,7 +298,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithTwoAttachments("Restore 2 Attachments"), bytes: exchMock.MessageWithTwoAttachments("Restore 2 Attachments"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithAttachments: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("mailtwoattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -313,7 +310,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.MessageWithOneDriveAttachment("Restore Reference(OneDrive) Attachment"), bytes: exchMock.MessageWithOneDriveAttachment("Restore Reference(OneDrive) Attachment"),
category: path.EmailCategory, category: path.EmailCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreMailwithReferenceAttachment: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("mailrefattch").ContainerName
folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName) folder, err := suite.ac.Mail().CreateMailFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -326,7 +323,7 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.ContactBytes("Test_Omega"), bytes: exchMock.ContactBytes("Test_Omega"),
category: path.ContactsCategory, category: path.ContactsCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
folderName := "TestRestoreContactObject: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("contact").ContainerName
folder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName) folder, err := suite.ac.Contacts().CreateContactFolder(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -338,8 +335,8 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.EventBytes("Restored Event Object"), bytes: exchMock.EventBytes("Restored Event Object"),
category: path.EventsCategory, category: path.EventsCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
calendarName := "TestRestoreEventObject: " + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("event").ContainerName
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(calendar.GetId()) return ptr.Val(calendar.GetId())
@ -350,8 +347,8 @@ func (suite *ExchangeRestoreSuite) TestRestoreExchangeObject() {
bytes: exchMock.EventWithAttachment("Restored Event Attachment"), bytes: exchMock.EventWithAttachment("Restored Event Attachment"),
category: path.EventsCategory, category: path.EventsCategory,
destination: func(t *testing.T, ctx context.Context) string { destination: func(t *testing.T, ctx context.Context) string {
calendarName := "TestRestoreEventObject_" + common.FormatSimpleDateTime(now) folderName := tester.DefaultTestRestoreDestination("eventobj").ContainerName
calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, calendarName) calendar, err := suite.ac.Events().CreateCalendar(ctx, userID, folderName)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
return ptr.Val(calendar.GetId()) return ptr.Val(calendar.GetId())

View File

@ -6,13 +6,13 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
var ErrFolderNotFound = clues.New("folder not found") var ErrFolderNotFound = clues.New("folder not found")
@ -137,21 +137,15 @@ func includeContainer(
directory = locPath.Folder(false) directory = locPath.Folder(false)
} }
var ( var ok bool
ok bool
pathRes path.Path
)
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
ok = scope.Matches(selectors.ExchangeMailFolder, directory) ok = scope.Matches(selectors.ExchangeMailFolder, directory)
pathRes = locPath
case path.ContactsCategory: case path.ContactsCategory:
ok = scope.Matches(selectors.ExchangeContactFolder, directory) ok = scope.Matches(selectors.ExchangeContactFolder, directory)
pathRes = locPath
case path.EventsCategory: case path.EventsCategory:
ok = scope.Matches(selectors.ExchangeEventCalendar, directory) ok = scope.Matches(selectors.ExchangeEventCalendar, directory)
pathRes = dirPath
default: default:
return nil, nil, false return nil, nil, false
} }
@ -162,5 +156,5 @@ func includeContainer(
"matches_input", directory, "matches_input", directory,
).Debug("backup folder selection filter") ).Debug("backup folder selection filter")
return pathRes, loc, ok return dirPath, loc, ok
} }

View File

@ -7,7 +7,6 @@ import (
"github.com/alcionai/corso/src/internal/common/pii" "github.com/alcionai/corso/src/internal/common/pii"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -16,6 +15,7 @@ import (
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
type addedAndRemovedItemIDsGetter interface { type addedAndRemovedItemIDsGetter interface {
@ -23,6 +23,7 @@ type addedAndRemovedItemIDsGetter interface {
ctx context.Context, ctx context.Context,
user, containerID, oldDeltaToken string, user, containerID, oldDeltaToken string,
immutableIDs bool, immutableIDs bool,
canMakeDeltaQueries bool,
) ([]string, []string, api.DeltaUpdate, error) ) ([]string, []string, api.DeltaUpdate, error)
} }
@ -31,19 +32,24 @@ type addedAndRemovedItemIDsGetter interface {
// into a BackupCollection. Messages outside of those directories are omitted. // into a BackupCollection. Messages outside of those directories are omitted.
// @param collection is filled with during this function. // @param collection is filled with during this function.
// Supports all exchange applications: Contacts, Events, and Mail // Supports all exchange applications: Contacts, Events, and Mail
//
// TODO(ashmrtn): This should really return []data.BackupCollection but
// unfortunately some of our tests rely on being able to lookup returned
// collections by ID and it would be non-trivial to change them.
func filterContainersAndFillCollections( func filterContainersAndFillCollections(
ctx context.Context, ctx context.Context,
qp graph.QueryParams, qp graph.QueryParams,
getter addedAndRemovedItemIDsGetter, getter addedAndRemovedItemIDsGetter,
collections map[string]data.BackupCollection,
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
resolver graph.ContainerResolver, resolver graph.ContainerResolver,
scope selectors.ExchangeScope, scope selectors.ExchangeScope,
dps DeltaPaths, dps DeltaPaths,
ctrlOpts control.Options, ctrlOpts control.Options,
errs *fault.Bus, errs *fault.Bus,
) error { ) (map[string]data.BackupCollection, error) {
var ( var (
// folder ID -> BackupCollection.
collections = map[string]data.BackupCollection{}
// folder ID -> delta url or folder path lookups // folder ID -> delta url or folder path lookups
deltaURLs = map[string]string{} deltaURLs = map[string]string{}
currPaths = map[string]string{} currPaths = map[string]string{}
@ -60,19 +66,19 @@ func filterContainersAndFillCollections(
// But this will work for the short term. // But this will work for the short term.
ac, err := api.NewClient(qp.Credentials) ac, err := api.NewClient(qp.Credentials)
if err != nil { if err != nil {
return err return nil, err
} }
ibt, err := itemerByType(ac, category) ibt, err := itemerByType(ac, category)
if err != nil { if err != nil {
return err return nil, err
} }
el := errs.Local() el := errs.Local()
for _, c := range resolver.Items() { for _, c := range resolver.Items() {
if el.Failure() != nil { if el.Failure() != nil {
return el.Failure() return nil, el.Failure()
} }
cID := ptr.Val(c.GetId()) cID := ptr.Val(c.GetId())
@ -80,8 +86,8 @@ func filterContainersAndFillCollections(
var ( var (
dp = dps[cID] dp = dps[cID]
prevDelta = dp.delta prevDelta = dp.Delta
prevPathStr = dp.path // do not log: pii; log prevPath instead prevPathStr = dp.Path // do not log: pii; log prevPath instead
prevPath path.Path prevPath path.Path
ictx = clues.Add( ictx = clues.Add(
ctx, ctx,
@ -114,7 +120,8 @@ func filterContainersAndFillCollections(
qp.ResourceOwner.ID(), qp.ResourceOwner.ID(),
cID, cID,
prevDelta, prevDelta,
ctrlOpts.ToggleFeatures.ExchangeImmutableIDs) ctrlOpts.ToggleFeatures.ExchangeImmutableIDs,
!ctrlOpts.ToggleFeatures.DisableDelta)
if err != nil { if err != nil {
if !graph.IsErrDeletedInFlight(err) { if !graph.IsErrDeletedInFlight(err) {
el.AddRecoverable(clues.Stack(err).Label(fault.LabelForceNoBackupCreation)) el.AddRecoverable(clues.Stack(err).Label(fault.LabelForceNoBackupCreation))
@ -171,7 +178,7 @@ func filterContainersAndFillCollections(
// resolver (which contains all the resource owners' current containers). // resolver (which contains all the resource owners' current containers).
for id, p := range tombstones { for id, p := range tombstones {
if el.Failure() != nil { if el.Failure() != nil {
return el.Failure() return nil, el.Failure()
} }
ictx := clues.Add(ctx, "tombstone_id", id) ictx := clues.Add(ctx, "tombstone_id", id)
@ -223,12 +230,12 @@ func filterContainersAndFillCollections(
}, },
statusUpdater) statusUpdater)
if err != nil { if err != nil {
return clues.Wrap(err, "making metadata collection") return nil, clues.Wrap(err, "making metadata collection")
} }
collections["metadata"] = col collections["metadata"] = col
return el.Failure() return collections, el.Failure()
} }
// produces a set of id:path pairs from the deltapaths map. // produces a set of id:path pairs from the deltapaths map.
@ -238,7 +245,7 @@ func makeTombstones(dps DeltaPaths) map[string]string {
r := make(map[string]string, len(dps)) r := make(map[string]string, len(dps))
for id, v := range dps { for id, v := range dps {
r[id] = v.path r[id] = v.Path
} }
return r return r

File diff suppressed because it is too large Load Diff

View File

@ -4,15 +4,13 @@ import (
"bytes" "bytes"
"context" "context"
"fmt" "fmt"
"reflect"
"runtime/trace" "runtime/trace"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/exchange/api"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -24,6 +22,7 @@ import (
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// RestoreExchangeObject directs restore pipeline towards restore function // RestoreExchangeObject directs restore pipeline towards restore function
@ -74,7 +73,13 @@ func RestoreExchangeContact(
ctx = clues.Add(ctx, "item_id", ptr.Val(contact.GetId())) ctx = clues.Add(ctx, "item_id", ptr.Val(contact.GetId()))
response, err := service.Client().UsersById(user).ContactFoldersById(destination).Contacts().Post(ctx, contact, nil) response, err := service.Client().
Users().
ByUserId(user).
ContactFolders().
ByContactFolderId(destination).
Contacts().
Post(ctx, contact, nil)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "uploading Contact") return nil, graph.Wrap(ctx, err, "uploading Contact")
} }
@ -122,7 +127,13 @@ func RestoreExchangeEvent(
transformedEvent.SetAttachments([]models.Attachmentable{}) transformedEvent.SetAttachments([]models.Attachmentable{})
} }
response, err := service.Client().UsersById(user).CalendarsById(destination).Events().Post(ctx, transformedEvent, nil) response, err := service.Client().
Users().
ByUserId(user).
Calendars().
ByCalendarId(destination).
Events().
Post(ctx, transformedEvent, nil)
if err != nil { if err != nil {
return nil, graph.Wrap(ctx, err, "uploading event") return nil, graph.Wrap(ctx, err, "uploading event")
} }
@ -194,7 +205,7 @@ func RestoreMailMessage(
if clone.GetSentDateTime() != nil { if clone.GetSentDateTime() != nil {
sv2 := models.NewSingleValueLegacyExtendedProperty() sv2 := models.NewSingleValueLegacyExtendedProperty()
sendPropertyValue := common.FormatLegacyTime(ptr.Val(clone.GetSentDateTime())) sendPropertyValue := dttm.FormatToLegacy(ptr.Val(clone.GetSentDateTime()))
sendPropertyTag := MailSendDateTimeOverrideProperty sendPropertyTag := MailSendDateTimeOverrideProperty
sv2.SetId(&sendPropertyTag) sv2.SetId(&sendPropertyTag)
sv2.SetValue(&sendPropertyValue) sv2.SetValue(&sendPropertyValue)
@ -204,7 +215,7 @@ func RestoreMailMessage(
if clone.GetReceivedDateTime() != nil { if clone.GetReceivedDateTime() != nil {
sv3 := models.NewSingleValueLegacyExtendedProperty() sv3 := models.NewSingleValueLegacyExtendedProperty()
recvPropertyValue := common.FormatLegacyTime(ptr.Val(clone.GetReceivedDateTime())) recvPropertyValue := dttm.FormatToLegacy(ptr.Val(clone.GetReceivedDateTime()))
recvPropertyTag := MailReceiveDateTimeOverriveProperty recvPropertyTag := MailReceiveDateTimeOverriveProperty
sv3.SetId(&recvPropertyTag) sv3.SetId(&recvPropertyTag)
sv3.SetValue(&recvPropertyValue) sv3.SetValue(&recvPropertyValue)
@ -218,16 +229,24 @@ func RestoreMailMessage(
return nil, err return nil, err
} }
info := api.MailInfo(clone) info := api.MailInfo(clone, int64(len(bits)))
info.Size = int64(len(bits))
return info, nil return info, nil
} }
// attachmentBytes is a helper to retrieve the attachment content from a models.Attachmentable // GetAttachmentBytes is a helper to retrieve the attachment content from a models.Attachmentable
// TODO: Revisit how we retrieve/persist attachment content during backup so this is not needed func GetAttachmentBytes(attachment models.Attachmentable) ([]byte, error) {
func attachmentBytes(attachment models.Attachmentable) []byte { bi, err := attachment.GetBackingStore().Get("contentBytes")
return reflect.Indirect(reflect.ValueOf(attachment)).FieldByName("contentBytes").Bytes() if err != nil {
return nil, err
}
bts, ok := bi.([]byte)
if !ok {
return nil, clues.New(fmt.Sprintf("unexpected type for attachment content: %T", bi))
}
return bts, nil
} }
// SendMailToBackStore function for transporting in-memory messageable item to M365 backstore // SendMailToBackStore function for transporting in-memory messageable item to M365 backstore
@ -246,7 +265,13 @@ func SendMailToBackStore(
// Item.Attachments --> HasAttachments doesn't always have a value populated when deserialized // Item.Attachments --> HasAttachments doesn't always have a value populated when deserialized
message.SetAttachments([]models.Attachmentable{}) message.SetAttachments([]models.Attachmentable{})
response, err := service.Client().UsersById(user).MailFoldersById(destination).Messages().Post(ctx, message, nil) response, err := service.Client().
Users().
ByUserId(user).
MailFolders().
ByMailFolderId(destination).
Messages().
Post(ctx, message, nil)
if err != nil { if err != nil {
return graph.Wrap(ctx, err, "restoring mail") return graph.Wrap(ctx, err, "restoring mail")
} }
@ -436,16 +461,13 @@ func restoreCollection(
metrics.Bytes += int64(len(byteArray)) metrics.Bytes += int64(len(byteArray))
metrics.Successes++ metrics.Successes++
itemPath, err := dc.FullPath().Append(itemData.UUID(), true) itemPath, err := dc.FullPath().AppendItem(itemData.UUID())
if err != nil { if err != nil {
errs.AddRecoverable(clues.Wrap(err, "building full path with item").WithClues(ctx)) errs.AddRecoverable(clues.Wrap(err, "building full path with item").WithClues(ctx))
continue continue
} }
locationRef := &path.Builder{} locationRef := path.Builder{}.Append(itemPath.Folders()...)
if category == path.ContactsCategory {
locationRef = locationRef.Append(itemPath.Folders()...)
}
err = deets.Add( err = deets.Add(
itemPath, itemPath,
@ -689,10 +711,20 @@ func establishEventsRestoreLocation(
ctx = clues.Add(ctx, "is_new_cache", isNewCache) ctx = clues.Add(ctx, "is_new_cache", isNewCache)
temp, err := ac.Events().CreateCalendar(ctx, user, folders[0]) temp, err := ac.Events().CreateCalendar(ctx, user, folders[0])
if err != nil { if err != nil && !graph.IsErrFolderExists(err) {
return "", err return "", err
} }
// 409 handling: Fetch folder if it exists and add to cache.
// This is rare, but may happen if CreateCalendar() POST fails with 5xx,
// potentially leaving dirty state in graph.
if graph.IsErrFolderExists(err) {
temp, err = ac.Events().GetContainerByName(ctx, user, folders[0])
if err != nil {
return "", err
}
}
folderID := ptr.Val(temp.GetId()) folderID := ptr.Val(temp.GetId())
if isNewCache { if isNewCache {

View File

@ -27,3 +27,25 @@ func NextLink(pl PageLinker) string {
func NextAndDeltaLink(pl DeltaPageLinker) (string, string) { func NextAndDeltaLink(pl DeltaPageLinker) (string, string) {
return NextLink(pl), ptr.Val(pl.GetOdataDeltaLink()) return NextLink(pl), ptr.Val(pl.GetOdataDeltaLink())
} }
type Valuer[T any] interface {
GetValue() []T
}
type PageLinkValuer[T any] interface {
PageLinker
Valuer[T]
}
// EmptyDeltaLinker is used to convert PageLinker to DeltaPageLinker
type EmptyDeltaLinker[T any] struct {
PageLinkValuer[T]
}
func (EmptyDeltaLinker[T]) GetOdataDeltaLink() *string {
return ptr.To("")
}
func (e EmptyDeltaLinker[T]) GetValue() []T {
return e.PageLinkValuer.GetValue()
}

View File

@ -11,14 +11,19 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
var _ data.BackupCollection = emptyCollection{} var _ data.BackupCollection = prefixCollection{}
type emptyCollection struct { // TODO: move this out of graph. /data would be a much better owner
p path.Path // for a generic struct like this. However, support.StatusUpdater makes
su support.StatusUpdater // it difficult to extract from this package in a generic way.
type prefixCollection struct {
full path.Path
prev path.Path
su support.StatusUpdater
state data.CollectionState
} }
func (c emptyCollection) Items(ctx context.Context, _ *fault.Bus) <-chan data.Stream { func (c prefixCollection) Items(ctx context.Context, _ *fault.Bus) <-chan data.Stream {
res := make(chan data.Stream) res := make(chan data.Stream)
close(res) close(res)
@ -28,26 +33,29 @@ func (c emptyCollection) Items(ctx context.Context, _ *fault.Bus) <-chan data.St
return res return res
} }
func (c emptyCollection) FullPath() path.Path { func (c prefixCollection) FullPath() path.Path {
return c.p return c.full
} }
func (c emptyCollection) PreviousPath() path.Path { func (c prefixCollection) PreviousPath() path.Path {
return c.p return c.prev
} }
func (c emptyCollection) State() data.CollectionState { func (c prefixCollection) State() data.CollectionState {
// This assumes we won't change the prefix path. Could probably use MovedState return c.state
// as well if we do need to change things around.
return data.NotMovedState
} }
func (c emptyCollection) DoNotMergeItems() bool { func (c prefixCollection) DoNotMergeItems() bool {
return false return false
} }
// ---------------------------------------------------------------------------
// base collections
// ---------------------------------------------------------------------------
func BaseCollections( func BaseCollections(
ctx context.Context, ctx context.Context,
colls []data.BackupCollection,
tenant, rOwner string, tenant, rOwner string,
service path.ServiceType, service path.ServiceType,
categories map[path.CategoryType]struct{}, categories map[path.CategoryType]struct{},
@ -55,15 +63,23 @@ func BaseCollections(
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, error) { ) ([]data.BackupCollection, error) {
var ( var (
res = []data.BackupCollection{} res = []data.BackupCollection{}
el = errs.Local() el = errs.Local()
lastErr error lastErr error
collKeys = map[string]struct{}{}
) )
// won't catch deleted collections, since they have no FullPath
for _, c := range colls {
if c.FullPath() != nil {
collKeys[c.FullPath().String()] = struct{}{}
}
}
for cat := range categories { for cat := range categories {
ictx := clues.Add(ctx, "base_service", service, "base_category", cat) ictx := clues.Add(ctx, "base_service", service, "base_category", cat)
p, err := path.Build(tenant, rOwner, service, cat, false, "tmp") full, err := path.ServicePrefix(tenant, rOwner, service, cat)
if err != nil { if err != nil {
// Shouldn't happen. // Shouldn't happen.
err = clues.Wrap(err, "making path").WithClues(ictx) err = clues.Wrap(err, "making path").WithClues(ictx)
@ -73,19 +89,63 @@ func BaseCollections(
continue continue
} }
// Pop off the last path element because we just want the prefix. // only add this collection if it doesn't already exist in the set.
p, err = p.Dir() if _, ok := collKeys[full.String()]; !ok {
if err != nil { res = append(res, &prefixCollection{
// Shouldn't happen. prev: full,
err = clues.Wrap(err, "getting base prefix").WithClues(ictx) full: full,
el.AddRecoverable(err) su: su,
lastErr = err state: data.StateOf(full, full),
})
continue
} }
res = append(res, emptyCollection{p: p, su: su})
} }
return res, lastErr return res, lastErr
} }
// ---------------------------------------------------------------------------
// prefix migration
// ---------------------------------------------------------------------------
// Creates a new collection that only handles prefix pathing.
func NewPrefixCollection(
prev, full path.Path,
su support.StatusUpdater,
) (*prefixCollection, error) {
if prev != nil {
if len(prev.Item()) > 0 {
return nil, clues.New("prefix collection previous path contains an item")
}
if len(prev.Folders()) > 0 {
return nil, clues.New("prefix collection previous path contains folders")
}
}
if full != nil {
if len(full.Item()) > 0 {
return nil, clues.New("prefix collection full path contains an item")
}
if len(full.Folders()) > 0 {
return nil, clues.New("prefix collection full path contains folders")
}
}
pc := &prefixCollection{
prev: prev,
full: full,
su: su,
state: data.StateOf(prev, full),
}
if pc.state == data.DeletedState {
return nil, clues.New("collection attempted to delete prefix")
}
if pc.state == data.NewState {
return nil, clues.New("collection attempted to create a new prefix")
}
return pc, nil
}

View File

@ -0,0 +1,100 @@
package graph
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
type CollectionsUnitSuite struct {
tester.Suite
}
func TestCollectionsUnitSuite(t *testing.T) {
suite.Run(t, &CollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *CollectionsUnitSuite) TestNewPrefixCollection() {
t := suite.T()
serv := path.OneDriveService
cat := path.FilesCategory
p1, err := path.ServicePrefix("t", "ro1", serv, cat)
require.NoError(t, err, clues.ToCore(err))
p2, err := path.ServicePrefix("t", "ro2", serv, cat)
require.NoError(t, err, clues.ToCore(err))
items, err := path.Build("t", "ro", serv, cat, true, "fld", "itm")
require.NoError(t, err, clues.ToCore(err))
folders, err := path.Build("t", "ro", serv, cat, false, "fld")
require.NoError(t, err, clues.ToCore(err))
table := []struct {
name string
prev path.Path
full path.Path
expectErr require.ErrorAssertionFunc
}{
{
name: "not moved",
prev: p1,
full: p1,
expectErr: require.NoError,
},
{
name: "moved",
prev: p1,
full: p2,
expectErr: require.NoError,
},
{
name: "deleted",
prev: p1,
full: nil,
expectErr: require.Error,
},
{
name: "new",
prev: nil,
full: p2,
expectErr: require.Error,
},
{
name: "prev has items",
prev: items,
full: p1,
expectErr: require.Error,
},
{
name: "prev has folders",
prev: folders,
full: p1,
expectErr: require.Error,
},
{
name: "full has items",
prev: p1,
full: items,
expectErr: require.Error,
},
{
name: "full has folders",
prev: p1,
full: folders,
expectErr: require.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
_, err := NewPrefixCollection(test.prev, test.full, nil)
test.expectErr(suite.T(), err, clues.ToCore(err))
})
}
}

View File

@ -0,0 +1,202 @@
package graph
import (
"context"
"net/http"
"sync"
"github.com/alcionai/clues"
khttp "github.com/microsoft/kiota-http-go"
"golang.org/x/time/rate"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
)
// ---------------------------------------------------------------------------
// Concurrency Limiter
// "how many calls at one time"
// ---------------------------------------------------------------------------
// concurrencyLimiter middleware limits the number of concurrent requests to graph API
type concurrencyLimiter struct {
semaphore chan struct{}
}
var (
once sync.Once
concurrencyLim *concurrencyLimiter
maxConcurrentRequests = 4
)
func generateConcurrencyLimiter(capacity int) *concurrencyLimiter {
if capacity < 1 || capacity > maxConcurrentRequests {
capacity = maxConcurrentRequests
}
return &concurrencyLimiter{
semaphore: make(chan struct{}, capacity),
}
}
func InitializeConcurrencyLimiter(capacity int) {
once.Do(func() {
concurrencyLim = generateConcurrencyLimiter(capacity)
})
}
func (cl *concurrencyLimiter) Intercept(
pipeline khttp.Pipeline,
middlewareIndex int,
req *http.Request,
) (*http.Response, error) {
if cl == nil || cl.semaphore == nil {
return nil, clues.New("nil concurrency limiter")
}
cl.semaphore <- struct{}{}
defer func() {
<-cl.semaphore
}()
return pipeline.Next(req, middlewareIndex)
}
//nolint:lll
// ---------------------------------------------------------------------------
// Rate Limiter
// "how many calls in a minute"
// https://learn.microsoft.com/en-us/sharepoint/dev/general-development/how-to-avoid-getting-throttled-or-blocked-in-sharepoint-online
// ---------------------------------------------------------------------------
const (
// Default goal is to keep calls below the 10k-per-10-minute threshold.
// 14 tokens every second nets 840 per minute. That's 8400 every 10 minutes,
// which is a bit below the mark.
// But suppose we have a minute-long dry spell followed by a 10 minute tsunami.
// We'll have built up 750 tokens in reserve, so the first 750 calls go through
// immediately. Over the next 10 minutes, we'll partition out the other calls
// at a rate of 840-per-minute, ending at a total of 9150. Theoretically, if
// the volume keeps up after that, we'll always stay between 8400 and 9150 out
// of 10k. Worst case scenario, we have an extra minute of padding to allow
// up to 9990.
defaultPerSecond = 14 // 14 * 60 = 840
defaultMaxCap = 750 // real cap is 10k-per-10-minutes
// since drive runs on a per-minute, rather than per-10-minute bucket, we have
// to keep the max cap equal to the per-second cap. A large maxCap pool (say,
// 1200, similar to the per-minute cap) would allow us to make a flood of 2400
// calls in the first minute, putting us over the per-minute limit. Keeping
// the cap at the per-second burst means we only dole out a max of 1240 in one
// minute (20 cap + 1200 per minute + one burst of padding).
drivePerSecond = 20 // 20 * 60 = 1200
driveMaxCap = 20 // real cap is 1250-per-minute
)
var (
driveLimiter = rate.NewLimiter(drivePerSecond, driveMaxCap)
// also used as the exchange service limiter
defaultLimiter = rate.NewLimiter(defaultPerSecond, defaultMaxCap)
)
type LimiterCfg struct {
Service path.ServiceType
}
type limiterCfgKey string
const limiterCfgCtxKey limiterCfgKey = "corsoGaphRateLimiterCfg"
func BindRateLimiterConfig(ctx context.Context, lc LimiterCfg) context.Context {
return context.WithValue(ctx, limiterCfgCtxKey, lc)
}
func ctxLimiter(ctx context.Context) *rate.Limiter {
lc, ok := extractRateLimiterConfig(ctx)
if !ok {
return defaultLimiter
}
switch lc.Service {
case path.OneDriveService, path.SharePointService:
return driveLimiter
default:
return defaultLimiter
}
}
func extractRateLimiterConfig(ctx context.Context) (LimiterCfg, bool) {
l := ctx.Value(limiterCfgCtxKey)
if l == nil {
return LimiterCfg{}, false
}
lc, ok := l.(LimiterCfg)
return lc, ok
}
type limiterConsumptionKey string
const limiterConsumptionCtxKey limiterConsumptionKey = "corsoGraphRateLimiterConsumption"
const (
defaultLC = 1
driveDefaultLC = 2
// limit consumption rate for single-item GETs requests,
// or delta-based multi-item GETs.
SingleGetOrDeltaLC = 1
// limit consumption rate for anything permissions related
PermissionsLC = 5
)
// ConsumeNTokens ensures any calls using this context will consume
// n rate-limiter tokens. Default is 1, and this value does not need
// to be established in the context to consume the default tokens.
// This should only get used on a per-call basis, to avoid cross-pollination.
func ConsumeNTokens(ctx context.Context, n int) context.Context {
return context.WithValue(ctx, limiterConsumptionCtxKey, n)
}
func ctxLimiterConsumption(ctx context.Context, defaultConsumption int) int {
l := ctx.Value(limiterConsumptionCtxKey)
if l == nil {
return defaultConsumption
}
lc, ok := l.(int)
if !ok || lc < 1 {
return defaultConsumption
}
return lc
}
// QueueRequest will allow the request to occur immediately if we're under the
// calls-per-minute rate. Otherwise, the call will wait in a queue until
// the next token set is available.
func QueueRequest(ctx context.Context) {
limiter := ctxLimiter(ctx)
defaultConsumed := defaultLC
if limiter == driveLimiter {
defaultConsumed = driveDefaultLC
}
consume := ctxLimiterConsumption(ctx, defaultConsumed)
if err := limiter.WaitN(ctx, consume); err != nil {
logger.CtxErr(ctx, err).Error("graph middleware waiting on the limiter")
}
}
// RateLimiterMiddleware is used to ensure we don't overstep per-min request limits.
type RateLimiterMiddleware struct{}
func (mw *RateLimiterMiddleware) Intercept(
pipeline khttp.Pipeline,
middlewareIndex int,
req *http.Request,
) (*http.Response, error) {
QueueRequest(req.Context())
return pipeline.Next(req, middlewareIndex)
}

Some files were not shown because too many files have changed in this diff Show More