Merge branch 'main' of https://github.com/alcionai/corso into updateKopiaPassword

This commit is contained in:
neha-Gupta1 2023-09-29 19:46:36 +05:30
commit 76fbbb19ea
105 changed files with 2393 additions and 2630 deletions

View File

@ -45,6 +45,9 @@ runs:
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-${{ inputs.service }}-${{inputs.kind }}.log
./corso backup create '${{ inputs.service }}' \ ./corso backup create '${{ inputs.service }}' \
@ -61,6 +64,9 @@ runs:
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
./corso restore '${{ inputs.service }}' \ ./corso restore '${{ inputs.service }}' \
@ -85,11 +91,14 @@ runs:
SANITY_TEST_KIND: restore SANITY_TEST_KIND: restore
SANITY_TEST_FOLDER: ${{ steps.restore.outputs.result }} SANITY_TEST_FOLDER: ${{ steps.restore.outputs.result }}
SANITY_TEST_SERVICE: ${{ inputs.service }} SANITY_TEST_SERVICE: ${{ inputs.service }}
TEST_DATA: ${{ inputs.test-folder }} SANITY_TEST_DATA: ${{ inputs.test-folder }}
BASE_BACKUP: ${{ inputs.base-backup }} SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
run: | run: |
echo "---------------------------"
echo Sanity Test Restore ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
./sanity-test ./sanity-test restore ${{ inputs.service }}
- name: Export ${{ inputs.service }} ${{ inputs.kind }} - name: Export ${{ inputs.service }} ${{ inputs.kind }}
if: inputs.with-export == true if: inputs.with-export == true
@ -97,6 +106,9 @@ runs:
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \ ./corso export '${{ inputs.service }}' \
@ -116,11 +128,14 @@ runs:
SANITY_TEST_KIND: export SANITY_TEST_KIND: export
SANITY_TEST_FOLDER: /tmp/export-${{ inputs.service }}-${{inputs.kind }} SANITY_TEST_FOLDER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}
SANITY_TEST_SERVICE: ${{ inputs.service }} SANITY_TEST_SERVICE: ${{ inputs.service }}
TEST_DATA: ${{ inputs.test-folder }} SANITY_TEST_DATA: ${{ inputs.test-folder }}
BASE_BACKUP: ${{ inputs.base-backup }} SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
run: | run: |
echo "---------------------------"
echo Sanity-Test Export ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
./sanity-test ./sanity-test export ${{ inputs.service }}
- name: Export archive ${{ inputs.service }} ${{ inputs.kind }} - name: Export archive ${{ inputs.service }} ${{ inputs.kind }}
if: inputs.with-export == true if: inputs.with-export == true
@ -128,6 +143,9 @@ runs:
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-restore-${{ inputs.service }}-${{inputs.kind }}.log
./corso export '${{ inputs.service }}' \ ./corso export '${{ inputs.service }}' \
@ -150,16 +168,22 @@ runs:
SANITY_TEST_KIND: export SANITY_TEST_KIND: export
SANITY_TEST_FOLDER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped SANITY_TEST_FOLDER: /tmp/export-${{ inputs.service }}-${{inputs.kind }}-unzipped
SANITY_TEST_SERVICE: ${{ inputs.service }} SANITY_TEST_SERVICE: ${{ inputs.service }}
TEST_DATA: ${{ inputs.test-folder }} SANITY_TEST_DATA: ${{ inputs.test-folder }}
BASE_BACKUP: ${{ inputs.base-backup }} SANITY_BASE_BACKUP: ${{ inputs.base-backup }}
run: | run: |
echo "---------------------------"
echo Sanity-Test Export Archive ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-validate-${{ inputs.service }}-${{inputs.kind }}.log
./sanity-test ./sanity-test export ${{ inputs.service }}
- name: List ${{ inputs.service }} ${{ inputs.kind }} - name: List ${{ inputs.service }} ${{ inputs.kind }}
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Backup list ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-${{ inputs.service }}-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \ ./corso backup list ${{ inputs.service }} \
@ -178,6 +202,9 @@ runs:
shell: bash shell: bash
working-directory: src working-directory: src
run: | run: |
echo "---------------------------"
echo Backup List w/ Backup ${{ inputs.service }} ${{ inputs.kind }}
echo "---------------------------"
set -euo pipefail set -euo pipefail
CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-single-${{ inputs.service }}-${{inputs.kind }}.log CORSO_LOG_FILE=${{ inputs.log-dir }}/gotest-backup-list-single-${{ inputs.service }}-${{inputs.kind }}.log
./corso backup list ${{ inputs.service }} \ ./corso backup list ${{ inputs.service }} \
@ -193,7 +220,13 @@ runs:
exit 1 exit 1
fi fi
# Upload the original go test output as an artifact for later review. - if: always()
shell: bash
run: |
echo "---------------------------"
echo Logging Results
echo "---------------------------"
- name: Upload test log - name: Upload test log
if: always() if: always()
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3

View File

@ -31,7 +31,7 @@ runs:
- name: use url or blank val - name: use url or blank val
shell: bash shell: bash
run: | run: |
echo "STEP=${{ github.action || '' }}" >> $GITHUB_ENV echo "STEP=${{ env.trimmed_ref || '' }}" >> $GITHUB_ENV
echo "JOB=${{ github.job || '' }}" >> $GITHUB_ENV echo "JOB=${{ github.job || '' }}" >> $GITHUB_ENV
echo "LOGS=${{ github.run_id && env.logurl || '-' }}" >> $GITHUB_ENV echo "LOGS=${{ github.run_id && env.logurl || '-' }}" >> $GITHUB_ENV
echo "COMMIT=${{ github.sha && env.commiturl || '-' }}" >> $GITHUB_ENV echo "COMMIT=${{ github.sha && env.commiturl || '-' }}" >> $GITHUB_ENV
@ -51,7 +51,7 @@ runs:
"type": "section", "type": "section",
"text": { "text": {
"type": "mrkdwn", "type": "mrkdwn",
"text": "${{ inputs.msg }} :: ${{ env.JOB }} - ${{ env.STEP }}\n${{ env.LOGS }} ${{ env.COMMIT }} ${{ env.REF }}" "text": "${{ inputs.msg }}\n${{ env.JOB }} :: ${{ env.STEP }}\n${{ env.LOGS }} ${{ env.COMMIT }} ${{ env.REF }}"
} }
} }
] ]

View File

@ -181,7 +181,7 @@ jobs:
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: exchange service: exchange
kind: initial kind: first-backup
backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"'
restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}'
@ -249,7 +249,7 @@ jobs:
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: onedrive service: onedrive
kind: initial kind: first-backup
backup-args: '--user "${{ env.TEST_USER }}"' backup-args: '--user "${{ env.TEST_USER }}"'
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}' restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}' test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}'
@ -305,7 +305,7 @@ jobs:
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: sharepoint service: sharepoint
kind: initial kind: first-backup
backup-args: '--site "${{ secrets.CORSO_M365_TEST_SITE_URL }}"' backup-args: '--site "${{ secrets.CORSO_M365_TEST_SITE_URL }}"'
restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}' restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}' test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}'
@ -362,12 +362,34 @@ jobs:
uses: ./.github/actions/backup-restore-test uses: ./.github/actions/backup-restore-test
with: with:
service: groups service: groups
kind: initial kind: first-backup
backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"' backup-args: '--group "${{ vars.CORSO_M365_TEST_TEAM_ID }}"'
test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}' test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
log-dir: ${{ env.CORSO_LOG_DIR }} log-dir: ${{ env.CORSO_LOG_DIR }}
# TODO: incrementals # generate some more enteries for incremental check
# - name: Groups - Create new data (for incremental)
# working-directory: ./src/cmd/factory
# run: |
# go run . sharepoint files \
# --site ${{ secrets.CORSO_M365_TEST_GROUPS_SITE_URL }} \
# --user ${{ env.TEST_USER }} \
# --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \
# --tenant ${{ secrets.TENANT_ID }} \
# --destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }} \
# --count 4
# - name: Groups - Incremental backup
# id: groups-incremental
# uses: ./.github/actions/backup-restore-test
# with:
# service: groups
# kind: incremental
# backup-args: '--site "${{ secrets.CORSO_M365_TEST_GROUPS_SITE_URL }}"'
# restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
# test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-groups.outputs.result }}'
# log-dir: ${{ env.CORSO_LOG_DIR }}
# with-export: true
########################################################################################################################################## ##########################################################################################################################################

View File

@ -48,12 +48,12 @@ func AddCommands(cmd *cobra.Command) {
for _, sc := range subCommandFuncs { for _, sc := range subCommandFuncs {
subCommand := sc() subCommand := sc()
flags.AddAllProviderFlags(subCommand)
flags.AddAllStorageFlags(subCommand)
backupC.AddCommand(subCommand) backupC.AddCommand(subCommand)
for _, addBackupTo := range serviceCommands { for _, addBackupTo := range serviceCommands {
addBackupTo(subCommand) sc := addBackupTo(subCommand)
flags.AddAllProviderFlags(sc)
flags.AddAllStorageFlags(sc)
} }
} }
} }

View File

@ -1,7 +1,6 @@
package backup package backup
import ( import (
"bytes"
"fmt" "fmt"
"strconv" "strconv"
"testing" "testing"
@ -14,6 +13,7 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
utilsTD "github.com/alcionai/corso/src/cli/utils/testdata" utilsTD "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -92,76 +92,46 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
func (suite *ExchangeUnitSuite) TestBackupCreateFlags() { func (suite *ExchangeUnitSuite) TestBackupCreateFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: createCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: createCommand},
flags.AddRunModeFlag(cmd, true) addExchangeCommands,
[]cliTD.UseCobraCommandFn{
c := addExchangeCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.MailBoxFN, flagsTD.FlgInputs(flagsTD.MailboxInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.ExchangeCategoryDataInput),
"--" + flags.FetchParallelismFN, flagsTD.FetchParallelism,
"--" + flags.DeltaPageSizeFN, flagsTD.DeltaPageSize,
// Test arg parsing for few args // bool flags
args := []string{ "--" + flags.FailFastFN,
exchangeServiceCommand, "--" + flags.DisableIncrementalsFN,
"--" + flags.RunModeFN, flags.RunModeFlagTest, "--" + flags.ForceItemDataDownloadFN,
"--" + flags.DisableDeltaFN,
"--" + flags.MailBoxFN, flagsTD.FlgInputs(flagsTD.MailboxInput), "--" + flags.EnableImmutableIDFN,
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.ExchangeCategoryDataInput), "--" + flags.DisableConcurrencyLimiterFN,
},
"--" + flags.FetchParallelismFN, flagsTD.FetchParallelism, flagsTD.PreparedProviderFlags(),
"--" + flags.DeltaPageSizeFN, flagsTD.DeltaPageSize, flagsTD.PreparedStorageFlags()))
// bool flags
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
"--" + flags.DisableDeltaFN,
"--" + flags.EnableImmutableIDFN,
"--" + flags.DisableConcurrencyLimiterFN,
}
args = append(args, flagsTD.PreparedProviderFlags()...)
args = append(args, flagsTD.PreparedStorageFlags()...)
cmd.SetArgs(args)
cmd.SetOut(new(bytes.Buffer)) // drop output
cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute()
assert.NoError(t, err, clues.ToCore(err))
opts := utils.MakeExchangeOpts(cmd) opts := utils.MakeExchangeOpts(cmd)
co := utils.Control() co := utils.Control()
assert.ElementsMatch(t, flagsTD.MailboxInput, opts.Users) assert.ElementsMatch(t, flagsTD.MailboxInput, opts.Users)
// no assertion for category data input
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch)) assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch))
assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(co.DeltaPageSize))) assert.Equal(t, flagsTD.DeltaPageSize, strconv.Itoa(int(co.DeltaPageSize)))
// bool flags
assert.Equal(t, control.FailFast, co.FailureHandling) assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals) assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload) assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.True(t, co.ToggleFeatures.DisableDelta) assert.True(t, co.ToggleFeatures.DisableDelta)
assert.True(t, co.ToggleFeatures.ExchangeImmutableIDs) assert.True(t, co.ToggleFeatures.ExchangeImmutableIDs)
assert.True(t, co.ToggleFeatures.DisableConcurrencyLimiter) assert.True(t, co.ToggleFeatures.DisableConcurrencyLimiter)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -169,36 +139,25 @@ func (suite *ExchangeUnitSuite) TestBackupCreateFlags() {
func (suite *ExchangeUnitSuite) TestBackupListFlags() { func (suite *ExchangeUnitSuite) TestBackupListFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: listCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: listCommand},
flags.AddRunModeFlag(cmd, true) addExchangeCommands,
[]cliTD.UseCobraCommandFn{
c := addExchangeCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
exchangeServiceCommand, []string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedBackupListFlags(), flagsTD.WithFlags(
flagsTD.PreparedProviderFlags(), exchangeServiceCommand,
flagsTD.PreparedStorageFlags()) []string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
cmd.SetErr(new(bytes.Buffer)) // drop output },
flagsTD.PreparedBackupListFlags(),
err := cmd.Execute() flagsTD.PreparedProviderFlags(),
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd) flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
@ -207,41 +166,28 @@ func (suite *ExchangeUnitSuite) TestBackupListFlags() {
func (suite *ExchangeUnitSuite) TestBackupDetailsFlags() { func (suite *ExchangeUnitSuite) TestBackupDetailsFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: detailsCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: detailsCommand},
flags.AddRunModeFlag(cmd, true) addExchangeCommands,
[]cliTD.UseCobraCommandFn{
c := addExchangeCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) exchangeServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
err := cmd.Execute() },
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control() co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce) assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -249,36 +195,24 @@ func (suite *ExchangeUnitSuite) TestBackupDetailsFlags() {
func (suite *ExchangeUnitSuite) TestBackupDeleteFlags() { func (suite *ExchangeUnitSuite) TestBackupDeleteFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: deleteCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: deleteCommand},
flags.AddRunModeFlag(cmd, true) addExchangeCommands,
[]cliTD.UseCobraCommandFn{
c := addExchangeCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) exchangeServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
},
err := cmd.Execute() flagsTD.PreparedProviderFlags(),
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }

View File

@ -1,7 +1,6 @@
package backup package backup
import ( import (
"bytes"
"strconv" "strconv"
"testing" "testing"
@ -13,6 +12,7 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
@ -128,70 +128,38 @@ func (suite *GroupsUnitSuite) TestValidateGroupsBackupCreateFlags() {
func (suite *GroupsUnitSuite) TestBackupCreateFlags() { func (suite *GroupsUnitSuite) TestBackupCreateFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: createCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: createCommand},
flags.AddRunModeFlag(cmd, true) addGroupsCommands,
[]cliTD.UseCobraCommandFn{
c := addGroupsCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) groupsServiceCommand,
[]string{
// Test arg parsing for few args "--" + flags.RunModeFN, flags.RunModeFlagTest,
args := []string{ "--" + flags.GroupFN, flagsTD.FlgInputs(flagsTD.GroupsInput),
groupsServiceCommand, "--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.GroupsCategoryDataInput),
"--" + flags.RunModeFN, flags.RunModeFlagTest, "--" + flags.FetchParallelismFN, flagsTD.FetchParallelism,
"--" + flags.FailFastFN,
"--" + flags.GroupFN, flagsTD.FlgInputs(flagsTD.GroupsInput), "--" + flags.DisableIncrementalsFN,
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.GroupsCategoryDataInput), "--" + flags.ForceItemDataDownloadFN,
"--" + flags.DisableDeltaFN,
"--" + flags.FetchParallelismFN, flagsTD.FetchParallelism, },
flagsTD.PreparedProviderFlags(),
// bool flags flagsTD.PreparedStorageFlags()))
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
"--" + flags.DisableDeltaFN,
}
args = append(args, flagsTD.PreparedProviderFlags()...)
args = append(args, flagsTD.PreparedStorageFlags()...)
cmd.SetArgs(args)
cmd.SetOut(new(bytes.Buffer)) // drop output
cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute()
assert.NoError(t, err, clues.ToCore(err))
opts := utils.MakeGroupsOpts(cmd) opts := utils.MakeGroupsOpts(cmd)
co := utils.Control() co := utils.Control()
assert.ElementsMatch(t, flagsTD.GroupsInput, opts.Groups) assert.ElementsMatch(t, flagsTD.GroupsInput, opts.Groups)
// no assertion for category data input
assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch)) assert.Equal(t, flagsTD.FetchParallelism, strconv.Itoa(co.Parallelism.ItemFetch))
// bool flags
assert.Equal(t, control.FailFast, co.FailureHandling) assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals) assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload) assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
assert.True(t, co.ToggleFeatures.DisableDelta) assert.True(t, co.ToggleFeatures.DisableDelta)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -199,37 +167,25 @@ func (suite *GroupsUnitSuite) TestBackupCreateFlags() {
func (suite *GroupsUnitSuite) TestBackupListFlags() { func (suite *GroupsUnitSuite) TestBackupListFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: listCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: listCommand},
flags.AddRunModeFlag(cmd, true) addGroupsCommands,
[]cliTD.UseCobraCommandFn{
c := addGroupsCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedBackupListFlags(), flagsTD.WithFlags(
flagsTD.PreparedProviderFlags(), groupsServiceCommand,
flagsTD.PreparedStorageFlags()) []string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
cmd.SetErr(new(bytes.Buffer)) // drop output },
flagsTD.PreparedBackupListFlags(),
err := cmd.Execute() flagsTD.PreparedProviderFlags(),
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd) flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
@ -238,41 +194,28 @@ func (suite *GroupsUnitSuite) TestBackupListFlags() {
func (suite *GroupsUnitSuite) TestBackupDetailsFlags() { func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: detailsCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: detailsCommand},
flags.AddRunModeFlag(cmd, true) addGroupsCommands,
[]cliTD.UseCobraCommandFn{
c := addGroupsCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) groupsServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
err := cmd.Execute() },
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control() co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce) assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -280,48 +223,24 @@ func (suite *GroupsUnitSuite) TestBackupDetailsFlags() {
func (suite *GroupsUnitSuite) TestBackupDeleteFlags() { func (suite *GroupsUnitSuite) TestBackupDeleteFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: deleteCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: deleteCommand},
flags.AddRunModeFlag(cmd, true) addGroupsCommands,
[]cliTD.UseCobraCommandFn{
c := addGroupsCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) groupsServiceCommand,
[]string{
// Test arg parsing for few args "--" + flags.RunModeFN, flags.RunModeFlagTest,
args := []string{ "--" + flags.BackupFN, flagsTD.BackupInput,
groupsServiceCommand, },
"--" + flags.RunModeFN, flags.RunModeFlagTest, flagsTD.PreparedProviderFlags(),
"--" + flags.BackupFN, flagsTD.BackupInput, flagsTD.PreparedStorageFlags()))
}
args = append(args, flagsTD.PreparedProviderFlags()...)
args = append(args, flagsTD.PreparedStorageFlags()...)
cmd.SetArgs(args)
cmd.SetOut(new(bytes.Buffer)) // drop output
cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute()
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }

View File

@ -140,11 +140,9 @@ func prepM365Test(
recorder = strings.Builder{} recorder = strings.Builder{}
) )
sc, err := st.StorageConfig() cfg, err := st.ToS3Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config)
force := map[string]string{ force := map[string]string{
tconfig.TestCfgAccountProvider: account.ProviderM365.String(), tconfig.TestCfgAccountProvider: account.ProviderM365.String(),
tconfig.TestCfgStorageProvider: storage.ProviderS3.String(), tconfig.TestCfgStorageProvider: storage.ProviderS3.String(),

View File

@ -1,7 +1,6 @@
package backup package backup
import ( import (
"bytes"
"fmt" "fmt"
"testing" "testing"
@ -13,6 +12,7 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
utilsTD "github.com/alcionai/corso/src/cli/utils/testdata" utilsTD "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -92,48 +92,33 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
func (suite *OneDriveUnitSuite) TestBackupCreateFlags() { func (suite *OneDriveUnitSuite) TestBackupCreateFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: createCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: createCommand},
flags.AddRunModeFlag(cmd, true) addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
c := addOneDriveCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) oneDriveServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.UserFN, flagsTD.FlgInputs(flagsTD.UsersInput),
"--" + flags.FailFastFN,
err := cmd.Execute() "--" + flags.DisableIncrementalsFN,
assert.NoError(t, err, clues.ToCore(err)) "--" + flags.ForceItemDataDownloadFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
co := utils.Control() co := utils.Control()
assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users) assert.ElementsMatch(t, flagsTD.UsersInput, opts.Users)
// no assertion for category data input
// bool flags
assert.Equal(t, control.FailFast, co.FailureHandling) assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals) assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload) assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -141,37 +126,25 @@ func (suite *OneDriveUnitSuite) TestBackupCreateFlags() {
func (suite *OneDriveUnitSuite) TestBackupListFlags() { func (suite *OneDriveUnitSuite) TestBackupListFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: listCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: listCommand},
flags.AddRunModeFlag(cmd, true) addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
c := addOneDriveCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedBackupListFlags(), flagsTD.WithFlags(
flagsTD.PreparedProviderFlags(), oneDriveServiceCommand,
flagsTD.PreparedStorageFlags()) []string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
cmd.SetErr(new(bytes.Buffer)) // drop output },
flagsTD.PreparedBackupListFlags(),
err := cmd.Execute() flagsTD.PreparedProviderFlags(),
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd) flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
@ -180,41 +153,28 @@ func (suite *OneDriveUnitSuite) TestBackupListFlags() {
func (suite *OneDriveUnitSuite) TestBackupDetailsFlags() { func (suite *OneDriveUnitSuite) TestBackupDetailsFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: detailsCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: detailsCommand},
flags.AddRunModeFlag(cmd, true) addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
c := addOneDriveCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) oneDriveServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
err := cmd.Execute() },
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control() co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce) assert.True(t, co.SkipReduce)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -222,36 +182,24 @@ func (suite *OneDriveUnitSuite) TestBackupDetailsFlags() {
func (suite *OneDriveUnitSuite) TestBackupDeleteFlags() { func (suite *OneDriveUnitSuite) TestBackupDeleteFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: deleteCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: deleteCommand},
flags.AddRunModeFlag(cmd, true) addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
c := addOneDriveCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) oneDriveServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
},
err := cmd.Execute() flagsTD.PreparedProviderFlags(),
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }

View File

@ -1,7 +1,6 @@
package backup package backup
import ( import (
"bytes"
"fmt" "fmt"
"strings" "strings"
"testing" "testing"
@ -14,6 +13,7 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
utilsTD "github.com/alcionai/corso/src/cli/utils/testdata" utilsTD "github.com/alcionai/corso/src/cli/utils/testdata"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
@ -94,51 +94,36 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
func (suite *SharePointUnitSuite) TestBackupCreateFlags() { func (suite *SharePointUnitSuite) TestBackupCreateFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: createCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: createCommand},
flags.AddRunModeFlag(cmd, true) addSharePointCommands,
[]cliTD.UseCobraCommandFn{
c := addSharePointCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.SiteIDFN, flagsTD.FlgInputs(flagsTD.SiteIDInput),
"--" + flags.SiteFN, flagsTD.FlgInputs(flagsTD.WebURLInput),
"--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.SharepointCategoryDataInput),
"--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) sharePointServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.SiteIDFN, flagsTD.FlgInputs(flagsTD.SiteIDInput),
"--" + flags.SiteFN, flagsTD.FlgInputs(flagsTD.WebURLInput),
err := cmd.Execute() "--" + flags.CategoryDataFN, flagsTD.FlgInputs(flagsTD.SharepointCategoryDataInput),
assert.NoError(t, err, clues.ToCore(err)) "--" + flags.FailFastFN,
"--" + flags.DisableIncrementalsFN,
"--" + flags.ForceItemDataDownloadFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
co := utils.Control() co := utils.Control()
assert.ElementsMatch(t, []string{strings.Join(flagsTD.SiteIDInput, ",")}, opts.SiteID) assert.ElementsMatch(t, []string{strings.Join(flagsTD.SiteIDInput, ",")}, opts.SiteID)
assert.ElementsMatch(t, flagsTD.WebURLInput, opts.WebURL) assert.ElementsMatch(t, flagsTD.WebURLInput, opts.WebURL)
// no assertion for category data input
// bool flags
assert.Equal(t, control.FailFast, co.FailureHandling) assert.Equal(t, control.FailFast, co.FailureHandling)
assert.True(t, co.ToggleFeatures.DisableIncrementals) assert.True(t, co.ToggleFeatures.DisableIncrementals)
assert.True(t, co.ToggleFeatures.ForceItemDataDownload) assert.True(t, co.ToggleFeatures.ForceItemDataDownload)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -146,37 +131,25 @@ func (suite *SharePointUnitSuite) TestBackupCreateFlags() {
func (suite *SharePointUnitSuite) TestBackupListFlags() { func (suite *SharePointUnitSuite) TestBackupListFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: listCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: listCommand},
flags.AddRunModeFlag(cmd, true) addSharePointCommands,
[]cliTD.UseCobraCommandFn{
c := addSharePointCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedBackupListFlags(), flagsTD.WithFlags(
flagsTD.PreparedProviderFlags(), sharePointServiceCommand,
flagsTD.PreparedStorageFlags()) []string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
cmd.SetErr(new(bytes.Buffer)) // drop output },
flagsTD.PreparedBackupListFlags(),
err := cmd.Execute() flagsTD.PreparedProviderFlags(),
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertBackupListFlags(t, cmd) flagsTD.AssertBackupListFlags(t, cmd)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
@ -185,41 +158,28 @@ func (suite *SharePointUnitSuite) TestBackupListFlags() {
func (suite *SharePointUnitSuite) TestBackupDetailsFlags() { func (suite *SharePointUnitSuite) TestBackupDetailsFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: detailsCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: detailsCommand},
flags.AddRunModeFlag(cmd, true) addSharePointCommands,
[]cliTD.UseCobraCommandFn{
c := addSharePointCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) sharePointServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.SkipReduceFN,
err := cmd.Execute() },
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
co := utils.Control() co := utils.Control()
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.True(t, co.SkipReduce) assert.True(t, co.SkipReduce)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }
@ -227,36 +187,24 @@ func (suite *SharePointUnitSuite) TestBackupDetailsFlags() {
func (suite *SharePointUnitSuite) TestBackupDeleteFlags() { func (suite *SharePointUnitSuite) TestBackupDeleteFlags() {
t := suite.T() t := suite.T()
cmd := &cobra.Command{Use: deleteCommand} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands &cobra.Command{Use: deleteCommand},
flags.AddRunModeFlag(cmd, true) addSharePointCommands,
[]cliTD.UseCobraCommandFn{
c := addSharePointCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
flagsTD.WithFlags(
cmd,
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) sharePointServiceCommand,
[]string{
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.RunModeFN, flags.RunModeFlagTest,
cmd.SetErr(new(bytes.Buffer)) // drop output "--" + flags.BackupFN, flagsTD.BackupInput,
},
err := cmd.Execute() flagsTD.PreparedProviderFlags(),
assert.NoError(t, err, clues.ToCore(err)) flagsTD.PreparedStorageFlags()))
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV) assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
} }

View File

@ -54,7 +54,7 @@ func configureAccount(
if matchFromConfig { if matchFromConfig {
providerType := vpr.GetString(account.AccountProviderTypeKey) providerType := vpr.GetString(account.AccountProviderTypeKey)
if providerType != account.ProviderM365.String() { if providerType != account.ProviderM365.String() {
return acct, clues.New("unsupported account provider: " + providerType) return acct, clues.New("unsupported account provider: [" + providerType + "]")
} }
if err := mustMatchConfig(vpr, m365Overrides(overrides)); err != nil { if err := mustMatchConfig(vpr, m365Overrides(overrides)); err != nil {

View File

@ -279,8 +279,7 @@ func getStorageAndAccountWithViper(
// possibly read the prior config from a .corso file // possibly read the prior config from a .corso file
if readFromFile { if readFromFile {
err = vpr.ReadInConfig() if err := vpr.ReadInConfig(); err != nil {
if err != nil {
if _, ok := err.(viper.ConfigFileNotFoundError); !ok { if _, ok := err.(viper.ConfigFileNotFoundError); !ok {
return config, clues.Wrap(err, "reading corso config file: "+vpr.ConfigFileUsed()) return config, clues.Wrap(err, "reading corso config file: "+vpr.ConfigFileUsed())
} }

View File

@ -356,10 +356,9 @@ func (suite *ConfigSuite) TestReadFromFlags() {
m365Config, _ := repoDetails.Account.M365Config() m365Config, _ := repoDetails.Account.M365Config()
sc, err := repoDetails.Storage.StorageConfig() s3Cfg, err := repoDetails.Storage.ToS3Config()
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err)) require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
s3Cfg := sc.(*storage.S3Config)
commonConfig, _ := repoDetails.Storage.CommonConfig() commonConfig, _ := repoDetails.Storage.CommonConfig()
pass := commonConfig.Corso.CorsoPassphrase pass := commonConfig.Corso.CorsoPassphrase
@ -425,17 +424,21 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount() {
err = writeRepoConfigWithViper(vpr, s3Cfg, m365, repository.Options{}, "repoid") err = writeRepoConfigWithViper(vpr, s3Cfg, m365, repository.Options{}, "repoid")
require.NoError(t, err, "writing repo config", clues.ToCore(err)) require.NoError(t, err, "writing repo config", clues.ToCore(err))
require.Equal(
t,
account.ProviderM365.String(),
vpr.GetString(account.AccountProviderTypeKey),
"viper should have m365 as the account provider")
err = vpr.ReadInConfig() err = vpr.ReadInConfig()
require.NoError(t, err, "reading repo config", clues.ToCore(err)) require.NoError(t, err, "reading repo config", clues.ToCore(err))
cfg, err := getStorageAndAccountWithViper(vpr, storage.ProviderS3, true, true, nil) cfg, err := getStorageAndAccountWithViper(vpr, storage.ProviderS3, true, true, nil)
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err)) require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
sc, err := cfg.Storage.StorageConfig() readS3Cfg, err := cfg.Storage.ToS3Config()
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err)) require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
readS3Cfg := sc.(*storage.S3Config)
assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket) assert.Equal(t, readS3Cfg.Bucket, s3Cfg.Bucket)
assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint) assert.Equal(t, readS3Cfg.Endpoint, s3Cfg.Endpoint)
assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix) assert.Equal(t, readS3Cfg.Prefix, s3Cfg.Prefix)
@ -482,11 +485,9 @@ func (suite *ConfigIntegrationSuite) TestGetStorageAndAccount_noFileOnlyOverride
cfg, err := getStorageAndAccountWithViper(vpr, storage.ProviderS3, false, true, overrides) cfg, err := getStorageAndAccountWithViper(vpr, storage.ProviderS3, false, true, overrides)
require.NoError(t, err, "getting storage and account from config", clues.ToCore(err)) require.NoError(t, err, "getting storage and account from config", clues.ToCore(err))
sc, err := cfg.Storage.StorageConfig() readS3Cfg, err := cfg.Storage.ToS3Config()
require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err)) require.NoError(t, err, "reading s3 config from storage", clues.ToCore(err))
readS3Cfg := sc.(*storage.S3Config)
assert.Equal(t, readS3Cfg.Bucket, bkt) assert.Equal(t, readS3Cfg.Bucket, bkt)
assert.Equal(t, cfg.RepoID, "") assert.Equal(t, cfg.RepoID, "")
assert.Equal(t, readS3Cfg.Endpoint, end) assert.Equal(t, readS3Cfg.Endpoint, end)

View File

@ -27,11 +27,11 @@ var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
// AddCommands attaches all `corso export * *` commands to the parent. // AddCommands attaches all `corso export * *` commands to the parent.
func AddCommands(cmd *cobra.Command) { func AddCommands(cmd *cobra.Command) {
subCommand := exportCmd() subCommand := exportCmd()
flags.AddAllStorageFlags(subCommand)
cmd.AddCommand(subCommand) cmd.AddCommand(subCommand)
for _, addExportTo := range exportCommands { for _, addExportTo := range exportCommands {
addExportTo(subCommand) sc := addExportTo(subCommand)
flags.AddAllStorageFlags(sc)
} }
} }

View File

@ -1,17 +1,15 @@
package export package export
import ( import (
"bytes"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -39,55 +37,41 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
parent := &cobra.Command{Use: exportCommand}
cmd := &cobra.Command{Use: test.use} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands parent,
flags.AddRunModeFlag(cmd, true) addGroupsCommands,
[]cliTD.UseCobraCommandFn{
c := addGroupsCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
flagsTD.WithFlags(
cmd,
groupsServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FormatFN, flagsTD.FormatType,
// bool flags
"--" + flags.ArchiveFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) groupsServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FormatFN, flagsTD.FormatType,
"--" + flags.ArchiveFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cmd.SetOut(new(bytes.Buffer)) // drop output cliTD.CheckCmdChild(
cmd.SetErr(new(bytes.Buffer)) // drop output t,
parent,
err := cmd.Execute() 3,
assert.NoError(t, err, clues.ToCore(err)) test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeGroupsOpts(cmd) opts := utils.MakeGroupsOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive) assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format) assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
}) })
} }

View File

@ -1,17 +1,15 @@
package export package export
import ( import (
"bytes"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -39,67 +37,55 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
parent := &cobra.Command{Use: exportCommand}
cmd := &cobra.Command{Use: test.use} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands parent,
flags.AddRunModeFlag(cmd, true) addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
c := addOneDriveCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
flagsTD.WithFlags(
cmd,
oneDriveServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.FormatFN, flagsTD.FormatType,
// bool flags
"--" + flags.ArchiveFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) oneDriveServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
cmd.SetOut(new(bytes.Buffer)) // drop output "--" + flags.FormatFN, flagsTD.FormatType,
cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute() // bool flags
assert.NoError(t, err, clues.ToCore(err)) "--" + flags.ArchiveFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cliTD.CheckCmdChild(
t,
parent,
3,
test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName) assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath) assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
assert.Equal(t, flagsTD.FileCreatedAfterInput, opts.FileCreatedAfter) assert.Equal(t, flagsTD.FileCreatedAfterInput, opts.FileCreatedAfter)
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore) assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter) assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.Equal(t, flagsTD.CorsoPassphrase, flags.CorsoPassphraseFV) assert.Equal(t, flagsTD.CorsoPassphrase, flags.CorsoPassphraseFV)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
}) })
} }

View File

@ -1,17 +1,15 @@
package export package export
import ( import (
"bytes"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -39,63 +37,50 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
parent := &cobra.Command{Use: exportCommand}
cmd := &cobra.Command{Use: test.use} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands parent,
flags.AddRunModeFlag(cmd, true) addSharePointCommands,
[]cliTD.UseCobraCommandFn{
c := addSharePointCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
flagsTD.WithFlags(
cmd,
sharePointServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.LibraryFN, flagsTD.LibraryInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.FormatFN, flagsTD.FormatType,
// bool flags
"--" + flags.ArchiveFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) sharePointServiceCommand,
[]string{
flagsTD.RestoreDestination,
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.LibraryFN, flagsTD.LibraryInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.FormatFN, flagsTD.FormatType,
"--" + flags.ArchiveFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cmd.SetOut(new(bytes.Buffer)) // drop output cliTD.CheckCmdChild(
cmd.SetErr(new(bytes.Buffer)) // drop output t,
parent,
err := cmd.Execute() 3,
assert.NoError(t, err, clues.ToCore(err)) test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.LibraryInput, opts.Library) assert.Equal(t, flagsTD.LibraryInput, opts.Library)
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName) assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath) assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
@ -103,16 +88,12 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore) assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter) assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListItemInput, opts.ListItem) assert.ElementsMatch(t, flagsTD.ListItemInput, opts.ListItem)
assert.ElementsMatch(t, flagsTD.ListFolderInput, opts.ListFolder) assert.ElementsMatch(t, flagsTD.ListFolderInput, opts.ListFolder)
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page) assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder) assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive) assert.Equal(t, flagsTD.Archive, opts.ExportCfg.Archive)
assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format) assert.Equal(t, flagsTD.FormatType, opts.ExportCfg.Format)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
}) })
} }

View File

@ -86,7 +86,7 @@ var (
DisableConcurrencyLimiter = true DisableConcurrencyLimiter = true
) )
func WithFlags( func WithFlags2(
cc *cobra.Command, cc *cobra.Command,
command string, command string,
flagSets ...[]string, flagSets ...[]string,
@ -99,3 +99,18 @@ func WithFlags(
cc.SetArgs(args) cc.SetArgs(args)
} }
func WithFlags(
command string,
flagSets ...[]string,
) func(*cobra.Command) {
return func(cc *cobra.Command) {
args := []string{command}
for _, sl := range flagSets {
args = append(args, sl...)
}
cc.SetArgs(args)
}
}

View File

@ -96,13 +96,11 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
cfg.Account.ID(), cfg.Account.ID(),
opt) opt)
sc, err := cfg.Storage.StorageConfig() storageCfg, err := cfg.Storage.ToFilesystemConfig()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving filesystem configuration")) return Only(ctx, clues.Wrap(err, "Retrieving filesystem configuration"))
} }
storageCfg := sc.(*storage.FilesystemConfig)
m365, err := cfg.Account.M365Config() m365, err := cfg.Account.M365Config()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to parse m365 account config")) return Only(ctx, clues.Wrap(err, "Failed to parse m365 account config"))
@ -123,14 +121,20 @@ func initFilesystemCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
return Only(ctx, clues.Wrap(err, "Failed to initialize a new filesystem repository")) return Only(ctx, clues.Stack(ErrInitializingRepo, err))
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
Infof(ctx, "Initialized a repository at path %s", storageCfg.Path) Infof(ctx, "Initialized a repository at path %s", storageCfg.Path)
if err = config.WriteRepoConfig(ctx, sc, m365, opt.Repo, r.GetID()); err != nil { err = config.WriteRepoConfig(
ctx,
storageCfg,
m365,
opt.Repo,
r.GetID())
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to write repository configuration")) return Only(ctx, clues.Wrap(err, "Failed to write repository configuration"))
} }
@ -181,13 +185,11 @@ func connectFilesystemCmd(cmd *cobra.Command, args []string) error {
repoID = events.RepoIDNotFound repoID = events.RepoIDNotFound
} }
sc, err := cfg.Storage.StorageConfig() storageCfg, err := cfg.Storage.ToFilesystemConfig()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving filesystem configuration")) return Only(ctx, clues.Wrap(err, "Retrieving filesystem configuration"))
} }
storageCfg := sc.(*storage.FilesystemConfig)
m365, err := cfg.Account.M365Config() m365, err := cfg.Account.M365Config()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to parse m365 account config")) return Only(ctx, clues.Wrap(err, "Failed to parse m365 account config"))
@ -206,14 +208,20 @@ func connectFilesystemCmd(cmd *cobra.Command, args []string) error {
} }
if err := r.Connect(ctx); err != nil { if err := r.Connect(ctx); err != nil {
return Only(ctx, clues.Wrap(err, "Failed to connect to the filesystem repository")) return Only(ctx, clues.Stack(ErrConnectingRepo, err))
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
Infof(ctx, "Connected to repository at path %s", storageCfg.Path) Infof(ctx, "Connected to repository at path %s", storageCfg.Path)
if err = config.WriteRepoConfig(ctx, sc, m365, opts.Repo, r.GetID()); err != nil { err = config.WriteRepoConfig(
ctx,
storageCfg,
m365,
opts.Repo,
r.GetID())
if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to write repository configuration")) return Only(ctx, clues.Wrap(err, "Failed to write repository configuration"))
} }

View File

@ -56,9 +56,8 @@ func (suite *FilesystemE2ESuite) TestInitFilesystemCmd() {
st := storeTD.NewFilesystemStorage(t) st := storeTD.NewFilesystemStorage(t)
sc, err := st.StorageConfig() cfg, err := st.ToFilesystemConfig()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.FilesystemConfig)
force := map[string]string{ force := map[string]string{
tconfig.TestCfgStorageProvider: storage.ProviderFilesystem.String(), tconfig.TestCfgStorageProvider: storage.ProviderFilesystem.String(),
@ -113,9 +112,8 @@ func (suite *FilesystemE2ESuite) TestConnectFilesystemCmd() {
defer flush() defer flush()
st := storeTD.NewFilesystemStorage(t) st := storeTD.NewFilesystemStorage(t)
sc, err := st.StorageConfig() cfg, err := st.ToFilesystemConfig()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.FilesystemConfig)
force := map[string]string{ force := map[string]string{
tconfig.TestCfgAccountProvider: account.ProviderM365.String(), tconfig.TestCfgAccountProvider: account.ProviderM365.String(),

View File

@ -21,6 +21,11 @@ const (
maintenanceCommand = "maintenance" maintenanceCommand = "maintenance"
) )
var (
ErrConnectingRepo = clues.New("connecting repository")
ErrInitializingRepo = clues.New("initializing repository")
)
var repoCommands = []func(cmd *cobra.Command) *cobra.Command{ var repoCommands = []func(cmd *cobra.Command) *cobra.Command{
addS3Commands, addS3Commands,
addFilesystemCommands, addFilesystemCommands,

View File

@ -116,13 +116,11 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
cfg.Account.ID(), cfg.Account.ID(),
opt) opt)
sc, err := cfg.Storage.StorageConfig() s3Cfg, err := cfg.Storage.ToS3Config()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving s3 configuration")) return Only(ctx, clues.Wrap(err, "Retrieving s3 configuration"))
} }
s3Cfg := sc.(*storage.S3Config)
if strings.HasPrefix(s3Cfg.Endpoint, "http://") || strings.HasPrefix(s3Cfg.Endpoint, "https://") { if strings.HasPrefix(s3Cfg.Endpoint, "http://") || strings.HasPrefix(s3Cfg.Endpoint, "https://") {
invalidEndpointErr := "endpoint doesn't support specifying protocol. " + invalidEndpointErr := "endpoint doesn't support specifying protocol. " +
"pass --disable-tls flag to use http:// instead of default https://" "pass --disable-tls flag to use http:// instead of default https://"
@ -150,7 +148,7 @@ func initS3Cmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
return Only(ctx, clues.Wrap(err, "Failed to initialize a new S3 repository")) return Only(ctx, clues.Stack(ErrInitializingRepo, err))
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
@ -199,13 +197,11 @@ func connectS3Cmd(cmd *cobra.Command, args []string) error {
repoID = events.RepoIDNotFound repoID = events.RepoIDNotFound
} }
sc, err := cfg.Storage.StorageConfig() s3Cfg, err := cfg.Storage.ToS3Config()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Retrieving s3 configuration")) return Only(ctx, clues.Wrap(err, "Retrieving s3 configuration"))
} }
s3Cfg := sc.(*storage.S3Config)
m365, err := cfg.Account.M365Config() m365, err := cfg.Account.M365Config()
if err != nil { if err != nil {
return Only(ctx, clues.Wrap(err, "Failed to parse m365 account config")) return Only(ctx, clues.Wrap(err, "Failed to parse m365 account config"))
@ -231,7 +227,7 @@ func connectS3Cmd(cmd *cobra.Command, args []string) error {
} }
if err := r.Connect(ctx); err != nil { if err := r.Connect(ctx); err != nil {
return Only(ctx, clues.Wrap(err, "Failed to connect to the S3 repository")) return Only(ctx, clues.Stack(ErrConnectingRepo, err))
} }
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)

View File

@ -8,10 +8,12 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/cli" "github.com/alcionai/corso/src/cli"
"github.com/alcionai/corso/src/cli/config" "github.com/alcionai/corso/src/cli/config"
cliTD "github.com/alcionai/corso/src/cli/testdata" cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
@ -64,9 +66,8 @@ func (suite *S3E2ESuite) TestInitS3Cmd() {
st := storeTD.NewPrefixedS3Storage(t) st := storeTD.NewPrefixedS3Storage(t)
sc, err := st.StorageConfig() cfg, err := st.ToS3Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config)
vpr, configFP := tconfig.MakeTempTestConfigClone(t, nil) vpr, configFP := tconfig.MakeTempTestConfigClone(t, nil)
if !test.hasConfigFile { if !test.hasConfigFile {
@ -102,10 +103,9 @@ func (suite *S3E2ESuite) TestInitMultipleTimes() {
defer flush() defer flush()
st := storeTD.NewPrefixedS3Storage(t) st := storeTD.NewPrefixedS3Storage(t)
sc, err := st.StorageConfig()
require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config) cfg, err := st.ToS3Config()
require.NoError(t, err, clues.ToCore(err))
vpr, configFP := tconfig.MakeTempTestConfigClone(t, nil) vpr, configFP := tconfig.MakeTempTestConfigClone(t, nil)
@ -134,11 +134,9 @@ func (suite *S3E2ESuite) TestInitS3Cmd_missingBucket() {
st := storeTD.NewPrefixedS3Storage(t) st := storeTD.NewPrefixedS3Storage(t)
sc, err := st.StorageConfig() cfg, err := st.ToS3Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config)
force := map[string]string{ force := map[string]string{
tconfig.TestCfgBucket: "", tconfig.TestCfgBucket: "",
} }
@ -189,9 +187,9 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
defer flush() defer flush()
st := storeTD.NewPrefixedS3Storage(t) st := storeTD.NewPrefixedS3Storage(t)
sc, err := st.StorageConfig()
cfg, err := st.ToS3Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config)
force := map[string]string{ force := map[string]string{
tconfig.TestCfgAccountProvider: account.ProviderM365.String(), tconfig.TestCfgAccountProvider: account.ProviderM365.String(),
@ -234,60 +232,65 @@ func (suite *S3E2ESuite) TestConnectS3Cmd() {
} }
} }
func (suite *S3E2ESuite) TestConnectS3Cmd_BadBucket() { func (suite *S3E2ESuite) TestConnectS3Cmd_badInputs() {
t := suite.T() table := []struct {
ctx, flush := tester.NewContext(t) name string
bucket string
prefix string
expectErr func(t *testing.T, err error)
}{
{
name: "bucket",
bucket: "wrong",
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, storage.ErrVerifyingConfigStorage, clues.ToCore(err))
},
},
{
name: "prefix",
prefix: "wrong",
expectErr: func(t *testing.T, err error) {
assert.ErrorIs(t, err, storage.ErrVerifyingConfigStorage, clues.ToCore(err))
},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
defer flush() ctx, flush := tester.NewContext(t)
defer flush()
st := storeTD.NewPrefixedS3Storage(t) st := storeTD.NewPrefixedS3Storage(t)
sc, err := st.StorageConfig() cfg, err := st.ToS3Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config) bucket := str.First(test.bucket, cfg.Bucket)
prefix := str.First(test.prefix, cfg.Prefix)
vpr, configFP := tconfig.MakeTempTestConfigClone(t, nil) over := map[string]string{}
acct := tconfig.NewM365Account(t)
ctx = config.SetViper(ctx, vpr) maps.Copy(over, acct.Config)
over[account.AccountProviderTypeKey] = account.ProviderM365.String()
over[storage.StorageProviderTypeKey] = storage.ProviderS3.String()
cmd := cliTD.StubRootCmd( vpr, configFP := tconfig.MakeTempTestConfigClone(t, over)
"repo", "connect", "s3", ctx = config.SetViper(ctx, vpr)
"--config-file", configFP,
"--bucket", "wrong",
"--prefix", cfg.Prefix)
cli.BuildCommandTree(cmd)
// run the command cmd := cliTD.StubRootCmd(
err = cmd.ExecuteContext(ctx) "repo", "connect", "s3",
require.Error(t, err, clues.ToCore(err)) "--config-file", configFP,
} "--bucket", bucket,
"--prefix", prefix)
cli.BuildCommandTree(cmd)
func (suite *S3E2ESuite) TestConnectS3Cmd_BadPrefix() { // run the command
t := suite.T() err = cmd.ExecuteContext(ctx)
ctx, flush := tester.NewContext(t) require.Error(t, err, clues.ToCore(err))
test.expectErr(t, err)
defer flush() })
}
st := storeTD.NewPrefixedS3Storage(t)
sc, err := st.StorageConfig()
require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config)
vpr, configFP := tconfig.MakeTempTestConfigClone(t, nil)
ctx = config.SetViper(ctx, vpr)
cmd := cliTD.StubRootCmd(
"repo", "connect", "s3",
"--config-file", configFP,
"--bucket", cfg.Bucket,
"--prefix", "wrong")
cli.BuildCommandTree(cmd)
// run the command
err = cmd.ExecuteContext(ctx)
require.Error(t, err, clues.ToCore(err))
} }
func (suite *S3E2ESuite) TestUpdateS3Cmd() { func (suite *S3E2ESuite) TestUpdateS3Cmd() {

View File

@ -66,11 +66,9 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
suite.acct = tconfig.NewM365Account(t) suite.acct = tconfig.NewM365Account(t)
suite.st = storeTD.NewPrefixedS3Storage(t) suite.st = storeTD.NewPrefixedS3Storage(t)
sc, err := suite.st.StorageConfig() cfg, err := suite.st.ToS3Config()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
cfg := sc.(*storage.S3Config)
force := map[string]string{ force := map[string]string{
tconfig.TestCfgAccountProvider: account.ProviderM365.String(), tconfig.TestCfgAccountProvider: account.ProviderM365.String(),
tconfig.TestCfgStorageProvider: storage.ProviderS3.String(), tconfig.TestCfgStorageProvider: storage.ProviderS3.String(),

View File

@ -1,17 +1,15 @@
package restore package restore
import ( import (
"bytes"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -39,80 +37,64 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
parent := &cobra.Command{Use: restoreCommand}
cmd := &cobra.Command{Use: test.use} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands parent,
flags.AddRunModeFlag(cmd, true) addExchangeCommands,
[]cliTD.UseCobraCommandFn{
c := addExchangeCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
flagsTD.WithFlags(
cmd,
exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.ContactFN, flagsTD.FlgInputs(flagsTD.ContactInput),
"--" + flags.ContactFolderFN, flagsTD.FlgInputs(flagsTD.ContactFldInput),
"--" + flags.ContactNameFN, flagsTD.ContactNameInput,
"--" + flags.EmailFN, flagsTD.FlgInputs(flagsTD.EmailInput),
"--" + flags.EmailFolderFN, flagsTD.FlgInputs(flagsTD.EmailFldInput),
"--" + flags.EmailReceivedAfterFN, flagsTD.EmailReceivedAfterInput,
"--" + flags.EmailReceivedBeforeFN, flagsTD.EmailReceivedBeforeInput,
"--" + flags.EmailSenderFN, flagsTD.EmailSenderInput,
"--" + flags.EmailSubjectFN, flagsTD.EmailSubjectInput,
"--" + flags.EventFN, flagsTD.FlgInputs(flagsTD.EventInput),
"--" + flags.EventCalendarFN, flagsTD.FlgInputs(flagsTD.EventCalInput),
"--" + flags.EventOrganizerFN, flagsTD.EventOrganizerInput,
"--" + flags.EventRecursFN, flagsTD.EventRecursInput,
"--" + flags.EventStartsAfterFN, flagsTD.EventStartsAfterInput,
"--" + flags.EventStartsBeforeFN, flagsTD.EventStartsBeforeInput,
"--" + flags.EventSubjectFN, flagsTD.EventSubjectInput,
"--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) exchangeServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.ContactFN, flagsTD.FlgInputs(flagsTD.ContactInput),
"--" + flags.ContactFolderFN, flagsTD.FlgInputs(flagsTD.ContactFldInput),
"--" + flags.ContactNameFN, flagsTD.ContactNameInput,
"--" + flags.EmailFN, flagsTD.FlgInputs(flagsTD.EmailInput),
"--" + flags.EmailFolderFN, flagsTD.FlgInputs(flagsTD.EmailFldInput),
"--" + flags.EmailReceivedAfterFN, flagsTD.EmailReceivedAfterInput,
"--" + flags.EmailReceivedBeforeFN, flagsTD.EmailReceivedBeforeInput,
"--" + flags.EmailSenderFN, flagsTD.EmailSenderInput,
"--" + flags.EmailSubjectFN, flagsTD.EmailSubjectInput,
"--" + flags.EventFN, flagsTD.FlgInputs(flagsTD.EventInput),
"--" + flags.EventCalendarFN, flagsTD.FlgInputs(flagsTD.EventCalInput),
"--" + flags.EventOrganizerFN, flagsTD.EventOrganizerInput,
"--" + flags.EventRecursFN, flagsTD.EventRecursInput,
"--" + flags.EventStartsAfterFN, flagsTD.EventStartsAfterInput,
"--" + flags.EventStartsBeforeFN, flagsTD.EventStartsBeforeInput,
"--" + flags.EventSubjectFN, flagsTD.EventSubjectInput,
"--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cmd.SetOut(new(bytes.Buffer)) // drop output cliTD.CheckCmdChild(
cmd.SetErr(new(bytes.Buffer)) // drop output t,
parent,
err := cmd.Execute() 3,
assert.NoError(t, err, clues.ToCore(err)) test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeExchangeOpts(cmd) opts := utils.MakeExchangeOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.ElementsMatch(t, flagsTD.ContactInput, opts.Contact) assert.ElementsMatch(t, flagsTD.ContactInput, opts.Contact)
assert.ElementsMatch(t, flagsTD.ContactFldInput, opts.ContactFolder) assert.ElementsMatch(t, flagsTD.ContactFldInput, opts.ContactFolder)
assert.Equal(t, flagsTD.ContactNameInput, opts.ContactName) assert.Equal(t, flagsTD.ContactNameInput, opts.ContactName)
assert.ElementsMatch(t, flagsTD.EmailInput, opts.Email) assert.ElementsMatch(t, flagsTD.EmailInput, opts.Email)
assert.ElementsMatch(t, flagsTD.EmailFldInput, opts.EmailFolder) assert.ElementsMatch(t, flagsTD.EmailFldInput, opts.EmailFolder)
assert.Equal(t, flagsTD.EmailReceivedAfterInput, opts.EmailReceivedAfter) assert.Equal(t, flagsTD.EmailReceivedAfterInput, opts.EmailReceivedAfter)
assert.Equal(t, flagsTD.EmailReceivedBeforeInput, opts.EmailReceivedBefore) assert.Equal(t, flagsTD.EmailReceivedBeforeInput, opts.EmailReceivedBefore)
assert.Equal(t, flagsTD.EmailSenderInput, opts.EmailSender) assert.Equal(t, flagsTD.EmailSenderInput, opts.EmailSender)
assert.Equal(t, flagsTD.EmailSubjectInput, opts.EmailSubject) assert.Equal(t, flagsTD.EmailSubjectInput, opts.EmailSubject)
assert.ElementsMatch(t, flagsTD.EventInput, opts.Event) assert.ElementsMatch(t, flagsTD.EventInput, opts.Event)
assert.ElementsMatch(t, flagsTD.EventCalInput, opts.EventCalendar) assert.ElementsMatch(t, flagsTD.EventCalInput, opts.EventCalendar)
assert.Equal(t, flagsTD.EventOrganizerInput, opts.EventOrganizer) assert.Equal(t, flagsTD.EventOrganizerInput, opts.EventOrganizer)
@ -120,11 +102,9 @@ func (suite *ExchangeUnitSuite) TestAddExchangeCommands() {
assert.Equal(t, flagsTD.EventStartsAfterInput, opts.EventStartsAfter) assert.Equal(t, flagsTD.EventStartsAfterInput, opts.EventStartsAfter)
assert.Equal(t, flagsTD.EventStartsBeforeInput, opts.EventStartsBefore) assert.Equal(t, flagsTD.EventStartsBeforeInput, opts.EventStartsBefore)
assert.Equal(t, flagsTD.EventSubjectInput, opts.EventSubject) assert.Equal(t, flagsTD.EventSubjectInput, opts.EventSubject)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions) assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination) assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination)
assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource) assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
}) })

View File

@ -1,17 +1,15 @@
package restore package restore
import ( import (
"bytes"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -39,65 +37,51 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
parent := &cobra.Command{Use: restoreCommand}
cmd := &cobra.Command{Use: test.use} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands parent,
flags.AddRunModeFlag(cmd, true) addGroupsCommands,
[]cliTD.UseCobraCommandFn{
c := addGroupsCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
flagsTD.WithFlags(
cmd,
groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.LibraryFN, flagsTD.LibraryInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource,
// bool flags
"--" + flags.NoPermissionsFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) groupsServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.LibraryFN, flagsTD.LibraryInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource,
"--" + flags.NoPermissionsFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cmd.SetOut(new(bytes.Buffer)) // drop output cliTD.CheckCmdChild(
cmd.SetErr(new(bytes.Buffer)) // drop output t,
parent,
err := cmd.Execute() 3,
assert.NoError(t, err, clues.ToCore(err)) test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeGroupsOpts(cmd) opts := utils.MakeGroupsOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.LibraryInput, opts.Library) assert.Equal(t, flagsTD.LibraryInput, opts.Library)
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName) assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath) assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
@ -105,14 +89,10 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore) assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter) assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions) assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination) assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination)
assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource) assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource)
// bool flags
assert.True(t, flags.NoPermissionsFV) assert.True(t, flags.NoPermissionsFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
}) })

View File

@ -1,17 +1,15 @@
package restore package restore
import ( import (
"bytes"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -39,73 +37,56 @@ func (suite *OneDriveUnitSuite) TestAddOneDriveCommands() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
parent := &cobra.Command{Use: restoreCommand}
cmd := &cobra.Command{Use: test.use} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands parent,
flags.AddRunModeFlag(cmd, true) addOneDriveCommands,
[]cliTD.UseCobraCommandFn{
c := addOneDriveCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
flagsTD.WithFlags(
cmd,
oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource,
// bool flags
"--" + flags.NoPermissionsFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) oneDriveServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource,
"--" + flags.NoPermissionsFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cmd.SetOut(new(bytes.Buffer)) // drop output cliTD.CheckCmdChild(
cmd.SetErr(new(bytes.Buffer)) // drop output t,
parent,
err := cmd.Execute() 3,
assert.NoError(t, err, clues.ToCore(err)) test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeOneDriveOpts(cmd) opts := utils.MakeOneDriveOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName) assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath) assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
assert.Equal(t, flagsTD.FileCreatedAfterInput, opts.FileCreatedAfter) assert.Equal(t, flagsTD.FileCreatedAfterInput, opts.FileCreatedAfter)
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore) assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter) assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions) assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination) assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination)
assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource) assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource)
// bool flags
assert.True(t, flags.NoPermissionsFV) assert.True(t, flags.NoPermissionsFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
}) })

View File

@ -25,12 +25,12 @@ var restoreCommands = []func(cmd *cobra.Command) *cobra.Command{
// AddCommands attaches all `corso restore * *` commands to the parent. // AddCommands attaches all `corso restore * *` commands to the parent.
func AddCommands(cmd *cobra.Command) { func AddCommands(cmd *cobra.Command) {
subCommand := restoreCmd() subCommand := restoreCmd()
flags.AddAllProviderFlags(subCommand)
flags.AddAllStorageFlags(subCommand)
cmd.AddCommand(subCommand) cmd.AddCommand(subCommand)
for _, addRestoreTo := range restoreCommands { for _, addRestoreTo := range restoreCommands {
addRestoreTo(subCommand) sc := addRestoreTo(subCommand)
flags.AddAllProviderFlags(sc)
flags.AddAllStorageFlags(sc)
} }
} }

View File

@ -1,17 +1,15 @@
package restore package restore
import ( import (
"bytes"
"testing" "testing"
"github.com/alcionai/clues"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
flagsTD "github.com/alcionai/corso/src/cli/flags/testdata" flagsTD "github.com/alcionai/corso/src/cli/flags/testdata"
cliTD "github.com/alcionai/corso/src/cli/testdata"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
) )
@ -39,64 +37,51 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
parent := &cobra.Command{Use: restoreCommand}
cmd := &cobra.Command{Use: test.use} cmd := cliTD.SetUpCmdHasFlags(
t,
// persistent flags not added by addCommands parent,
flags.AddRunModeFlag(cmd, true) addSharePointCommands,
[]cliTD.UseCobraCommandFn{
c := addSharePointCommands(cmd) flags.AddAllProviderFlags,
require.NotNil(t, c) flags.AddAllStorageFlags,
// non-persistent flags not added by addCommands
flags.AddAllProviderFlags(c)
flags.AddAllStorageFlags(c)
cmds := cmd.Commands()
require.Len(t, cmds, 1)
child := cmds[0]
assert.Equal(t, test.expectUse, child.Use)
assert.Equal(t, test.expectShort, child.Short)
tester.AreSameFunc(t, test.expectRunE, child.RunE)
flagsTD.WithFlags(
cmd,
sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.LibraryFN, flagsTD.LibraryInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource,
// bool flags
"--" + flags.NoPermissionsFN,
}, },
flagsTD.PreparedProviderFlags(), flagsTD.WithFlags(
flagsTD.PreparedStorageFlags()) sharePointServiceCommand,
[]string{
"--" + flags.RunModeFN, flags.RunModeFlagTest,
"--" + flags.BackupFN, flagsTD.BackupInput,
"--" + flags.LibraryFN, flagsTD.LibraryInput,
"--" + flags.FileFN, flagsTD.FlgInputs(flagsTD.FileNameInput),
"--" + flags.FolderFN, flagsTD.FlgInputs(flagsTD.FolderPathInput),
"--" + flags.FileCreatedAfterFN, flagsTD.FileCreatedAfterInput,
"--" + flags.FileCreatedBeforeFN, flagsTD.FileCreatedBeforeInput,
"--" + flags.FileModifiedAfterFN, flagsTD.FileModifiedAfterInput,
"--" + flags.FileModifiedBeforeFN, flagsTD.FileModifiedBeforeInput,
"--" + flags.ListItemFN, flagsTD.FlgInputs(flagsTD.ListItemInput),
"--" + flags.ListFolderFN, flagsTD.FlgInputs(flagsTD.ListFolderInput),
"--" + flags.PageFN, flagsTD.FlgInputs(flagsTD.PageInput),
"--" + flags.PageFolderFN, flagsTD.FlgInputs(flagsTD.PageFolderInput),
"--" + flags.CollisionsFN, flagsTD.Collisions,
"--" + flags.DestinationFN, flagsTD.Destination,
"--" + flags.ToResourceFN, flagsTD.ToResource,
"--" + flags.NoPermissionsFN,
},
flagsTD.PreparedProviderFlags(),
flagsTD.PreparedStorageFlags()))
cmd.SetOut(new(bytes.Buffer)) // drop output cliTD.CheckCmdChild(
cmd.SetErr(new(bytes.Buffer)) // drop output t,
parent,
err := cmd.Execute() 3,
assert.NoError(t, err, clues.ToCore(err)) test.expectUse,
test.expectShort,
test.expectRunE)
opts := utils.MakeSharePointOpts(cmd) opts := utils.MakeSharePointOpts(cmd)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.BackupInput, flags.BackupIDFV)
assert.Equal(t, flagsTD.LibraryInput, opts.Library) assert.Equal(t, flagsTD.LibraryInput, opts.Library)
assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName) assert.ElementsMatch(t, flagsTD.FileNameInput, opts.FileName)
assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath) assert.ElementsMatch(t, flagsTD.FolderPathInput, opts.FolderPath)
@ -104,20 +89,14 @@ func (suite *SharePointUnitSuite) TestAddSharePointCommands() {
assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore) assert.Equal(t, flagsTD.FileCreatedBeforeInput, opts.FileCreatedBefore)
assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter) assert.Equal(t, flagsTD.FileModifiedAfterInput, opts.FileModifiedAfter)
assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore) assert.Equal(t, flagsTD.FileModifiedBeforeInput, opts.FileModifiedBefore)
assert.ElementsMatch(t, flagsTD.ListItemInput, opts.ListItem) assert.ElementsMatch(t, flagsTD.ListItemInput, opts.ListItem)
assert.ElementsMatch(t, flagsTD.ListFolderInput, opts.ListFolder) assert.ElementsMatch(t, flagsTD.ListFolderInput, opts.ListFolder)
assert.ElementsMatch(t, flagsTD.PageInput, opts.Page) assert.ElementsMatch(t, flagsTD.PageInput, opts.Page)
assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder) assert.ElementsMatch(t, flagsTD.PageFolderInput, opts.PageFolder)
assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions) assert.Equal(t, flagsTD.Collisions, opts.RestoreCfg.Collisions)
assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination) assert.Equal(t, flagsTD.Destination, opts.RestoreCfg.Destination)
assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource) assert.Equal(t, flagsTD.ToResource, opts.RestoreCfg.ProtectedResource)
// bool flags
assert.True(t, flags.NoPermissionsFV) assert.True(t, flags.NoPermissionsFV)
flagsTD.AssertProviderFlags(t, cmd) flagsTD.AssertProviderFlags(t, cmd)
flagsTD.AssertStorageFlags(t, cmd) flagsTD.AssertStorageFlags(t, cmd)
}) })

View File

@ -1,11 +1,20 @@
package testdata package testdata
import ( import (
"bytes"
"fmt" "fmt"
"strings"
"testing"
"time" "time"
"github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/cli/flags"
"github.com/alcionai/corso/src/internal/tester"
) )
// StubRootCmd builds a stub cobra command to be used as // StubRootCmd builds a stub cobra command to be used as
@ -27,3 +36,82 @@ func StubRootCmd(args ...string) *cobra.Command {
return c return c
} }
type UseCobraCommandFn func(*cobra.Command)
func SetUpCmdHasFlags(
t *testing.T,
parentCmd *cobra.Command,
addChildCommand func(*cobra.Command) *cobra.Command,
addFlags []UseCobraCommandFn,
setArgs UseCobraCommandFn,
) *cobra.Command {
parentCmd.PersistentPreRun = func(c *cobra.Command, args []string) {
t.Log("testing args:")
for _, arg := range args {
t.Log(arg)
}
}
// persistent flags not added by addCommands
flags.AddRunModeFlag(parentCmd, true)
cmd := addChildCommand(parentCmd)
require.NotNil(t, cmd)
cul := cmd.UseLine()
require.Truef(
t,
strings.HasPrefix(cul, parentCmd.Use+" "+cmd.Use),
"child command has expected usage format 'parent child', got %q",
cul)
for _, af := range addFlags {
af(cmd)
}
setArgs(parentCmd)
parentCmd.SetOut(new(bytes.Buffer)) // drop output
parentCmd.SetErr(new(bytes.Buffer)) // drop output
err := parentCmd.Execute()
assert.NoError(t, err, clues.ToCore(err))
return cmd
}
type CobraRunEFn func(cmd *cobra.Command, args []string) error
func CheckCmdChild(
t *testing.T,
cmd *cobra.Command,
expectChildCount int,
expectUse string,
expectShort string,
expectRunE CobraRunEFn,
) {
var (
cmds = cmd.Commands()
child *cobra.Command
)
for _, cc := range cmds {
if cc.Use == expectUse {
child = cc
break
}
}
require.Len(
t,
cmds,
expectChildCount,
"parent command should have the correct child command count")
require.NotNil(t, child, "should have found expected child command")
assert.Equal(t, expectShort, child.Short)
tester.AreSameFunc(t, expectRunE, child.RunE)
}

View File

@ -229,7 +229,7 @@ elseif (![string]::IsNullOrEmpty($Site)) {
} }
} }
else { else {
Write-Host "User (for OneDrvie) or Site (for Sharpeoint) is required" Write-Host "User (for OneDrive) or Site (for Sharepoint) is required"
Exit Exit
} }

View File

@ -197,13 +197,11 @@ func handleCheckerCommand(cmd *cobra.Command, args []string, f flags) error {
return clues.Wrap(err, "getting storage config") return clues.Wrap(err, "getting storage config")
} }
sc, err := repoDetails.Storage.StorageConfig() cfg, err := repoDetails.Storage.ToS3Config()
if err != nil { if err != nil {
return clues.Wrap(err, "getting S3 config") return clues.Wrap(err, "getting S3 config")
} }
cfg := sc.(*storage.S3Config)
endpoint := defaultS3Endpoint endpoint := defaultS3Endpoint
if len(cfg.Endpoint) > 0 { if len(cfg.Endpoint) > 0 {
endpoint = cfg.Endpoint endpoint = cfg.Endpoint

View File

@ -1,6 +1,68 @@
package common package common
import (
"context"
"fmt"
"os"
"strings"
"time"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type PermissionInfo struct { type PermissionInfo struct {
EntityID string EntityID string
Roles []string Roles []string
} }
const (
sanityBaseBackup = "SANITY_BASE_BACKUP"
sanityTestData = "SANITY_TEST_DATA"
sanityTestFolder = "SANITY_TEST_FOLDER"
sanityTestService = "SANITY_TEST_SERVICE"
)
type Envs struct {
BaseBackupFolder string
DataFolder string
FolderName string
Service string
SiteID string
StartTime time.Time
UserID string
}
func EnvVars(ctx context.Context) Envs {
folder := strings.TrimSpace(os.Getenv(sanityTestFolder))
startTime, _ := MustGetTimeFromName(ctx, folder)
e := Envs{
BaseBackupFolder: os.Getenv(sanityBaseBackup),
DataFolder: os.Getenv(sanityTestData),
FolderName: folder,
SiteID: tconfig.GetM365SiteID(ctx),
Service: os.Getenv(sanityTestService),
StartTime: startTime,
UserID: tconfig.GetM365UserID(ctx),
}
fmt.Printf("\n-----\nenvs %+v\n-----\n", e)
logger.Ctx(ctx).Info("envs", e)
return e
}
func GetAC() (api.Client, error) {
creds := account.M365Config{
M365: credentials.GetM365(),
AzureTenantID: os.Getenv(account.AzureTenantID),
}
return api.NewClient(creds, control.DefaultOptions())
}

View File

@ -0,0 +1,38 @@
package common
import (
"os"
"path/filepath"
"time"
"github.com/alcionai/clues"
)
func FilepathWalker(
folderName string,
exportFileSizes map[string]int64,
startTime time.Time,
) filepath.WalkFunc {
return func(path string, info os.FileInfo, err error) error {
if err != nil {
return clues.Stack(err)
}
if info.IsDir() {
return nil
}
relPath, err := filepath.Rel(folderName, path)
if err != nil {
return clues.Stack(err)
}
exportFileSizes[relPath] = info.Size()
if startTime.After(info.ModTime()) {
startTime = info.ModTime()
}
return nil
}
}

View File

@ -0,0 +1,69 @@
package common
import (
"context"
"golang.org/x/exp/maps"
)
// Sanitree is used to build out a hierarchical tree of items
// for comparison against each other. Primarily so that a restore
// can compare two subtrees easily.
type Sanitree[T any] struct {
Container T
ContainerID string
ContainerName string
// non-containers only
ContainsItems int
// name -> node
Children map[string]*Sanitree[T]
}
func AssertEqualTrees[T any](
ctx context.Context,
expect, other *Sanitree[T],
) {
if expect == nil && other == nil {
return
}
Assert(
ctx,
func() bool { return expect != nil && other != nil },
"non nil nodes",
expect,
other)
Assert(
ctx,
func() bool { return expect.ContainerName == other.ContainerName },
"container names match",
expect.ContainerName,
other.ContainerName)
Assert(
ctx,
func() bool { return expect.ContainsItems == other.ContainsItems },
"count of items in container matches",
expect.ContainsItems,
other.ContainsItems)
Assert(
ctx,
func() bool { return len(expect.Children) == len(other.Children) },
"count of child containers matches",
len(expect.Children),
len(other.Children))
for name, s := range expect.Children {
ch, ok := other.Children[name]
Assert(
ctx,
func() bool { return ok },
"found matching child container",
name,
maps.Keys(other.Children))
AssertEqualTrees(ctx, s, ch)
}
}

View File

@ -22,7 +22,7 @@ func Assert(
return return
} }
header = "Error: " + header header = "TEST FAILURE: " + header
expected := fmt.Sprintf("* Expected: %+v", expect) expected := fmt.Sprintf("* Expected: %+v", expect)
got := fmt.Sprintf("* Current: %+v", current) got := fmt.Sprintf("* Current: %+v", current)
@ -37,7 +37,7 @@ func Assert(
func Fatal(ctx context.Context, msg string, err error) { func Fatal(ctx context.Context, msg string, err error) {
logger.CtxErr(ctx, err).Error("test failure: " + msg) logger.CtxErr(ctx, err).Error("test failure: " + msg)
fmt.Println(msg+": ", err) fmt.Println("TEST FAILURE: "+msg+": ", err)
os.Exit(1) os.Exit(1)
} }

View File

@ -0,0 +1,16 @@
package export
import (
"context"
"github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
func CheckGroupsExport(
ctx context.Context,
ac api.Client,
envs common.Envs,
) {
// TODO
}

View File

@ -3,28 +3,21 @@ package export
import ( import (
"context" "context"
"fmt" "fmt"
"os"
"path/filepath" "path/filepath"
"time" "time"
"github.com/alcionai/clues"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/alcionai/corso/src/cmd/sanity_test/common" "github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/cmd/sanity_test/restore" "github.com/alcionai/corso/src/cmd/sanity_test/restore"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
func CheckOneDriveExport( func CheckOneDriveExport(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
userID, folderName, dataFolder string, envs common.Envs,
) { ) {
drive, err := client. drive, err := ac.Users().GetDefaultDrive(ctx, envs.UserID)
Users().
ByUserId(userID).
Drive().
Get(ctx, nil)
if err != nil { if err != nil {
common.Fatal(ctx, "getting the drive:", err) common.Fatal(ctx, "getting the drive:", err)
} }
@ -36,37 +29,19 @@ func CheckOneDriveExport(
startTime = time.Now() startTime = time.Now()
) )
err = filepath.Walk(folderName, func(path string, info os.FileInfo, err error) error { err = filepath.Walk(
if err != nil { envs.FolderName,
return clues.Stack(err) common.FilepathWalker(envs.FolderName, exportFileSizes, startTime))
}
if info.IsDir() {
return nil
}
relPath, err := filepath.Rel(folderName, path)
if err != nil {
return clues.Stack(err)
}
exportFileSizes[relPath] = info.Size()
if startTime.After(info.ModTime()) {
startTime = info.ModTime()
}
return nil
})
if err != nil { if err != nil {
fmt.Println("Error walking the path:", err) fmt.Println("Error walking the path:", err)
} }
_ = restore.PopulateDriveDetails( _ = restore.PopulateDriveDetails(
ctx, ctx,
client, ac,
ptr.Val(drive.GetId()), ptr.Val(drive.GetId()),
folderName, envs.FolderName,
dataFolder, envs.DataFolder,
fileSizes, fileSizes,
map[string][]common.PermissionInfo{}, map[string][]common.PermissionInfo{},
startTime) startTime)

View File

@ -3,28 +3,21 @@ package export
import ( import (
"context" "context"
"fmt" "fmt"
"os"
"path/filepath" "path/filepath"
"time" "time"
"github.com/alcionai/clues"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/alcionai/corso/src/cmd/sanity_test/common" "github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/cmd/sanity_test/restore" "github.com/alcionai/corso/src/cmd/sanity_test/restore"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
func CheckSharePointExport( func CheckSharePointExport(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
siteID, folderName, dataFolder string, envs common.Envs,
) { ) {
drive, err := client. drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
Sites().
BySiteId(siteID).
Drive().
Get(ctx, nil)
if err != nil { if err != nil {
common.Fatal(ctx, "getting the drive:", err) common.Fatal(ctx, "getting the drive:", err)
} }
@ -36,37 +29,19 @@ func CheckSharePointExport(
startTime = time.Now() startTime = time.Now()
) )
err = filepath.Walk(folderName, func(path string, info os.FileInfo, err error) error { err = filepath.Walk(
if err != nil { envs.FolderName,
return clues.Stack(err) common.FilepathWalker(envs.FolderName, exportFileSizes, startTime))
}
if info.IsDir() {
return nil
}
relPath, err := filepath.Rel(folderName, path)
if err != nil {
return clues.Stack(err)
}
exportFileSizes[relPath] = info.Size()
if startTime.After(info.ModTime()) {
startTime = info.ModTime()
}
return nil
})
if err != nil { if err != nil {
fmt.Println("Error walking the path:", err) fmt.Println("Error walking the path:", err)
} }
_ = restore.PopulateDriveDetails( _ = restore.PopulateDriveDetails(
ctx, ctx,
client, ac,
ptr.Val(drive.GetId()), ptr.Val(drive.GetId()),
folderName, envs.FolderName,
dataFolder, envs.DataFolder,
fileSizes, fileSizes,
map[string][]common.PermissionInfo{}, map[string][]common.PermissionInfo{},
startTime) startTime)

View File

@ -3,99 +3,43 @@ package restore
import ( import (
"context" "context"
"fmt" "fmt"
stdpath "path"
"strings"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/users"
"github.com/alcionai/corso/src/cmd/sanity_test/common" "github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
// CheckEmailRestoration verifies that the emails count in restored folder is equivalent to // CheckEmailRestoration verifies that the emails count in restored folder is equivalent to
// emails in actual m365 account // emails in actual m365 account
func CheckEmailRestoration( func CheckEmailRestoration(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
testUser, folderName, dataFolder, baseBackupFolder string, envs common.Envs,
startTime time.Time,
) { ) {
var ( var (
restoreFolder models.MailFolderable folderNameToItemCount = make(map[string]int32)
itemCount = make(map[string]int32) folderNameToRestoreItemCount = make(map[string]int32)
restoreItemCount = make(map[string]int32)
builder = client.Users().ByUserId(testUser).MailFolders()
) )
for { restoredTree := buildSanitree(ctx, ac, envs.UserID, envs.FolderName)
result, err := builder.Get(ctx, nil) dataTree := buildSanitree(ctx, ac, envs.UserID, envs.DataFolder)
if err != nil {
common.Fatal(ctx, "getting mail folders", err)
}
values := result.GetValue()
for _, v := range values {
itemName := ptr.Val(v.GetDisplayName())
if itemName == folderName {
restoreFolder = v
continue
}
if itemName == dataFolder || itemName == baseBackupFolder {
// otherwise, recursively aggregate all child folders.
getAllMailSubFolders(ctx, client, testUser, v, itemName, dataFolder, itemCount)
itemCount[itemName] = ptr.Val(v.GetTotalItemCount())
}
}
link, ok := ptr.ValOK(result.GetOdataNextLink())
if !ok {
break
}
builder = users.NewItemMailFoldersRequestBuilder(link, client.GetAdapter())
}
folderID := ptr.Val(restoreFolder.GetId())
folderName = ptr.Val(restoreFolder.GetDisplayName())
ctx = clues.Add( ctx = clues.Add(
ctx, ctx,
"restore_folder_id", folderID, "restore_folder_id", restoredTree.ContainerID,
"restore_folder_name", folderName) "restore_folder_name", restoredTree.ContainerName,
"original_folder_id", dataTree.ContainerID,
"original_folder_name", dataTree.ContainerName)
childFolder, err := client. verifyEmailData(ctx, folderNameToRestoreItemCount, folderNameToItemCount)
Users().
ByUserId(testUser).
MailFolders().
ByMailFolderId(folderID).
ChildFolders().
Get(ctx, nil)
if err != nil {
common.Fatal(ctx, "getting restore folder child folders", err)
}
for _, fld := range childFolder.GetValue() { common.AssertEqualTrees[models.MailFolderable](
restoreDisplayName := ptr.Val(fld.GetDisplayName()) ctx,
dataTree,
// check if folder is the data folder we loaded or the base backup to verify restoredTree.Children[envs.DataFolder])
// the incremental backup worked fine
if strings.EqualFold(restoreDisplayName, dataFolder) || strings.EqualFold(restoreDisplayName, baseBackupFolder) {
count, _ := ptr.ValOK(fld.GetTotalItemCount())
restoreItemCount[restoreDisplayName] = count
checkAllSubFolder(ctx, client, fld, testUser, restoreDisplayName, dataFolder, restoreItemCount)
}
}
verifyEmailData(ctx, restoreItemCount, itemCount)
} }
func verifyEmailData(ctx context.Context, restoreMessageCount, messageCount map[string]int32) { func verifyEmailData(ctx context.Context, restoreMessageCount, messageCount map[string]int32) {
@ -111,109 +55,71 @@ func verifyEmailData(ctx context.Context, restoreMessageCount, messageCount map[
} }
} }
// getAllSubFolder will recursively check for all subfolders and get the corresponding func buildSanitree(
// email count.
func getAllMailSubFolders(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
testUser string, userID, folderName string,
r models.MailFolderable, ) *common.Sanitree[models.MailFolderable] {
parentFolder, gcc, err := ac.Mail().GetContainerByName(
dataFolder string, ctx,
messageCount map[string]int32, userID,
) { api.MsgFolderRoot,
var ( folderName)
folderID = ptr.Val(r.GetId())
count int32 = 99
options = &users.ItemMailFoldersItemChildFoldersRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMailFoldersItemChildFoldersRequestBuilderGetQueryParameters{
Top: &count,
},
}
)
ctx = clues.Add(ctx, "parent_folder_id", folderID)
childFolder, err := client.
Users().
ByUserId(testUser).
MailFolders().
ByMailFolderId(folderID).
ChildFolders().
Get(ctx, options)
if err != nil { if err != nil {
common.Fatal(ctx, "getting mail subfolders", err) common.Fatal(
ctx,
fmt.Sprintf("finding folder by name %q", folderName),
err)
} }
for _, child := range childFolder.GetValue() { mmf, ok := gcc.(models.MailFolderable)
var ( if !ok {
childDisplayName = ptr.Val(child.GetDisplayName()) common.Fatal(
childFolderCount = ptr.Val(child.GetChildFolderCount()) ctx,
//nolint:forbidigo "mail folderable required",
fullFolderName = stdpath.Join(parentFolder, childDisplayName) clues.New("casting "+*gcc.GetDisplayName()+" to models.MailFolderable"))
) }
if filters.PathContains([]string{dataFolder}).Compare(fullFolderName) { root := &common.Sanitree[models.MailFolderable]{
messageCount[fullFolderName] = ptr.Val(child.GetTotalItemCount()) Container: mmf,
// recursively check for subfolders ContainerID: ptr.Val(mmf.GetId()),
if childFolderCount > 0 { ContainerName: ptr.Val(mmf.GetDisplayName()),
parentFolder := fullFolderName ContainsItems: int(ptr.Val(mmf.GetTotalItemCount())),
Children: map[string]*common.Sanitree[models.MailFolderable]{},
}
getAllMailSubFolders(ctx, client, testUser, child, parentFolder, dataFolder, messageCount) recurseSubfolders(ctx, ac, root, userID)
}
} return root
} }
}
func recurseSubfolders(
// checkAllSubFolder will recursively traverse inside the restore folder and ctx context.Context,
// verify that data matched in all subfolders ac api.Client,
func checkAllSubFolder( parent *common.Sanitree[models.MailFolderable],
ctx context.Context, userID string,
client *msgraphsdk.GraphServiceClient, ) {
r models.MailFolderable, childFolders, err := ac.Mail().GetContainerChildren(
testUser, ctx,
parentFolder, userID,
dataFolder string, parent.ContainerID)
restoreMessageCount map[string]int32, if err != nil {
) { common.Fatal(ctx, "getting subfolders", err)
var ( }
folderID = ptr.Val(r.GetId())
count int32 = 99 for _, child := range childFolders {
options = &users.ItemMailFoldersItemChildFoldersRequestBuilderGetRequestConfiguration{ c := &common.Sanitree[models.MailFolderable]{
QueryParameters: &users.ItemMailFoldersItemChildFoldersRequestBuilderGetQueryParameters{ Container: child,
Top: &count, ContainerID: ptr.Val(child.GetId()),
}, ContainerName: ptr.Val(child.GetDisplayName()),
} ContainsItems: int(ptr.Val(child.GetTotalItemCount())),
) Children: map[string]*common.Sanitree[models.MailFolderable]{},
}
childFolder, err := client.
Users(). parent.Children[c.ContainerName] = c
ByUserId(testUser).
MailFolders(). if ptr.Val(child.GetChildFolderCount()) > 0 {
ByMailFolderId(folderID). recurseSubfolders(ctx, ac, c, userID)
ChildFolders().
Get(ctx, options)
if err != nil {
common.Fatal(ctx, "getting mail subfolders", err)
}
for _, child := range childFolder.GetValue() {
var (
childDisplayName = ptr.Val(child.GetDisplayName())
//nolint:forbidigo
fullFolderName = stdpath.Join(parentFolder, childDisplayName)
)
if filters.PathContains([]string{dataFolder}).Compare(fullFolderName) {
childTotalCount, _ := ptr.ValOK(child.GetTotalItemCount())
restoreMessageCount[fullFolderName] = childTotalCount
}
childFolderCount := ptr.Val(child.GetChildFolderCount())
if childFolderCount > 0 {
parentFolder := fullFolderName
checkAllSubFolder(ctx, client, child, testUser, parentFolder, dataFolder, restoreMessageCount)
} }
} }
} }

View File

@ -0,0 +1,16 @@
package restore
import (
"context"
"github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
func CheckGroupsRestoration(
ctx context.Context,
ac api.Client,
envs common.Envs,
) {
// TODO
}

View File

@ -7,12 +7,12 @@ import (
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/cmd/sanity_test/common" "github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
const ( const (
@ -21,34 +21,29 @@ const (
func CheckOneDriveRestoration( func CheckOneDriveRestoration(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
userID, folderName, dataFolder string, envs common.Envs,
startTime time.Time,
) { ) {
drive, err := client. drive, err := ac.Users().GetDefaultDrive(ctx, envs.UserID)
Users().
ByUserId(userID).
Drive().
Get(ctx, nil)
if err != nil { if err != nil {
common.Fatal(ctx, "getting the drive:", err) common.Fatal(ctx, "getting the drive:", err)
} }
checkDriveRestoration( checkDriveRestoration(
ctx, ctx,
client, ac,
path.OneDriveService, path.OneDriveService,
folderName, envs.FolderName,
ptr.Val(drive.GetId()), ptr.Val(drive.GetId()),
ptr.Val(drive.GetName()), ptr.Val(drive.GetName()),
dataFolder, envs.DataFolder,
startTime, envs.StartTime,
false) false)
} }
func checkDriveRestoration( func checkDriveRestoration(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
service path.ServiceType, service path.ServiceType,
folderName, folderName,
driveID, driveID,
@ -70,7 +65,7 @@ func checkDriveRestoration(
restoreFolderID := PopulateDriveDetails( restoreFolderID := PopulateDriveDetails(
ctx, ctx,
client, ac,
driveID, driveID,
folderName, folderName,
dataFolder, dataFolder,
@ -78,7 +73,14 @@ func checkDriveRestoration(
folderPermissions, folderPermissions,
startTime) startTime)
getRestoredDrive(ctx, client, driveID, restoreFolderID, restoreFile, restoredFolderPermissions, startTime) getRestoredDrive(
ctx,
ac,
driveID,
restoreFolderID,
restoreFile,
restoredFolderPermissions,
startTime)
checkRestoredDriveItemPermissions( checkRestoredDriveItemPermissions(
ctx, ctx,
@ -105,7 +107,7 @@ func checkDriveRestoration(
func PopulateDriveDetails( func PopulateDriveDetails(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
driveID, folderName, dataFolder string, driveID, folderName, dataFolder string,
fileSizes map[string]int64, fileSizes map[string]int64,
folderPermissions map[string][]common.PermissionInfo, folderPermissions map[string][]common.PermissionInfo,
@ -113,18 +115,12 @@ func PopulateDriveDetails(
) string { ) string {
var restoreFolderID string var restoreFolderID string
response, err := client. children, err := ac.Drives().GetFolderChildren(ctx, driveID, "root")
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId("root").
Children().
Get(ctx, nil)
if err != nil { if err != nil {
common.Fatal(ctx, "getting drive by id", err) common.Fatal(ctx, "getting drive by id", err)
} }
for _, driveItem := range response.GetValue() { for _, driveItem := range children {
var ( var (
itemID = ptr.Val(driveItem.GetId()) itemID = ptr.Val(driveItem.GetId())
itemName = ptr.Val(driveItem.GetName()) itemName = ptr.Val(driveItem.GetName())
@ -156,8 +152,17 @@ func PopulateDriveDetails(
continue continue
} }
folderPermissions[itemName] = permissionIn(ctx, client, driveID, itemID) folderPermissions[itemName] = permissionIn(ctx, ac, driveID, itemID)
getOneDriveChildFolder(ctx, client, driveID, itemID, itemName, fileSizes, folderPermissions, startTime)
getOneDriveChildFolder(
ctx,
ac,
driveID,
itemID,
itemName,
fileSizes,
folderPermissions,
startTime)
} }
return restoreFolderID return restoreFolderID
@ -228,18 +233,18 @@ func checkRestoredDriveItemPermissions(
func getOneDriveChildFolder( func getOneDriveChildFolder(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
driveID, itemID, parentName string, driveID, itemID, parentName string,
fileSizes map[string]int64, fileSizes map[string]int64,
folderPermission map[string][]common.PermissionInfo, folderPermission map[string][]common.PermissionInfo,
startTime time.Time, startTime time.Time,
) { ) {
response, err := client.Drives().ByDriveId(driveID).Items().ByDriveItemId(itemID).Children().Get(ctx, nil) children, err := ac.Drives().GetFolderChildren(ctx, driveID, itemID)
if err != nil { if err != nil {
common.Fatal(ctx, "getting child folder", err) common.Fatal(ctx, "getting child folder", err)
} }
for _, driveItem := range response.GetValue() { for _, driveItem := range children {
var ( var (
itemID = ptr.Val(driveItem.GetId()) itemID = ptr.Val(driveItem.GetId())
itemName = ptr.Val(driveItem.GetName()) itemName = ptr.Val(driveItem.GetName())
@ -268,31 +273,33 @@ func getOneDriveChildFolder(
continue continue
} }
folderPermission[fullName] = permissionIn(ctx, client, driveID, itemID) folderPermission[fullName] = permissionIn(ctx, ac, driveID, itemID)
getOneDriveChildFolder(ctx, client, driveID, itemID, fullName, fileSizes, folderPermission, startTime) getOneDriveChildFolder(
ctx,
ac,
driveID,
itemID,
fullName,
fileSizes,
folderPermission,
startTime)
} }
} }
func getRestoredDrive( func getRestoredDrive(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
driveID, restoreFolderID string, driveID, restoreFolderID string,
restoreFile map[string]int64, restoreFile map[string]int64,
restoreFolder map[string][]common.PermissionInfo, restoreFolder map[string][]common.PermissionInfo,
startTime time.Time, startTime time.Time,
) { ) {
restored, err := client. children, err := ac.Drives().GetFolderChildren(ctx, driveID, restoreFolderID)
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(restoreFolderID).
Children().
Get(ctx, nil)
if err != nil { if err != nil {
common.Fatal(ctx, "getting child folder", err) common.Fatal(ctx, "getting child folder", err)
} }
for _, item := range restored.GetValue() { for _, item := range children {
var ( var (
itemID = ptr.Val(item.GetId()) itemID = ptr.Val(item.GetId())
itemName = ptr.Val(item.GetName()) itemName = ptr.Val(item.GetName())
@ -308,8 +315,16 @@ func getRestoredDrive(
continue continue
} }
restoreFolder[itemName] = permissionIn(ctx, client, driveID, itemID) restoreFolder[itemName] = permissionIn(ctx, ac, driveID, itemID)
getOneDriveChildFolder(ctx, client, driveID, itemID, itemName, restoreFile, restoreFolder, startTime) getOneDriveChildFolder(
ctx,
ac,
driveID,
itemID,
itemName,
restoreFile,
restoreFolder,
startTime)
} }
} }
@ -319,18 +334,12 @@ func getRestoredDrive(
func permissionIn( func permissionIn(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
driveID, itemID string, driveID, itemID string,
) []common.PermissionInfo { ) []common.PermissionInfo {
pi := []common.PermissionInfo{} pi := []common.PermissionInfo{}
pcr, err := client. pcr, err := ac.Drives().GetItemPermission(ctx, driveID, itemID)
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Permissions().
Get(ctx, nil)
if err != nil { if err != nil {
common.Fatal(ctx, "getting permission", err) common.Fatal(ctx, "getting permission", err)
} }

View File

@ -2,38 +2,31 @@ package restore
import ( import (
"context" "context"
"time"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/alcionai/corso/src/cmd/sanity_test/common" "github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
func CheckSharePointRestoration( func CheckSharePointRestoration(
ctx context.Context, ctx context.Context,
client *msgraphsdk.GraphServiceClient, ac api.Client,
siteID, userID, folderName, dataFolder string, envs common.Envs,
startTime time.Time,
) { ) {
drive, err := client. drive, err := ac.Sites().GetDefaultDrive(ctx, envs.SiteID)
Sites().
BySiteId(siteID).
Drive().
Get(ctx, nil)
if err != nil { if err != nil {
common.Fatal(ctx, "getting the drive:", err) common.Fatal(ctx, "getting the drive:", err)
} }
checkDriveRestoration( checkDriveRestoration(
ctx, ctx,
client, ac,
path.SharePointService, path.SharePointService,
folderName, envs.FolderName,
ptr.Val(drive.GetId()), ptr.Val(drive.GetId()),
ptr.Val(drive.GetName()), ptr.Val(drive.GetName()),
dataFolder, envs.DataFolder,
startTime, envs.StartTime,
true) true)
} }

View File

@ -2,21 +2,40 @@ package main
import ( import (
"context" "context"
"fmt"
"os" "os"
"strings"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go" "github.com/spf13/cobra"
"github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cmd/sanity_test/common" "github.com/alcionai/corso/src/cmd/sanity_test/common"
"github.com/alcionai/corso/src/cmd/sanity_test/export" "github.com/alcionai/corso/src/cmd/sanity_test/export"
"github.com/alcionai/corso/src/cmd/sanity_test/restore" "github.com/alcionai/corso/src/cmd/sanity_test/restore"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
// ---------------------------------------------------------------------------
// root command
// ---------------------------------------------------------------------------
func rootCMD() *cobra.Command {
return &cobra.Command{
Use: "sanity-test",
Short: "run the sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestRoot,
PersistentPreRun: func(cmd *cobra.Command, args []string) {
fmt.Println("running", cmd.UseLine())
},
}
}
func sanityTestRoot(cmd *cobra.Command, args []string) error {
return print.Only(cmd.Context(), clues.New("must specify a kind of test"))
}
func main() { func main() {
ls := logger.Settings{ ls := logger.Settings{
File: logger.GetLogFile(""), File: logger.GetLogFile(""),
@ -29,60 +48,226 @@ func main() {
_ = log.Sync() // flush all logs in the buffer _ = log.Sync() // flush all logs in the buffer
}() }()
// TODO: only needed for exchange
graph.InitializeConcurrencyLimiter(ctx, true, 4) graph.InitializeConcurrencyLimiter(ctx, true, 4)
adapter, err := graph.CreateAdapter( root := rootCMD()
tconfig.GetM365TenantID(ctx),
os.Getenv("AZURE_CLIENT_ID"),
os.Getenv("AZURE_CLIENT_SECRET"))
if err != nil {
common.Fatal(ctx, "creating adapter", err)
}
var ( restCMD := restoreCMD()
client = msgraphsdk.NewGraphServiceClient(adapter)
testUser = tconfig.GetM365UserID(ctx)
testSite = tconfig.GetM365SiteID(ctx)
testKind = os.Getenv("SANITY_TEST_KIND") // restore or export (cli arg?)
testService = os.Getenv("SANITY_TEST_SERVICE")
folder = strings.TrimSpace(os.Getenv("SANITY_TEST_FOLDER"))
dataFolder = os.Getenv("TEST_DATA")
baseBackupFolder = os.Getenv("BASE_BACKUP")
)
ctx = clues.Add( restCMD.AddCommand(restoreExchangeCMD())
ctx, restCMD.AddCommand(restoreOneDriveCMD())
"resource_owner", testUser, restCMD.AddCommand(restoreSharePointCMD())
"service", testService, restCMD.AddCommand(restoreGroupsCMD())
"sanity_restore_folder", folder) root.AddCommand(restCMD)
logger.Ctx(ctx).Info("starting sanity test check") expCMD := exportCMD()
switch testKind { expCMD.AddCommand(exportOneDriveCMD())
case "restore": expCMD.AddCommand(exportSharePointCMD())
startTime, _ := common.MustGetTimeFromName(ctx, folder) expCMD.AddCommand(exportGroupsCMD())
clues.Add(ctx, "sanity_restore_start_time", startTime.Format(time.RFC3339)) root.AddCommand(expCMD)
switch testService { if err := root.Execute(); err != nil {
case "exchange": os.Exit(1)
restore.CheckEmailRestoration(ctx, client, testUser, folder, dataFolder, baseBackupFolder, startTime)
case "onedrive":
restore.CheckOneDriveRestoration(ctx, client, testUser, folder, dataFolder, startTime)
case "sharepoint":
restore.CheckSharePointRestoration(ctx, client, testSite, testUser, folder, dataFolder, startTime)
default:
common.Fatal(ctx, "unknown service for restore sanity tests", nil)
}
case "export":
switch testService {
case "onedrive":
export.CheckOneDriveExport(ctx, client, testUser, folder, dataFolder)
case "sharepoint":
export.CheckSharePointExport(ctx, client, testSite, folder, dataFolder)
default:
common.Fatal(ctx, "unknown service for export sanity tests", nil)
}
default:
common.Fatal(ctx, "unknown test kind (expected restore or export)", nil)
} }
} }
// ---------------------------------------------------------------------------
// restore/export command
// ---------------------------------------------------------------------------
func exportCMD() *cobra.Command {
return &cobra.Command{
Use: "restore",
Short: "run the post-export sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestExport,
}
}
func sanityTestExport(cmd *cobra.Command, args []string) error {
return print.Only(cmd.Context(), clues.New("must specify a service"))
}
func restoreCMD() *cobra.Command {
return &cobra.Command{
Use: "restore",
Short: "run the post-restore sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestRestore,
}
}
func sanityTestRestore(cmd *cobra.Command, args []string) error {
return print.Only(cmd.Context(), clues.New("must specify a service"))
}
// ---------------------------------------------------------------------------
// service commands - export
// ---------------------------------------------------------------------------
func exportGroupsCMD() *cobra.Command {
return &cobra.Command{
Use: "groups",
Short: "run the groups export sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestExportGroups,
}
}
func sanityTestExportGroups(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
envs := common.EnvVars(ctx)
ac, err := common.GetAC()
if err != nil {
return print.Only(ctx, err)
}
export.CheckGroupsExport(ctx, ac, envs)
return nil
}
func exportOneDriveCMD() *cobra.Command {
return &cobra.Command{
Use: "onedrive",
Short: "run the onedrive export sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestExportOneDrive,
}
}
func sanityTestExportOneDrive(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
envs := common.EnvVars(ctx)
ac, err := common.GetAC()
if err != nil {
return print.Only(ctx, err)
}
export.CheckOneDriveExport(ctx, ac, envs)
return nil
}
func exportSharePointCMD() *cobra.Command {
return &cobra.Command{
Use: "sharepoint",
Short: "run the sharepoint export sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestExportSharePoint,
}
}
func sanityTestExportSharePoint(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
envs := common.EnvVars(ctx)
ac, err := common.GetAC()
if err != nil {
return print.Only(ctx, err)
}
export.CheckSharePointExport(ctx, ac, envs)
return nil
}
// ---------------------------------------------------------------------------
// service commands - restore
// ---------------------------------------------------------------------------
func restoreExchangeCMD() *cobra.Command {
return &cobra.Command{
Use: "exchange",
Short: "run the exchange restore sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestRestoreExchange,
}
}
func sanityTestRestoreExchange(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
envs := common.EnvVars(ctx)
ac, err := common.GetAC()
if err != nil {
return print.Only(ctx, err)
}
restore.CheckEmailRestoration(ctx, ac, envs)
return nil
}
func restoreOneDriveCMD() *cobra.Command {
return &cobra.Command{
Use: "onedrive",
Short: "run the onedrive restore sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestRestoreOneDrive,
}
}
func sanityTestRestoreOneDrive(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
envs := common.EnvVars(ctx)
ac, err := common.GetAC()
if err != nil {
return print.Only(ctx, err)
}
restore.CheckOneDriveRestoration(ctx, ac, envs)
return nil
}
func restoreSharePointCMD() *cobra.Command {
return &cobra.Command{
Use: "sharepoint",
Short: "run the sharepoint restore sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestRestoreSharePoint,
}
}
func sanityTestRestoreSharePoint(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
envs := common.EnvVars(ctx)
ac, err := common.GetAC()
if err != nil {
return print.Only(ctx, err)
}
restore.CheckSharePointRestoration(ctx, ac, envs)
return nil
}
func restoreGroupsCMD() *cobra.Command {
return &cobra.Command{
Use: "groups",
Short: "run the groups restore sanity tests",
DisableAutoGenTag: true,
RunE: sanityTestRestoreGroups,
}
}
func sanityTestRestoreGroups(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
envs := common.EnvVars(ctx)
ac, err := common.GetAC()
if err != nil {
return print.Only(ctx, err)
}
restore.CheckGroupsRestoration(ctx, ac, envs)
return nil
}

View File

@ -27,7 +27,7 @@ func NewPrefixMap(m map[string]map[string]struct{}) *PrefixMap {
func (pm PrefixMap) AssertEqual(t *testing.T, r prefixmatcher.StringSetReader) { func (pm PrefixMap) AssertEqual(t *testing.T, r prefixmatcher.StringSetReader) {
if pm.Empty() { if pm.Empty() {
require.True(t, r.Empty(), "both prefix maps are empty") require.True(t, r.Empty(), "result prefixMap should be empty but contains keys: %+v", r.Keys())
return return
} }

View File

@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"context" "context"
"io" "io"
"sync"
"time" "time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -15,16 +16,23 @@ import (
) )
var ( var (
_ Item = &unindexedPrefetchedItem{}
_ ItemModTime = &unindexedPrefetchedItem{}
_ Item = &prefetchedItem{} _ Item = &prefetchedItem{}
_ ItemInfo = &prefetchedItem{} _ ItemInfo = &prefetchedItem{}
_ ItemModTime = &prefetchedItem{} _ ItemModTime = &prefetchedItem{}
_ Item = &unindexedLazyItem{}
_ ItemModTime = &unindexedLazyItem{}
_ Item = &lazyItem{} _ Item = &lazyItem{}
_ ItemInfo = &lazyItem{} _ ItemInfo = &lazyItem{}
_ ItemModTime = &lazyItem{} _ ItemModTime = &lazyItem{}
) )
func NewDeletedItem(itemID string) Item { func NewDeletedItem(itemID string) Item {
return &prefetchedItem{ return &unindexedPrefetchedItem{
id: itemID, id: itemID,
deleted: true, deleted: true,
// TODO(ashmrtn): This really doesn't need to be set since deleted items are // TODO(ashmrtn): This really doesn't need to be set since deleted items are
@ -34,24 +42,26 @@ func NewDeletedItem(itemID string) Item {
} }
} }
func NewPrefetchedItem( func NewUnindexedPrefetchedItem(
reader io.ReadCloser, reader io.ReadCloser,
itemID string, itemID string,
info details.ItemInfo, modTime time.Time,
) Item { ) Item {
return &prefetchedItem{ return &unindexedPrefetchedItem{
id: itemID, id: itemID,
reader: reader, reader: reader,
info: info, modTime: modTime,
modTime: info.Modified(),
} }
} }
// prefetchedItem represents a single item retrieved from the remote service. // unindexedPrefetchedItem represents a single item retrieved from the remote
type prefetchedItem struct { // service.
//
// This item doesn't implement ItemInfo so it's safe to use for items like
// metadata that shouldn't appear in backup details.
type unindexedPrefetchedItem struct {
id string id string
reader io.ReadCloser reader io.ReadCloser
info details.ItemInfo
// modTime is the modified time of the item. It should match the modTime in // modTime is the modified time of the item. It should match the modTime in
// info if info is present. Here as a separate field so that deleted items // info if info is present. Here as a separate field so that deleted items
// don't error out by trying to source it from info. // don't error out by trying to source it from info.
@ -62,26 +72,50 @@ type prefetchedItem struct {
deleted bool deleted bool
} }
func (i prefetchedItem) ID() string { func (i unindexedPrefetchedItem) ID() string {
return i.id return i.id
} }
func (i *prefetchedItem) ToReader() io.ReadCloser { func (i *unindexedPrefetchedItem) ToReader() io.ReadCloser {
return i.reader return i.reader
} }
func (i prefetchedItem) Deleted() bool { func (i unindexedPrefetchedItem) Deleted() bool {
return i.deleted return i.deleted
} }
func (i unindexedPrefetchedItem) ModTime() time.Time {
return i.modTime
}
func NewPrefetchedItem(
reader io.ReadCloser,
itemID string,
info details.ItemInfo,
) Item {
return &prefetchedItem{
unindexedPrefetchedItem: unindexedPrefetchedItem{
id: itemID,
reader: reader,
modTime: info.Modified(),
},
info: info,
}
}
// prefetchedItem represents a single item retrieved from the remote service.
//
// This item implements ItemInfo so it should be used for things that need to
// appear in backup details.
type prefetchedItem struct {
unindexedPrefetchedItem
info details.ItemInfo
}
func (i prefetchedItem) Info() (details.ItemInfo, error) { func (i prefetchedItem) Info() (details.ItemInfo, error) {
return i.info, nil return i.info, nil
} }
func (i prefetchedItem) ModTime() time.Time {
return i.modTime
}
type ItemDataGetter interface { type ItemDataGetter interface {
GetData( GetData(
context.Context, context.Context,
@ -89,14 +123,14 @@ type ItemDataGetter interface {
) (io.ReadCloser, *details.ItemInfo, bool, error) ) (io.ReadCloser, *details.ItemInfo, bool, error)
} }
func NewLazyItem( func NewUnindexedLazyItem(
ctx context.Context, ctx context.Context,
itemGetter ItemDataGetter, itemGetter ItemDataGetter,
itemID string, itemID string,
modTime time.Time, modTime time.Time,
errs *fault.Bus, errs *fault.Bus,
) Item { ) Item {
return &lazyItem{ return &unindexedLazyItem{
ctx: ctx, ctx: ctx,
id: itemID, id: itemID,
itemGetter: itemGetter, itemGetter: itemGetter,
@ -105,11 +139,15 @@ func NewLazyItem(
} }
} }
// lazyItem represents a single item retrieved from the remote service. It // unindexedLazyItem represents a single item retrieved from the remote service.
// lazily fetches the item's data when the first call to ToReader().Read() is // It lazily fetches the item's data when the first call to ToReader().Read() is
// made. // made.
type lazyItem struct { //
// This item doesn't implement ItemInfo so it's safe to use for items like
// metadata that shouldn't appear in backup details.
type unindexedLazyItem struct {
ctx context.Context ctx context.Context
mu sync.Mutex
id string id string
errs *fault.Bus errs *fault.Bus
itemGetter ItemDataGetter itemGetter ItemDataGetter
@ -127,12 +165,18 @@ type lazyItem struct {
delInFlight bool delInFlight bool
} }
func (i lazyItem) ID() string { func (i *unindexedLazyItem) ID() string {
return i.id return i.id
} }
func (i *lazyItem) ToReader() io.ReadCloser { func (i *unindexedLazyItem) ToReader() io.ReadCloser {
return lazy.NewLazyReadCloser(func() (io.ReadCloser, error) { return lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
// Don't allow getting Item info while trying to initialize said info.
// GetData could be a long running call, but in theory nothing should happen
// with the item until a reader is returned anyway.
i.mu.Lock()
defer i.mu.Unlock()
reader, info, delInFlight, err := i.itemGetter.GetData(i.ctx, i.errs) reader, info, delInFlight, err := i.itemGetter.GetData(i.ctx, i.errs)
if err != nil { if err != nil {
return nil, clues.Stack(err) return nil, clues.Stack(err)
@ -159,11 +203,46 @@ func (i *lazyItem) ToReader() io.ReadCloser {
}) })
} }
func (i lazyItem) Deleted() bool { func (i *unindexedLazyItem) Deleted() bool {
return false return false
} }
func (i lazyItem) Info() (details.ItemInfo, error) { func (i *unindexedLazyItem) ModTime() time.Time {
return i.modTime
}
func NewLazyItem(
ctx context.Context,
itemGetter ItemDataGetter,
itemID string,
modTime time.Time,
errs *fault.Bus,
) Item {
return &lazyItem{
unindexedLazyItem: unindexedLazyItem{
ctx: ctx,
id: itemID,
itemGetter: itemGetter,
modTime: modTime,
errs: errs,
},
}
}
// lazyItem represents a single item retrieved from the remote service. It
// lazily fetches the item's data when the first call to ToReader().Read() is
// made.
//
// This item implements ItemInfo so it should be used for things that need to
// appear in backup details.
type lazyItem struct {
unindexedLazyItem
}
func (i *lazyItem) Info() (details.ItemInfo, error) {
i.mu.Lock()
defer i.mu.Unlock()
if i.delInFlight { if i.delInFlight {
return details.ItemInfo{}, clues.Stack(ErrNotFound).WithClues(i.ctx) return details.ItemInfo{}, clues.Stack(ErrNotFound).WithClues(i.ctx)
} else if i.info == nil { } else if i.info == nil {
@ -173,7 +252,3 @@ func (i lazyItem) Info() (details.ItemInfo, error) {
return *i.info, nil return *i.info, nil
} }
func (i lazyItem) ModTime() time.Time {
return i.modTime
}

View File

@ -49,6 +49,31 @@ func TestItemUnitSuite(t *testing.T) {
suite.Run(t, &ItemUnitSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &ItemUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *ItemUnitSuite) TestUnindexedPrefetchedItem() {
prefetch := data.NewUnindexedPrefetchedItem(
io.NopCloser(bytes.NewReader([]byte{})),
"foo",
time.Time{})
_, ok := prefetch.(data.ItemInfo)
assert.False(suite.T(), ok, "unindexedPrefetchedItem implements Info()")
}
func (suite *ItemUnitSuite) TestUnindexedLazyItem() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
lazy := data.NewUnindexedLazyItem(
ctx,
nil,
"foo",
time.Time{},
fault.New(true))
_, ok := lazy.(data.ItemInfo)
assert.False(t, ok, "unindexedLazyItem implements Info()")
}
func (suite *ItemUnitSuite) TestDeletedItem() { func (suite *ItemUnitSuite) TestDeletedItem() {
var ( var (
t = suite.T() t = suite.T()

View File

@ -205,7 +205,7 @@ func (w *conn) commonConnect(
bst, bst,
password, password,
kopiaOpts); err != nil { kopiaOpts); err != nil {
return clues.Wrap(err, "connecting to repo").WithClues(ctx) return clues.Wrap(err, "connecting to kopia repo").WithClues(ctx)
} }
if err := w.open(ctx, cfgFile, password); err != nil { if err := w.open(ctx, cfgFile, password); err != nil {

View File

@ -16,12 +16,11 @@ func filesystemStorage(
repoOpts repository.Options, repoOpts repository.Options,
s storage.Storage, s storage.Storage,
) (blob.Storage, error) { ) (blob.Storage, error) {
cfg, err := s.StorageConfig() fsCfg, err := s.ToFilesystemConfig()
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx) return nil, clues.Stack(err).WithClues(ctx)
} }
fsCfg := cfg.(*storage.FilesystemConfig)
opts := filesystem.Options{ opts := filesystem.Options{
Path: fsCfg.Path, Path: fsCfg.Path,
} }

View File

@ -20,13 +20,11 @@ func s3BlobStorage(
repoOpts repository.Options, repoOpts repository.Options,
s storage.Storage, s storage.Storage,
) (blob.Storage, error) { ) (blob.Storage, error) {
sc, err := s.StorageConfig() cfg, err := s.ToS3Config()
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx) return nil, clues.Stack(err).WithClues(ctx)
} }
cfg := sc.(*storage.S3Config)
endpoint := defaultS3Endpoint endpoint := defaultS3Endpoint
if len(cfg.Endpoint) > 0 { if len(cfg.Endpoint) > 0 {
endpoint = cfg.Endpoint endpoint = cfg.Endpoint

View File

@ -33,11 +33,7 @@ const (
MaxOneNoteFileSize = 2 * 1024 * 1024 * 1024 MaxOneNoteFileSize = 2 * 1024 * 1024 * 1024
) )
var ( var _ data.BackupCollection = &Collection{}
_ data.BackupCollection = &Collection{}
_ data.Item = &metadata.Item{}
_ data.ItemModTime = &metadata.Item{}
)
// Collection represents a set of OneDrive objects retrieved from M365 // Collection represents a set of OneDrive objects retrieved from M365
type Collection struct { type Collection struct {
@ -588,13 +584,15 @@ func (oc *Collection) streamDriveItem(
return progReader, nil return progReader, nil
}) })
oc.data <- &metadata.Item{ // We wrap the reader with a lazy reader so that the progress bar is only
ItemID: metaFileName + metaSuffix, // initialized if the file is read. Since we're not actually lazily reading
Data: metaReader, // data just use the eager item implementation.
oc.data <- data.NewUnindexedPrefetchedItem(
metaReader,
metaFileName+metaSuffix,
// Metadata file should always use the latest time as // Metadata file should always use the latest time as
// permissions change does not update mod time. // permissions change does not update mod time.
Mod: time.Now(), time.Now())
}
// Item read successfully, add to collection // Item read successfully, add to collection
if isFile { if isFile {

View File

@ -230,16 +230,16 @@ func (c *Collections) Get(
ssmb *prefixmatcher.StringSetMatchBuilder, ssmb *prefixmatcher.StringSetMatchBuilder,
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, bool, error) { ) ([]data.BackupCollection, bool, error) {
prevDeltas, oldPathsByDriveID, canUsePreviousBackup, err := deserializeMetadata(ctx, prevMetadata) prevDriveIDToDelta, oldPrevPathsByDriveID, canUsePrevBackup, err := deserializeMetadata(ctx, prevMetadata)
if err != nil { if err != nil {
return nil, false, err return nil, false, err
} }
ctx = clues.Add(ctx, "can_use_previous_backup", canUsePreviousBackup) ctx = clues.Add(ctx, "can_use_previous_backup", canUsePrevBackup)
driveTombstones := map[string]struct{}{} driveTombstones := map[string]struct{}{}
for driveID := range oldPathsByDriveID { for driveID := range oldPrevPathsByDriveID {
driveTombstones[driveID] = struct{}{} driveTombstones[driveID] = struct{}{}
} }
@ -257,76 +257,88 @@ func (c *Collections) Get(
} }
var ( var (
// Drive ID -> delta URL for drive driveIDToDeltaLink = map[string]string{}
deltaURLs = map[string]string{} driveIDToPrevPaths = map[string]map[string]string{}
// Drive ID -> folder ID -> folder path numPrevItems = 0
folderPaths = map[string]map[string]string{}
numPrevItems = 0
) )
for _, d := range drives { for _, d := range drives {
var ( var (
driveID = ptr.Val(d.GetId()) driveID = ptr.Val(d.GetId())
driveName = ptr.Val(d.GetName()) driveName = ptr.Val(d.GetName())
prevDelta = prevDeltas[driveID] ictx = clues.Add(
oldPaths = oldPathsByDriveID[driveID] ctx,
numOldDelta = 0 "drive_id", driveID,
ictx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName) "drive_name", clues.Hide(driveName))
excludedItemIDs = map[string]struct{}{}
oldPrevPaths = oldPrevPathsByDriveID[driveID]
prevDeltaLink = prevDriveIDToDelta[driveID]
// itemCollection is used to identify which collection a
// file belongs to. This is useful to delete a file from the
// collection it was previously in, in case it was moved to a
// different collection within the same delta query
// item ID -> item ID
itemCollection = map[string]string{}
) )
delete(driveTombstones, driveID) delete(driveTombstones, driveID)
if _, ok := driveIDToPrevPaths[driveID]; !ok {
driveIDToPrevPaths[driveID] = map[string]string{}
}
if _, ok := c.CollectionMap[driveID]; !ok { if _, ok := c.CollectionMap[driveID]; !ok {
c.CollectionMap[driveID] = map[string]*Collection{} c.CollectionMap[driveID] = map[string]*Collection{}
} }
if len(prevDelta) > 0 {
numOldDelta++
}
logger.Ctx(ictx).Infow( logger.Ctx(ictx).Infow(
"previous metadata for drive", "previous metadata for drive",
"num_paths_entries", len(oldPaths), "num_paths_entries", len(oldPrevPaths))
"num_deltas_entries", numOldDelta)
delta, paths, excluded, err := collectItems( items, du, err := c.handler.EnumerateDriveItemsDelta(
ictx, ictx,
c.handler.NewItemPager(driveID, "", api.DriveItemSelectDefault()),
driveID, driveID,
driveName, prevDeltaLink)
c.UpdateCollections,
oldPaths,
prevDelta,
errs)
if err != nil { if err != nil {
return nil, false, err return nil, false, err
} }
// Used for logging below.
numDeltas := 0
// It's alright to have an empty folders map (i.e. no folders found) but not // It's alright to have an empty folders map (i.e. no folders found) but not
// an empty delta token. This is because when deserializing the metadata we // an empty delta token. This is because when deserializing the metadata we
// remove entries for which there is no corresponding delta token/folder. If // remove entries for which there is no corresponding delta token/folder. If
// we leave empty delta tokens then we may end up setting the State field // we leave empty delta tokens then we may end up setting the State field
// for collections when not actually getting delta results. // for collections when not actually getting delta results.
if len(delta.URL) > 0 { if len(du.URL) > 0 {
deltaURLs[driveID] = delta.URL driveIDToDeltaLink[driveID] = du.URL
numDeltas++ }
newPrevPaths, err := c.UpdateCollections(
ctx,
driveID,
driveName,
items,
oldPrevPaths,
itemCollection,
excludedItemIDs,
du.Reset,
errs)
if err != nil {
return nil, false, clues.Stack(err)
} }
// Avoid the edge case where there's no paths but we do have a valid delta // Avoid the edge case where there's no paths but we do have a valid delta
// token. We can accomplish this by adding an empty paths map for this // token. We can accomplish this by adding an empty paths map for this
// drive. If we don't have this then the next backup won't use the delta // drive. If we don't have this then the next backup won't use the delta
// token because it thinks the folder paths weren't persisted. // token because it thinks the folder paths weren't persisted.
folderPaths[driveID] = map[string]string{} driveIDToPrevPaths[driveID] = map[string]string{}
maps.Copy(folderPaths[driveID], paths) maps.Copy(driveIDToPrevPaths[driveID], newPrevPaths)
logger.Ctx(ictx).Infow( logger.Ctx(ictx).Infow(
"persisted metadata for drive", "persisted metadata for drive",
"num_paths_entries", len(paths), "num_new_paths_entries", len(newPrevPaths),
"num_deltas_entries", numDeltas, "delta_reset", du.Reset)
"delta_reset", delta.Reset)
numDriveItems := c.NumItems - numPrevItems numDriveItems := c.NumItems - numPrevItems
numPrevItems = c.NumItems numPrevItems = c.NumItems
@ -338,7 +350,7 @@ func (c *Collections) Get(
err = c.addURLCacheToDriveCollections( err = c.addURLCacheToDriveCollections(
ictx, ictx,
driveID, driveID,
prevDelta, prevDeltaLink,
errs) errs)
if err != nil { if err != nil {
return nil, false, err return nil, false, err
@ -347,8 +359,8 @@ func (c *Collections) Get(
// For both cases we don't need to do set difference on folder map if the // For both cases we don't need to do set difference on folder map if the
// delta token was valid because we should see all the changes. // delta token was valid because we should see all the changes.
if !delta.Reset { if !du.Reset {
if len(excluded) == 0 { if len(excludedItemIDs) == 0 {
continue continue
} }
@ -357,7 +369,7 @@ func (c *Collections) Get(
return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx) return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
} }
ssmb.Add(p.String(), excluded) ssmb.Add(p.String(), excludedItemIDs)
continue continue
} }
@ -372,13 +384,11 @@ func (c *Collections) Get(
foundFolders[id] = struct{}{} foundFolders[id] = struct{}{}
} }
for fldID, p := range oldPaths { for fldID, p := range oldPrevPaths {
if _, ok := foundFolders[fldID]; ok { if _, ok := foundFolders[fldID]; ok {
continue continue
} }
delete(paths, fldID)
prevPath, err := path.FromDataLayerPath(p, false) prevPath, err := path.FromDataLayerPath(p, false)
if err != nil { if err != nil {
err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p) err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p)
@ -446,14 +456,14 @@ func (c *Collections) Get(
// empty/missing and default to a full backup. // empty/missing and default to a full backup.
logger.CtxErr(ctx, err).Info("making metadata collection path prefixes") logger.CtxErr(ctx, err).Info("making metadata collection path prefixes")
return collections, canUsePreviousBackup, nil return collections, canUsePrevBackup, nil
} }
md, err := graph.MakeMetadataCollection( md, err := graph.MakeMetadataCollection(
pathPrefix, pathPrefix,
[]graph.MetadataCollectionEntry{ []graph.MetadataCollectionEntry{
graph.NewMetadataEntry(bupMD.PreviousPathFileName, folderPaths), graph.NewMetadataEntry(bupMD.PreviousPathFileName, driveIDToPrevPaths),
graph.NewMetadataEntry(bupMD.DeltaURLsFileName, deltaURLs), graph.NewMetadataEntry(bupMD.DeltaURLsFileName, driveIDToDeltaLink),
}, },
c.statusUpdater) c.statusUpdater)
@ -466,7 +476,7 @@ func (c *Collections) Get(
collections = append(collections, md) collections = append(collections, md)
} }
return collections, canUsePreviousBackup, nil return collections, canUsePrevBackup, nil
} }
// addURLCacheToDriveCollections adds an URL cache to all collections belonging to // addURLCacheToDriveCollections adds an URL cache to all collections belonging to
@ -480,7 +490,7 @@ func (c *Collections) addURLCacheToDriveCollections(
driveID, driveID,
prevDelta, prevDelta,
urlCacheRefreshInterval, urlCacheRefreshInterval,
c.handler.NewItemPager(driveID, "", api.DriveItemSelectURLCache()), c.handler,
errs) errs)
if err != nil { if err != nil {
return err return err
@ -536,22 +546,21 @@ func updateCollectionPaths(
func (c *Collections) handleDelete( func (c *Collections) handleDelete(
itemID, driveID string, itemID, driveID string,
oldPaths, newPaths map[string]string, oldPrevPaths, currPrevPaths, newPrevPaths map[string]string,
isFolder bool, isFolder bool,
excluded map[string]struct{}, excluded map[string]struct{},
itemCollection map[string]map[string]string,
invalidPrevDelta bool, invalidPrevDelta bool,
) error { ) error {
if !isFolder { if !isFolder {
// Try to remove the item from the Collection if an entry exists for this // Try to remove the item from the Collection if an entry exists for this
// item. This handles cases where an item was created and deleted during the // item. This handles cases where an item was created and deleted during the
// same delta query. // same delta query.
if parentID, ok := itemCollection[driveID][itemID]; ok { if parentID, ok := currPrevPaths[itemID]; ok {
if col := c.CollectionMap[driveID][parentID]; col != nil { if col := c.CollectionMap[driveID][parentID]; col != nil {
col.Remove(itemID) col.Remove(itemID)
} }
delete(itemCollection[driveID], itemID) delete(currPrevPaths, itemID)
} }
// Don't need to add to exclude list if the delta is invalid since the // Don't need to add to exclude list if the delta is invalid since the
@ -572,7 +581,7 @@ func (c *Collections) handleDelete(
var prevPath path.Path var prevPath path.Path
prevPathStr, ok := oldPaths[itemID] prevPathStr, ok := oldPrevPaths[itemID]
if ok { if ok {
var err error var err error
@ -589,7 +598,7 @@ func (c *Collections) handleDelete(
// Nested folders also return deleted delta results so we don't have to // Nested folders also return deleted delta results so we don't have to
// worry about doing a prefix search in the map to remove the subtree of // worry about doing a prefix search in the map to remove the subtree of
// the deleted folder/package. // the deleted folder/package.
delete(newPaths, itemID) delete(newPrevPaths, itemID)
if prevPath == nil || invalidPrevDelta { if prevPath == nil || invalidPrevDelta {
// It is possible that an item was created and deleted between two delta // It is possible that an item was created and deleted between two delta
@ -679,21 +688,29 @@ func (c *Collections) getCollectionPath(
// UpdateCollections initializes and adds the provided drive items to Collections // UpdateCollections initializes and adds the provided drive items to Collections
// A new collection is created for every drive folder (or package). // A new collection is created for every drive folder (or package).
// oldPaths is the unchanged data that was loaded from the metadata file. // oldPrevPaths is the unchanged data that was loaded from the metadata file.
// newPaths starts as a copy of oldPaths and is updated as changes are found in // This map is not modified during the call.
// the returned results. // currPrevPaths starts as a copy of oldPaths and is updated as changes are found in
// the returned results. Items are added to this collection throughout the call.
// newPrevPaths, ie: the items added during this call, get returned as a map.
func (c *Collections) UpdateCollections( func (c *Collections) UpdateCollections(
ctx context.Context, ctx context.Context,
driveID, driveName string, driveID, driveName string,
items []models.DriveItemable, items []models.DriveItemable,
oldPaths map[string]string, oldPrevPaths map[string]string,
newPaths map[string]string, currPrevPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
itemCollection map[string]map[string]string,
invalidPrevDelta bool, invalidPrevDelta bool,
errs *fault.Bus, errs *fault.Bus,
) error { ) (map[string]string, error) {
el := errs.Local() var (
el = errs.Local()
newPrevPaths = map[string]string{}
)
if !invalidPrevDelta {
maps.Copy(newPrevPaths, oldPrevPaths)
}
for _, item := range items { for _, item := range items {
if el.Failure() != nil { if el.Failure() != nil {
@ -703,8 +720,12 @@ func (c *Collections) UpdateCollections(
var ( var (
itemID = ptr.Val(item.GetId()) itemID = ptr.Val(item.GetId())
itemName = ptr.Val(item.GetName()) itemName = ptr.Val(item.GetName())
ictx = clues.Add(ctx, "item_id", itemID, "item_name", clues.Hide(itemName))
isFolder = item.GetFolder() != nil || item.GetPackageEscaped() != nil isFolder = item.GetFolder() != nil || item.GetPackageEscaped() != nil
ictx = clues.Add(
ctx,
"item_id", itemID,
"item_name", clues.Hide(itemName),
"item_is_folder", isFolder)
) )
if item.GetMalware() != nil { if item.GetMalware() != nil {
@ -726,13 +747,13 @@ func (c *Collections) UpdateCollections(
if err := c.handleDelete( if err := c.handleDelete(
itemID, itemID,
driveID, driveID,
oldPaths, oldPrevPaths,
newPaths, currPrevPaths,
newPrevPaths,
isFolder, isFolder,
excluded, excluded,
itemCollection,
invalidPrevDelta); err != nil { invalidPrevDelta); err != nil {
return clues.Stack(err).WithClues(ictx) return nil, clues.Stack(err).WithClues(ictx)
} }
continue continue
@ -758,13 +779,13 @@ func (c *Collections) UpdateCollections(
// Deletions are handled above so this is just moves/renames. // Deletions are handled above so this is just moves/renames.
var prevPath path.Path var prevPath path.Path
prevPathStr, ok := oldPaths[itemID] prevPathStr, ok := oldPrevPaths[itemID]
if ok { if ok {
prevPath, err = path.FromDataLayerPath(prevPathStr, false) prevPath, err = path.FromDataLayerPath(prevPathStr, false)
if err != nil { if err != nil {
el.AddRecoverable(ctx, clues.Wrap(err, "invalid previous path"). el.AddRecoverable(ctx, clues.Wrap(err, "invalid previous path").
WithClues(ictx). WithClues(ictx).
With("path_string", prevPathStr)) With("prev_path_string", path.LoggableDir(prevPathStr)))
} }
} else if item.GetRoot() != nil { } else if item.GetRoot() != nil {
// Root doesn't move or get renamed. // Root doesn't move or get renamed.
@ -774,11 +795,11 @@ func (c *Collections) UpdateCollections(
// Moved folders don't cause delta results for any subfolders nested in // Moved folders don't cause delta results for any subfolders nested in
// them. We need to go through and update paths to handle that. We only // them. We need to go through and update paths to handle that. We only
// update newPaths so we don't accidentally clobber previous deletes. // update newPaths so we don't accidentally clobber previous deletes.
updatePath(newPaths, itemID, collectionPath.String()) updatePath(newPrevPaths, itemID, collectionPath.String())
found, err := updateCollectionPaths(driveID, itemID, c.CollectionMap, collectionPath) found, err := updateCollectionPaths(driveID, itemID, c.CollectionMap, collectionPath)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ictx) return nil, clues.Stack(err).WithClues(ictx)
} }
if found { if found {
@ -801,7 +822,7 @@ func (c *Collections) UpdateCollections(
invalidPrevDelta, invalidPrevDelta,
nil) nil)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ictx) return nil, clues.Stack(err).WithClues(ictx)
} }
col.driveName = driveName col.driveName = driveName
@ -823,35 +844,38 @@ func (c *Collections) UpdateCollections(
case item.GetFile() != nil: case item.GetFile() != nil:
// Deletions are handled above so this is just moves/renames. // Deletions are handled above so this is just moves/renames.
if len(ptr.Val(item.GetParentReference().GetId())) == 0 { if len(ptr.Val(item.GetParentReference().GetId())) == 0 {
return clues.New("file without parent ID").WithClues(ictx) return nil, clues.New("file without parent ID").WithClues(ictx)
} }
// Get the collection for this item. // Get the collection for this item.
parentID := ptr.Val(item.GetParentReference().GetId()) parentID := ptr.Val(item.GetParentReference().GetId())
ictx = clues.Add(ictx, "parent_id", parentID) ictx = clues.Add(ictx, "parent_id", parentID)
collection, found := c.CollectionMap[driveID][parentID] collection, ok := c.CollectionMap[driveID][parentID]
if !found { if !ok {
return clues.New("item seen before parent folder").WithClues(ictx) return nil, clues.New("item seen before parent folder").WithClues(ictx)
} }
// Delete the file from previous collection. This will // This will only kick in if the file was moved multiple times
// only kick in if the file was moved multiple times // within a single delta query. We delete the file from the previous
// within a single delta query // collection so that it doesn't appear in two places.
icID, found := itemCollection[driveID][itemID] prevParentContainerID, ok := currPrevPaths[itemID]
if found { if ok {
pcollection, found := c.CollectionMap[driveID][icID] prevColl, found := c.CollectionMap[driveID][prevParentContainerID]
if !found { if !found {
return clues.New("previous collection not found").WithClues(ictx) return nil, clues.New("previous collection not found").
With("prev_parent_container_id", prevParentContainerID).
WithClues(ictx)
} }
removed := pcollection.Remove(itemID) if ok := prevColl.Remove(itemID); !ok {
if !removed { return nil, clues.New("removing item from prev collection").
return clues.New("removing from prev collection").WithClues(ictx) With("prev_parent_container_id", prevParentContainerID).
WithClues(ictx)
} }
} }
itemCollection[driveID][itemID] = parentID currPrevPaths[itemID] = parentID
if collection.Add(item) { if collection.Add(item) {
c.NumItems++ c.NumItems++
@ -872,11 +896,13 @@ func (c *Collections) UpdateCollections(
} }
default: default:
return clues.New("item type not supported").WithClues(ictx) el.AddRecoverable(ictx, clues.New("item is neither folder nor file").
WithClues(ictx).
Label(fault.LabelForceNoBackupCreation))
} }
} }
return el.Failure() return newPrevPaths, el.Failure()
} }
type dirScopeChecker interface { type dirScopeChecker interface {

View File

@ -8,7 +8,6 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -136,7 +135,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath) expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath)
tests := []struct { tests := []struct {
testCase string name string
items []models.DriveItemable items []models.DriveItemable
inputFolderMap map[string]string inputFolderMap map[string]string
scope selectors.OneDriveScope scope selectors.OneDriveScope
@ -146,11 +145,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedContainerCount int expectedContainerCount int
expectedFileCount int expectedFileCount int
expectedSkippedCount int expectedSkippedCount int
expectedMetadataPaths map[string]string expectedPrevPaths map[string]string
expectedExcludes map[string]struct{} expectedExcludes map[string]struct{}
}{ }{
{ {
testCase: "Invalid item", name: "Invalid item",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("item", "item", testBaseDrivePath, "root", false, false, false), driveItem("item", "item", testBaseDrivePath, "root", false, false, false),
@ -162,13 +161,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
}, },
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "Single File", name: "Single File",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("file", "file", testBaseDrivePath, "root", true, false, false), driveItem("file", "file", testBaseDrivePath, "root", true, false, false),
@ -183,13 +182,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 1, expectedContainerCount: 1,
// Root folder is skipped since it's always present. // Root folder is skipped since it's always present.
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: getDelList("file"), expectedExcludes: getDelList("file"),
}, },
{ {
testCase: "Single Folder", name: "Single Folder",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -201,7 +200,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"folder": expectedStatePath(data.NewState, folder), "folder": expectedStatePath(data.NewState, folder),
}, },
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
}, },
@ -210,7 +209,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "Single Package", name: "Single Package",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("package", "package", testBaseDrivePath, "root", false, false, true), driveItem("package", "package", testBaseDrivePath, "root", false, false, true),
@ -222,7 +221,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
"root": expectedStatePath(data.NotMovedState, ""), "root": expectedStatePath(data.NotMovedState, ""),
"package": expectedStatePath(data.NewState, pkg), "package": expectedStatePath(data.NewState, pkg),
}, },
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"package": expectedPath("/package"), "package": expectedPath("/package"),
}, },
@ -231,7 +230,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "1 root file, 1 folder, 1 package, 2 files, 3 collections", name: "1 root file, 1 folder, 1 package, 2 files, 3 collections",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -251,7 +250,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 5, expectedItemCount: 5,
expectedFileCount: 3, expectedFileCount: 3,
expectedContainerCount: 3, expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
"package": expectedPath("/package"), "package": expectedPath("/package"),
@ -259,7 +258,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("fileInRoot", "fileInFolder", "fileInPackage"), expectedExcludes: getDelList("fileInRoot", "fileInFolder", "fileInPackage"),
}, },
{ {
testCase: "contains folder selector", name: "contains folder selector",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -284,7 +283,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedContainerCount: 3, expectedContainerCount: 3,
// just "folder" isn't added here because the include check is done on the // just "folder" isn't added here because the include check is done on the
// parent path since we only check later if something is a folder or not. // parent path since we only check later if something is a folder or not.
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
"folder2": expectedPath(folderSub + folder), "folder2": expectedPath(folderSub + folder),
@ -292,7 +291,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("fileInFolder", "fileInFolder2"), expectedExcludes: getDelList("fileInFolder", "fileInFolder2"),
}, },
{ {
testCase: "prefix subfolder selector", name: "prefix subfolder selector",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -315,14 +314,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 3, expectedItemCount: 3,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
"folder2": expectedPath(folderSub + folder), "folder2": expectedPath(folderSub + folder),
}, },
expectedExcludes: getDelList("fileInFolder2"), expectedExcludes: getDelList("fileInFolder2"),
}, },
{ {
testCase: "match subfolder selector", name: "match subfolder selector",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -343,13 +342,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 1, expectedContainerCount: 1,
// No child folders for subfolder so nothing here. // No child folders for subfolder so nothing here.
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
}, },
expectedExcludes: getDelList("fileInSubfolder"), expectedExcludes: getDelList("fileInSubfolder"),
}, },
{ {
testCase: "not moved folder tree", name: "not moved folder tree",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -367,7 +366,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
@ -375,7 +374,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "moved folder tree", name: "moved folder tree",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -393,7 +392,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath(folderSub), "subfolder": expectedPath(folderSub),
@ -401,7 +400,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "moved folder tree with file no previous", name: "moved folder tree with file no previous",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -418,14 +417,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder2"), "folder": expectedPath("/folder2"),
}, },
expectedExcludes: getDelList("file"), expectedExcludes: getDelList("file"),
}, },
{ {
testCase: "moved folder tree with file no previous 1", name: "moved folder tree with file no previous 1",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -441,14 +440,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
}, },
expectedExcludes: getDelList("file"), expectedExcludes: getDelList("file"),
}, },
{ {
testCase: "moved folder tree and subfolder 1", name: "moved folder tree and subfolder 1",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -468,7 +467,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 3, expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath("/subfolder"), "subfolder": expectedPath("/subfolder"),
@ -476,7 +475,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "moved folder tree and subfolder 2", name: "moved folder tree and subfolder 2",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false), driveItem("subfolder", "subfolder", testBaseDrivePath, "root", false, true, false),
@ -496,7 +495,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 3, expectedContainerCount: 3,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath(folder), "folder": expectedPath(folder),
"subfolder": expectedPath("/subfolder"), "subfolder": expectedPath("/subfolder"),
@ -504,7 +503,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "move subfolder when moving parent", name: "move subfolder when moving parent",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder2", "folder2", testBaseDrivePath, "root", false, true, false), driveItem("folder2", "folder2", testBaseDrivePath, "root", false, true, false),
@ -538,7 +537,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 5, expectedItemCount: 5,
expectedFileCount: 2, expectedFileCount: 2,
expectedContainerCount: 4, expectedContainerCount: 4,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
"folder2": expectedPath("/folder2"), "folder2": expectedPath("/folder2"),
@ -547,7 +546,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("itemInSubfolder", "itemInFolder2"), expectedExcludes: getDelList("itemInSubfolder", "itemInFolder2"),
}, },
{ {
testCase: "moved folder tree multiple times", name: "moved folder tree multiple times",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false), driveItem("folder", "folder", testBaseDrivePath, "root", false, true, false),
@ -567,7 +566,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 2, expectedItemCount: 2,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder2"), "folder": expectedPath("/folder2"),
"subfolder": expectedPath("/folder2/subfolder"), "subfolder": expectedPath("/folder2/subfolder"),
@ -575,7 +574,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedExcludes: getDelList("file"), expectedExcludes: getDelList("file"),
}, },
{ {
testCase: "deleted folder and package", name: "deleted folder and package",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), // root is always present, but not necessary here driveRootItem("root"), // root is always present, but not necessary here
delItem("folder", testBaseDrivePath, "root", false, true, false), delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -596,13 +595,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0, expectedItemCount: 0,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "delete folder without previous", name: "delete folder without previous",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("folder", testBaseDrivePath, "root", false, true, false), delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -618,13 +617,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0, expectedItemCount: 0,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "delete folder tree move subfolder", name: "delete folder tree move subfolder",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("folder", testBaseDrivePath, "root", false, true, false), delItem("folder", testBaseDrivePath, "root", false, true, false),
@ -645,14 +644,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 2, expectedContainerCount: 2,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"subfolder": expectedPath("/subfolder"), "subfolder": expectedPath("/subfolder"),
}, },
expectedExcludes: map[string]struct{}{}, expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "delete file", name: "delete file",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("item", testBaseDrivePath, "root", true, false, false), delItem("item", testBaseDrivePath, "root", true, false, false),
@ -668,13 +667,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 1, expectedItemCount: 1,
expectedFileCount: 1, expectedFileCount: 1,
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
}, },
expectedExcludes: getDelList("item"), expectedExcludes: getDelList("item"),
}, },
{ {
testCase: "item before parent errors", name: "item before parent errors",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false), driveItem("file", "file", testBaseDrivePath+"/folder", "folder", true, false, false),
@ -689,13 +688,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedItemCount: 0, expectedItemCount: 0,
expectedFileCount: 0, expectedFileCount: 0,
expectedContainerCount: 1, expectedContainerCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: nil,
"root": expectedPath(""), expectedExcludes: map[string]struct{}{},
},
expectedExcludes: map[string]struct{}{},
}, },
{ {
testCase: "1 root file, 1 folder, 1 package, 1 good file, 1 malware", name: "1 root file, 1 folder, 1 package, 1 good file, 1 malware",
items: []models.DriveItemable{ items: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false), driveItem("fileInRoot", "fileInRoot", testBaseDrivePath, "root", true, false, false),
@ -716,7 +713,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
expectedFileCount: 2, expectedFileCount: 2,
expectedContainerCount: 3, expectedContainerCount: 3,
expectedSkippedCount: 1, expectedSkippedCount: 1,
expectedMetadataPaths: map[string]string{ expectedPrevPaths: map[string]string{
"root": expectedPath(""), "root": expectedPath(""),
"folder": expectedPath("/folder"), "folder": expectedPath("/folder"),
"package": expectedPath("/package"), "package": expectedPath("/package"),
@ -725,26 +722,23 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
}, },
} }
for _, tt := range tests { for _, test := range tests {
suite.Run(tt.testCase, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
var ( var (
excludes = map[string]struct{}{} excludes = map[string]struct{}{}
outputFolderMap = map[string]string{} currPrevPaths = map[string]string{}
itemCollection = map[string]map[string]string{ errs = fault.New(true)
driveID: {},
}
errs = fault.New(true)
) )
maps.Copy(outputFolderMap, tt.inputFolderMap) maps.Copy(currPrevPaths, test.inputFolderMap)
c := NewCollections( c := NewCollections(
&itemBackupHandler{api.Drives{}, user, tt.scope}, &itemBackupHandler{api.Drives{}, user, test.scope},
tenant, tenant,
user, user,
nil, nil,
@ -752,25 +746,24 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
c.CollectionMap[driveID] = map[string]*Collection{} c.CollectionMap[driveID] = map[string]*Collection{}
err := c.UpdateCollections( newPrevPaths, err := c.UpdateCollections(
ctx, ctx,
driveID, driveID,
"General", "General",
tt.items, test.items,
tt.inputFolderMap, test.inputFolderMap,
outputFolderMap, currPrevPaths,
excludes, excludes,
itemCollection,
false, false,
errs) errs)
tt.expect(t, err, clues.ToCore(err)) test.expect(t, err, clues.ToCore(err))
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections") assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count") assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count") assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count") assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
assert.Equal(t, tt.expectedSkippedCount, len(errs.Skipped()), "skipped items") assert.Equal(t, test.expectedSkippedCount, len(errs.Skipped()), "skipped items")
for id, sp := range tt.expectedCollectionIDs { for id, sp := range test.expectedCollectionIDs {
if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) { if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) {
// Skip collections we don't find so we don't get an NPE. // Skip collections we don't find so we don't get an NPE.
continue continue
@ -781,8 +774,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id) assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id)
} }
assert.Equal(t, tt.expectedMetadataPaths, outputFolderMap, "metadata paths") assert.Equal(t, test.expectedPrevPaths, newPrevPaths, "metadata paths")
assert.Equal(t, tt.expectedExcludes, excludes, "exclude list") assert.Equal(t, test.expectedExcludes, excludes, "exclude list")
}) })
} }
} }
@ -1300,7 +1293,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false), driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1338,7 +1332,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1415,7 +1410,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false), driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &empty, // probably will never happen with graph DeltaLink: &empty, // probably will never happen with graph
ResetDelta: true,
}, },
}, },
}, },
@ -1452,7 +1448,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false), driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
NextLink: &next, NextLink: &next,
ResetDelta: true,
}, },
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
@ -1460,7 +1457,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false), driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1502,7 +1500,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false), driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
driveID2: { driveID2: {
@ -1512,7 +1511,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder2", "folder", driveBasePath2, "root2", false, true, false), driveItem("folder2", "folder", driveBasePath2, "root2", false, true, false),
driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false), driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false),
}, },
DeltaLink: &delta2, DeltaLink: &delta2,
ResetDelta: true,
}, },
}, },
}, },
@ -1564,7 +1564,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath1, "root", false, true, false), driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
driveID2: { driveID2: {
@ -1574,7 +1575,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("folder", "folder", driveBasePath2, "root", false, true, false), driveItem("folder", "folder", driveBasePath2, "root", false, true, false),
driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false), driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta2, DeltaLink: &delta2,
ResetDelta: true,
}, },
}, },
}, },
@ -1632,87 +1634,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
expectedFolderPaths: nil, expectedFolderPaths: nil,
expectedDelList: nil, expectedDelList: nil,
}, },
{
name: "OneDrive_OneItemPage_DeltaError",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
DeltaLink: &delta,
},
},
},
canUsePreviousBackup: true,
errCheck: assert.NoError,
expectedCollections: map[string]map[data.CollectionState][]string{
rootFolderPath1: {data.NotMovedState: {"file"}},
},
expectedDeltaURLs: map[string]string{
driveID1: delta,
},
expectedFolderPaths: map[string]map[string]string{
driveID1: {
"root": rootFolderPath1,
},
},
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
doNotMergeItems: map[string]bool{
rootFolderPath1: true,
},
},
{
name: "OneDrive_TwoItemPage_DeltaError",
drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
Err: getDeltaError(),
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
NextLink: &next,
},
{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
DeltaLink: &delta,
},
},
},
canUsePreviousBackup: true,
errCheck: assert.NoError,
expectedCollections: map[string]map[data.CollectionState][]string{
rootFolderPath1: {data.NotMovedState: {"file"}},
expectedPath1("/folder"): {data.NewState: {"folder", "file2"}},
},
expectedDeltaURLs: map[string]string{
driveID1: delta,
},
expectedFolderPaths: map[string]map[string]string{
driveID1: {
"root": rootFolderPath1,
"folder": folderPath1,
},
},
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
doNotMergeItems: map[string]bool{
rootFolderPath1: true,
folderPath1: true,
},
},
{ {
name: "OneDrive_TwoItemPage_NoDeltaError", name: "OneDrive_TwoItemPage_NoDeltaError",
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
@ -1765,16 +1686,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false), driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false), driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1812,16 +1731,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
driveItem("folder2", "folder", driveBasePath1, "root", false, true, false), driveItem("folder2", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false), driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1878,7 +1795,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false), driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false), malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -1908,13 +1826,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
expectedSkippedCount: 2, expectedSkippedCount: 2,
}, },
{ {
name: "One Drive Delta Error Deleted Folder In New Results", name: "One Drive Deleted Folder In New Results",
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
@ -1931,7 +1846,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("folder2", driveBasePath1, "root", false, true, false), delItem("folder2", driveBasePath1, "root", false, true, false),
delItem("file2", driveBasePath1, "root", true, false, false), delItem("file2", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta2, DeltaLink: &delta2,
ResetDelta: true,
}, },
}, },
}, },
@ -1966,19 +1882,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
}, },
}, },
{ {
name: "One Drive Delta Error Random Folder Delete", name: "One Drive Random Folder Delete",
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("folder", driveBasePath1, "root", false, true, false), delItem("folder", driveBasePath1, "root", false, true, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2009,19 +1923,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
}, },
}, },
{ {
name: "One Drive Delta Error Random Item Delete", name: "One Drive Random Item Delete",
drives: []models.Driveable{drive1}, drives: []models.Driveable{drive1},
items: map[string][]apiMock.PagerResult[models.DriveItemable]{ items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: { driveID1: {
{
Err: getDeltaError(),
},
{ {
Values: []models.DriveItemable{ Values: []models.DriveItemable{
driveRootItem("root"), driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false), delItem("file", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2067,7 +1979,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
delItem("folder", driveBasePath1, "root", false, true, false), delItem("folder", driveBasePath1, "root", false, true, false),
delItem("file", driveBasePath1, "root", true, false, false), delItem("file", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta2, DeltaLink: &delta2,
ResetDelta: true,
}, },
}, },
}, },
@ -2110,7 +2023,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveRootItem("root"), driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false), delItem("file", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2148,7 +2062,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveRootItem("root"), driveRootItem("root"),
delItem("folder", driveBasePath1, "root", false, true, false), delItem("folder", driveBasePath1, "root", false, true, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2183,7 +2098,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
driveRootItem("root"), driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false), delItem("file", driveBasePath1, "root", true, false, false),
}, },
DeltaLink: &delta, DeltaLink: &delta,
ResetDelta: true,
}, },
}, },
}, },
@ -2265,6 +2181,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
mbh := mock.DefaultOneDriveBH("a-user") mbh := mock.DefaultOneDriveBH("a-user")
mbh.DrivePagerV = mockDrivePager mbh.DrivePagerV = mockDrivePager
mbh.ItemPagerV = itemPagers mbh.ItemPagerV = itemPagers
mbh.DriveItemEnumeration = mock.PagerResultToEDID(test.items)
c := NewCollections( c := NewCollections(
mbh, mbh,
@ -2491,121 +2408,6 @@ func delItem(
return item return item
} }
func getDeltaError() error {
syncStateNotFound := "SyncStateNotFound"
me := odataerrors.NewMainError()
me.SetCode(&syncStateNotFound)
deltaError := odataerrors.NewODataError()
deltaError.SetErrorEscaped(me)
return deltaError
}
func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
next := "next"
delta := "delta"
prevDelta := "prev-delta"
table := []struct {
name string
items []apiMock.PagerResult[models.DriveItemable]
deltaURL string
prevDeltaSuccess bool
prevDelta string
err error
}{
{
name: "delta on first run",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{DeltaLink: &delta},
},
prevDeltaSuccess: true,
prevDelta: prevDelta,
},
{
name: "empty prev delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{DeltaLink: &delta},
},
prevDeltaSuccess: false,
prevDelta: "",
},
{
name: "next then delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{NextLink: &next},
{DeltaLink: &delta},
},
prevDeltaSuccess: true,
prevDelta: prevDelta,
},
{
name: "invalid prev delta",
deltaURL: delta,
items: []apiMock.PagerResult[models.DriveItemable]{
{Err: getDeltaError()},
{DeltaLink: &delta}, // works on retry
},
prevDelta: prevDelta,
prevDeltaSuccess: false,
},
{
name: "fail a normal delta query",
items: []apiMock.PagerResult[models.DriveItemable]{
{NextLink: &next},
{Err: assert.AnError},
},
prevDelta: prevDelta,
prevDeltaSuccess: true,
err: assert.AnError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
ToReturn: test.items,
}
collectorFunc := func(
ctx context.Context,
driveID, driveName string,
driveItems []models.DriveItemable,
oldPaths map[string]string,
newPaths map[string]string,
excluded map[string]struct{},
itemCollection map[string]map[string]string,
doNotMergeItems bool,
errs *fault.Bus,
) error {
return nil
}
delta, _, _, err := collectItems(
ctx,
itemPager,
"",
"General",
collectorFunc,
map[string]string{},
test.prevDelta,
fault.New(true))
require.ErrorIs(t, err, test.err, "delta fetch err", clues.ToCore(err))
require.Equal(t, test.deltaURL, delta.URL, "delta url")
require.Equal(t, !test.prevDeltaSuccess, delta.Reset, "delta reset")
})
}
}
func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() { func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
driveID := "test-drive" driveID := "test-drive"
collCount := 3 collCount := 3

View File

@ -36,6 +36,7 @@ type BackupHandler interface {
GetItemPermissioner GetItemPermissioner
GetItemer GetItemer
NewDrivePagerer NewDrivePagerer
EnumerateDriveItemsDeltaer
// PathPrefix constructs the service and category specific path prefix for // PathPrefix constructs the service and category specific path prefix for
// the given values. // the given values.
@ -50,7 +51,7 @@ type BackupHandler interface {
// ServiceCat returns the service and category used by this implementation. // ServiceCat returns the service and category used by this implementation.
ServiceCat() (path.ServiceType, path.CategoryType) ServiceCat() (path.ServiceType, path.CategoryType)
NewItemPager(driveID, link string, fields []string) api.DeltaPager[models.DriveItemable]
// FormatDisplayPath creates a human-readable string to represent the // FormatDisplayPath creates a human-readable string to represent the
// provided path. // provided path.
FormatDisplayPath(driveName string, parentPath *path.Builder) string FormatDisplayPath(driveName string, parentPath *path.Builder) string
@ -79,6 +80,17 @@ type GetItemer interface {
) (models.DriveItemable, error) ) (models.DriveItemable, error)
} }
type EnumerateDriveItemsDeltaer interface {
EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
) (
[]models.DriveItemable,
api.DeltaUpdate,
error,
)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// restore // restore
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -1,142 +0,0 @@
package drive
import (
"context"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
// a delta-queryable folder.
// FIXME: This is same as exchange.api.DeltaUpdate
type DeltaUpdate struct {
// the deltaLink itself
URL string
// true if the old delta was marked as invalid
Reset bool
}
// itemCollector functions collect the items found in a drive
type itemCollector func(
ctx context.Context,
driveID, driveName string,
driveItems []models.DriveItemable,
oldPaths map[string]string,
newPaths map[string]string,
excluded map[string]struct{},
itemCollections map[string]map[string]string,
validPrevDelta bool,
errs *fault.Bus,
) error
// collectItems will enumerate all items in the specified drive and hand them to the
// provided `collector` method
func collectItems(
ctx context.Context,
pager api.DeltaPager[models.DriveItemable],
driveID, driveName string,
collector itemCollector,
oldPaths map[string]string,
prevDelta string,
errs *fault.Bus,
) (
DeltaUpdate,
map[string]string, // newPaths
map[string]struct{}, // excluded
error,
) {
var (
newDeltaURL = ""
newPaths = map[string]string{}
excluded = map[string]struct{}{}
invalidPrevDelta = len(prevDelta) == 0
// itemCollection is used to identify which collection a
// file belongs to. This is useful to delete a file from the
// collection it was previously in, in case it was moved to a
// different collection within the same delta query
// drive ID -> item ID -> item ID
itemCollection = map[string]map[string]string{
driveID: {},
}
)
if !invalidPrevDelta {
maps.Copy(newPaths, oldPaths)
pager.SetNextLink(prevDelta)
}
for {
// assume delta urls here, which allows single-token consumption
page, err := pager.GetPage(graph.ConsumeNTokens(ctx, graph.SingleGetOrDeltaLC))
if graph.IsErrInvalidDelta(err) {
logger.Ctx(ctx).Infow("Invalid previous delta link", "link", prevDelta)
invalidPrevDelta = true
newPaths = map[string]string{}
pager.Reset(ctx)
continue
}
if err != nil {
return DeltaUpdate{}, nil, nil, graph.Wrap(ctx, err, "getting page")
}
vals := page.GetValue()
err = collector(
ctx,
driveID,
driveName,
vals,
oldPaths,
newPaths,
excluded,
itemCollection,
invalidPrevDelta,
errs)
if err != nil {
return DeltaUpdate{}, nil, nil, err
}
nextLink, deltaLink := api.NextAndDeltaLink(page)
if len(deltaLink) > 0 {
newDeltaURL = deltaLink
}
// Check if there are more items
if len(nextLink) == 0 {
break
}
logger.Ctx(ctx).Debugw("Found nextLink", "link", nextLink)
pager.SetNextLink(nextLink)
}
return DeltaUpdate{URL: newDeltaURL, Reset: invalidPrevDelta}, newPaths, excluded, nil
}
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
func newItem(name string, folder bool) *models.DriveItem {
itemToCreate := models.NewDriveItem()
itemToCreate.SetName(&name)
if folder {
itemToCreate.SetFolder(models.NewFolder())
} else {
itemToCreate.SetFile(models.NewFile())
}
return itemToCreate
}

View File

@ -87,13 +87,6 @@ func (h itemBackupHandler) NewDrivePager(
return h.ac.NewUserDrivePager(resourceOwner, fields) return h.ac.NewUserDrivePager(resourceOwner, fields)
} }
func (h itemBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DeltaPager[models.DriveItemable] {
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
}
func (h itemBackupHandler) AugmentItemInfo( func (h itemBackupHandler) AugmentItemInfo(
dii details.ItemInfo, dii details.ItemInfo,
item models.DriveItemable, item models.DriveItemable,
@ -139,6 +132,13 @@ func (h itemBackupHandler) IncludesDir(dir string) bool {
return h.scope.Matches(selectors.OneDriveFolder, dir) return h.scope.Matches(selectors.OneDriveFolder, dir)
} }
func (h itemBackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Restore // Restore
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -20,8 +20,6 @@ import (
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -60,83 +58,6 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.userDriveID = ptr.Val(odDrives[0].GetId()) suite.userDriveID = ptr.Val(odDrives[0].GetId())
} }
// TestItemReader is an integration test that makes a few assumptions
// about the test environment
// 1) It assumes the test user has a drive
// 2) It assumes the drive has a file it can use to test `driveItemReader`
// The test checks these in below
func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var driveItem models.DriveItemable
// This item collector tries to find "a" drive item that is a non-empty
// file to test the reader function
itemCollector := func(
_ context.Context,
_, _ string,
items []models.DriveItemable,
_ map[string]string,
_ map[string]string,
_ map[string]struct{},
_ map[string]map[string]string,
_ bool,
_ *fault.Bus,
) error {
if driveItem != nil {
return nil
}
for _, item := range items {
if item.GetFile() != nil && ptr.Val(item.GetSize()) > 0 {
driveItem = item
break
}
}
return nil
}
ip := suite.service.ac.
Drives().
NewDriveItemDeltaPager(suite.userDriveID, "", api.DriveItemSelectDefault())
_, _, _, err := collectItems(
ctx,
ip,
suite.userDriveID,
"General",
itemCollector,
map[string]string{},
"",
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
// Test Requirement 2: Need a file
require.NotEmpty(
t,
driveItem,
"no file item found for user %s drive %s",
suite.user,
suite.userDriveID)
bh := itemBackupHandler{
suite.service.ac.Drives(),
suite.user,
(&selectors.OneDriveBackup{}).Folders(selectors.Any())[0],
}
// Read data for the file
itemData, err := downloadItem(ctx, bh, driveItem)
require.NoError(t, err, clues.ToCore(err))
size, err := io.Copy(io.Discard, itemData)
require.NoError(t, err, clues.ToCore(err))
require.NotZero(t, size)
}
// TestItemWriter is an integration test for uploading data to OneDrive // TestItemWriter is an integration test for uploading data to OneDrive
// It creates a new folder with a new item and writes data to it // It creates a new folder with a new item and writes data to it
func (suite *ItemIntegrationSuite) TestItemWriter() { func (suite *ItemIntegrationSuite) TestItemWriter() {
@ -171,7 +92,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
ctx, ctx,
test.driveID, test.driveID,
ptr.Val(root.GetId()), ptr.Val(root.GetId()),
newItem(newFolderName, true), api.NewDriveItem(newFolderName, true),
control.Copy) control.Copy)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newFolder.GetId()) require.NotNil(t, newFolder.GetId())
@ -183,7 +104,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
ctx, ctx,
test.driveID, test.driveID,
ptr.Val(newFolder.GetId()), ptr.Val(newFolder.GetId()),
newItem(newItemName, false), api.NewDriveItem(newItemName, false),
control.Copy) control.Copy)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newItem.GetId()) require.NotNil(t, newItem.GetId())
@ -317,7 +238,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "success", name: "success",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })
@ -336,7 +257,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "success, content url set instead of download url", name: "success, content url set instead of download url",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@content.downloadUrl": url, "@content.downloadUrl": url,
}) })
@ -355,7 +276,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "api getter returns error", name: "api getter returns error",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })
@ -371,7 +292,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "download url is empty", name: "download url is empty",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
return di return di
}, },
GetFunc: func(ctx context.Context, url string) (*http.Response, error) { GetFunc: func(ctx context.Context, url string) (*http.Response, error) {
@ -386,7 +307,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "malware", name: "malware",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })
@ -408,7 +329,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem() {
{ {
name: "non-2xx http response", name: "non-2xx http response",
itemFunc: func() models.DriveItemable { itemFunc: func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })
@ -457,7 +378,7 @@ func (suite *ItemUnitTestSuite) TestDownloadItem_ConnectionResetErrorOnFirstRead
url = "https://example.com" url = "https://example.com"
itemFunc = func() models.DriveItemable { itemFunc = func() models.DriveItemable {
di := newItem("test", false) di := api.NewDriveItem("test", false)
di.SetAdditionalData(map[string]any{ di.SetAdditionalData(map[string]any{
"@microsoft.graph.downloadUrl": url, "@microsoft.graph.downloadUrl": url,
}) })

View File

@ -92,13 +92,6 @@ func (h libraryBackupHandler) NewDrivePager(
return h.ac.NewSiteDrivePager(resourceOwner, fields) return h.ac.NewSiteDrivePager(resourceOwner, fields)
} }
func (h libraryBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DeltaPager[models.DriveItemable] {
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
}
func (h libraryBackupHandler) AugmentItemInfo( func (h libraryBackupHandler) AugmentItemInfo(
dii details.ItemInfo, dii details.ItemInfo,
item models.DriveItemable, item models.DriveItemable,
@ -177,6 +170,13 @@ func (h libraryBackupHandler) IncludesDir(dir string) bool {
return h.scope.Matches(selectors.SharePointLibraryFolder, dir) return h.scope.Matches(selectors.SharePointLibraryFolder, dir)
} }
func (h libraryBackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.ac.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Restore // Restore
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -1,7 +1,6 @@
package metadata package metadata
import ( import (
"io"
"time" "time"
) )
@ -41,17 +40,3 @@ type Metadata struct {
Permissions []Permission `json:"permissions,omitempty"` Permissions []Permission `json:"permissions,omitempty"`
LinkShares []LinkShare `json:"linkShares,omitempty"` LinkShares []LinkShare `json:"linkShares,omitempty"`
} }
type Item struct {
ItemID string
Data io.ReadCloser
Mod time.Time
}
// Deleted implements an interface function. However, OneDrive items are marked
// as deleted by adding them to the exclude list so this can always return
// false.
func (i *Item) Deleted() bool { return false }
func (i *Item) ID() string { return i.ItemID }
func (i *Item) ToReader() io.ReadCloser { return i.Data }
func (i *Item) ModTime() time.Time { return i.Mod }

View File

@ -671,7 +671,7 @@ func createFolder(
ctx, ctx,
driveID, driveID,
parentFolderID, parentFolderID,
newItem(folderName, true), api.NewDriveItem(folderName, true),
control.Replace) control.Replace)
// ErrItemAlreadyExistsConflict can only occur for folders if the // ErrItemAlreadyExistsConflict can only occur for folders if the
@ -692,7 +692,7 @@ func createFolder(
ctx, ctx,
driveID, driveID,
parentFolderID, parentFolderID,
newItem(folderName, true), api.NewDriveItem(folderName, true),
control.Copy) control.Copy)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "creating folder") return nil, clues.Wrap(err, "creating folder")
@ -733,7 +733,7 @@ func restoreFile(
} }
var ( var (
item = newItem(name, false) item = api.NewDriveItem(name, false)
collisionKey = api.DriveItemCollisionKey(item) collisionKey = api.DriveItemCollisionKey(item)
collision api.DriveItemIDType collision api.DriveItemIDType
shouldDeleteOriginal bool shouldDeleteOriginal bool

View File

@ -12,7 +12,6 @@ import (
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
const ( const (
@ -47,7 +46,7 @@ type urlCache struct {
refreshMu sync.Mutex refreshMu sync.Mutex
deltaQueryCount int deltaQueryCount int
itemPager api.DeltaPager[models.DriveItemable] edid EnumerateDriveItemsDeltaer
errs *fault.Bus errs *fault.Bus
} }
@ -56,13 +55,10 @@ type urlCache struct {
func newURLCache( func newURLCache(
driveID, prevDelta string, driveID, prevDelta string,
refreshInterval time.Duration, refreshInterval time.Duration,
itemPager api.DeltaPager[models.DriveItemable], edid EnumerateDriveItemsDeltaer,
errs *fault.Bus, errs *fault.Bus,
) (*urlCache, error) { ) (*urlCache, error) {
err := validateCacheParams( err := validateCacheParams(driveID, refreshInterval, edid)
driveID,
refreshInterval,
itemPager)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "cache params") return nil, clues.Wrap(err, "cache params")
} }
@ -71,9 +67,9 @@ func newURLCache(
idToProps: make(map[string]itemProps), idToProps: make(map[string]itemProps),
lastRefreshTime: time.Time{}, lastRefreshTime: time.Time{},
driveID: driveID, driveID: driveID,
edid: edid,
prevDelta: prevDelta, prevDelta: prevDelta,
refreshInterval: refreshInterval, refreshInterval: refreshInterval,
itemPager: itemPager,
errs: errs, errs: errs,
}, },
nil nil
@ -83,7 +79,7 @@ func newURLCache(
func validateCacheParams( func validateCacheParams(
driveID string, driveID string,
refreshInterval time.Duration, refreshInterval time.Duration,
itemPager api.DeltaPager[models.DriveItemable], edid EnumerateDriveItemsDeltaer,
) error { ) error {
if len(driveID) == 0 { if len(driveID) == 0 {
return clues.New("drive id is empty") return clues.New("drive id is empty")
@ -93,8 +89,8 @@ func validateCacheParams(
return clues.New("invalid refresh interval") return clues.New("invalid refresh interval")
} }
if itemPager == nil { if edid == nil {
return clues.New("nil item pager") return clues.New("nil item enumerator")
} }
return nil return nil
@ -160,44 +156,23 @@ func (uc *urlCache) refreshCache(
// Issue a delta query to graph // Issue a delta query to graph
logger.Ctx(ctx).Info("refreshing url cache") logger.Ctx(ctx).Info("refreshing url cache")
err := uc.deltaQuery(ctx) items, du, err := uc.edid.EnumerateDriveItemsDelta(ctx, uc.driveID, uc.prevDelta)
if err != nil { if err != nil {
// clear cache
uc.idToProps = make(map[string]itemProps) uc.idToProps = make(map[string]itemProps)
return clues.Stack(err)
}
return err uc.deltaQueryCount++
if err := uc.updateCache(ctx, items, uc.errs); err != nil {
return clues.Stack(err)
} }
logger.Ctx(ctx).Info("url cache refreshed") logger.Ctx(ctx).Info("url cache refreshed")
// Update last refresh time // Update last refresh time
uc.lastRefreshTime = time.Now() uc.lastRefreshTime = time.Now()
uc.prevDelta = du.URL
return nil
}
// deltaQuery performs a delta query on the drive and update the cache
func (uc *urlCache) deltaQuery(
ctx context.Context,
) error {
logger.Ctx(ctx).Debug("starting delta query")
// Reset item pager to remove any previous state
uc.itemPager.Reset(ctx)
_, _, _, err := collectItems(
ctx,
uc.itemPager,
uc.driveID,
"",
uc.updateCache,
map[string]string{},
uc.prevDelta,
uc.errs)
if err != nil {
return clues.Wrap(err, "delta query")
}
uc.deltaQueryCount++
return nil return nil
} }
@ -224,13 +199,7 @@ func (uc *urlCache) readCache(
// It assumes that cacheMu is held by caller in write mode // It assumes that cacheMu is held by caller in write mode
func (uc *urlCache) updateCache( func (uc *urlCache) updateCache(
ctx context.Context, ctx context.Context,
_, _ string,
items []models.DriveItemable, items []models.DriveItemable,
_ map[string]string,
_ map[string]string,
_ map[string]struct{},
_ map[string]map[string]string,
_ bool,
errs *fault.Bus, errs *fault.Bus,
) error { ) error {
el := errs.Local() el := errs.Local()

View File

@ -1,7 +1,6 @@
package drive package drive
import ( import (
"context"
"errors" "errors"
"io" "io"
"math/rand" "math/rand"
@ -18,15 +17,19 @@ import (
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/onedrive/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
) )
// ---------------------------------------------------------------------------
// integration
// ---------------------------------------------------------------------------
type URLCacheIntegrationSuite struct { type URLCacheIntegrationSuite struct {
tester.Suite tester.Suite
ac api.Client ac api.Client
@ -68,11 +71,10 @@ func (suite *URLCacheIntegrationSuite) SetupSuite() {
// url cache // url cache
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() { func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
var ( var (
t = suite.T() t = suite.T()
ac = suite.ac.Drives() ac = suite.ac.Drives()
driveID = suite.driveID driveID = suite.driveID
newFolderName = testdata.DefaultRestoreConfig("folder").Location newFolderName = testdata.DefaultRestoreConfig("folder").Location
driveItemPager = suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectDefault())
) )
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
@ -82,11 +84,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
root, err := ac.GetRootFolder(ctx, driveID) root, err := ac.GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
newFolder, err := ac.Drives().PostItemInContainer( newFolder, err := ac.PostItemInContainer(
ctx, ctx,
driveID, driveID,
ptr.Val(root.GetId()), ptr.Val(root.GetId()),
newItem(newFolderName, true), api.NewDriveItem(newFolderName, true),
control.Copy) control.Copy)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -94,33 +96,10 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
nfid := ptr.Val(newFolder.GetId()) nfid := ptr.Val(newFolder.GetId())
collectorFunc := func(
context.Context,
string,
string,
[]models.DriveItemable,
map[string]string,
map[string]string,
map[string]struct{},
map[string]map[string]string,
bool,
*fault.Bus,
) error {
return nil
}
// Get the previous delta to feed into url cache // Get the previous delta to feed into url cache
prevDelta, _, _, err := collectItems( _, du, err := ac.EnumerateDriveItemsDelta(ctx, suite.driveID, "")
ctx,
suite.ac.Drives().NewDriveItemDeltaPager(driveID, "", api.DriveItemSelectURLCache()),
suite.driveID,
"drive-name",
collectorFunc,
map[string]string{},
"",
fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, prevDelta.URL) require.NotEmpty(t, du.URL)
// Create a bunch of files in the new folder // Create a bunch of files in the new folder
var items []models.DriveItemable var items []models.DriveItemable
@ -128,11 +107,11 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
for i := 0; i < 5; i++ { for i := 0; i < 5; i++ {
newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting) newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting)
item, err := ac.Drives().PostItemInContainer( item, err := ac.PostItemInContainer(
ctx, ctx,
driveID, driveID,
nfid, nfid,
newItem(newItemName, false), api.NewDriveItem(newItemName, false),
control.Copy) control.Copy)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -142,9 +121,9 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
// Create a new URL cache with a long TTL // Create a new URL cache with a long TTL
uc, err := newURLCache( uc, err := newURLCache(
suite.driveID, suite.driveID,
prevDelta.URL, du.URL,
1*time.Hour, 1*time.Hour,
driveItemPager, suite.ac.Drives(),
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -195,6 +174,10 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
require.Equal(t, 1, uc.deltaQueryCount) require.Equal(t, 1, uc.deltaQueryCount)
} }
// ---------------------------------------------------------------------------
// unit
// ---------------------------------------------------------------------------
type URLCacheUnitSuite struct { type URLCacheUnitSuite struct {
tester.Suite tester.Suite
} }
@ -205,27 +188,20 @@ func TestURLCacheUnitSuite(t *testing.T) {
func (suite *URLCacheUnitSuite) TestGetItemProperties() { func (suite *URLCacheUnitSuite) TestGetItemProperties() {
deltaString := "delta" deltaString := "delta"
next := "next"
driveID := "drive1" driveID := "drive1"
table := []struct { table := []struct {
name string name string
pagerResult map[string][]apiMock.PagerResult[models.DriveItemable] pagerItems map[string][]models.DriveItemable
pagerErr map[string]error
expectedItemProps map[string]itemProps expectedItemProps map[string]itemProps
expectedErr require.ErrorAssertionFunc expectedErr require.ErrorAssertionFunc
cacheAssert func(*urlCache, time.Time) cacheAssert func(*urlCache, time.Time)
}{ }{
{ {
name: "single item in cache", name: "single item in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {fileItem("1", "file1", "root", "root", "https://dummy1.com", false)},
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
DeltaLink: &deltaString,
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"1": { "1": {
@ -242,18 +218,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
{ {
name: "multiple items in cache", name: "multiple items in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {
{ fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
Values: []models.DriveItemable{ fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", false), fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false), fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false), fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
},
DeltaLink: &deltaString,
},
}, },
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
@ -287,18 +258,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
{ {
name: "duplicate items with potentially new urls", name: "duplicate items with potentially new urls",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {
{ fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
Values: []models.DriveItemable{ fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", false), fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false), fileItem("1", "file1", "root", "root", "https://test1.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false), fileItem("2", "file2", "root", "root", "https://test2.com", false),
fileItem("1", "file1", "root", "root", "https://test1.com", false),
fileItem("2", "file2", "root", "root", "https://test2.com", false),
},
DeltaLink: &deltaString,
},
}, },
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
@ -324,16 +290,11 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
{ {
name: "deleted items", name: "deleted items",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {
{ fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
Values: []models.DriveItemable{ fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", false), fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
},
DeltaLink: &deltaString,
},
}, },
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
@ -355,15 +316,8 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
{ {
name: "item not found in cache", name: "item not found in cache",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {fileItem("1", "file1", "root", "root", "https://dummy1.com", false)},
{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
DeltaLink: &deltaString,
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"2": {}, "2": {},
@ -376,23 +330,10 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
}, },
}, },
{ {
name: "multi-page delta query error", name: "delta query error",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{},
driveID: { pagerErr: map[string]error{
{ driveID: errors.New("delta query error"),
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
NextLink: &next,
},
{
Values: []models.DriveItemable{
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
},
DeltaLink: &deltaString,
Err: errors.New("delta query error"),
},
},
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
"1": {}, "1": {},
@ -408,15 +349,10 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
{ {
name: "folder item", name: "folder item",
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{ pagerItems: map[string][]models.DriveItemable{
driveID: { driveID: {
{ fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
Values: []models.DriveItemable{ driveItem("2", "folder2", "root", "root", false, true, false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
driveItem("2", "folder2", "root", "root", false, true, false),
},
DeltaLink: &deltaString,
},
}, },
}, },
expectedItemProps: map[string]itemProps{ expectedItemProps: map[string]itemProps{
@ -437,15 +373,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
itemPager := &apiMock.DeltaPager[models.DriveItemable]{ medi := mock.EnumeratesDriveItemsDelta{
ToReturn: test.pagerResult[driveID], Items: test.pagerItems,
Err: test.pagerErr,
DeltaUpdate: map[string]api.DeltaUpdate{driveID: {URL: deltaString}},
} }
cache, err := newURLCache( cache, err := newURLCache(
driveID, driveID,
"", "",
1*time.Hour, 1*time.Hour,
itemPager, &medi,
fault.New(true)) fault.New(true))
require.NoError(suite.T(), err, clues.ToCore(err)) require.NoError(suite.T(), err, clues.ToCore(err))
@ -480,15 +418,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
// Test needsRefresh // Test needsRefresh
func (suite *URLCacheUnitSuite) TestNeedsRefresh() { func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
driveID := "drive1" var (
t := suite.T() t = suite.T()
refreshInterval := 1 * time.Second driveID = "drive1"
refreshInterval = 1 * time.Second
)
cache, err := newURLCache( cache, err := newURLCache(
driveID, driveID,
"", "",
refreshInterval, refreshInterval,
&apiMock.DeltaPager[models.DriveItemable]{}, &mock.EnumeratesDriveItemsDelta{},
fault.New(true)) fault.New(true))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -510,14 +450,12 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
require.False(t, cache.needsRefresh()) require.False(t, cache.needsRefresh())
} }
// Test newURLCache
func (suite *URLCacheUnitSuite) TestNewURLCache() { func (suite *URLCacheUnitSuite) TestNewURLCache() {
// table driven tests
table := []struct { table := []struct {
name string name string
driveID string driveID string
refreshInt time.Duration refreshInt time.Duration
itemPager api.DeltaPager[models.DriveItemable] itemPager EnumerateDriveItemsDeltaer
errors *fault.Bus errors *fault.Bus
expectedErr require.ErrorAssertionFunc expectedErr require.ErrorAssertionFunc
}{ }{
@ -525,7 +463,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "invalid driveID", name: "invalid driveID",
driveID: "", driveID: "",
refreshInt: 1 * time.Hour, refreshInt: 1 * time.Hour,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{}, itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true), errors: fault.New(true),
expectedErr: require.Error, expectedErr: require.Error,
}, },
@ -533,12 +471,12 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "invalid refresh interval", name: "invalid refresh interval",
driveID: "drive1", driveID: "drive1",
refreshInt: 100 * time.Millisecond, refreshInt: 100 * time.Millisecond,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{}, itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true), errors: fault.New(true),
expectedErr: require.Error, expectedErr: require.Error,
}, },
{ {
name: "invalid itemPager", name: "invalid item enumerator",
driveID: "drive1", driveID: "drive1",
refreshInt: 1 * time.Hour, refreshInt: 1 * time.Hour,
itemPager: nil, itemPager: nil,
@ -549,7 +487,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "valid", name: "valid",
driveID: "drive1", driveID: "drive1",
refreshInt: 1 * time.Hour, refreshInt: 1 * time.Hour,
itemPager: &apiMock.DeltaPager[models.DriveItemable]{}, itemPager: &mock.EnumeratesDriveItemsDelta{},
errors: fault.New(true), errors: fault.New(true),
expectedErr: require.NoError, expectedErr: require.NoError,
}, },

View File

@ -2,7 +2,6 @@ package groups
import ( import (
"context" "context"
"fmt"
"testing" "testing"
"time" "time"
@ -527,8 +526,6 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
require.NotEmpty(t, c.FullPath().Folder(false)) require.NotEmpty(t, c.FullPath().Folder(false))
fmt.Printf("\n-----\nfolder %+v\n-----\n", c.FullPath().Folder(false))
// TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection // TODO(ashmrtn): Remove when LocationPath is made part of BackupCollection
// interface. // interface.
if !assert.Implements(t, (*data.LocationPather)(nil), c) { if !assert.Implements(t, (*data.LocationPather)(nil), c) {
@ -537,8 +534,6 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
loc := c.(data.LocationPather).LocationPath().String() loc := c.(data.LocationPather).LocationPath().String()
fmt.Printf("\n-----\nloc %+v\n-----\n", c.(data.LocationPather).LocationPath().String())
require.NotEmpty(t, loc) require.NotEmpty(t, loc)
delete(test.channelNames, loc) delete(test.channelNames, loc)

View File

@ -4,7 +4,6 @@ import (
"bytes" "bytes"
"context" "context"
"io" "io"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoft/kiota-abstractions-go/serialization" "github.com/microsoft/kiota-abstractions-go/serialization"
@ -40,12 +39,7 @@ const (
Pages DataCategory = 2 Pages DataCategory = 2
) )
var ( var _ data.BackupCollection = &Collection{}
_ data.BackupCollection = &Collection{}
_ data.Item = &Item{}
_ data.ItemInfo = &Item{}
_ data.ItemModTime = &Item{}
)
// Collection is the SharePoint.List implementation of data.Collection. SharePoint.Libraries collections are supported // Collection is the SharePoint.List implementation of data.Collection. SharePoint.Libraries collections are supported
// by the oneDrive.Collection as the calls are identical for populating the Collection // by the oneDrive.Collection as the calls are identical for populating the Collection
@ -120,43 +114,6 @@ func (sc *Collection) Items(
return sc.data return sc.data
} }
type Item struct {
id string
data io.ReadCloser
info *details.SharePointInfo
modTime time.Time
// true if the item was marked by graph as deleted.
deleted bool
}
func NewItem(name string, d io.ReadCloser) *Item {
return &Item{
id: name,
data: d,
}
}
func (sd *Item) ID() string {
return sd.id
}
func (sd *Item) ToReader() io.ReadCloser {
return sd.data
}
func (sd Item) Deleted() bool {
return sd.deleted
}
func (sd *Item) Info() (details.ItemInfo, error) {
return details.ItemInfo{SharePoint: sd.info}, nil
}
func (sd *Item) ModTime() time.Time {
return sd.modTime
}
func (sc *Collection) finishPopulation( func (sc *Collection) finishPopulation(
ctx context.Context, ctx context.Context,
metrics support.CollectionMetrics, metrics support.CollectionMetrics,
@ -251,20 +208,13 @@ func (sc *Collection) retrieveLists(
size := int64(len(byteArray)) size := int64(len(byteArray))
if size > 0 { if size > 0 {
t := time.Now()
if t1 := lst.GetLastModifiedDateTime(); t1 != nil {
t = *t1
}
metrics.Bytes += size metrics.Bytes += size
metrics.Successes++ metrics.Successes++
sc.data <- &Item{ sc.data <- data.NewPrefetchedItem(
id: ptr.Val(lst.GetId()), io.NopCloser(bytes.NewReader(byteArray)),
data: io.NopCloser(bytes.NewReader(byteArray)), ptr.Val(lst.GetId()),
info: ListToSPInfo(lst, size), details.ItemInfo{SharePoint: ListToSPInfo(lst, size)})
modTime: t,
}
progress <- struct{}{} progress <- struct{}{}
} }
@ -322,12 +272,10 @@ func (sc *Collection) retrievePages(
if size > 0 { if size > 0 {
metrics.Bytes += size metrics.Bytes += size
metrics.Successes++ metrics.Successes++
sc.data <- &Item{ sc.data <- data.NewPrefetchedItem(
id: ptr.Val(pg.GetId()), io.NopCloser(bytes.NewReader(byteArray)),
data: io.NopCloser(bytes.NewReader(byteArray)), ptr.Val(pg.GetId()),
info: pageToSPInfo(pg, root, size), details.ItemInfo{SharePoint: pageToSPInfo(pg, root, size)})
modTime: ptr.OrNow(pg.GetLastModifiedDateTime()),
}
progress <- struct{}{} progress <- struct{}{}
} }

View File

@ -19,6 +19,7 @@ import (
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -58,21 +59,6 @@ func TestSharePointCollectionSuite(t *testing.T) {
}) })
} }
func (suite *SharePointCollectionSuite) TestCollection_Item_Read() {
t := suite.T()
m := []byte("test message")
name := "aFile"
sc := &Item{
id: name,
data: io.NopCloser(bytes.NewReader(m)),
}
readData, err := io.ReadAll(sc.ToReader())
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, name, sc.id)
assert.Equal(t, readData, m)
}
// TestListCollection tests basic functionality to create // TestListCollection tests basic functionality to create
// SharePoint collection and to use the data stream channel. // SharePoint collection and to use the data stream channel.
func (suite *SharePointCollectionSuite) TestCollection_Items() { func (suite *SharePointCollectionSuite) TestCollection_Items() {
@ -88,7 +74,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
name, itemName string name, itemName string
scope selectors.SharePointScope scope selectors.SharePointScope
getDir func(t *testing.T) path.Path getDir func(t *testing.T) path.Path
getItem func(t *testing.T, itemName string) *Item getItem func(t *testing.T, itemName string) data.Item
}{ }{
{ {
name: "List", name: "List",
@ -106,7 +92,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
return dir return dir
}, },
getItem: func(t *testing.T, name string) *Item { getItem: func(t *testing.T, name string) data.Item {
ow := kioser.NewJsonSerializationWriter() ow := kioser.NewJsonSerializationWriter()
listing := spMock.ListDefault(name) listing := spMock.ListDefault(name)
listing.SetDisplayName(&name) listing.SetDisplayName(&name)
@ -117,11 +103,10 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
byteArray, err := ow.GetSerializedContent() byteArray, err := ow.GetSerializedContent()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
data := &Item{ data := data.NewPrefetchedItem(
id: name, io.NopCloser(bytes.NewReader(byteArray)),
data: io.NopCloser(bytes.NewReader(byteArray)), name,
info: ListToSPInfo(listing, int64(len(byteArray))), details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})
}
return data return data
}, },
@ -142,16 +127,15 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
return dir return dir
}, },
getItem: func(t *testing.T, itemName string) *Item { getItem: func(t *testing.T, itemName string) data.Item {
byteArray := spMock.Page(itemName) byteArray := spMock.Page(itemName)
page, err := betaAPI.CreatePageFromBytes(byteArray) page, err := betaAPI.CreatePageFromBytes(byteArray)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
data := &Item{ data := data.NewPrefetchedItem(
id: itemName, io.NopCloser(bytes.NewReader(byteArray)),
data: io.NopCloser(bytes.NewReader(byteArray)), itemName,
info: betaAPI.PageInfo(page, int64(len(byteArray))), details.ItemInfo{SharePoint: betaAPI.PageInfo(page, int64(len(byteArray)))})
}
return data return data
}, },
@ -210,11 +194,10 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
byteArray, err := service.Serialize(listing) byteArray, err := service.Serialize(listing)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
listData := &Item{ listData := data.NewPrefetchedItem(
id: testName, io.NopCloser(bytes.NewReader(byteArray)),
data: io.NopCloser(bytes.NewReader(byteArray)), testName,
info: ListToSPInfo(listing, int64(len(byteArray))), details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})
}
destName := testdata.DefaultRestoreConfig("").Location destName := testdata.DefaultRestoreConfig("").Location

View File

@ -5,6 +5,7 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"io" "io"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -16,7 +17,7 @@ import (
var ( var (
_ data.BackupCollection = &MetadataCollection{} _ data.BackupCollection = &MetadataCollection{}
_ data.Item = &MetadataItem{} _ data.Item = &metadataItem{}
) )
// MetadataCollection in a simple collection that assumes all items to be // MetadataCollection in a simple collection that assumes all items to be
@ -24,7 +25,7 @@ var (
// created. This collection has no logic for lazily fetching item data. // created. This collection has no logic for lazily fetching item data.
type MetadataCollection struct { type MetadataCollection struct {
fullPath path.Path fullPath path.Path
items []MetadataItem items []metadataItem
statusUpdater support.StatusUpdater statusUpdater support.StatusUpdater
} }
@ -40,23 +41,29 @@ func NewMetadataEntry(fileName string, mData any) MetadataCollectionEntry {
return MetadataCollectionEntry{fileName, mData} return MetadataCollectionEntry{fileName, mData}
} }
func (mce MetadataCollectionEntry) toMetadataItem() (MetadataItem, error) { func (mce MetadataCollectionEntry) toMetadataItem() (metadataItem, error) {
if len(mce.fileName) == 0 { if len(mce.fileName) == 0 {
return MetadataItem{}, clues.New("missing metadata filename") return metadataItem{}, clues.New("missing metadata filename")
} }
if mce.data == nil { if mce.data == nil {
return MetadataItem{}, clues.New("missing metadata") return metadataItem{}, clues.New("missing metadata")
} }
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
encoder := json.NewEncoder(buf) encoder := json.NewEncoder(buf)
if err := encoder.Encode(mce.data); err != nil { if err := encoder.Encode(mce.data); err != nil {
return MetadataItem{}, clues.Wrap(err, "serializing metadata") return metadataItem{}, clues.Wrap(err, "serializing metadata")
} }
return NewMetadataItem(mce.fileName, buf.Bytes()), nil return metadataItem{
Item: data.NewUnindexedPrefetchedItem(
io.NopCloser(buf),
mce.fileName,
time.Now()),
size: int64(buf.Len()),
}, nil
} }
// MakeMetadataCollection creates a metadata collection that has a file // MakeMetadataCollection creates a metadata collection that has a file
@ -71,7 +78,7 @@ func MakeMetadataCollection(
return nil, nil return nil, nil
} }
items := make([]MetadataItem, 0, len(metadata)) items := make([]metadataItem, 0, len(metadata))
for _, md := range metadata { for _, md := range metadata {
item, err := md.toMetadataItem() item, err := md.toMetadataItem()
@ -89,7 +96,7 @@ func MakeMetadataCollection(
func NewMetadataCollection( func NewMetadataCollection(
p path.Path, p path.Path,
items []MetadataItem, items []metadataItem,
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
) *MetadataCollection { ) *MetadataCollection {
return &MetadataCollection{ return &MetadataCollection{
@ -148,7 +155,7 @@ func (md MetadataCollection) Items(
defer close(res) defer close(res)
for _, item := range md.items { for _, item := range md.items {
totalBytes += int64(len(item.data)) totalBytes += item.size
res <- item res <- item
} }
}() }()
@ -156,36 +163,7 @@ func (md MetadataCollection) Items(
return res return res
} }
// MetadataItem is an in-memory data.Item implementation. MetadataItem does type metadataItem struct {
// not implement additional interfaces like data.ItemInfo, so it should only data.Item
// be used for items with a small amount of content that don't need to be added size int64
// to backup details.
//
// Currently the expected use-case for this struct are storing metadata for a
// backup like delta tokens or a mapping of container IDs to container paths.
type MetadataItem struct {
// uuid is an ID that can be used to refer to the item.
uuid string
// data is a buffer of data that the item refers to.
data []byte
}
func NewMetadataItem(uuid string, itemData []byte) MetadataItem {
return MetadataItem{
uuid: uuid,
data: itemData,
}
}
func (mi MetadataItem) ID() string {
return mi.uuid
}
// TODO(ashmrtn): Fill in once we know how to handle this.
func (mi MetadataItem) Deleted() bool {
return false
}
func (mi MetadataItem) ToReader() io.ReadCloser {
return io.NopCloser(bytes.NewReader(mi.data))
} }

View File

@ -1,9 +1,11 @@
package graph package graph
import ( import (
"bytes"
"encoding/json" "encoding/json"
"io" "io"
"testing" "testing"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/google/uuid" "github.com/google/uuid"
@ -11,6 +13,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -63,10 +66,18 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
len(itemData), len(itemData),
"Requires same number of items and data") "Requires same number of items and data")
items := []MetadataItem{} items := []metadataItem{}
for i := 0; i < len(itemNames); i++ { for i := 0; i < len(itemNames); i++ {
items = append(items, NewMetadataItem(itemNames[i], itemData[i])) items = append(
items,
metadataItem{
Item: data.NewUnindexedPrefetchedItem(
io.NopCloser(bytes.NewReader(itemData[i])),
itemNames[i],
time.Time{}),
size: int64(len(itemData[i])),
})
} }
p, err := path.Build( p, err := path.Build(

View File

@ -751,10 +751,6 @@ func compareDriveItem(
} }
if isMeta { if isMeta {
var itemType *metadata.Item
assert.IsType(t, itemType, item)
var ( var (
itemMeta metadata.Metadata itemMeta metadata.Metadata
expectedMeta metadata.Metadata expectedMeta metadata.Metadata

View File

@ -55,7 +55,10 @@ func ProduceBackupCollections(
"group_id", clues.Hide(bpc.ProtectedResource.ID()), "group_id", clues.Hide(bpc.ProtectedResource.ID()),
"group_name", clues.Hide(bpc.ProtectedResource.Name())) "group_name", clues.Hide(bpc.ProtectedResource.Name()))
group, err := ac.Groups().GetByID(ctx, bpc.ProtectedResource.ID()) group, err := ac.Groups().GetByID(
ctx,
bpc.ProtectedResource.ID(),
api.CallConfig{})
if err != nil { if err != nil {
return nil, nil, false, clues.Wrap(err, "getting group").WithClues(ctx) return nil, nil, false, clues.Wrap(err, "getting group").WithClues(ctx)
} }

View File

@ -7,18 +7,15 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
type getByIDer interface {
GetByID(ctx context.Context, identifier string) (models.Groupable, error)
}
func IsServiceEnabled( func IsServiceEnabled(
ctx context.Context, ctx context.Context,
gbi getByIDer, gbi api.GetByIDer[models.Groupable],
resource string, resource string,
) (bool, error) { ) (bool, error) {
resp, err := gbi.GetByID(ctx, resource) resp, err := gbi.GetByID(ctx, resource, api.CallConfig{})
if err != nil { if err != nil {
return false, clues.Wrap(err, "getting group").WithClues(ctx) return false, clues.Wrap(err, "getting group").WithClues(ctx)
} }

View File

@ -12,6 +12,7 @@ import (
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
type EnabledUnitSuite struct { type EnabledUnitSuite struct {
@ -22,14 +23,18 @@ func TestEnabledUnitSuite(t *testing.T) {
suite.Run(t, &EnabledUnitSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &EnabledUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
var _ getByIDer = mockGBI{} var _ api.GetByIDer[models.Groupable] = mockGBI{}
type mockGBI struct { type mockGBI struct {
group models.Groupable group models.Groupable
err error err error
} }
func (m mockGBI) GetByID(ctx context.Context, identifier string) (models.Groupable, error) { func (m mockGBI) GetByID(
ctx context.Context,
identifier string,
_ api.CallConfig,
) (models.Groupable, error) {
return m.group, m.err return m.group, m.err
} }
@ -56,13 +61,13 @@ func (suite *EnabledUnitSuite) TestIsServiceEnabled() {
table := []struct { table := []struct {
name string name string
mock func(context.Context) getByIDer mock func(context.Context) api.GetByIDer[models.Groupable]
expect assert.BoolAssertionFunc expect assert.BoolAssertionFunc
expectErr assert.ErrorAssertionFunc expectErr assert.ErrorAssertionFunc
}{ }{
{ {
name: "ok", name: "ok",
mock: func(ctx context.Context) getByIDer { mock: func(ctx context.Context) api.GetByIDer[models.Groupable] {
return mockGBI{ return mockGBI{
group: unified, group: unified,
} }
@ -72,7 +77,7 @@ func (suite *EnabledUnitSuite) TestIsServiceEnabled() {
}, },
{ {
name: "non-unified group", name: "non-unified group",
mock: func(ctx context.Context) getByIDer { mock: func(ctx context.Context) api.GetByIDer[models.Groupable] {
return mockGBI{ return mockGBI{
group: nonUnified, group: nonUnified,
} }
@ -82,7 +87,7 @@ func (suite *EnabledUnitSuite) TestIsServiceEnabled() {
}, },
{ {
name: "group not found", name: "group not found",
mock: func(ctx context.Context) getByIDer { mock: func(ctx context.Context) api.GetByIDer[models.Groupable] {
return mockGBI{ return mockGBI{
err: graph.Stack(ctx, odErrMsg(string(graph.RequestResourceNotFound), "message")), err: graph.Stack(ctx, odErrMsg(string(graph.RequestResourceNotFound), "message")),
} }
@ -92,7 +97,7 @@ func (suite *EnabledUnitSuite) TestIsServiceEnabled() {
}, },
{ {
name: "arbitrary error", name: "arbitrary error",
mock: func(ctx context.Context) getByIDer { mock: func(ctx context.Context) api.GetByIDer[models.Groupable] {
return mockGBI{ return mockGBI{
err: assert.AnError, err: assert.AnError,
} }

View File

@ -8,11 +8,13 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/drives" "github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts" odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -22,6 +24,8 @@ import (
type BackupHandler struct { type BackupHandler struct {
ItemInfo details.ItemInfo ItemInfo details.ItemInfo
DriveItemEnumeration EnumeratesDriveItemsDelta
GI GetsItem GI GetsItem
GIP GetsItemPermission GIP GetsItemPermission
@ -55,6 +59,7 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler {
OneDrive: &details.OneDriveInfo{}, OneDrive: &details.OneDriveInfo{},
Extension: &details.ExtensionData{}, Extension: &details.ExtensionData{},
}, },
DriveItemEnumeration: EnumeratesDriveItemsDelta{},
GI: GetsItem{Err: clues.New("not defined")}, GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")}, GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultOneDrivePathPrefixer, PathPrefixFn: defaultOneDrivePathPrefixer,
@ -124,10 +129,6 @@ func (h BackupHandler) NewDrivePager(string, []string) api.Pager[models.Driveabl
return h.DrivePagerV return h.DrivePagerV
} }
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DeltaPager[models.DriveItemable] {
return h.ItemPagerV[driveID]
}
func (h BackupHandler) FormatDisplayPath(_ string, pb *path.Builder) string { func (h BackupHandler) FormatDisplayPath(_ string, pb *path.Builder) string {
return "/" + pb.String() return "/" + pb.String()
} }
@ -152,6 +153,13 @@ func (h *BackupHandler) Get(context.Context, string, map[string]string) (*http.R
return h.GetResps[c], h.GetErrs[c] return h.GetResps[c], h.GetErrs[c]
} }
func (h BackupHandler) EnumerateDriveItemsDelta(
ctx context.Context,
driveID, prevDeltaLink string,
) ([]models.DriveItemable, api.DeltaUpdate, error) {
return h.DriveItemEnumeration.EnumerateDriveItemsDelta(ctx, driveID, prevDeltaLink)
}
func (h BackupHandler) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) { func (h BackupHandler) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) {
return h.GI.GetItem(ctx, "", "") return h.GI.GetItem(ctx, "", "")
} }
@ -254,6 +262,65 @@ func (m GetsItem) GetItem(
return m.Item, m.Err return m.Item, m.Err
} }
// ---------------------------------------------------------------------------
// Enumerates Drive Items
// ---------------------------------------------------------------------------
type EnumeratesDriveItemsDelta struct {
Items map[string][]models.DriveItemable
DeltaUpdate map[string]api.DeltaUpdate
Err map[string]error
}
func (edi EnumeratesDriveItemsDelta) EnumerateDriveItemsDelta(
_ context.Context,
driveID, _ string,
) (
[]models.DriveItemable,
api.DeltaUpdate,
error,
) {
return edi.Items[driveID], edi.DeltaUpdate[driveID], edi.Err[driveID]
}
func PagerResultToEDID(
m map[string][]apiMock.PagerResult[models.DriveItemable],
) EnumeratesDriveItemsDelta {
edi := EnumeratesDriveItemsDelta{
Items: map[string][]models.DriveItemable{},
DeltaUpdate: map[string]api.DeltaUpdate{},
Err: map[string]error{},
}
for driveID, results := range m {
var (
err error
items = []models.DriveItemable{}
deltaUpdate api.DeltaUpdate
)
for _, pr := range results {
items = append(items, pr.Values...)
if pr.DeltaLink != nil {
deltaUpdate = api.DeltaUpdate{URL: ptr.Val(pr.DeltaLink)}
}
if pr.Err != nil {
err = pr.Err
}
deltaUpdate.Reset = deltaUpdate.Reset || pr.ResetDelta
}
edi.Items[driveID] = items
edi.Err[driveID] = err
edi.DeltaUpdate[driveID] = deltaUpdate
}
return edi
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Get Item Permissioner // Get Item Permissioner
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -4,13 +4,14 @@ import (
"bytes" "bytes"
"io" "io"
"testing" "testing"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/collection/site" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint/api" "github.com/alcionai/corso/src/internal/m365/service/sharepoint/api"
spMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock" spMock "github.com/alcionai/corso/src/internal/m365/service/sharepoint/mock"
@ -108,9 +109,10 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() {
//nolint:lll //nolint:lll
byteArray := spMock.Page("Byte Test") byteArray := spMock.Page("Byte Test")
pageData := site.NewItem( pageData := data.NewUnindexedPrefetchedItem(
io.NopCloser(bytes.NewReader(byteArray)),
testName, testName,
io.NopCloser(bytes.NewReader(byteArray))) time.Now())
info, err := api.RestoreSitePage( info, err := api.RestoreSitePage(
ctx, ctx,

View File

@ -90,12 +90,9 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
var ( var (
paths = map[string]string{} paths = map[string]string{}
newPaths = map[string]string{} currPaths = map[string]string{}
excluded = map[string]struct{}{} excluded = map[string]struct{}{}
itemColls = map[string]map[string]string{ collMap = map[string]map[string]*drive.Collection{
driveID: {},
}
collMap = map[string]map[string]*drive.Collection{
driveID: {}, driveID: {},
} }
) )
@ -109,15 +106,14 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
c.CollectionMap = collMap c.CollectionMap = collMap
err := c.UpdateCollections( _, err := c.UpdateCollections(
ctx, ctx,
driveID, driveID,
"General", "General",
test.items, test.items,
paths, paths,
newPaths, currPaths,
excluded, excluded,
itemColls,
true, true,
fault.New(true)) fault.New(true))

View File

@ -6,6 +6,7 @@ import (
"bytes" "bytes"
"context" "context"
"io" "io"
"time"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -128,7 +129,7 @@ type streamCollection struct {
// folderPath indicates what level in the hierarchy this collection // folderPath indicates what level in the hierarchy this collection
// represents // represents
folderPath path.Path folderPath path.Path
item *streamItem item data.Item
} }
func (dc *streamCollection) FullPath() path.Path { func (dc *streamCollection) FullPath() path.Path {
@ -157,27 +158,6 @@ func (dc *streamCollection) Items(context.Context, *fault.Bus) <-chan data.Item
return items return items
} }
// ---------------------------------------------------------------------------
// item
// ---------------------------------------------------------------------------
type streamItem struct {
name string
data []byte
}
func (di *streamItem) ID() string {
return di.name
}
func (di *streamItem) ToReader() io.ReadCloser {
return io.NopCloser(bytes.NewReader(di.data))
}
func (di *streamItem) Deleted() bool {
return false
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// common reader/writer/deleter // common reader/writer/deleter
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -204,10 +184,10 @@ func collect(
dc := streamCollection{ dc := streamCollection{
folderPath: p, folderPath: p,
item: &streamItem{ item: data.NewUnindexedPrefetchedItem(
name: col.itemName, io.NopCloser(bytes.NewReader(bs)),
data: bs, col.itemName,
}, time.Now()),
} }
return &dc, nil return &dc, nil

View File

@ -384,20 +384,20 @@ func (pec printableErrCore) Values() []string {
// funcs, and the function that spawned the local bus should always // funcs, and the function that spawned the local bus should always
// return `local.Failure()` to ensure that hard failures are propagated // return `local.Failure()` to ensure that hard failures are propagated
// back upstream. // back upstream.
func (e *Bus) Local() *localBus { func (e *Bus) Local() *LocalBus {
return &localBus{ return &LocalBus{
mu: &sync.Mutex{}, mu: &sync.Mutex{},
bus: e, bus: e,
} }
} }
type localBus struct { type LocalBus struct {
mu *sync.Mutex mu *sync.Mutex
bus *Bus bus *Bus
current error current error
} }
func (e *localBus) AddRecoverable(ctx context.Context, err error) { func (e *LocalBus) AddRecoverable(ctx context.Context, err error) {
if err == nil { if err == nil {
return return
} }
@ -422,7 +422,7 @@ func (e *localBus) AddRecoverable(ctx context.Context, err error) {
// 2. Skipping avoids a permanent and consistent failure. If // 2. Skipping avoids a permanent and consistent failure. If
// the underlying reason is transient or otherwise recoverable, // the underlying reason is transient or otherwise recoverable,
// the item should not be skipped. // the item should not be skipped.
func (e *localBus) AddSkip(ctx context.Context, s *Skipped) { func (e *LocalBus) AddSkip(ctx context.Context, s *Skipped) {
if s == nil { if s == nil {
return return
} }
@ -437,7 +437,7 @@ func (e *localBus) AddSkip(ctx context.Context, s *Skipped) {
// It does not return the underlying bus.Failure(), only the failure // It does not return the underlying bus.Failure(), only the failure
// that was recorded within the local bus instance. This error should // that was recorded within the local bus instance. This error should
// get returned by any func which created a local bus. // get returned by any func which created a local bus.
func (e *localBus) Failure() error { func (e *LocalBus) Failure() error {
return e.current return e.current
} }

View File

@ -187,6 +187,8 @@ func (r *repository) Initialize(
} }
}() }()
observe.Message(ctx, "Initializing repository")
kopiaRef := kopia.NewConn(r.Storage) kopiaRef := kopia.NewConn(r.Storage)
if err := kopiaRef.Initialize(ctx, r.Opts.Repo, retentionOpts); err != nil { if err := kopiaRef.Initialize(ctx, r.Opts.Repo, retentionOpts); err != nil {
// replace common internal errors so that sdk users can check results with errors.Is() // replace common internal errors so that sdk users can check results with errors.Is()
@ -237,8 +239,7 @@ func (r *repository) Connect(ctx context.Context) (err error) {
} }
}() }()
progressBar := observe.MessageWithCompletion(ctx, "Connecting to repository") observe.Message(ctx, "Connecting to repository")
defer close(progressBar)
kopiaRef := kopia.NewConn(r.Storage) kopiaRef := kopia.NewConn(r.Storage)
if err := kopiaRef.Connect(ctx, r.Opts.Repo); err != nil { if err := kopiaRef.Connect(ctx, r.Opts.Repo); err != nil {

View File

@ -697,7 +697,7 @@ func (s ExchangeScope) IncludesCategory(cat exchangeCategory) bool {
// returns true if the category is included in the scope's data type, // returns true if the category is included in the scope's data type,
// and the value is set to Any(). // and the value is set to Any().
func (s ExchangeScope) IsAny(cat exchangeCategory) bool { func (s ExchangeScope) IsAny(cat exchangeCategory) bool {
return isAnyTarget(s, cat) return IsAnyTarget(s, cat)
} }
// Get returns the data category in the scope. If the scope // Get returns the data category in the scope. If the scope

View File

@ -699,7 +699,7 @@ func (s GroupsScope) IncludesCategory(cat groupsCategory) bool {
// returns true if the category is included in the scope's data type, // returns true if the category is included in the scope's data type,
// and the value is set to Any(). // and the value is set to Any().
func (s GroupsScope) IsAny(cat groupsCategory) bool { func (s GroupsScope) IsAny(cat groupsCategory) bool {
return isAnyTarget(s, cat) return IsAnyTarget(s, cat)
} }
// Get returns the data category in the scope. If the scope // Get returns the data category in the scope. If the scope

View File

@ -484,7 +484,7 @@ func (s OneDriveScope) Matches(cat oneDriveCategory, target string) bool {
// returns true if the category is included in the scope's data type, // returns true if the category is included in the scope's data type,
// and the value is set to Any(). // and the value is set to Any().
func (s OneDriveScope) IsAny(cat oneDriveCategory) bool { func (s OneDriveScope) IsAny(cat oneDriveCategory) bool {
return isAnyTarget(s, cat) return IsAnyTarget(s, cat)
} }
// Get returns the data category in the scope. If the scope // Get returns the data category in the scope. If the scope

View File

@ -694,7 +694,7 @@ func matchesPathValues[T scopeT, C categoryT](
return false return false
} }
if isAnyTarget(sc, cc) { if IsAnyTarget(sc, cc) {
// continue, not return: all path keys must match the entry to succeed // continue, not return: all path keys must match the entry to succeed
continue continue
} }
@ -795,7 +795,7 @@ func isNoneTarget[T scopeT, C categoryT](s T, cat C) bool {
// returns true if the category is included in the scope's category type, // returns true if the category is included in the scope's category type,
// and the value is set to Any(). // and the value is set to Any().
func isAnyTarget[T scopeT, C categoryT](s T, cat C) bool { func IsAnyTarget[T scopeT, C categoryT](s T, cat C) bool {
if !typeAndCategoryMatches(cat, s.categorizer()) { if !typeAndCategoryMatches(cat, s.categorizer()) {
return false return false
} }

View File

@ -125,14 +125,14 @@ func (suite *SelectorScopesSuite) TestGetCatValue() {
func (suite *SelectorScopesSuite) TestIsAnyTarget() { func (suite *SelectorScopesSuite) TestIsAnyTarget() {
t := suite.T() t := suite.T()
stub := stubScope("") stub := stubScope("")
assert.True(t, isAnyTarget(stub, rootCatStub)) assert.True(t, IsAnyTarget(stub, rootCatStub))
assert.True(t, isAnyTarget(stub, leafCatStub)) assert.True(t, IsAnyTarget(stub, leafCatStub))
assert.False(t, isAnyTarget(stub, mockCategorizer("smarf"))) assert.False(t, IsAnyTarget(stub, mockCategorizer("smarf")))
stub = stubScope("none") stub = stubScope("none")
assert.False(t, isAnyTarget(stub, rootCatStub)) assert.False(t, IsAnyTarget(stub, rootCatStub))
assert.False(t, isAnyTarget(stub, leafCatStub)) assert.False(t, IsAnyTarget(stub, leafCatStub))
assert.False(t, isAnyTarget(stub, mockCategorizer("smarf"))) assert.False(t, IsAnyTarget(stub, mockCategorizer("smarf")))
} }
var reduceTestTable = []struct { var reduceTestTable = []struct {

View File

@ -625,7 +625,7 @@ func (s SharePointScope) IncludesCategory(cat sharePointCategory) bool {
// returns true if the category is included in the scope's data type, // returns true if the category is included in the scope's data type,
// and the value is set to Any(). // and the value is set to Any().
func (s SharePointScope) IsAny(cat sharePointCategory) bool { func (s SharePointScope) IsAny(cat sharePointCategory) bool {
return isAnyTarget(s, cat) return IsAnyTarget(s, cat)
} }
// Get returns the data category in the scope. If the scope // Get returns the data category in the scope. If the scope

View File

@ -24,7 +24,7 @@ import (
type Client struct { type Client struct {
Credentials account.M365Config Credentials account.M365Config
// The Stable service is re-usable for any non-paged request. // The Stable service is re-usable for any request.
// This allows us to maintain performance across async requests. // This allows us to maintain performance across async requests.
Stable graph.Servicer Stable graph.Servicer
@ -126,3 +126,15 @@ func (c Client) Get(
type CallConfig struct { type CallConfig struct {
Expand []string Expand []string
} }
// ---------------------------------------------------------------------------
// common interfaces
// ---------------------------------------------------------------------------
type GetByIDer[T any] interface {
GetByID(
ctx context.Context,
identifier string,
cc CallConfig,
) (T, error)
}

View File

@ -101,7 +101,7 @@ func idAnd(ss ...string) []string {
// exported // exported
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func DriveItemSelectDefault() []string { func DefaultDriveItemProps() []string {
return idAnd( return idAnd(
"content.downloadUrl", "content.downloadUrl",
"createdBy", "createdBy",

View File

@ -1,11 +0,0 @@
package api
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
// a delta-queryable folder.
type DeltaUpdate struct {
// the deltaLink itself
URL string
// true if the old delta was marked as invalid
Reset bool
}

View File

@ -84,6 +84,26 @@ func (c Drives) GetRootFolder(
return root, nil return root, nil
} }
// TODO: pagination controller needed for completion.
func (c Drives) GetFolderChildren(
ctx context.Context,
driveID, folderID string,
) ([]models.DriveItemable, error) {
response, err := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(folderID).
Children().
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting folder children")
}
return response.GetValue(), nil
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Items // Items
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -331,6 +351,10 @@ func (c Drives) PostItemLinkShareUpdate(
return itm, nil return itm, nil
} }
// ---------------------------------------------------------------------------
// helper funcs
// ---------------------------------------------------------------------------
// DriveItemCollisionKeyy constructs a key from the item name. // DriveItemCollisionKeyy constructs a key from the item name.
// collision keys are used to identify duplicate item conflicts for handling advanced restoration config. // collision keys are used to identify duplicate item conflicts for handling advanced restoration config.
func DriveItemCollisionKey(item models.DriveItemable) string { func DriveItemCollisionKey(item models.DriveItemable) string {
@ -340,3 +364,17 @@ func DriveItemCollisionKey(item models.DriveItemable) string {
return ptr.Val(item.GetName()) return ptr.Val(item.GetName())
} }
// NewDriveItem initializes a `models.DriveItemable` with either a folder or file entry.
func NewDriveItem(name string, folder bool) *models.DriveItem {
itemToCreate := models.NewDriveItem()
itemToCreate.SetName(&name)
if folder {
itemToCreate.SetFolder(models.NewFolder())
} else {
itemToCreate.SetFile(models.NewFile())
}
return itemToCreate
}

View File

@ -15,6 +15,11 @@ import (
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
type DriveItemIDType struct {
ItemID string
IsFolder bool
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// non-delta item pager // non-delta item pager
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -65,11 +70,6 @@ func (p *driveItemPageCtrl) ValidModTimes() bool {
return true return true
} }
type DriveItemIDType struct {
ItemID string
IsFolder bool
}
func (c Drives) GetItemsInContainerByCollisionKey( func (c Drives) GetItemsInContainerByCollisionKey(
ctx context.Context, ctx context.Context,
driveID, containerID string, driveID, containerID string,
@ -131,9 +131,9 @@ type DriveItemDeltaPageCtrl struct {
options *drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration options *drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration
} }
func (c Drives) NewDriveItemDeltaPager( func (c Drives) newDriveItemDeltaPager(
driveID, link string, driveID, prevDeltaLink string,
selectFields []string, selectProps ...string,
) *DriveItemDeltaPageCtrl { ) *DriveItemDeltaPageCtrl {
preferHeaderItems := []string{ preferHeaderItems := []string{
"deltashowremovedasdeleted", "deltashowremovedasdeleted",
@ -142,28 +142,32 @@ func (c Drives) NewDriveItemDeltaPager(
"hierarchicalsharing", "hierarchicalsharing",
} }
requestConfig := &drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration{ options := &drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferHeaderItems...), Headers: newPreferHeaders(preferHeaderItems...),
QueryParameters: &drives.ItemItemsItemDeltaRequestBuilderGetQueryParameters{ QueryParameters: &drives.ItemItemsItemDeltaRequestBuilderGetQueryParameters{},
Select: selectFields, }
},
if len(selectProps) > 0 {
options.QueryParameters.Select = selectProps
}
builder := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(onedrive.RootID).
Delta()
if len(prevDeltaLink) > 0 {
builder = drives.NewItemItemsItemDeltaRequestBuilder(prevDeltaLink, c.Stable.Adapter())
} }
res := &DriveItemDeltaPageCtrl{ res := &DriveItemDeltaPageCtrl{
gs: c.Stable, gs: c.Stable,
driveID: driveID, driveID: driveID,
options: requestConfig, options: options,
builder: c.Stable. builder: builder,
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(onedrive.RootID).
Delta(),
}
if len(link) > 0 {
res.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, c.Stable.Adapter())
} }
return res return res
@ -193,6 +197,27 @@ func (p *DriveItemDeltaPageCtrl) ValidModTimes() bool {
return true return true
} }
// EnumerateDriveItems will enumerate all items in the specified drive and hand them to the
// provided `collector` method
func (c Drives) EnumerateDriveItemsDelta(
ctx context.Context,
driveID string,
prevDeltaLink string,
) (
[]models.DriveItemable,
DeltaUpdate,
error,
) {
pager := c.newDriveItemDeltaPager(driveID, prevDeltaLink, DefaultDriveItemProps()...)
items, du, err := deltaEnumerateItems[models.DriveItemable](ctx, pager, prevDeltaLink)
if err != nil {
return nil, du, clues.Stack(err)
}
return items, du, nil
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// user's drives pager // user's drives pager
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -178,3 +178,18 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemIDsInContainer() {
}) })
} }
} }
func (suite *DrivePagerIntgSuite) TestEnumerateDriveItems() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
items, du, err := suite.its.
ac.
Drives().
EnumerateDriveItemsDelta(ctx, suite.its.user.driveID, "")
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, items, "no items found in user's drive")
assert.NotEmpty(t, du.URL, "should have a delta link")
}

View File

@ -17,6 +17,7 @@ import (
"github.com/alcionai/corso/src/internal/tester/tconfig" "github.com/alcionai/corso/src/internal/tester/tconfig"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/testdata" "github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/services/m365/api"
) )
type DriveAPIIntgSuite struct { type DriveAPIIntgSuite struct {
@ -50,20 +51,6 @@ func (suite *DriveAPIIntgSuite) TestDrives_CreatePagerAndGetPage() {
assert.NotNil(t, a) assert.NotNil(t, a)
} }
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
func newItem(name string, folder bool) *models.DriveItem {
itemToCreate := models.NewDriveItem()
itemToCreate.SetName(&name)
if folder {
itemToCreate.SetFolder(models.NewFolder())
} else {
itemToCreate.SetFile(models.NewFile())
}
return itemToCreate
}
func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() { func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
t := suite.T() t := suite.T()
@ -78,12 +65,12 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
ctx, ctx,
suite.its.user.driveID, suite.its.user.driveID,
suite.its.user.driveRootFolderID, suite.its.user.driveRootFolderID,
newItem(rc.Location, true), api.NewDriveItem(rc.Location, true),
control.Replace) control.Replace)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// generate a folder to use for collision testing // generate a folder to use for collision testing
folder := newItem("collision", true) folder := api.NewDriveItem("collision", true)
origFolder, err := acd.PostItemInContainer( origFolder, err := acd.PostItemInContainer(
ctx, ctx,
suite.its.user.driveID, suite.its.user.driveID,
@ -93,7 +80,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// generate an item to use for collision testing // generate an item to use for collision testing
file := newItem("collision.txt", false) file := api.NewDriveItem("collision.txt", false)
origFile, err := acd.PostItemInContainer( origFile, err := acd.PostItemInContainer(
ctx, ctx,
suite.its.user.driveID, suite.its.user.driveID,
@ -241,7 +228,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
ctx, ctx,
suite.its.user.driveID, suite.its.user.driveID,
suite.its.user.driveRootFolderID, suite.its.user.driveRootFolderID,
newItem(rc.Location, true), api.NewDriveItem(rc.Location, true),
// skip instead of replace here to get // skip instead of replace here to get
// an ErrItemAlreadyExistsConflict, just in case. // an ErrItemAlreadyExistsConflict, just in case.
control.Skip) control.Skip)
@ -249,7 +236,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
// generate items within that folder // generate items within that folder
for i := 0; i < 5; i++ { for i := 0; i < 5; i++ {
file := newItem(fmt.Sprintf("collision_%d.txt", i), false) file := api.NewDriveItem(fmt.Sprintf("collision_%d.txt", i), false)
f, err := acd.PostItemInContainer( f, err := acd.PostItemInContainer(
ctx, ctx,
suite.its.user.driveID, suite.its.user.driveID,
@ -265,7 +252,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
ctx, ctx,
suite.its.user.driveID, suite.its.user.driveID,
ptr.Val(folder.GetParentReference().GetId()), ptr.Val(folder.GetParentReference().GetId()),
newItem(rc.Location, true), api.NewDriveItem(rc.Location, true),
control.Replace) control.Replace)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, ptr.Val(resultFolder.GetId())) require.NotEmpty(t, ptr.Val(resultFolder.GetId()))

View File

@ -102,6 +102,7 @@ const filterGroupByDisplayNameQueryTmpl = "displayName eq '%s'"
func (c Groups) GetByID( func (c Groups) GetByID(
ctx context.Context, ctx context.Context,
identifier string, identifier string,
_ CallConfig, // matching standards
) (models.Groupable, error) { ) (models.Groupable, error) {
service, err := c.Service() service, err := c.Service()
if err != nil { if err != nil {
@ -234,9 +235,9 @@ func IsTeam(ctx context.Context, mg models.Groupable) bool {
func (c Groups) GetIDAndName( func (c Groups) GetIDAndName(
ctx context.Context, ctx context.Context,
groupID string, groupID string,
_ CallConfig, // not currently supported cc CallConfig,
) (string, string, error) { ) (string, string, error) {
s, err := c.GetByID(ctx, groupID) s, err := c.GetByID(ctx, groupID, cc)
if err != nil { if err != nil {
return "", "", err return "", "", err
} }

View File

@ -121,7 +121,7 @@ func (suite *GroupsIntgSuite) TestGroups_GetByID() {
groupsAPI = suite.its.ac.Groups() groupsAPI = suite.its.ac.Groups()
) )
grp, err := groupsAPI.GetByID(ctx, groupID) grp, err := groupsAPI.GetByID(ctx, groupID, api.CallConfig{})
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
table := []struct { table := []struct {
@ -157,7 +157,7 @@ func (suite *GroupsIntgSuite) TestGroups_GetByID() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
_, err := groupsAPI.GetByID(ctx, test.id) _, err := groupsAPI.GetByID(ctx, test.id, api.CallConfig{})
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
}) })
} }

View File

@ -13,6 +13,20 @@ import (
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
// ---------------------------------------------------------------------------
// common structs
// ---------------------------------------------------------------------------
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
// a delta-queryable folder.
type DeltaUpdate struct {
// the deltaLink itself
URL string
// true if the old delta was marked as invalid
Reset bool
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// common interfaces // common interfaces
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -223,6 +223,26 @@ func (c Mail) PatchFolder(
return nil return nil
} }
// TODO: needs pager implementation for completion
func (c Mail) GetContainerChildren(
ctx context.Context,
userID, containerID string,
) ([]models.MailFolderable, error) {
resp, err := c.Stable.
Client().
Users().
ByUserId(userID).
MailFolders().
ByMailFolderId(containerID).
ChildFolders().
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting container child folders")
}
return resp.GetValue(), nil
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// items // items
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -32,10 +32,11 @@ func (dnl *DeltaNextLinkValues[T]) GetOdataDeltaLink() *string {
} }
type PagerResult[T any] struct { type PagerResult[T any] struct {
Values []T Values []T
NextLink *string NextLink *string
DeltaLink *string DeltaLink *string
Err error ResetDelta bool
Err error
} }
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@ -28,6 +28,27 @@ type Group struct {
IsTeam bool IsTeam bool
} }
// GroupByID retrieves a specific group.
func GroupByID(
ctx context.Context,
acct account.Account,
id string,
) (*Group, error) {
ac, err := makeAC(ctx, acct, path.GroupsService)
if err != nil {
return nil, clues.Stack(err).WithClues(ctx)
}
cc := api.CallConfig{}
g, err := ac.Groups().GetByID(ctx, id, cc)
if err != nil {
return nil, clues.Stack(err)
}
return parseGroup(ctx, g)
}
// GroupsCompat returns a list of groups in the specified M365 tenant. // GroupsCompat returns a list of groups in the specified M365 tenant.
func GroupsCompat(ctx context.Context, acct account.Account) ([]*Group, error) { func GroupsCompat(ctx context.Context, acct account.Account) ([]*Group, error) {
errs := fault.New(true) errs := fault.New(true)

View File

@ -41,6 +41,24 @@ func (suite *GroupsIntgSuite) SetupSuite() {
suite.acct = tconfig.NewM365Account(t) suite.acct = tconfig.NewM365Account(t)
} }
func (suite *GroupsIntgSuite) TestGroupByID() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
graph.InitializeConcurrencyLimiter(ctx, true, 4)
gid := tconfig.M365TeamID(t)
group, err := m365.GroupByID(ctx, suite.acct, gid)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, group)
assert.Equal(t, gid, group.ID, "must match expected id")
assert.NotEmpty(t, group.DisplayName)
}
func (suite *GroupsIntgSuite) TestGroups() { func (suite *GroupsIntgSuite) TestGroups() {
t := suite.T() t := suite.T()

View File

@ -7,16 +7,17 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
"github.com/alcionai/corso/src/pkg/storage" "github.com/alcionai/corso/src/pkg/storage"
) )
type CommonCfgSuite struct { type CommonCfgUnitSuite struct {
suite.Suite tester.Suite
} }
func TestCommonCfgSuite(t *testing.T) { func TestCommonCfgUnitSuite(t *testing.T) {
suite.Run(t, new(CommonCfgSuite)) suite.Run(t, &CommonCfgUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
var goodCommonConfig = storage.CommonConfig{ var goodCommonConfig = storage.CommonConfig{
@ -25,7 +26,7 @@ var goodCommonConfig = storage.CommonConfig{
}, },
} }
func (suite *CommonCfgSuite) TestCommonConfig_Config() { func (suite *CommonCfgUnitSuite) TestCommonConfig_Config() {
cfg := goodCommonConfig cfg := goodCommonConfig
c, err := cfg.StringConfig() c, err := cfg.StringConfig()
assert.NoError(suite.T(), err, clues.ToCore(err)) assert.NoError(suite.T(), err, clues.ToCore(err))
@ -43,7 +44,7 @@ func (suite *CommonCfgSuite) TestCommonConfig_Config() {
} }
} }
func (suite *CommonCfgSuite) TestStorage_CommonConfig() { func (suite *CommonCfgUnitSuite) TestStorage_CommonConfig() {
t := suite.T() t := suite.T()
in := goodCommonConfig in := goodCommonConfig
@ -55,7 +56,7 @@ func (suite *CommonCfgSuite) TestStorage_CommonConfig() {
assert.Equal(t, in.CorsoPassphrase, out.CorsoPassphrase) assert.Equal(t, in.CorsoPassphrase, out.CorsoPassphrase)
} }
func (suite *CommonCfgSuite) TestStorage_CommonConfig_InvalidCases() { func (suite *CommonCfgUnitSuite) TestStorage_CommonConfig_InvalidCases() {
// missing required properties // missing required properties
table := []struct { table := []struct {
name string name string

View File

@ -20,6 +20,10 @@ type FilesystemConfig struct {
Path string Path string
} }
func (s Storage) ToFilesystemConfig() (*FilesystemConfig, error) {
return buildFilesystemConfigFromMap(s.Config)
}
func buildFilesystemConfigFromMap(config map[string]string) (*FilesystemConfig, error) { func buildFilesystemConfigFromMap(config map[string]string) (*FilesystemConfig, error) {
c := &FilesystemConfig{} c := &FilesystemConfig{}
@ -69,7 +73,7 @@ func (c *FilesystemConfig) ApplyConfigOverrides(
if matchFromConfig { if matchFromConfig {
providerType := cast.ToString(g.Get(StorageProviderTypeKey)) providerType := cast.ToString(g.Get(StorageProviderTypeKey))
if providerType != ProviderFilesystem.String() { if providerType != ProviderFilesystem.String() {
return clues.New("unsupported storage provider in config file: " + providerType) return clues.New("unsupported storage provider in config file: [" + providerType + "]")
} }
// This is matching override values from config file. // This is matching override values from config file.

View File

@ -62,6 +62,28 @@ var s3constToTomlKeyMap = map[string]string{
StorageProviderTypeKey: StorageProviderTypeKey, StorageProviderTypeKey: StorageProviderTypeKey,
} }
func (s Storage) ToS3Config() (*S3Config, error) {
return buildS3ConfigFromMap(s.Config)
}
func buildS3ConfigFromMap(config map[string]string) (*S3Config, error) {
c := &S3Config{}
if len(config) > 0 {
c.AccessKey = orEmptyString(config[keyS3AccessKey])
c.SecretKey = orEmptyString(config[keyS3SecretKey])
c.SessionToken = orEmptyString(config[keyS3SessionToken])
c.Bucket = orEmptyString(config[keyS3Bucket])
c.Endpoint = orEmptyString(config[keyS3Endpoint])
c.Prefix = orEmptyString(config[keyS3Prefix])
c.DoNotUseTLS = str.ParseBool(config[keyS3DoNotUseTLS])
c.DoNotVerifyTLS = str.ParseBool(config[keyS3DoNotVerifyTLS])
}
return c, c.validate()
}
func (c *S3Config) normalize() S3Config { func (c *S3Config) normalize() S3Config {
return S3Config{ return S3Config{
Bucket: common.NormalizeBucket(c.Bucket), Bucket: common.NormalizeBucket(c.Bucket),
@ -91,24 +113,6 @@ func (c *S3Config) StringConfig() (map[string]string, error) {
return cfg, cn.validate() return cfg, cn.validate()
} }
func buildS3ConfigFromMap(config map[string]string) (*S3Config, error) {
c := &S3Config{}
if len(config) > 0 {
c.AccessKey = orEmptyString(config[keyS3AccessKey])
c.SecretKey = orEmptyString(config[keyS3SecretKey])
c.SessionToken = orEmptyString(config[keyS3SessionToken])
c.Bucket = orEmptyString(config[keyS3Bucket])
c.Endpoint = orEmptyString(config[keyS3Endpoint])
c.Prefix = orEmptyString(config[keyS3Prefix])
c.DoNotUseTLS = str.ParseBool(config[keyS3DoNotUseTLS])
c.DoNotVerifyTLS = str.ParseBool(config[keyS3DoNotVerifyTLS])
}
return c, c.validate()
}
func (c S3Config) validate() error { func (c S3Config) validate() error {
check := map[string]string{ check := map[string]string{
Bucket: c.Bucket, Bucket: c.Bucket,
@ -169,11 +173,11 @@ func (c *S3Config) ApplyConfigOverrides(
if matchFromConfig { if matchFromConfig {
providerType := cast.ToString(kvg.Get(StorageProviderTypeKey)) providerType := cast.ToString(kvg.Get(StorageProviderTypeKey))
if providerType != ProviderS3.String() { if providerType != ProviderS3.String() {
return clues.New("unsupported storage provider: " + providerType) return clues.New("unsupported storage provider: [" + providerType + "]")
} }
if err := mustMatchConfig(kvg, s3constToTomlKeyMap, s3Overrides(overrides)); err != nil { if err := mustMatchConfig(kvg, s3constToTomlKeyMap, s3Overrides(overrides)); err != nil {
return clues.Wrap(err, "verifying s3 configs in corso config file") return clues.Stack(err)
} }
} }
} }

View File

@ -8,15 +8,16 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/credentials"
) )
type S3CfgSuite struct { type S3CfgUnitSuite struct {
suite.Suite tester.Suite
} }
func TestS3CfgSuite(t *testing.T) { func TestS3CfgUnitSuite(t *testing.T) {
suite.Run(t, new(S3CfgSuite)) suite.Run(t, &S3CfgUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
var ( var (
@ -41,7 +42,7 @@ var (
} }
) )
func (suite *S3CfgSuite) TestS3Config_Config() { func (suite *S3CfgUnitSuite) TestS3Config_Config() {
s3 := goodS3Config s3 := goodS3Config
c, err := s3.StringConfig() c, err := s3.StringConfig()
@ -60,16 +61,16 @@ func (suite *S3CfgSuite) TestS3Config_Config() {
} }
} }
func (suite *S3CfgSuite) TestStorage_S3Config() { func (suite *S3CfgUnitSuite) TestStorage_S3Config() {
t := suite.T() t := suite.T()
in := goodS3Config in := goodS3Config
s, err := NewStorage(ProviderS3, &in) s, err := NewStorage(ProviderS3, &in)
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
sc, err := s.StorageConfig()
out, err := s.ToS3Config()
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
out := sc.(*S3Config)
assert.Equal(t, in.Bucket, out.Bucket) assert.Equal(t, in.Bucket, out.Bucket)
assert.Equal(t, in.Endpoint, out.Endpoint) assert.Equal(t, in.Endpoint, out.Endpoint)
assert.Equal(t, in.Prefix, out.Prefix) assert.Equal(t, in.Prefix, out.Prefix)
@ -84,7 +85,7 @@ func makeTestS3Cfg(bkt, end, pre, access, secret, session string) S3Config {
} }
} }
func (suite *S3CfgSuite) TestStorage_S3Config_invalidCases() { func (suite *S3CfgUnitSuite) TestStorage_S3Config_invalidCases() {
// missing required properties // missing required properties
table := []struct { table := []struct {
name string name string
@ -118,13 +119,14 @@ func (suite *S3CfgSuite) TestStorage_S3Config_invalidCases() {
st, err := NewStorage(ProviderUnknown, &goodS3Config) st, err := NewStorage(ProviderUnknown, &goodS3Config)
assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
test.amend(st) test.amend(st)
_, err = st.StorageConfig()
assert.Error(t, err) _, err = st.ToS3Config()
assert.Error(t, err, clues.ToCore(err))
}) })
} }
} }
func (suite *S3CfgSuite) TestStorage_S3Config_StringConfig() { func (suite *S3CfgUnitSuite) TestStorage_S3Config_StringConfig() {
table := []struct { table := []struct {
name string name string
input S3Config input S3Config
@ -178,7 +180,7 @@ func (suite *S3CfgSuite) TestStorage_S3Config_StringConfig() {
} }
} }
func (suite *S3CfgSuite) TestStorage_S3Config_Normalize() { func (suite *S3CfgUnitSuite) TestStorage_S3Config_Normalize() {
const ( const (
prefixedBkt = "s3://bkt" prefixedBkt = "s3://bkt"
normalBkt = "bkt" normalBkt = "bkt"

Some files were not shown because too many files have changed in this diff Show More