diff --git a/.github/actions/backup-restore-test/action.yml b/.github/actions/backup-restore-test/action.yml index 83bee4c72..10f81cbb0 100644 --- a/.github/actions/backup-restore-test/action.yml +++ b/.github/actions/backup-restore-test/action.yml @@ -50,9 +50,11 @@ runs: run: | set -euo pipefail ./corso restore '${{ inputs.service }}' \ - --no-stats --hide-progress \ + --no-stats \ + --hide-progress \ ${{ inputs.restore-args }} \ - --backup '${{ steps.backup.outputs.result }}' 2>&1 | + --backup '${{ steps.backup.outputs.result }}' \ + 2>&1 | tee /tmp/corsologs | grep -i -e 'Restoring to folder ' | sed "s/Restoring to folder /result=/" | @@ -77,7 +79,9 @@ runs: run: | set -euo pipefail ./corso backup list ${{ inputs.service }} \ - --no-stats --hide-progress 2>&1 | + --no-stats \ + --hide-progress \ + 2>&1 | tee /tmp/corso-backup-list.log if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list.log @@ -92,8 +96,10 @@ runs: run: | set -euo pipefail ./corso backup list ${{ inputs.service }} \ - --no-stats --hide-progress \ - --backup "${{ steps.backup.outputs.result }}" 2>&1 | + --no-stats \ + --hide-progress \ + --backup "${{ steps.backup.outputs.result }}" \ + 2>&1 | tee /tmp/corso-backup-list-item.log if ! grep -q ${{ steps.backup.outputs.result }} /tmp/corso-backup-list-item.log diff --git a/.github/workflows/sanity-test.yaml b/.github/workflows/sanity-test.yaml index c793986e7..a32ffb5fc 100644 --- a/.github/workflows/sanity-test.yaml +++ b/.github/workflows/sanity-test.yaml @@ -27,20 +27,20 @@ jobs: environment: Testing runs-on: ubuntu-latest env: + # Need these in the local env so that corso can read them AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY_SECRET }} AZURE_CLIENT_ID: ${{ secrets[needs.SetM365App.outputs.client_id_env] }} AZURE_CLIENT_SECRET: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }} AZURE_TENANT_ID: ${{ secrets.TENANT_ID }} - CORSO_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }} + CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} + # re-used values + # don't forget: return to Corso_Test_Sanity_ CORSO_LOG_DIR: testlog CORSO_LOG_FILE: testlog/testlogging.log - CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }} RESTORE_DEST_PFX: Corso_Test_Sanity_ TEST_RESULT: test_results TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }} - TEST_SITE: ${{ secrets.CORSO_M365_TEST_SITE_URL }} - SECONDARY_TEST_USER : ${{ secrets.CORSO_SECONDARY_M365_TEST_USER_ID }} # The default working directory doesn't seem to apply to things without # the 'run' directive. https://stackoverflow.com/a/67845456 WORKING_DIR: src @@ -87,9 +87,9 @@ jobs: user: ${{ env.TEST_USER }} folder-prefix: ${{ env.RESTORE_DEST_PFX }} older-than: ${{ env.NOW }} - azure-client-id: ${{ env.AZURE_CLIENT_ID }} - azure-client-secret: ${{ env.AZURE_CLIENT_SECRET }} - azure-tenant-id: ${{ env.AZURE_TENANT_ID }} + azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }} + azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }} + azure-tenant-id: ${{ secrets.TENANT_ID }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} @@ -97,13 +97,13 @@ jobs: if: always() uses: ./.github/actions/purge-m365-data with: - site: ${{ env.TEST_SITE }} + site: ${{ secrets.CORSO_M365_TEST_SITE_URL }} folder-prefix: ${{ env.RESTORE_DEST_PFX }} libraries: ${{ vars.CORSO_M365_TEST_SITE_LIBRARIES }} older-than: ${{ env.NOW }} - azure-client-id: ${{ env.AZURE_CLIENT_ID }} - azure-client-secret: ${{ env.AZURE_CLIENT_SECRET }} - azure-tenant-id: ${{ env.AZURE_TENANT_ID }} + azure-client-id: ${{ secrets[needs.SetM365App.outputs.client_id_env] }} + azure-client-secret: ${{ secrets[needs.SetM365App.outputs.client_secret_env] }} + azure-tenant-id: ${{ secrets.TENANT_ID }} m365-admin-user: ${{ secrets.M365_TENANT_ADMIN_USER }} m365-admin-password: ${{ secrets.M365_TENANT_ADMIN_PASSWORD }} @@ -122,10 +122,13 @@ jobs: run: | set -euo pipefail prefix=$(date +"%Y-%m-%d-%T") - echo -e "\nRepo init test\n" >> ${CORSO_LOG_FILE} + echo -e "\nRepo init test\n" >> ${{ env.CORSO_LOG_FILE }} ./corso repo init s3 \ - --no-stats --hide-progress --prefix $prefix \ - --bucket ${CORSO_BUCKET} 2>&1 | tee $TEST_RESULT/initrepo.txt + --no-stats \ + --hide-progress \ + --prefix $prefix \ + --bucket ${{ secrets.CI_TESTS_S3_BUCKET }} \ + 2>&1 | tee $TEST_RESULT/initrepo.txt if ! grep -q 'Initialized a S3 repository within bucket' $TEST_RESULT/initrepo.txt then @@ -138,10 +141,13 @@ jobs: - name: Repo connect test run: | set -euo pipefail - echo -e "\nRepo connect test\n" >> ${CORSO_LOG_FILE} + echo -e "\nRepo connect test\n" >> ${{ env.CORSO_LOG_FILE }} ./corso repo connect s3 \ - --no-stats --hide-progress --prefix ${{ steps.repo-init.outputs.result }} \ - --bucket ${CORSO_BUCKET} 2>&1 | tee $TEST_RESULT/connect.txt + --no-stats \ + --hide-progress \ + --prefix ${{ steps.repo-init.outputs.result }} \ + --bucket ${{ secrets.CI_TESTS_S3_BUCKET }} \ + 2>&1 | tee $TEST_RESULT/connect.txt if ! grep -q 'Connected to S3 bucket' $TEST_RESULT/connect.txt then @@ -159,9 +165,9 @@ jobs: working-directory: ./src/cmd/factory run: | go run . exchange emails \ - --user ${TEST_USER} \ - --tenant ${AZURE_TENANT_ID} \ - --destination ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }} \ + --user ${{ env.TEST_USER }} \ + --tenant ${{ secrets.TENANT_ID }} \ + --destination ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }} \ --count 4 - name: Exchange - Backup @@ -170,9 +176,9 @@ jobs: with: service: exchange kind: backup - backup-args: '--mailbox "${TEST_USER}" --data "email"' - restore-args: '--email-folder ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}' - test-folder: '${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}' + backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' + restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' + test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' - name: Exchange - Incremental backup id: exchange-backup-incremental @@ -180,9 +186,9 @@ jobs: with: service: exchange kind: backup-incremental - backup-args: '--mailbox "${TEST_USER}" --data "email"' - restore-args: '--email-folder ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}' - test-folder: '${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}' + backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' + restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' + test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' base-backup: ${{ steps.exchange-backup.outputs.backup-id }} - name: Exchange - Non delta backup @@ -191,9 +197,9 @@ jobs: with: service: exchange kind: backup-non-delta - backup-args: '--mailbox "${TEST_USER}" --data "email" --disable-delta' - restore-args: '--email-folder ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}' - test-folder: '${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}' + backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email" --disable-delta' + restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' + test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' base-backup: ${{ steps.exchange-backup.outputs.backup-id }} - name: Exchange - Incremental backup after non-delta @@ -202,9 +208,9 @@ jobs: with: service: exchange kind: backup-incremental-after-non-delta - backup-args: '--mailbox "${TEST_USER}" --data "email"' - restore-args: '--email-folder ${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}' - test-folder: '${RESTORE_DEST_PFX}${{ steps.repo-init.outputs.result }}' + backup-args: '--mailbox "${{ env.TEST_USER }}" --data "email"' + restore-args: '--email-folder ${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' + test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.repo-init.outputs.result }}' base-backup: ${{ steps.exchange-backup.outputs.backup-id }} @@ -220,10 +226,10 @@ jobs: suffix=$(date +"%Y-%m-%d_%H-%M-%S") go run . onedrive files \ - --user ${TEST_USER} \ - --secondaryuser ${SECONDARY_TEST_USER} \ - --tenant ${AZURE_TENANT_ID} \ - --destination ${RESTORE_DEST_PFX}$suffix \ + --user ${{ env.TEST_USER }} \ + --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ + --tenant ${{ secrets.TENANT_ID }} \ + --destination ${{ env.RESTORE_DEST_PFX }}$suffix \ --count 4 echo result="${suffix}" >> $GITHUB_OUTPUT @@ -234,19 +240,19 @@ jobs: with: service: onedrive kind: backup - backup-args: '--user "${TEST_USER}"' - restore-args: '--folder ${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }} --restore-permissions' - test-folder: '${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }}' + backup-args: '--user "${{ env.TEST_USER }}"' + restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }} --restore-permissions' + test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}' # generate some more enteries for incremental check - name: OneDrive - Create new data (for incremental) working-directory: ./src/cmd/factory run: | go run . onedrive files \ - --user ${TEST_USER} \ - --secondaryuser ${SECONDARY_TEST_USER} \ - --tenant ${AZURE_TENANT_ID} \ - --destination ${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }} \ + --user ${{ env.TEST_USER }} \ + --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ + --tenant ${{ secrets.TENANT_ID }} \ + --destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }} \ --count 4 - name: OneDrive - Incremental backup @@ -255,9 +261,9 @@ jobs: with: service: onedrive kind: incremental - backup-args: '--user "${TEST_USER}"' - restore-args: '--folder ${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }} --restore-permissions' - test-folder: '${RESTORE_DEST_PFX}${{ steps.new-data-creation-onedrive.outputs.result }}' + backup-args: '--user "${{ env.TEST_USER }}"' + restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }} --restore-permissions' + test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-onedrive.outputs.result }}' ########################################################################################################################################## @@ -271,11 +277,11 @@ jobs: suffix=$(date +"%Y-%m-%d_%H-%M-%S") go run . sharepoint files \ - --site ${TEST_SITE} \ - --user ${TEST_USER} \ - --secondaryuser ${SECONDARY_TEST_USER} \ - --tenant ${AZURE_TENANT_ID} \ - --destination ${RESTORE_DEST_PFX}$suffix \ + --site ${{ secrets.CORSO_M365_TEST_SITE_URL }} \ + --user ${{ env.TEST_USER }} \ + --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ + --tenant ${{ secrets.TENANT_ID }} \ + --destination ${{ env.RESTORE_DEST_PFX }}$suffix \ --count 4 echo result="${suffix}" >> $GITHUB_OUTPUT @@ -286,20 +292,20 @@ jobs: with: service: sharepoint kind: backup - backup-args: '--site "${TEST_SITE}"' - restore-args: '--folder ${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }} --restore-permissions' - test-folder: '${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }}' + backup-args: '--site "${{ secrets.CORSO_M365_TEST_SITE_URL }}"' + restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }} --restore-permissions' + test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}' # generate some more enteries for incremental check - name: SharePoint - Create new data (for incremental) working-directory: ./src/cmd/factory run: | go run . sharepoint files \ - --site ${TEST_SITE} \ - --user ${TEST_USER} \ - --secondaryuser ${SECONDARY_TEST_USER} \ - --tenant ${AZURE_TENANT_ID} \ - --destination ${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }} \ + --site ${{ secrets.CORSO_M365_TEST_SITE_URL }} \ + --user ${{ env.TEST_USER }} \ + --secondaryuser ${{ env.CORSO_SECONDARY_M365_TEST_USER_ID }} \ + --tenant ${{ secrets.TENANT_ID }} \ + --destination ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }} \ --count 4 - name: SharePoint - Incremental backup @@ -308,9 +314,9 @@ jobs: with: service: sharepoint kind: incremental - backup-args: '--site "${TEST_SITE}"' - restore-args: '--folder ${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }} --restore-permissions' - test-folder: '${RESTORE_DEST_PFX}${{ steps.new-data-creation-sharepoint.outputs.result }}' + backup-args: '--site "${{ secrets.CORSO_M365_TEST_SITE_URL }}"' + restore-args: '--folder ${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }} --restore-permissions' + test-folder: '${{ env.RESTORE_DEST_PFX }}${{ steps.new-data-creation-sharepoint.outputs.result }}' ########################################################################################################################################## @@ -338,7 +344,7 @@ jobs: - name: Send Github Action failure to Slack id: slack-notification if: failure() - uses: slackapi/slack-github-action@v1.23.0 + uses: slackapi/slack-github-action@v1.24.0 with: payload: | { diff --git a/CHANGELOG.md b/CHANGELOG.md index 475c98407..b36c38580 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added ### Fixed +- Fix Exchange folder cache population error when parent folder isn't found. + ### Known Issues ## [v0.8.0] (beta) - 2023-05-15 diff --git a/src/cli/backup/exchange_test.go b/src/cli/backup/exchange_test.go index d8d4f9e68..dd3d12766 100644 --- a/src/cli/backup/exchange_test.go +++ b/src/cli/backup/exchange_test.go @@ -1,6 +1,7 @@ package backup import ( + "fmt" "testing" "github.com/alcionai/clues" @@ -13,6 +14,8 @@ import ( "github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/internal/version" + dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata" ) type ExchangeUnitSuite struct { @@ -275,18 +278,26 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupDetailsSelectors() { ctx, flush := tester.NewContext() defer flush() - for _, test := range testdata.ExchangeOptionDetailLookups { - suite.Run(test.Name, func() { - t := suite.T() + for v := 0; v <= version.Backup; v++ { + suite.Run(fmt.Sprintf("version%d", v), func() { + for _, test := range testdata.ExchangeOptionDetailLookups { + suite.Run(test.Name, func() { + t := suite.T() - output, err := runDetailsExchangeCmd( - ctx, - test.BackupGetter, - "backup-ID", - test.Opts, - false) - assert.NoError(t, err, clues.ToCore(err)) - assert.ElementsMatch(t, test.Expected, output.Entries) + bg := testdata.VersionedBackupGetter{ + Details: dtd.GetDetailsSetForVersion(t, v), + } + + output, err := runDetailsExchangeCmd( + ctx, + bg, + "backup-ID", + test.Opts(t, v), + false) + assert.NoError(t, err, clues.ToCore(err)) + assert.ElementsMatch(t, test.Expected(t, v), output.Entries) + }) + } }) } } @@ -303,7 +314,7 @@ func (suite *ExchangeUnitSuite) TestExchangeBackupDetailsSelectorsBadFormats() { ctx, test.BackupGetter, "backup-ID", - test.Opts, + test.Opts(t, version.Backup), false) assert.Error(t, err, clues.ToCore(err)) assert.Empty(t, output) diff --git a/src/cli/backup/onedrive_test.go b/src/cli/backup/onedrive_test.go index dd9d6586b..27720fa74 100644 --- a/src/cli/backup/onedrive_test.go +++ b/src/cli/backup/onedrive_test.go @@ -1,6 +1,7 @@ package backup import ( + "fmt" "testing" "github.com/alcionai/clues" @@ -13,6 +14,8 @@ import ( "github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/internal/version" + dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata" ) type OneDriveUnitSuite struct { @@ -137,18 +140,26 @@ func (suite *OneDriveUnitSuite) TestOneDriveBackupDetailsSelectors() { ctx, flush := tester.NewContext() defer flush() - for _, test := range testdata.OneDriveOptionDetailLookups { - suite.Run(test.Name, func() { - t := suite.T() + for v := 0; v <= version.Backup; v++ { + suite.Run(fmt.Sprintf("version%d", v), func() { + for _, test := range testdata.OneDriveOptionDetailLookups { + suite.Run(test.Name, func() { + t := suite.T() - output, err := runDetailsOneDriveCmd( - ctx, - test.BackupGetter, - "backup-ID", - test.Opts, - false) - assert.NoError(t, err, clues.ToCore(err)) - assert.ElementsMatch(t, test.Expected, output.Entries) + bg := testdata.VersionedBackupGetter{ + Details: dtd.GetDetailsSetForVersion(t, v), + } + + output, err := runDetailsOneDriveCmd( + ctx, + bg, + "backup-ID", + test.Opts(t, v), + false) + assert.NoError(t, err, clues.ToCore(err)) + assert.ElementsMatch(t, test.Expected(t, v), output.Entries) + }) + } }) } } @@ -165,7 +176,7 @@ func (suite *OneDriveUnitSuite) TestOneDriveBackupDetailsSelectorsBadFormats() { ctx, test.BackupGetter, "backup-ID", - test.Opts, + test.Opts(t, version.Backup), false) assert.Error(t, err, clues.ToCore(err)) assert.Empty(t, output) diff --git a/src/cli/backup/sharepoint_test.go b/src/cli/backup/sharepoint_test.go index 70b132897..ba355da82 100644 --- a/src/cli/backup/sharepoint_test.go +++ b/src/cli/backup/sharepoint_test.go @@ -1,6 +1,7 @@ package backup import ( + "fmt" "testing" "github.com/alcionai/clues" @@ -14,6 +15,8 @@ import ( "github.com/alcionai/corso/src/cli/utils/testdata" "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/internal/version" + dtd "github.com/alcionai/corso/src/pkg/backup/details/testdata" "github.com/alcionai/corso/src/pkg/selectors" ) @@ -256,18 +259,26 @@ func (suite *SharePointUnitSuite) TestSharePointBackupDetailsSelectors() { ctx, flush := tester.NewContext() defer flush() - for _, test := range testdata.SharePointOptionDetailLookups { - suite.Run(test.Name, func() { - t := suite.T() + for v := 0; v <= version.Backup; v++ { + suite.Run(fmt.Sprintf("version%d", v), func() { + for _, test := range testdata.SharePointOptionDetailLookups { + suite.Run(test.Name, func() { + t := suite.T() - output, err := runDetailsSharePointCmd( - ctx, - test.BackupGetter, - "backup-ID", - test.Opts, - false) - assert.NoError(t, err, clues.ToCore(err)) - assert.ElementsMatch(t, test.Expected, output.Entries) + bg := testdata.VersionedBackupGetter{ + Details: dtd.GetDetailsSetForVersion(t, v), + } + + output, err := runDetailsSharePointCmd( + ctx, + bg, + "backup-ID", + test.Opts(t, v), + false) + assert.NoError(t, err, clues.ToCore(err)) + assert.ElementsMatch(t, test.Expected(t, v), output.Entries) + }) + } }) } } @@ -284,7 +295,7 @@ func (suite *SharePointUnitSuite) TestSharePointBackupDetailsSelectorsBadFormats ctx, test.BackupGetter, "backup-ID", - test.Opts, + test.Opts(t, version.Backup), false) assert.Error(t, err, clues.ToCore(err)) assert.Empty(t, output) diff --git a/src/cli/config/account.go b/src/cli/config/account.go index 310ac97c3..45fd50058 100644 --- a/src/cli/config/account.go +++ b/src/cli/config/account.go @@ -6,7 +6,7 @@ import ( "github.com/alcionai/clues" "github.com/spf13/viper" - "github.com/alcionai/corso/src/internal/common" + "github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/credentials" ) @@ -64,7 +64,7 @@ func configureAccount( m365Cfg = account.M365Config{ M365: m365, - AzureTenantID: common.First( + AzureTenantID: str.First( overrides[account.AzureTenantID], m365Cfg.AzureTenantID, os.Getenv(account.AzureTenantID)), diff --git a/src/cli/config/storage.go b/src/cli/config/storage.go index 9aba1e5d9..af8dff397 100644 --- a/src/cli/config/storage.go +++ b/src/cli/config/storage.go @@ -10,6 +10,7 @@ import ( "github.com/spf13/viper" "github.com/alcionai/corso/src/internal/common" + "github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/pkg/credentials" "github.com/alcionai/corso/src/pkg/storage" ) @@ -80,14 +81,14 @@ func configureStorage( } s3Cfg = storage.S3Config{ - Bucket: common.First(overrides[storage.Bucket], s3Cfg.Bucket, os.Getenv(storage.BucketKey)), - Endpoint: common.First(overrides[storage.Endpoint], s3Cfg.Endpoint, os.Getenv(storage.EndpointKey)), - Prefix: common.First(overrides[storage.Prefix], s3Cfg.Prefix, os.Getenv(storage.PrefixKey)), - DoNotUseTLS: common.ParseBool(common.First( + Bucket: str.First(overrides[storage.Bucket], s3Cfg.Bucket, os.Getenv(storage.BucketKey)), + Endpoint: str.First(overrides[storage.Endpoint], s3Cfg.Endpoint, os.Getenv(storage.EndpointKey)), + Prefix: str.First(overrides[storage.Prefix], s3Cfg.Prefix, os.Getenv(storage.PrefixKey)), + DoNotUseTLS: str.ParseBool(str.First( overrides[storage.DoNotUseTLS], strconv.FormatBool(s3Cfg.DoNotUseTLS), os.Getenv(storage.PrefixKey))), - DoNotVerifyTLS: common.ParseBool(common.First( + DoNotVerifyTLS: str.ParseBool(str.First( overrides[storage.DoNotVerifyTLS], strconv.FormatBool(s3Cfg.DoNotVerifyTLS), os.Getenv(storage.PrefixKey))), diff --git a/src/cli/utils/testdata/opts.go b/src/cli/utils/testdata/opts.go index 8bbb35a58..614434c11 100644 --- a/src/cli/utils/testdata/opts.go +++ b/src/cli/utils/testdata/opts.go @@ -2,6 +2,7 @@ package testdata import ( "context" + "testing" "time" "github.com/alcionai/clues" @@ -13,15 +14,16 @@ import ( "github.com/alcionai/corso/src/pkg/backup/details/testdata" "github.com/alcionai/corso/src/pkg/fault" ftd "github.com/alcionai/corso/src/pkg/fault/testdata" + "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/store" ) type ExchangeOptionsTest struct { Name string - Opts utils.ExchangeOpts + Opts func(t *testing.T, wantedVersion int) utils.ExchangeOpts BackupGetter *MockBackupGetter - Expected []details.Entry + Expected func(t *testing.T, wantedVersion int) []details.Entry } var ( @@ -32,92 +34,112 @@ var ( BadExchangeOptionsFormats = []ExchangeOptionsTest{ { Name: "BadEmailReceiveAfter", - Opts: utils.ExchangeOpts{ - EmailReceivedAfter: "foo", - Populated: utils.PopulatedFlags{ - utils.EmailReceivedAfterFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailReceivedAfter: "foo", + Populated: utils.PopulatedFlags{ + utils.EmailReceivedAfterFN: struct{}{}, + }, + } }, }, { Name: "EmptyEmailReceiveAfter", - Opts: utils.ExchangeOpts{ - EmailReceivedAfter: "", - Populated: utils.PopulatedFlags{ - utils.EmailReceivedAfterFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailReceivedAfter: "", + Populated: utils.PopulatedFlags{ + utils.EmailReceivedAfterFN: struct{}{}, + }, + } }, }, { Name: "BadEmailReceiveBefore", - Opts: utils.ExchangeOpts{ - EmailReceivedBefore: "foo", - Populated: utils.PopulatedFlags{ - utils.EmailReceivedBeforeFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailReceivedBefore: "foo", + Populated: utils.PopulatedFlags{ + utils.EmailReceivedBeforeFN: struct{}{}, + }, + } }, }, { Name: "EmptyEmailReceiveBefore", - Opts: utils.ExchangeOpts{ - EmailReceivedBefore: "", - Populated: utils.PopulatedFlags{ - utils.EmailReceivedBeforeFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailReceivedBefore: "", + Populated: utils.PopulatedFlags{ + utils.EmailReceivedBeforeFN: struct{}{}, + }, + } }, }, { Name: "BadEventRecurs", - Opts: utils.ExchangeOpts{ - EventRecurs: "foo", - Populated: utils.PopulatedFlags{ - utils.EventRecursFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EventRecurs: "foo", + Populated: utils.PopulatedFlags{ + utils.EventRecursFN: struct{}{}, + }, + } }, }, { Name: "EmptyEventRecurs", - Opts: utils.ExchangeOpts{ - EventRecurs: "", - Populated: utils.PopulatedFlags{ - utils.EventRecursFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EventRecurs: "", + Populated: utils.PopulatedFlags{ + utils.EventRecursFN: struct{}{}, + }, + } }, }, { Name: "BadEventStartsAfter", - Opts: utils.ExchangeOpts{ - EventStartsAfter: "foo", - Populated: utils.PopulatedFlags{ - utils.EventStartsAfterFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EventStartsAfter: "foo", + Populated: utils.PopulatedFlags{ + utils.EventStartsAfterFN: struct{}{}, + }, + } }, }, { Name: "EmptyEventStartsAfter", - Opts: utils.ExchangeOpts{ - EventStartsAfter: "", - Populated: utils.PopulatedFlags{ - utils.EventStartsAfterFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EventStartsAfter: "", + Populated: utils.PopulatedFlags{ + utils.EventStartsAfterFN: struct{}{}, + }, + } }, }, { Name: "BadEventStartsBefore", - Opts: utils.ExchangeOpts{ - EventStartsBefore: "foo", - Populated: utils.PopulatedFlags{ - utils.EventStartsBeforeFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EventStartsBefore: "foo", + Populated: utils.PopulatedFlags{ + utils.EventStartsBeforeFN: struct{}{}, + }, + } }, }, { Name: "EmptyEventStartsBefore", - Opts: utils.ExchangeOpts{ - EventStartsBefore: "", - Populated: utils.PopulatedFlags{ - utils.EventStartsBeforeFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EventStartsBefore: "", + Populated: utils.PopulatedFlags{ + utils.EventStartsBeforeFN: struct{}{}, + }, + } }, }, } @@ -128,130 +150,274 @@ var ( // configured to return the full dataset listed in selectors/testdata. ExchangeOptionDetailLookups = []ExchangeOptionsTest{ { - Name: "Emails", - Expected: testdata.ExchangeEmailItems, - Opts: utils.ExchangeOpts{ - Email: selectors.Any(), + Name: "Emails", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + Email: selectors.Any(), + } }, }, { - Name: "EmailsFolderPrefixMatch", - Expected: testdata.ExchangeEmailItems, - Opts: utils.ExchangeOpts{ - EmailFolder: []string{testdata.ExchangeEmailInboxPath.FolderLocation()}, + Name: "EmailsFolderPrefixMatch", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailFolder: []string{testdata.ExchangeEmailInboxPath.FolderLocation()}, + } }, }, { - Name: "EmailsFolderPrefixMatchTrailingSlash", - Expected: testdata.ExchangeEmailItems, - Opts: utils.ExchangeOpts{ - EmailFolder: []string{testdata.ExchangeEmailInboxPath.FolderLocation() + "/"}, + Name: "EmailsFolderPrefixMatchTrailingSlash", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailFolder: []string{testdata.ExchangeEmailInboxPath.FolderLocation() + "/"}, + } }, }, { Name: "EmailsFolderWithSlashPrefixMatch", - Expected: []details.Entry{ - testdata.ExchangeEmailItems[1], - testdata.ExchangeEmailItems[2], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + 1, 2) }, - Opts: utils.ExchangeOpts{ - EmailFolder: []string{testdata.ExchangeEmailBasePath2.FolderLocation()}, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailFolder: []string{testdata.ExchangeEmailBasePath2.FolderLocation()}, + } }, }, { Name: "EmailsFolderWithSlashPrefixMatchTrailingSlash", - Expected: []details.Entry{ - testdata.ExchangeEmailItems[1], - testdata.ExchangeEmailItems[2], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + 1, 2) }, - Opts: utils.ExchangeOpts{ - EmailFolder: []string{testdata.ExchangeEmailBasePath2.FolderLocation() + "/"}, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailFolder: []string{testdata.ExchangeEmailBasePath2.FolderLocation() + "/"}, + } }, }, { Name: "EmailsBySubject", - Expected: []details.Entry{ - testdata.ExchangeEmailItems[0], - testdata.ExchangeEmailItems[1], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + 0, 1) }, - Opts: utils.ExchangeOpts{ - EmailSender: "a-person", + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailSender: "a-person", + } }, }, { Name: "AllExchange", - Expected: append( - append( + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{} + }, + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return append( append( - []details.Entry{}, - testdata.ExchangeEmailItems..., - ), - testdata.ExchangeContactsItems..., - ), - testdata.ExchangeEventsItems..., - ), - }, - { - Name: "MailReceivedTime", - Expected: []details.Entry{testdata.ExchangeEmailItems[0]}, - Opts: utils.ExchangeOpts{ - EmailReceivedBefore: dttm.Format(testdata.Time1.Add(time.Second)), + testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + -1), + testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EventsCategory, + wantedVersion, + -1)...), + testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.ContactsCategory, + wantedVersion, + -1)...) }, }, { - Name: "MailShortRef", - Expected: []details.Entry{testdata.ExchangeEmailItems[0]}, - Opts: utils.ExchangeOpts{ - Email: []string{testdata.ExchangeEmailItemPath1.RR.ShortRef()}, + Name: "MailReceivedTime", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + 0) + }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailReceivedBefore: dttm.Format(testdata.Time1.Add(time.Second)), + } + }, + }, + { + Name: "MailShortRef", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + 0) + }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + deets := testdata.GetDeetsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion) + + return utils.ExchangeOpts{ + Email: []string{deets[0].ShortRef}, + } }, }, { Name: "BadMailItemRef", // no matches are expected, since exchange ItemRefs // are not matched when using the CLI's selectors. - Expected: []details.Entry{}, - Opts: utils.ExchangeOpts{ - Email: []string{testdata.ExchangeEmailItems[0].ItemRef}, + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return []details.Entry{} + }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + deets := testdata.GetDeetsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion) + + return utils.ExchangeOpts{ + Email: []string{deets[0].ItemRef}, + } }, }, { Name: "MultipleMailShortRef", - Expected: []details.Entry{ - testdata.ExchangeEmailItems[0], - testdata.ExchangeEmailItems[1], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + 0, 1) }, - Opts: utils.ExchangeOpts{ - Email: []string{ - testdata.ExchangeEmailItemPath1.RR.ShortRef(), - testdata.ExchangeEmailItemPath2.RR.ShortRef(), - }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + deets := testdata.GetDeetsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion) + + return utils.ExchangeOpts{ + Email: []string{ + deets[0].ShortRef, + deets[1].ShortRef, + }, + } }, }, { - Name: "AllEventsAndMailWithSubject", - Expected: []details.Entry{testdata.ExchangeEmailItems[0]}, - Opts: utils.ExchangeOpts{ - EmailSubject: "foo", - Event: selectors.Any(), + Name: "AllEventsAndMailWithSubject", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + 0) + }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailSubject: "foo", + Event: selectors.Any(), + } }, }, { - Name: "EventsAndMailWithSubject", - Expected: []details.Entry{}, - Opts: utils.ExchangeOpts{ - EmailSubject: "foo", - EventSubject: "foo", + Name: "EventsAndMailWithSubject", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return []details.Entry{} + }, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + return utils.ExchangeOpts{ + EmailSubject: "foo", + EventSubject: "foo", + } }, }, { Name: "EventsAndMailByShortRef", - Expected: []details.Entry{ - testdata.ExchangeEmailItems[0], - testdata.ExchangeEventsItems[0], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return append( + testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion, + 0), + testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EventsCategory, + wantedVersion, + 0)...) }, - Opts: utils.ExchangeOpts{ - Email: []string{testdata.ExchangeEmailItemPath1.RR.ShortRef()}, - Event: []string{testdata.ExchangeEventsItemPath1.RR.ShortRef()}, + Opts: func(t *testing.T, wantedVersion int) utils.ExchangeOpts { + emailDeets := testdata.GetDeetsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantedVersion) + + eventDeets := testdata.GetDeetsForVersion( + t, + path.ExchangeService, + path.EventsCategory, + wantedVersion) + + return utils.ExchangeOpts{ + Email: []string{emailDeets[0].ShortRef}, + Event: []string{eventDeets[0].ShortRef}, + } }, }, } @@ -259,9 +425,9 @@ var ( type OneDriveOptionsTest struct { Name string - Opts utils.OneDriveOpts + Opts func(t *testing.T, wantedVersion int) utils.OneDriveOpts BackupGetter *MockBackupGetter - Expected []details.Entry + Expected func(t *testing.T, wantedVersion int) []details.Entry } var ( @@ -271,75 +437,91 @@ var ( BadOneDriveOptionsFormats = []OneDriveOptionsTest{ { Name: "BadFileCreatedAfter", - Opts: utils.OneDriveOpts{ - Users: selectors.Any(), - FileCreatedAfter: "foo", - Populated: utils.PopulatedFlags{ - utils.FileCreatedAfterFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + Users: selectors.Any(), + FileCreatedAfter: "foo", + Populated: utils.PopulatedFlags{ + utils.FileCreatedAfterFN: struct{}{}, + }, + } }, }, { Name: "EmptyFileCreatedAfter", - Opts: utils.OneDriveOpts{ - FileCreatedAfter: "", - Populated: utils.PopulatedFlags{ - utils.FileCreatedAfterFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FileCreatedAfter: "", + Populated: utils.PopulatedFlags{ + utils.FileCreatedAfterFN: struct{}{}, + }, + } }, }, { Name: "BadFileCreatedBefore", - Opts: utils.OneDriveOpts{ - FileCreatedBefore: "foo", - Populated: utils.PopulatedFlags{ - utils.FileCreatedBeforeFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FileCreatedBefore: "foo", + Populated: utils.PopulatedFlags{ + utils.FileCreatedBeforeFN: struct{}{}, + }, + } }, }, { Name: "EmptyFileCreatedBefore", - Opts: utils.OneDriveOpts{ - FileCreatedBefore: "", - Populated: utils.PopulatedFlags{ - utils.FileCreatedBeforeFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FileCreatedBefore: "", + Populated: utils.PopulatedFlags{ + utils.FileCreatedBeforeFN: struct{}{}, + }, + } }, }, { Name: "BadFileModifiedAfter", - Opts: utils.OneDriveOpts{ - FileModifiedAfter: "foo", - Populated: utils.PopulatedFlags{ - utils.FileModifiedAfterFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FileModifiedAfter: "foo", + Populated: utils.PopulatedFlags{ + utils.FileModifiedAfterFN: struct{}{}, + }, + } }, }, { Name: "EmptyFileModifiedAfter", - Opts: utils.OneDriveOpts{ - FileModifiedAfter: "", - Populated: utils.PopulatedFlags{ - utils.FileModifiedAfterFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FileModifiedAfter: "", + Populated: utils.PopulatedFlags{ + utils.FileModifiedAfterFN: struct{}{}, + }, + } }, }, { Name: "BadFileModifiedBefore", - Opts: utils.OneDriveOpts{ - FileModifiedBefore: "foo", - Populated: utils.PopulatedFlags{ - utils.FileModifiedBeforeFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FileModifiedBefore: "foo", + Populated: utils.PopulatedFlags{ + utils.FileModifiedBeforeFN: struct{}{}, + }, + } }, }, { Name: "EmptyFileModifiedBefore", - Opts: utils.OneDriveOpts{ - FileModifiedBefore: "", - Populated: utils.PopulatedFlags{ - utils.FileModifiedBeforeFN: struct{}{}, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FileModifiedBefore: "", + Populated: utils.PopulatedFlags{ + utils.FileModifiedBeforeFN: struct{}{}, + }, + } }, }, } @@ -350,96 +532,203 @@ var ( // configured to return the full dataset listed in selectors/testdata. OneDriveOptionDetailLookups = []OneDriveOptionsTest{ { - Name: "AllFiles", - Expected: testdata.OneDriveItems, - Opts: utils.OneDriveOpts{ - FolderPath: selectors.Any(), + Name: "AllFiles", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FolderPath: selectors.Any(), + } }, }, { - Name: "FilesWithSingleSlash", - Expected: testdata.OneDriveItems, - Opts: utils.OneDriveOpts{ - FolderPath: []string{"/"}, + Name: "FilesWithSingleSlash", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FolderPath: []string{"/"}, + } }, }, { - Name: "FolderPrefixMatch", - Expected: testdata.OneDriveItems, - Opts: utils.OneDriveOpts{ - FolderPath: []string{testdata.OneDriveFolderFolder}, + Name: "FolderPrefixMatch", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FolderPath: []string{testdata.OneDriveFolderFolder}, + } }, }, { - Name: "FolderPrefixMatchTrailingSlash", - Expected: testdata.OneDriveItems, - Opts: utils.OneDriveOpts{ - FolderPath: []string{testdata.OneDriveFolderFolder + "/"}, + Name: "FolderPrefixMatchTrailingSlash", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FolderPath: []string{testdata.OneDriveFolderFolder + "/"}, + } }, }, { - Name: "FolderPrefixMatchTrailingSlash", - Expected: testdata.OneDriveItems, - Opts: utils.OneDriveOpts{ - FolderPath: []string{testdata.OneDriveFolderFolder + "/"}, + Name: "FolderPrefixMatchTrailingSlash", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FolderPath: []string{testdata.OneDriveFolderFolder + "/"}, + } }, }, { - Name: "FolderRepoRefMatchesNothing", - Expected: []details.Entry{}, - Opts: utils.OneDriveOpts{ - FolderPath: []string{testdata.OneDriveFolderPath.RR.Folder(true)}, + Name: "FolderRepoRefMatchesNothing", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return []details.Entry{} + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FolderPath: []string{testdata.OneDriveFolderPath.RR.Folder(true)}, + } }, }, { Name: "ShortRef", - Expected: []details.Entry{ - testdata.OneDriveItems[0], - testdata.OneDriveItems[1], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + 0, 1) }, - Opts: utils.OneDriveOpts{ - FileName: []string{ - testdata.OneDriveItems[0].ShortRef, - testdata.OneDriveItems[1].ShortRef, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + deets := testdata.GetDeetsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion) + + return utils.OneDriveOpts{ + FileName: []string{ + deets[0].ShortRef, + deets[1].ShortRef, + }, + } }, }, { - Name: "SingleItem", - Expected: []details.Entry{testdata.OneDriveItems[0]}, - Opts: utils.OneDriveOpts{ - FileName: []string{ - testdata.OneDriveItems[0].OneDrive.ItemName, - }, + Name: "SingleItem", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + 0) + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + deets := testdata.GetDeetsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion) + + return utils.OneDriveOpts{ + FileName: []string{ + deets[0].OneDrive.ItemName, + }, + } }, }, { Name: "MultipleItems", - Expected: []details.Entry{ - testdata.OneDriveItems[0], - testdata.OneDriveItems[1], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + 0, 1) }, - Opts: utils.OneDriveOpts{ - FileName: []string{ - testdata.OneDriveItems[0].OneDrive.ItemName, - testdata.OneDriveItems[1].OneDrive.ItemName, - }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + deets := testdata.GetDeetsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion) + + return utils.OneDriveOpts{ + FileName: []string{ + deets[0].OneDrive.ItemName, + deets[1].OneDrive.ItemName, + }, + } }, }, { - Name: "ItemRefMatchesNothing", - Expected: []details.Entry{}, - Opts: utils.OneDriveOpts{ - FileName: []string{ - testdata.OneDriveItems[0].ItemRef, - }, + Name: "ItemRefMatchesNothing", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return []details.Entry{} + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + deets := testdata.GetDeetsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion) + + return utils.OneDriveOpts{ + FileName: []string{ + deets[0].ItemRef, + }, + } }, }, { - Name: "CreatedBefore", - Expected: []details.Entry{testdata.OneDriveItems[1]}, - Opts: utils.OneDriveOpts{ - FileCreatedBefore: dttm.Format(testdata.Time1.Add(time.Second)), + Name: "CreatedBefore", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.OneDriveService, + path.FilesCategory, + wantedVersion, + 1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + return utils.OneDriveOpts{ + FileCreatedBefore: dttm.Format(testdata.Time1.Add(time.Second)), + } }, }, } @@ -447,9 +736,9 @@ var ( type SharePointOptionsTest struct { Name string - Opts utils.SharePointOpts + Opts func(t *testing.T, wantedVersion int) utils.SharePointOpts BackupGetter *MockBackupGetter - Expected []details.Entry + Expected func(t *testing.T, wantedVersion int) []details.Entry } var ( @@ -457,24 +746,28 @@ var ( // cause errors about the format of the input flag. Mocks are configured to // allow the system to run if it doesn't throw an error on formatting. BadSharePointOptionsFormats = []SharePointOptionsTest{ - // { - // Name: "BadFileCreatedBefore", - // Opts: utils.OneDriveOpts{ - // FileCreatedBefore: "foo", - // Populated: utils.PopulatedFlags{ - // utils.FileCreatedBeforeFN: struct{}{}, - // }, - // }, - // }, - // { - // Name: "EmptyFileCreatedBefore", - // Opts: utils.OneDriveOpts{ - // FileCreatedBefore: "", - // Populated: utils.PopulatedFlags{ - // utils.FileCreatedBeforeFN: struct{}{}, - // }, - // }, - // }, + //{ + // Name: "BadFileCreatedBefore", + // Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + // return utils.SharePointOpts{ + // FileCreatedBefore: "foo", + // Populated: utils.PopulatedFlags{ + // utils.FileCreatedBeforeFN: struct{}{}, + // }, + // } + // }, + //}, + //{ + // Name: "EmptyFileCreatedBefore", + // Opts: func(t *testing.T, wantedVersion int) utils.OneDriveOpts { + // return utils.SharePointOpts{ + // FileCreatedBefore: "", + // Populated: utils.PopulatedFlags{ + // utils.FileCreatedBeforeFN: struct{}{}, + // }, + // } + // }, + //}, } // SharePointOptionDetailLookups contains flag inputs and expected results for @@ -483,98 +776,205 @@ var ( // configured to return the full dataset listed in selectors/testdata. SharePointOptionDetailLookups = []SharePointOptionsTest{ { - Name: "AllLibraryItems", - Expected: testdata.SharePointLibraryItems, - Opts: utils.SharePointOpts{ - FolderPath: selectors.Any(), + Name: "AllLibraryItems", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + return utils.SharePointOpts{ + FolderPath: selectors.Any(), + } }, }, { - Name: "LibraryItemsWithSingleSlash", - Expected: testdata.SharePointLibraryItems, - Opts: utils.SharePointOpts{ - FolderPath: []string{"/"}, + Name: "LibraryItemsWithSingleSlash", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + return utils.SharePointOpts{ + FolderPath: []string{"/"}, + } }, }, { - Name: "FolderPrefixMatch", - Expected: testdata.SharePointLibraryItems, - Opts: utils.SharePointOpts{ - FolderPath: []string{testdata.SharePointLibraryFolder}, + Name: "FolderPrefixMatch", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + return utils.SharePointOpts{ + FolderPath: []string{testdata.SharePointLibraryFolder}, + } }, }, { - Name: "FolderPrefixMatchTrailingSlash", - Expected: testdata.SharePointLibraryItems, - Opts: utils.SharePointOpts{ - FolderPath: []string{testdata.SharePointLibraryFolder + "/"}, + Name: "FolderPrefixMatchTrailingSlash", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + return utils.SharePointOpts{ + FolderPath: []string{testdata.SharePointLibraryFolder + "/"}, + } }, }, { - Name: "FolderPrefixMatchTrailingSlash", - Expected: testdata.SharePointLibraryItems, - Opts: utils.SharePointOpts{ - FolderPath: []string{testdata.SharePointLibraryFolder + "/"}, + Name: "FolderPrefixMatchTrailingSlash", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion, + -1) + }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + return utils.SharePointOpts{ + FolderPath: []string{testdata.SharePointLibraryFolder + "/"}, + } }, }, { - Name: "FolderRepoRefMatchesNothing", - Expected: []details.Entry{}, - Opts: utils.SharePointOpts{ - FolderPath: []string{testdata.SharePointLibraryPath.RR.Folder(true)}, + Name: "FolderRepoRefMatchesNothing", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return []details.Entry{} + }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + return utils.SharePointOpts{ + FolderPath: []string{testdata.SharePointLibraryPath.RR.Folder(true)}, + } }, }, { Name: "ShortRef", - Expected: []details.Entry{ - testdata.SharePointLibraryItems[0], - testdata.SharePointLibraryItems[1], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion, + 0, 1) }, - Opts: utils.SharePointOpts{ - FileName: []string{ - testdata.SharePointLibraryItems[0].ShortRef, - testdata.SharePointLibraryItems[1].ShortRef, - }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + deets := testdata.GetDeetsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion) + + return utils.SharePointOpts{ + FileName: []string{ + deets[0].ShortRef, + deets[1].ShortRef, + }, + } }, }, { - Name: "SingleItem", - Expected: []details.Entry{testdata.SharePointLibraryItems[0]}, - Opts: utils.SharePointOpts{ - FileName: []string{ - testdata.SharePointLibraryItems[0].SharePoint.ItemName, - }, + Name: "SingleItem", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion, + 0) + }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + deets := testdata.GetDeetsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion) + + return utils.SharePointOpts{ + FileName: []string{ + deets[0].SharePoint.ItemName, + }, + } }, }, { Name: "MultipleItems", - Expected: []details.Entry{ - testdata.SharePointLibraryItems[0], - testdata.SharePointLibraryItems[1], + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion, + 0, 1) }, - Opts: utils.SharePointOpts{ - FileName: []string{ - testdata.SharePointLibraryItems[0].SharePoint.ItemName, - testdata.SharePointLibraryItems[1].SharePoint.ItemName, - }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + deets := testdata.GetDeetsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion) + + return utils.SharePointOpts{ + FileName: []string{ + deets[0].SharePoint.ItemName, + deets[1].SharePoint.ItemName, + }, + } }, }, { - Name: "ItemRefMatchesNothing", - Expected: []details.Entry{}, - Opts: utils.SharePointOpts{ - FileName: []string{ - testdata.SharePointLibraryItems[0].ItemRef, - }, + Name: "ItemRefMatchesNothing", + Expected: func(t *testing.T, wantedVersion int) []details.Entry { + return []details.Entry{} + }, + Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + deets := testdata.GetDeetsForVersion( + t, + path.SharePointService, + path.LibrariesCategory, + wantedVersion) + + return utils.SharePointOpts{ + FileName: []string{ + deets[0].ItemRef, + }, + } }, }, - // { - // Name: "CreatedBefore", - // Expected: []details.DetailsEntry{testdata.SharePointLibraryItems[1]}, - // Opts: utils.SharePointOpts{ - // FileCreatedBefore: dttm.Format(testdata.Time1.Add(time.Second)), - // }, - // }, + //{ + // Name: "CreatedBefore", + // Expected: func(t *testing.T, wantedVersion int) []details.DetailsEntry { + // return testdata.GetItemsForVersion( + // t, + // path.SharePointService, + // path.LibrariesCategory, + // wantedVersion, + // 1) + // }, + // Opts: func(t *testing.T, wantedVersion int) utils.SharePointOpts { + // return utils.SharePointOpts{ + // FileCreatedBefore: dttm.Format(testdata.Time1.Add(time.Second)), + // } + // }, + //}, } ) @@ -611,10 +1011,6 @@ func (bg *MockBackupGetter) GetBackupDetails( ctx context.Context, backupID string, ) (*details.Details, *backup.Backup, *fault.Bus) { - if bg == nil { - return testdata.GetDetailsSet(), nil, fault.New(true) - } - return nil, nil, fault.New(false).Fail(clues.New("unexpected call to mock")) } @@ -629,3 +1025,15 @@ func (bg *MockBackupGetter) GetBackupErrors( return nil, nil, fault.New(false).Fail(clues.New("unexpected call to mock")) } + +type VersionedBackupGetter struct { + *MockBackupGetter + Details *details.Details +} + +func (bg VersionedBackupGetter) GetBackupDetails( + ctx context.Context, + backupID string, +) (*details.Details, *backup.Backup, *fault.Bus) { + return bg.Details, nil, fault.New(true) +} diff --git a/src/cmd/factory/impl/common.go b/src/cmd/factory/impl/common.go index c267a828a..de5e61915 100644 --- a/src/cmd/factory/impl/common.go +++ b/src/cmd/factory/impl/common.go @@ -11,10 +11,10 @@ import ( "github.com/google/uuid" "github.com/alcionai/corso/src/cli/print" - "github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/connector" exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock" "github.com/alcionai/corso/src/internal/data" @@ -116,7 +116,7 @@ func getGCAndVerifyResourceOwner( idname.Provider, error, ) { - tid := common.First(Tenant, os.Getenv(account.AzureTenantID)) + tid := str.First(Tenant, os.Getenv(account.AzureTenantID)) if len(Tenant) == 0 { Tenant = tid diff --git a/src/cmd/getM365/exchange/get_item.go b/src/cmd/getM365/exchange/get_item.go index 1d644f97e..cc6e8cd6a 100644 --- a/src/cmd/getM365/exchange/get_item.go +++ b/src/cmd/getM365/exchange/get_item.go @@ -15,7 +15,8 @@ import ( "github.com/spf13/cobra" "github.com/alcionai/corso/src/cli/utils" - "github.com/alcionai/corso/src/internal/common" + "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/credentials" @@ -54,7 +55,7 @@ func handleExchangeCmd(cmd *cobra.Command, args []string) error { return nil } - tid := common.First(tenant, os.Getenv(account.AzureTenantID)) + tid := str.First(tenant, os.Getenv(account.AzureTenantID)) ctx := clues.Add( cmd.Context(), @@ -111,9 +112,7 @@ func runDisplayM365JSON( return err } - str := string(bs) - - err = sw.WriteStringValue("", &str) + err = sw.WriteStringValue("", ptr.To(string(bs))) if err != nil { return clues.Wrap(err, "Error writing string value: "+itemID) } diff --git a/src/cmd/getM365/onedrive/get_item.go b/src/cmd/getM365/onedrive/get_item.go index ab1378ab9..414f50694 100644 --- a/src/cmd/getM365/onedrive/get_item.go +++ b/src/cmd/getM365/onedrive/get_item.go @@ -19,8 +19,8 @@ import ( . "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" - "github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/ptr" + "github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/credentials" @@ -57,7 +57,7 @@ func handleOneDriveCmd(cmd *cobra.Command, args []string) error { return nil } - tid := common.First(tenant, os.Getenv(account.AzureTenantID)) + tid := str.First(tenant, os.Getenv(account.AzureTenantID)) ctx := clues.Add( cmd.Context(), diff --git a/src/cmd/purge/purge.go b/src/cmd/purge/purge.go index d9f1133c1..337ea6f46 100644 --- a/src/cmd/purge/purge.go +++ b/src/cmd/purge/purge.go @@ -11,8 +11,8 @@ import ( . "github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/utils" - "github.com/alcionai/corso/src/internal/common" "github.com/alcionai/corso/src/internal/common/dttm" + "github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/connector" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/onedrive" @@ -263,7 +263,7 @@ func getGC(ctx context.Context) (account.Account, *connector.GraphConnector, err // get account info m365Cfg := account.M365Config{ M365: credentials.GetM365(), - AzureTenantID: common.First(tenant, os.Getenv(account.AzureTenantID)), + AzureTenantID: str.First(tenant, os.Getenv(account.AzureTenantID)), } acct, err := account.NewAccount(account.ProviderM365, m365Cfg) diff --git a/src/go.mod b/src/go.mod index 94777caa6..b64004f3d 100644 --- a/src/go.mod +++ b/src/go.mod @@ -8,7 +8,7 @@ require ( github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c github.com/armon/go-metrics v0.4.1 - github.com/aws/aws-sdk-go v1.44.264 + github.com/aws/aws-sdk-go v1.44.266 github.com/aws/aws-xray-sdk-go v1.8.1 github.com/cenkalti/backoff/v4 v4.2.1 github.com/google/uuid v1.3.0 @@ -19,7 +19,7 @@ require ( github.com/microsoft/kiota-http-go v1.0.0 github.com/microsoft/kiota-serialization-form-go v1.0.0 github.com/microsoft/kiota-serialization-json-go v1.0.0 - github.com/microsoftgraph/msgraph-sdk-go v1.1.0 + github.com/microsoftgraph/msgraph-sdk-go v1.2.0 github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 github.com/pkg/errors v0.9.1 github.com/rudderlabs/analytics-go v3.3.3+incompatible @@ -27,7 +27,7 @@ require ( github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.15.0 - github.com/stretchr/testify v1.8.2 + github.com/stretchr/testify v1.8.3 github.com/tidwall/pretty v1.2.1 github.com/tomlazar/table v0.1.2 github.com/vbauerster/mpb/v8 v8.1.6 @@ -117,7 +117,7 @@ require ( go.uber.org/multierr v1.11.0 // indirect golang.org/x/crypto v0.8.0 // indirect golang.org/x/mod v0.10.0 // indirect - golang.org/x/net v0.10.0 // indirect + golang.org/x/net v0.10.0 golang.org/x/sync v0.2.0 // indirect golang.org/x/sys v0.8.0 // indirect golang.org/x/text v0.9.0 // indirect diff --git a/src/go.sum b/src/go.sum index 407ab2941..03d94a7b9 100644 --- a/src/go.sum +++ b/src/go.sum @@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA= github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4= -github.com/aws/aws-sdk-go v1.44.264 h1:5klL62ebn6uv3oJ0ixF7K12hKItj8lV3QqWeQPlkFSs= -github.com/aws/aws-sdk-go v1.44.264/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.44.266 h1:MWd775dcYf7NrwgcHLtlsIbWoWkX8p4vomfNHr88zH0= +github.com/aws/aws-sdk-go v1.44.266/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo= github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= @@ -285,8 +285,8 @@ github.com/microsoft/kiota-serialization-json-go v1.0.0 h1:snT+SwS/R4CMjkmj7mjCH github.com/microsoft/kiota-serialization-json-go v1.0.0/go.mod h1:psfgIfqWm/9P1JAdl2cxHHIg9SdEtYHOetfDLIQ5/dw= github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA= github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M= -github.com/microsoftgraph/msgraph-sdk-go v1.1.0 h1:NtFsFVIt8lpXcTlRbLG1WuCOTzltzS5j+U8Fecqdnr4= -github.com/microsoftgraph/msgraph-sdk-go v1.1.0/go.mod h1:NIk9kSn7lQ5Hnhhn3FM4NrJWz54JfDHD0JvhJZky27g= +github.com/microsoftgraph/msgraph-sdk-go v1.2.0 h1:SZGcs6aoxUt5mSSNlgKe7j4N2BhKN1w2DzYBEMLtMCc= +github.com/microsoftgraph/msgraph-sdk-go v1.2.0/go.mod h1:NIk9kSn7lQ5Hnhhn3FM4NrJWz54JfDHD0JvhJZky27g= github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY= github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE= github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34= @@ -387,8 +387,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/subosito/gotenv v1.4.2 h1:X1TuBLAMDFbaTAChgCBLu3DU3UPyELpnF2jjJ2cz/S8= github.com/subosito/gotenv v1.4.2/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/tg123/go-htpasswd v1.2.1 h1:i4wfsX1KvvkyoMiHZzjS0VzbAPWfxzI8INcZAKtutoU= diff --git a/src/internal/common/slices.go b/src/internal/common/slices.go deleted file mode 100644 index 73c7c951e..000000000 --- a/src/internal/common/slices.go +++ /dev/null @@ -1,23 +0,0 @@ -package common - -// TODO: can be replaced with slices.Contains() -func ContainsString(super []string, sub string) bool { - for _, s := range super { - if s == sub { - return true - } - } - - return false -} - -// First returns the first non-zero valued string -func First(vs ...string) string { - for _, v := range vs { - if len(v) > 0 { - return v - } - } - - return "" -} diff --git a/src/internal/common/slices_test.go b/src/internal/common/slices_test.go deleted file mode 100644 index 9aae236a5..000000000 --- a/src/internal/common/slices_test.go +++ /dev/null @@ -1,30 +0,0 @@ -package common_test - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/suite" - - "github.com/alcionai/corso/src/internal/common" - "github.com/alcionai/corso/src/internal/tester" -) - -type CommonSlicesSuite struct { - tester.Suite -} - -func TestCommonSlicesSuite(t *testing.T) { - s := &CommonSlicesSuite{Suite: tester.NewUnitSuite(t)} - suite.Run(t, s) -} - -func (suite *CommonSlicesSuite) TestContainsString() { - t := suite.T() - target := "fnords" - good := []string{"fnords"} - bad := []string{"foo", "bar"} - - assert.True(t, common.ContainsString(good, target)) - assert.False(t, common.ContainsString(bad, target)) -} diff --git a/src/internal/common/str/str.go b/src/internal/common/str/str.go new file mode 100644 index 000000000..9dcd46af8 --- /dev/null +++ b/src/internal/common/str/str.go @@ -0,0 +1,58 @@ +package str + +import ( + "fmt" + "strconv" + + "github.com/alcionai/clues" + + "github.com/alcionai/corso/src/internal/common/ptr" +) + +// parseBool returns the bool value represented by the string +// or false on error +func ParseBool(v string) bool { + s, err := strconv.ParseBool(v) + if err != nil { + return false + } + + return s +} + +func FromMapToAny(k string, m map[string]any) (string, error) { + if len(m) == 0 { + return "", clues.New("missing entry").With("map_key", k) + } + + return FromAny(m[k]) +} + +func FromAny(a any) (string, error) { + if a == nil { + return "", clues.New("missing value") + } + + sp, ok := a.(*string) + if ok { + return ptr.Val(sp), nil + } + + s, ok := a.(string) + if ok { + return s, nil + } + + return "", clues.New(fmt.Sprintf("unexpected type: %T", a)) +} + +// First returns the first non-zero valued string +func First(vs ...string) string { + for _, v := range vs { + if len(v) > 0 { + return v + } + } + + return "" +} diff --git a/src/internal/common/strings.go b/src/internal/common/strings.go deleted file mode 100644 index e8db07c94..000000000 --- a/src/internal/common/strings.go +++ /dev/null @@ -1,14 +0,0 @@ -package common - -import "strconv" - -// parseBool returns the bool value represented by the string -// or false on error -func ParseBool(v string) bool { - s, err := strconv.ParseBool(v) - if err != nil { - return false - } - - return s -} diff --git a/src/internal/common/tform/tform.go b/src/internal/common/tform/tform.go new file mode 100644 index 000000000..64b43c316 --- /dev/null +++ b/src/internal/common/tform/tform.go @@ -0,0 +1,25 @@ +package tform + +import ( + "fmt" + + "github.com/alcionai/clues" +) + +func FromMapToAny[T any](k string, m map[string]any) (T, error) { + v, ok := m[k] + if !ok { + return *new(T), clues.New("entry not found") + } + + if v == nil { + return *new(T), clues.New("nil entry") + } + + vt, ok := v.(T) + if !ok { + return *new(T), clues.New(fmt.Sprintf("unexpected type: %T", v)) + } + + return vt, nil +} diff --git a/src/internal/connector/exchange/contact_folder_cache.go b/src/internal/connector/exchange/contact_folder_cache.go index 79cee76b8..5526bf7b7 100644 --- a/src/internal/connector/exchange/contact_folder_cache.go +++ b/src/internal/connector/exchange/contact_folder_cache.go @@ -11,7 +11,29 @@ import ( "github.com/alcionai/corso/src/pkg/path" ) -var _ graph.ContainerResolver = &contactFolderCache{} +var ( + _ graph.ContainerResolver = &contactFolderCache{} + _ containerRefresher = &contactRefresher{} +) + +type contactRefresher struct { + getter containerGetter + userID string +} + +func (r *contactRefresher) refreshContainer( + ctx context.Context, + id string, +) (graph.CachedContainer, error) { + c, err := r.getter.GetContainerByID(ctx, r.userID, id) + if err != nil { + return nil, clues.Stack(err) + } + + f := graph.NewCacheFolder(c, nil, nil) + + return &f, nil +} type contactFolderCache struct { *containerResolver @@ -34,7 +56,7 @@ func (cfc *contactFolderCache) populateContactRoot( f, path.Builder{}.Append(ptr.Val(f.GetId())), // path of IDs path.Builder{}.Append(baseContainerPath...)) // display location - if err := cfc.addFolder(temp); err != nil { + if err := cfc.addFolder(&temp); err != nil { return clues.Wrap(err, "adding resolver dir").WithClues(ctx) } @@ -77,7 +99,10 @@ func (cfc *contactFolderCache) init( } if cfc.containerResolver == nil { - cfc.containerResolver = newContainerResolver() + cfc.containerResolver = newContainerResolver(&contactRefresher{ + userID: cfc.userID, + getter: cfc.getter, + }) } return cfc.populateContactRoot(ctx, baseNode, baseContainerPath) diff --git a/src/internal/connector/exchange/container_resolver.go b/src/internal/connector/exchange/container_resolver.go index 8ec4f02ff..0e2730449 100644 --- a/src/internal/connector/exchange/container_resolver.go +++ b/src/internal/connector/exchange/container_resolver.go @@ -8,6 +8,7 @@ import ( "github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/path" ) @@ -26,11 +27,18 @@ type containersEnumerator interface { EnumerateContainers( ctx context.Context, userID, baseDirID string, - fn func(graph.CacheFolder) error, + fn func(graph.CachedContainer) error, errs *fault.Bus, ) error } +type containerRefresher interface { + refreshContainer( + ctx context.Context, + dirID string, + ) (graph.CachedContainer, error) +} + // --------------------------------------------------------------------------- // controller // --------------------------------------------------------------------------- @@ -40,59 +48,243 @@ type containersEnumerator interface { // folders if each folder is only a single character. const maxIterations = 300 -func newContainerResolver() *containerResolver { +func newContainerResolver(refresher containerRefresher) *containerResolver { return &containerResolver{ - cache: map[string]graph.CachedContainer{}, + cache: map[string]graph.CachedContainer{}, + refresher: refresher, } } type containerResolver struct { - cache map[string]graph.CachedContainer + cache map[string]graph.CachedContainer + refresher containerRefresher } func (cr *containerResolver) IDToPath( ctx context.Context, folderID string, ) (*path.Builder, *path.Builder, error) { - return cr.idToPath(ctx, folderID, 0) + ctx = clues.Add(ctx, "container_id", folderID) + + c, ok := cr.cache[folderID] + if !ok { + return nil, nil, clues.New("container not cached").WithClues(ctx) + } + + p := c.Path() + if p == nil { + return nil, nil, clues.New("cached container has no path").WithClues(ctx) + } + + return p, c.Location(), nil +} + +// refreshContainer attempts to fetch the container with the given ID from Graph +// API. Returns a graph.CachedContainer if the container was found. If the +// container was deleted, returns nil, true, nil to note the container should +// be removed from the cache. +func (cr *containerResolver) refreshContainer( + ctx context.Context, + id string, +) (graph.CachedContainer, bool, error) { + ctx = clues.Add(ctx, "refresh_container_id", id) + logger.Ctx(ctx).Debug("refreshing container") + + if cr.refresher == nil { + return nil, false, clues.New("nil refresher").WithClues(ctx) + } + + c, err := cr.refresher.refreshContainer(ctx, id) + if err != nil && graph.IsErrDeletedInFlight(err) { + logger.Ctx(ctx).Debug("container deleted") + return nil, true, nil + } else if err != nil { + // This is some other error, just return it. + return nil, false, clues.Wrap(err, "refreshing container").WithClues(ctx) + } + + return c, false, nil +} + +// recoverContainer attempts to fetch a missing container from Graph API and +// populate the path for it. It returns +// - the ID path for the folder +// - the display name path for the folder +// - if the folder was deleted +// - any error that occurred +// +// If the folder is marked as deleted, child folders of this folder should be +// deleted if they haven't been moved to another folder. +func (cr *containerResolver) recoverContainer( + ctx context.Context, + folderID string, + depth int, +) (*path.Builder, *path.Builder, bool, error) { + c, deleted, err := cr.refreshContainer(ctx, folderID) + if err != nil { + return nil, nil, false, clues.Wrap(err, "fetching uncached container") + } + + if deleted { + logger.Ctx(ctx).Debug("fetching uncached container showed it was deleted") + return nil, nil, deleted, err + } + + if err := cr.addFolder(c); err != nil { + return nil, nil, false, clues.Wrap(err, "adding new container").WithClues(ctx) + } + + // Retry populating this container's paths. + // + // TODO(ashmrtn): May want to bump the depth here just so we don't get stuck + // retrying too much if for some reason things keep moving around? + resolved, err := cr.idToPath(ctx, folderID, depth) + if err != nil { + err = clues.Wrap(err, "repopulating uncached container") + } + + return resolved.idPath, resolved.locPath, resolved.deleted, err +} + +type resolvedPath struct { + idPath *path.Builder + locPath *path.Builder + cached bool + deleted bool } func (cr *containerResolver) idToPath( ctx context.Context, folderID string, depth int, -) (*path.Builder, *path.Builder, error) { +) (resolvedPath, error) { ctx = clues.Add(ctx, "container_id", folderID) if depth >= maxIterations { - return nil, nil, clues.New("path contains cycle or is too tall").WithClues(ctx) + return resolvedPath{ + idPath: nil, + locPath: nil, + cached: false, + deleted: false, + }, clues.New("path contains cycle or is too tall").WithClues(ctx) } c, ok := cr.cache[folderID] if !ok { - return nil, nil, clues.New("folder not cached").WithClues(ctx) + pth, loc, deleted, err := cr.recoverContainer(ctx, folderID, depth) + if err != nil { + err = clues.Stack(err) + } + + return resolvedPath{ + idPath: pth, + locPath: loc, + cached: false, + deleted: deleted, + }, err } p := c.Path() if p != nil { - return p, c.Location(), nil + return resolvedPath{ + idPath: p, + locPath: c.Location(), + cached: true, + deleted: false, + }, nil } - parentPath, parentLoc, err := cr.idToPath( + resolved, err := cr.idToPath( ctx, ptr.Val(c.GetParentFolderId()), depth+1) if err != nil { - return nil, nil, clues.Wrap(err, "retrieving parent folder") + return resolvedPath{ + idPath: nil, + locPath: nil, + cached: true, + deleted: false, + }, clues.Wrap(err, "retrieving parent container") } - fullPath := parentPath.Append(ptr.Val(c.GetId())) + if !resolved.cached { + logger.Ctx(ctx).Debug("parent container was refreshed") + + newContainer, shouldDelete, err := cr.refreshContainer(ctx, folderID) + if err != nil { + return resolvedPath{ + idPath: nil, + locPath: nil, + cached: true, + deleted: false, + }, clues.Wrap(err, "refreshing container").WithClues(ctx) + } + + if shouldDelete { + logger.Ctx(ctx).Debug("refreshing container showed it was deleted") + delete(cr.cache, folderID) + + return resolvedPath{ + idPath: nil, + locPath: nil, + cached: true, + deleted: true, + }, nil + } + + // See if the newer version of the current container we got back has + // changed. If it has then it could be that the container was moved prior to + // deleting the parent and we just hit some eventual consistency case in + // Graph. + // + // TODO(ashmrtn): May want to bump the depth here just so we don't get stuck + // retrying too much if for some reason things keep moving around? + if ptr.Val(newContainer.GetParentFolderId()) != ptr.Val(c.GetParentFolderId()) || + ptr.Val(newContainer.GetDisplayName()) != ptr.Val(c.GetDisplayName()) { + delete(cr.cache, folderID) + + if err := cr.addFolder(newContainer); err != nil { + return resolvedPath{ + idPath: nil, + locPath: nil, + cached: false, + deleted: false, + }, clues.Wrap(err, "updating cached container").WithClues(ctx) + } + + return cr.idToPath(ctx, folderID, depth) + } + } + + // If the parent wasn't found and refreshing the current container produced no + // diffs then delete the current container on the assumption that the parent + // was deleted and the current container will later get deleted via eventual + // consistency. If w're wrong then the container will get picked up again on + // the next backup. + if resolved.deleted { + logger.Ctx(ctx).Debug("deleting container since parent was deleted") + delete(cr.cache, folderID) + + return resolvedPath{ + idPath: nil, + locPath: nil, + cached: true, + deleted: true, + }, nil + } + + fullPath := resolved.idPath.Append(ptr.Val(c.GetId())) c.SetPath(fullPath) - locPath := parentLoc.Append(ptr.Val(c.GetDisplayName())) + locPath := resolved.locPath.Append(ptr.Val(c.GetDisplayName())) c.SetLocation(locPath) - return fullPath, locPath, nil + return resolvedPath{ + idPath: fullPath, + locPath: locPath, + cached: true, + deleted: false, + }, nil } // PathInCache is a utility function to return m365ID of a folder if the @@ -139,14 +331,14 @@ func (cr *containerResolver) LocationInCache(pathString string) (string, bool) { // addFolder adds a folder to the cache with the given ID. If the item is // already in the cache does nothing. The path for the item is not modified. -func (cr *containerResolver) addFolder(cf graph.CacheFolder) error { +func (cr *containerResolver) addFolder(cf graph.CachedContainer) error { // Only require a non-nil non-empty parent if the path isn't already populated. if cf.Path() != nil { - if err := checkIDAndName(cf.Container); err != nil { + if err := checkIDAndName(cf); err != nil { return clues.Wrap(err, "adding item to cache") } } else { - if err := checkRequiredValues(cf.Container); err != nil { + if err := checkRequiredValues(cf); err != nil { return clues.Wrap(err, "adding item to cache") } } @@ -155,7 +347,7 @@ func (cr *containerResolver) addFolder(cf graph.CacheFolder) error { return nil } - cr.cache[ptr.Val(cf.GetId())] = &cf + cr.cache[ptr.Val(cf.GetId())] = cf return nil } @@ -176,7 +368,7 @@ func (cr *containerResolver) AddToCache( ctx context.Context, f graph.Container, ) error { - temp := graph.CacheFolder{ + temp := &graph.CacheFolder{ Container: f, } if err := cr.addFolder(temp); err != nil { @@ -185,7 +377,7 @@ func (cr *containerResolver) AddToCache( // Populate the path for this entry so calls to PathInCache succeed no matter // when they're made. - _, _, err := cr.IDToPath(ctx, ptr.Val(f.GetId())) + _, err := cr.idToPath(ctx, ptr.Val(f.GetId()), 0) if err != nil { return clues.Wrap(err, "adding cache entry") } @@ -208,7 +400,7 @@ func (cr *containerResolver) populatePaths( return el.Failure() } - _, _, err := cr.IDToPath(ctx, ptr.Val(f.GetId())) + _, err := cr.idToPath(ctx, ptr.Val(f.GetId()), 0) if err != nil { err = clues.Wrap(err, "populating path") el.AddRecoverable(err) diff --git a/src/internal/connector/exchange/container_resolver_test.go b/src/internal/connector/exchange/container_resolver_test.go index de050d25a..a79daa58f 100644 --- a/src/internal/connector/exchange/container_resolver_test.go +++ b/src/internal/connector/exchange/container_resolver_test.go @@ -1,6 +1,7 @@ package exchange import ( + "context" "fmt" stdpath "path" "testing" @@ -232,8 +233,8 @@ func (suite *FolderCacheUnitSuite) TestAddFolder() { for _, test := range table { suite.Run(test.name, func() { - fc := newContainerResolver() - err := fc.addFolder(test.cf) + fc := newContainerResolver(nil) + err := fc.addFolder(&test.cf) test.check(suite.T(), err, clues.ToCore(err)) }) } @@ -293,7 +294,7 @@ func resolverWithContainers(numContainers int, useIDInPath bool) (*containerReso containers[i].expectedLocation = stdpath.Join(containers[i-1].expectedLocation, dn) } - resolver := newContainerResolver() + resolver := newContainerResolver(nil) for _, c := range containers { resolver.cache[c.id] = c @@ -302,6 +303,37 @@ func resolverWithContainers(numContainers int, useIDInPath bool) (*containerReso return resolver, containers } +// --------------------------------------------------------------------------- +// mock container refresher +// --------------------------------------------------------------------------- + +type refreshResult struct { + err error + c graph.CachedContainer +} + +type mockContainerRefresher struct { + // Folder ID -> result + entries map[string]refreshResult +} + +func (r mockContainerRefresher) refreshContainer( + ctx context.Context, + id string, +) (graph.CachedContainer, error) { + rr, ok := r.entries[id] + if !ok { + // May not be this precise error, but it's easy to get a handle on. + return nil, graph.ErrDeletedInFlight + } + + if rr.err != nil { + return nil, rr.err + } + + return rr.c, nil +} + // --------------------------------------------------------------------------- // configured unit suite // --------------------------------------------------------------------------- @@ -326,6 +358,160 @@ func TestConfiguredFolderCacheUnitSuite(t *testing.T) { suite.Run(t, &ConfiguredFolderCacheUnitSuite{Suite: tester.NewUnitSuite(t)}) } +func (suite *ConfiguredFolderCacheUnitSuite) TestRefreshContainer_RefreshParent() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + + resolver, containers := resolverWithContainers(4, true) + almostLast := containers[len(containers)-2] + last := containers[len(containers)-1] + + refresher := mockContainerRefresher{ + entries: map[string]refreshResult{ + almostLast.id: {c: almostLast}, + last.id: {c: last}, + }, + } + + resolver.refresher = refresher + + delete(resolver.cache, almostLast.id) + + ferrs := fault.New(true) + err := resolver.populatePaths(ctx, ferrs) + require.NoError(t, err, "populating paths", clues.ToCore(err)) + + p, l, err := resolver.IDToPath(ctx, last.id) + require.NoError(t, err, "getting paths", clues.ToCore(err)) + + assert.Equal(t, last.expectedPath, p.String()) + assert.Equal(t, last.expectedLocation, l.String()) +} + +func (suite *ConfiguredFolderCacheUnitSuite) TestRefreshContainer_RefreshParent_NotFoundDeletes() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + + resolver, containers := resolverWithContainers(4, true) + almostLast := containers[len(containers)-2] + last := containers[len(containers)-1] + + refresher := mockContainerRefresher{ + entries: map[string]refreshResult{ + last.id: {c: last}, + }, + } + + resolver.refresher = refresher + + delete(resolver.cache, almostLast.id) + + ferrs := fault.New(true) + err := resolver.populatePaths(ctx, ferrs) + require.NoError(t, err, "populating paths", clues.ToCore(err)) + + _, _, err = resolver.IDToPath(ctx, last.id) + assert.Error(t, err, "getting paths", clues.ToCore(err)) +} + +func (suite *ConfiguredFolderCacheUnitSuite) TestRefreshContainer_RefreshAncestor_NotFoundDeletes() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + + resolver, containers := resolverWithContainers(4, true) + gone := containers[0] + child := containers[1] + last := containers[len(containers)-1] + + refresher := mockContainerRefresher{ + entries: map[string]refreshResult{ + child.id: {c: child}, + }, + } + + resolver.refresher = refresher + + delete(resolver.cache, gone.id) + + ferrs := fault.New(true) + err := resolver.populatePaths(ctx, ferrs) + require.NoError(t, err, "populating paths", clues.ToCore(err)) + + _, _, err = resolver.IDToPath(ctx, last.id) + assert.Error(t, err, "getting paths", clues.ToCore(err)) +} + +func (suite *ConfiguredFolderCacheUnitSuite) TestRefreshContainer_RefreshAncestor_NewParent() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + + resolver, containers := resolverWithContainers(4, true) + other := containers[len(containers)-3] + gone := containers[len(containers)-2] + last := containers[len(containers)-1] + + expected := *last + expected.parentID = other.id + expected.expectedPath = stdpath.Join(other.expectedPath, expected.id) + expected.expectedLocation = stdpath.Join(other.expectedLocation, expected.displayName) + + refresher := mockContainerRefresher{ + entries: map[string]refreshResult{ + last.id: {c: &expected}, + }, + } + + resolver.refresher = refresher + + delete(resolver.cache, gone.id) + + ferrs := fault.New(true) + err := resolver.populatePaths(ctx, ferrs) + require.NoError(t, err, "populating paths", clues.ToCore(err)) + + p, l, err := resolver.IDToPath(ctx, last.id) + require.NoError(t, err, "getting paths", clues.ToCore(err)) + + assert.Equal(t, expected.expectedPath, p.String()) + assert.Equal(t, expected.expectedLocation, l.String()) +} + +func (suite *ConfiguredFolderCacheUnitSuite) TestRefreshContainer_RefreshFolder_FolderDeleted() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + + resolver, containers := resolverWithContainers(4, true) + parent := containers[len(containers)-2] + last := containers[len(containers)-1] + + refresher := mockContainerRefresher{ + entries: map[string]refreshResult{ + parent.id: {c: parent}, + }, + } + + resolver.refresher = refresher + + delete(resolver.cache, parent.id) + + ferrs := fault.New(true) + err := resolver.populatePaths(ctx, ferrs) + require.NoError(t, err, "populating paths", clues.ToCore(err)) + + _, _, err = resolver.IDToPath(ctx, last.id) + assert.Error(t, err, "getting paths", clues.ToCore(err)) +} + func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() { ctx, flush := tester.NewContext() defer flush() @@ -350,7 +536,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestDepthLimit() { for _, test := range table { suite.Run(test.name, func() { resolver, containers := resolverWithContainers(test.numContainers, false) - _, _, err := resolver.IDToPath(ctx, containers[len(containers)-1].id) + _, err := resolver.idToPath(ctx, containers[len(containers)-1].id, 0) test.check(suite.T(), err, clues.ToCore(err)) }) } @@ -384,6 +570,9 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached ctx, flush := tester.NewContext() defer flush() + err := suite.fc.populatePaths(ctx, fault.New(true)) + require.NoError(suite.T(), err, clues.ToCore(err)) + for _, c := range suite.allContainers { suite.Run(ptr.Val(c.GetDisplayName()), func() { t := suite.T() @@ -396,10 +585,14 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached } } +// TODO(ashmrtn): Remove this since the same cache can do IDs or locations. func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderNoPathsCached_useID() { ctx, flush := tester.NewContext() defer flush() + err := suite.fcWithID.populatePaths(ctx, fault.New(true)) + require.NoError(suite.T(), err, clues.ToCore(err)) + for _, c := range suite.containersWithID { suite.Run(ptr.Val(c.GetDisplayName()), func() { t := suite.T() @@ -419,6 +612,9 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths() t := suite.T() c := suite.allContainers[len(suite.allContainers)-1] + err := suite.fc.populatePaths(ctx, fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + p, l, err := suite.fc.IDToPath(ctx, c.id) require.NoError(t, err, clues.ToCore(err)) assert.Equal(t, c.expectedPath, p.String()) @@ -432,6 +628,7 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths() assert.Equal(t, c.expectedLocation, l.String()) } +// TODO(ashmrtn): Remove this since the same cache can do IDs or locations. func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_useID() { ctx, flush := tester.NewContext() defer flush() @@ -439,6 +636,9 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderCachesPaths_u t := suite.T() c := suite.containersWithID[len(suite.containersWithID)-1] + err := suite.fcWithID.populatePaths(ctx, fault.New(true)) + require.NoError(t, err, clues.ToCore(err)) + p, l, err := suite.fcWithID.IDToPath(ctx, c.id) require.NoError(t, err, clues.ToCore(err)) assert.Equal(t, c.expectedPath, p.String()) @@ -457,12 +657,21 @@ func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolderErrorsParentN defer flush() t := suite.T() - last := suite.allContainers[len(suite.allContainers)-1] almostLast := suite.allContainers[len(suite.allContainers)-2] delete(suite.fc.cache, almostLast.id) - _, _, err := suite.fc.IDToPath(ctx, last.id) + err := suite.fc.populatePaths(ctx, fault.New(true)) + assert.Error(t, err, clues.ToCore(err)) +} + +func (suite *ConfiguredFolderCacheUnitSuite) TestLookupCachedFolder_Errors_PathsNotBuilt() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + + _, _, err := suite.fc.IDToPath(ctx, suite.allContainers[len(suite.allContainers)-1].id) assert.Error(t, err, clues.ToCore(err)) } diff --git a/src/internal/connector/exchange/data_collections_test.go b/src/internal/connector/exchange/data_collections_test.go index ef34de5ff..557df264f 100644 --- a/src/internal/connector/exchange/data_collections_test.go +++ b/src/internal/connector/exchange/data_collections_test.go @@ -597,7 +597,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression( bdayID string ) - fn := func(gcf graph.CacheFolder) error { + fn := func(gcf graph.CachedContainer) error { if ptr.Val(gcf.GetDisplayName()) == DefaultCalendar { calID = ptr.Val(gcf.GetId()) } diff --git a/src/internal/connector/exchange/event_calendar_cache.go b/src/internal/connector/exchange/event_calendar_cache.go index ac8e59548..5e99b4b39 100644 --- a/src/internal/connector/exchange/event_calendar_cache.go +++ b/src/internal/connector/exchange/event_calendar_cache.go @@ -27,7 +27,7 @@ func (ecc *eventCalendarCache) init( ctx context.Context, ) error { if ecc.containerResolver == nil { - ecc.containerResolver = newContainerResolver() + ecc.containerResolver = newContainerResolver(nil) } return ecc.populateEventRoot(ctx) @@ -49,7 +49,7 @@ func (ecc *eventCalendarCache) populateEventRoot(ctx context.Context) error { f, path.Builder{}.Append(ptr.Val(f.GetId())), // storage path path.Builder{}.Append(ptr.Val(f.GetDisplayName()))) // display location - if err := ecc.addFolder(temp); err != nil { + if err := ecc.addFolder(&temp); err != nil { return clues.Wrap(err, "initializing calendar resolver").WithClues(ctx) } @@ -98,7 +98,7 @@ func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container path.Builder{}.Append(ptr.Val(f.GetId())), // storage path path.Builder{}.Append(ptr.Val(f.GetDisplayName()))) // display location - if err := ecc.addFolder(temp); err != nil { + if err := ecc.addFolder(&temp); err != nil { return clues.Wrap(err, "adding container").WithClues(ctx) } diff --git a/src/internal/connector/exchange/mail_folder_cache.go b/src/internal/connector/exchange/mail_folder_cache.go index c2630a29a..062f91a23 100644 --- a/src/internal/connector/exchange/mail_folder_cache.go +++ b/src/internal/connector/exchange/mail_folder_cache.go @@ -10,7 +10,29 @@ import ( "github.com/alcionai/corso/src/pkg/path" ) -var _ graph.ContainerResolver = &mailFolderCache{} +var ( + _ graph.ContainerResolver = &mailFolderCache{} + _ containerRefresher = &mailRefresher{} +) + +type mailRefresher struct { + getter containerGetter + userID string +} + +func (r *mailRefresher) refreshContainer( + ctx context.Context, + id string, +) (graph.CachedContainer, error) { + c, err := r.getter.GetContainerByID(ctx, r.userID, id) + if err != nil { + return nil, clues.Stack(err) + } + + f := graph.NewCacheFolder(c, nil, nil) + + return &f, nil +} // mailFolderCache struct used to improve lookup of directories within exchange.Mail // cache map of cachedContainers where the key = M365ID @@ -29,7 +51,10 @@ func (mc *mailFolderCache) init( ctx context.Context, ) error { if mc.containerResolver == nil { - mc.containerResolver = newContainerResolver() + mc.containerResolver = newContainerResolver(&mailRefresher{ + userID: mc.userID, + getter: mc.getter, + }) } return mc.populateMailRoot(ctx) @@ -52,7 +77,7 @@ func (mc *mailFolderCache) populateMailRoot(ctx context.Context) error { // the user doesn't see in the regular UI for Exchange. path.Builder{}.Append(), // path of IDs path.Builder{}.Append()) // display location - if err := mc.addFolder(temp); err != nil { + if err := mc.addFolder(&temp); err != nil { return clues.Wrap(err, "adding resolver dir").WithClues(ctx) } diff --git a/src/internal/connector/graph/cache_container.go b/src/internal/connector/graph/cache_container.go index fd9c06aac..1e3467639 100644 --- a/src/internal/connector/graph/cache_container.go +++ b/src/internal/connector/graph/cache_container.go @@ -169,21 +169,22 @@ func CreateCalendarDisplayable(entry any, parentID string) *CalendarDisplayable // helper funcs // ========================================= -// checkRequiredValues is a helper function to ensure that -// all the pointers are set prior to being called. -func CheckRequiredValues(c Container) error { - id, ok := ptr.ValOK(c.GetId()) - if !ok { +// CheckIDAndName is a validator that ensures the ID +// and name are populated and not zero valued. +func CheckIDAndName(c Container) error { + if c == nil { + return clues.New("nil container") + } + + id := ptr.Val(c.GetId()) + if len(id) == 0 { return clues.New("container missing ID") } - if _, ok := ptr.ValOK(c.GetDisplayName()); !ok { + dn := ptr.Val(c.GetDisplayName()) + if len(dn) == 0 { return clues.New("container missing display name").With("container_id", id) } - if _, ok := ptr.ValOK(c.GetParentFolderId()); !ok { - return clues.New("container missing parent ID").With("container_id", id) - } - return nil } diff --git a/src/internal/connector/graph/concurrency_middleware.go b/src/internal/connector/graph/concurrency_middleware.go index 2756a60c6..ba2a08fa6 100644 --- a/src/internal/connector/graph/concurrency_middleware.go +++ b/src/internal/connector/graph/concurrency_middleware.go @@ -3,7 +3,9 @@ package graph import ( "context" "net/http" + "strconv" "sync" + "time" "github.com/alcionai/clues" khttp "github.com/microsoft/kiota-http-go" @@ -200,3 +202,144 @@ func (mw *RateLimiterMiddleware) Intercept( QueueRequest(req.Context()) return pipeline.Next(req, middlewareIndex) } + +// --------------------------------------------------------------------------- +// global throttle fencing +// --------------------------------------------------------------------------- + +// timedFence sets up a fence for a certain amount of time. +// the time can be extended arbitrarily. All processes blocked at +// the fence will be let through when all timer extensions conclude. +type timedFence struct { + mu sync.Mutex + c chan struct{} + timers map[int64]*time.Timer +} + +func newTimedFence() *timedFence { + return &timedFence{ + mu: sync.Mutex{}, + c: nil, + timers: map[int64]*time.Timer{}, + } +} + +// Block until the fence is let down. +// if no fence is up, return immediately. +// returns if the ctx deadlines before the fence is let down. +func (tf *timedFence) Block(ctx context.Context) error { + // set to a local var to avoid race panics from tf.c + // getting set to nil between the conditional check and + // the read case. If c gets closed between those two + // points then the select case will exit immediately, + // as if we didn't block at all. + c := tf.c + + if c != nil { + select { + case <-ctx.Done(): + return clues.Wrap(ctx.Err(), "blocked on throttling fence") + case <-c: + } + } + + return nil +} + +// RaiseFence puts up a fence to block requests for the provided +// duration of time. Seconds are always added to the current time. +// Multiple calls to RaiseFence are not additive. ie: calling +// `RaiseFence(5); RaiseFence(1)` will keep the fence up until +// now+5 seconds, not now+6 seconds. When the last remaining fence +// is dropped, all currently blocked calls are allowed through. +func (tf *timedFence) RaiseFence(seconds time.Duration) { + tf.mu.Lock() + defer tf.mu.Unlock() + + if seconds < 1 { + return + } + + if tf.c == nil { + tf.c = make(chan struct{}) + } + + timer := time.NewTimer(seconds) + tid := time.Now().Add(seconds).UnixMilli() + tf.timers[tid] = timer + + go func(c <-chan time.Time, id int64) { + // wait for the timeout + <-c + + tf.mu.Lock() + defer tf.mu.Unlock() + + // remove the timer + delete(tf.timers, id) + + // if no timers remain, close the channel to drop the fence + // and set the fenc channel to nil + if len(tf.timers) == 0 && tf.c != nil { + close(tf.c) + tf.c = nil + } + }(timer.C, tid) +} + +// throttlingMiddleware is used to ensure we don't overstep per-min request limits. +type throttlingMiddleware struct { + tf *timedFence +} + +func (mw *throttlingMiddleware) Intercept( + pipeline khttp.Pipeline, + middlewareIndex int, + req *http.Request, +) (*http.Response, error) { + err := mw.tf.Block(req.Context()) + if err != nil { + return nil, err + } + + resp, err := pipeline.Next(req, middlewareIndex) + if resp == nil || err != nil { + return resp, err + } + + seconds := getRetryAfterHeader(resp) + if seconds < 1 { + return resp, nil + } + + // if all prior conditions pass, we need to add a fence that blocks + // calls, globally, from progressing until the timeout retry-after + // passes. + mw.tf.RaiseFence(time.Duration(seconds) * time.Second) + + return resp, nil +} + +func getRetryAfterHeader(resp *http.Response) int { + if resp == nil || len(resp.Header) == 0 { + return -1 + } + + if resp.StatusCode != http.StatusTooManyRequests { + return -1 + } + + rah := resp.Header.Get(retryAfterHeader) + if len(rah) == 0 { + return -1 + } + + seconds, err := strconv.Atoi(rah) + if err != nil { + // the error itself is irrelevant, we only want + // to wait if we have a clear length of time to wait until. + return -1 + } + + return seconds +} diff --git a/src/internal/connector/graph/concurrency_middleware_test.go b/src/internal/connector/graph/concurrency_middleware_test.go index c5734a665..5e19f791a 100644 --- a/src/internal/connector/graph/concurrency_middleware_test.go +++ b/src/internal/connector/graph/concurrency_middleware_test.go @@ -8,23 +8,25 @@ import ( "testing" "time" + "github.com/alcionai/clues" khttp "github.com/microsoft/kiota-http-go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" + "golang.org/x/net/context" "github.com/alcionai/corso/src/internal/tester" ) -type ConcurrencyLimiterUnitTestSuite struct { +type ConcurrencyMWUnitTestSuite struct { tester.Suite } func TestConcurrencyLimiterSuite(t *testing.T) { - suite.Run(t, &ConcurrencyLimiterUnitTestSuite{Suite: tester.NewUnitSuite(t)}) + suite.Run(t, &ConcurrencyMWUnitTestSuite{Suite: tester.NewUnitSuite(t)}) } -func (suite *ConcurrencyLimiterUnitTestSuite) TestConcurrencyLimiter() { +func (suite *ConcurrencyMWUnitTestSuite) TestConcurrencyLimiter() { t := suite.T() maxConcurrentRequests := 4 @@ -71,7 +73,7 @@ func (suite *ConcurrencyLimiterUnitTestSuite) TestConcurrencyLimiter() { wg.Wait() } -func (suite *ConcurrencyLimiterUnitTestSuite) TestInitializeConcurrencyLimiter() { +func (suite *ConcurrencyMWUnitTestSuite) TestInitializeConcurrencyLimiter() { t := suite.T() InitializeConcurrencyLimiter(2) @@ -80,7 +82,7 @@ func (suite *ConcurrencyLimiterUnitTestSuite) TestInitializeConcurrencyLimiter() assert.Equal(t, cap(concurrencyLim.semaphore), 2, "singleton semaphore capacity changed") } -func (suite *ConcurrencyLimiterUnitTestSuite) TestGenerateConcurrencyLimiter() { +func (suite *ConcurrencyMWUnitTestSuite) TestGenerateConcurrencyLimiter() { tests := []struct { name string cap int @@ -118,3 +120,201 @@ func (suite *ConcurrencyLimiterUnitTestSuite) TestGenerateConcurrencyLimiter() { }) } } + +func (suite *ConcurrencyMWUnitTestSuite) TestTimedFence_Block() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + tf := newTimedFence() + + // raise multiple fences, the longest at 5 seconds + for i := -5; i < 6; i++ { + tf.RaiseFence(time.Duration(i) * time.Second) + } + + // -5..0 get dropped, 1..5 get added + assert.Len(t, tf.timers, 5) + + start := time.Now() + + var wg sync.WaitGroup + + wg.Add(5) + + for i := 0; i < 5; i++ { + go func(i int) { + defer wg.Done() + + err := tf.Block(ctx) + require.NoError(t, err, clues.ToCore(err)) + }(i) + } + + wg.Wait() + + // should block for 5 seconds. comparing to 4 to avoid + // race condition flakes. + assert.Less(t, 4.0, time.Since(start).Seconds()) +} + +func (suite *ConcurrencyMWUnitTestSuite) TestTimedFence_Block_ctxDeadline() { + ctx, flush := tester.NewContext() + defer flush() + + ctx, _ = context.WithDeadline(ctx, time.Now().Add(2*time.Second)) + + t := suite.T() + tf := newTimedFence() + + // raise multiple fences, the longest at 10 seconds + for i := 1; i < 6; i++ { + tf.RaiseFence(time.Duration(i*2) * time.Second) + } + + start := time.Now() + + var wg sync.WaitGroup + + wg.Add(5) + + for i := 0; i < 5; i++ { + go func(i int) { + defer wg.Done() + + err := tf.Block(ctx) + // should error from ctx deadline + require.Error(t, err, clues.ToCore(err)) + }(i) + } + + wg.Wait() + + // should block for 2 seconds. comparing to 3 to avoid + // race condition flakes. + assert.Greater(t, 3.0, time.Since(start).Seconds()) +} + +type mockPipeline struct { + resp *http.Response + err error +} + +func (mp mockPipeline) Next(*http.Request, int) (*http.Response, error) { + return mp.resp, mp.err +} + +func (suite *ConcurrencyMWUnitTestSuite) TestThrottlingMiddleware() { + retryAfterNan := http.Header{} + retryAfterNan.Set(retryAfterHeader, "brunhuldi") + + retryAfterNeg1 := http.Header{} + retryAfterNeg1.Set(retryAfterHeader, "-1") + + retryAfter0 := http.Header{} + retryAfter0.Set(retryAfterHeader, "0") + + retryAfter5 := http.Header{} + retryAfter5.Set(retryAfterHeader, "5") + + goodPipe := mockPipeline{ + resp: &http.Response{ + StatusCode: http.StatusOK, + Header: http.Header{}, + }, + } + + table := []struct { + name string + pipeline mockPipeline + expectMinWait float64 + }{ + { + name: "2xx response", + pipeline: goodPipe, + expectMinWait: 0, + }, + { + name: "non-429 response", + pipeline: mockPipeline{ + resp: &http.Response{ + StatusCode: http.StatusBadGateway, + Header: retryAfter5, + }, + }, + expectMinWait: 0, + }, + { + name: "429 response w/out retry header", + pipeline: mockPipeline{ + resp: &http.Response{ + StatusCode: http.StatusTooManyRequests, + Header: http.Header{}, + }, + }, + expectMinWait: 0, + }, + { + name: "429 response w/ nan retry-after", + pipeline: mockPipeline{ + resp: &http.Response{ + StatusCode: http.StatusTooManyRequests, + Header: retryAfterNan, + }, + }, + expectMinWait: 0, + }, + { + name: "429 response w/ negative retry-after", + pipeline: mockPipeline{ + resp: &http.Response{ + StatusCode: http.StatusTooManyRequests, + Header: retryAfterNeg1, + }, + }, + expectMinWait: 0, + }, + { + name: "429 response w/ zero retry-after", + pipeline: mockPipeline{ + resp: &http.Response{ + StatusCode: http.StatusTooManyRequests, + Header: retryAfter0, + }, + }, + expectMinWait: 0, + }, + { + name: "429 response w/ positive retry-after", + pipeline: mockPipeline{ + resp: &http.Response{ + StatusCode: http.StatusTooManyRequests, + Header: retryAfter5, + }, + }, + expectMinWait: 4, + }, + } + for _, test := range table { + suite.Run(test.name, func() { + ctx, flush := tester.NewContext() + defer flush() + + t := suite.T() + tm := throttlingMiddleware{newTimedFence()} + + req := &http.Request{} + req = req.WithContext(ctx) + + start := time.Now() + + _, err := tm.Intercept(test.pipeline, 0, req) + require.NoError(t, err, clues.ToCore(err)) + + _, err = tm.Intercept(goodPipe, 0, req) + require.NoError(t, err, clues.ToCore(err)) + + assert.Less(t, test.expectMinWait, time.Since(start).Seconds()) + }) + } +} diff --git a/src/internal/connector/graph/http_wrapper.go b/src/internal/connector/graph/http_wrapper.go index b0bca76e2..bc30e88e6 100644 --- a/src/internal/connector/graph/http_wrapper.go +++ b/src/internal/connector/graph/http_wrapper.go @@ -147,6 +147,7 @@ func internalMiddleware(cc *clientConfig) []khttp.Middleware { }, khttp.NewRedirectHandler(), &LoggingMiddleware{}, + &throttlingMiddleware{newTimedFence()}, &RateLimiterMiddleware{}, &MetricsMiddleware{}, } diff --git a/src/internal/connector/graph/service.go b/src/internal/connector/graph/service.go index f0aa71f08..4335972a4 100644 --- a/src/internal/connector/graph/service.go +++ b/src/internal/connector/graph/service.go @@ -268,6 +268,7 @@ func kiotaMiddlewares( mw = append( mw, + &throttlingMiddleware{newTimedFence()}, &RateLimiterMiddleware{}, &MetricsMiddleware{}) diff --git a/src/pkg/backup/details/testdata/testdata.go b/src/pkg/backup/details/testdata/testdata.go index 0d98ec7df..a929c141c 100644 --- a/src/pkg/backup/details/testdata/testdata.go +++ b/src/pkg/backup/details/testdata/testdata.go @@ -2,8 +2,13 @@ package testdata import ( "strings" + "testing" "time" + "github.com/stretchr/testify/require" + "golang.org/x/exp/slices" + + "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/path" ) @@ -88,6 +93,28 @@ func (p repoRefAndLocRef) FolderLocation() string { return p.Loc.Append(strings.TrimSuffix(lastElem, folderSuffix)).String() } +// locationAsRepoRef returns a path.Path where the LocationRef is used for the +// folder path instead of the id-based path elements. This is useful for +// generating paths for older versions of Corso. +func (p repoRefAndLocRef) locationAsRepoRef() path.Path { + tmp := p.Loc + if len(p.ItemLocation()) > 0 { + tmp = tmp.Append(p.ItemLocation()) + } + + res, err := tmp.ToDataLayerPath( + p.RR.Tenant(), + p.RR.ResourceOwner(), + p.RR.Service(), + p.RR.Category(), + len(p.ItemLocation()) > 0) + if err != nil { + panic(err) + } + + return res +} + func mustPathRep(ref string, isItem bool) repoRefAndLocRef { res := repoRefAndLocRef{} tmp := mustParsePath(ref, isItem) @@ -145,49 +172,141 @@ var ( ExchangeEmailItemPath2 = ExchangeEmailBasePath2.MustAppend(ItemName2, true) ExchangeEmailItemPath3 = ExchangeEmailBasePath3.MustAppend(ItemName3, true) - ExchangeEmailItems = []details.Entry{ - { - RepoRef: ExchangeEmailItemPath1.RR.String(), - ShortRef: ExchangeEmailItemPath1.RR.ShortRef(), - ParentRef: ExchangeEmailItemPath1.RR.ToBuilder().Dir().ShortRef(), - ItemRef: ExchangeEmailItemPath1.ItemLocation(), - LocationRef: ExchangeEmailItemPath1.Loc.String(), - ItemInfo: details.ItemInfo{ - Exchange: &details.ExchangeInfo{ - ItemType: details.ExchangeMail, - Sender: "a-person", - Subject: "foo", - Received: Time1, + // These all represent the same set of items however, the different versions + // have varying amounts of information. + exchangeEmailItemsByVersion = map[int][]details.Entry{ + version.All8MigrateUserPNToID: { + { + RepoRef: ExchangeEmailItemPath1.RR.String(), + ShortRef: ExchangeEmailItemPath1.RR.ShortRef(), + ParentRef: ExchangeEmailItemPath1.RR.ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeEmailItemPath1.ItemLocation(), + LocationRef: ExchangeEmailItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "a-person", + Subject: "foo", + Received: Time1, + }, + }, + }, + { + RepoRef: ExchangeEmailItemPath2.RR.String(), + ShortRef: ExchangeEmailItemPath2.RR.ShortRef(), + ParentRef: ExchangeEmailItemPath2.RR.ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeEmailItemPath2.ItemLocation(), + LocationRef: ExchangeEmailItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "a-person", + Subject: "bar", + Received: Time2, + }, + }, + }, + { + RepoRef: ExchangeEmailItemPath3.RR.String(), + ShortRef: ExchangeEmailItemPath3.RR.ShortRef(), + ParentRef: ExchangeEmailItemPath3.RR.ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeEmailItemPath3.ItemLocation(), + LocationRef: ExchangeEmailItemPath3.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "another-person", + Subject: "baz", + Received: Time2, + }, }, }, }, - { - RepoRef: ExchangeEmailItemPath2.RR.String(), - ShortRef: ExchangeEmailItemPath2.RR.ShortRef(), - ParentRef: ExchangeEmailItemPath2.RR.ToBuilder().Dir().ShortRef(), - ItemRef: ExchangeEmailItemPath2.ItemLocation(), - LocationRef: ExchangeEmailItemPath2.Loc.String(), - ItemInfo: details.ItemInfo{ - Exchange: &details.ExchangeInfo{ - ItemType: details.ExchangeMail, - Sender: "a-person", - Subject: "bar", - Received: Time2, + version.OneDrive7LocationRef: { + { + RepoRef: ExchangeEmailItemPath1.locationAsRepoRef().String(), + ShortRef: ExchangeEmailItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeEmailItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeEmailItemPath1.ItemLocation(), + LocationRef: ExchangeEmailItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "a-person", + Subject: "foo", + Received: Time1, + }, + }, + }, + { + RepoRef: ExchangeEmailItemPath2.locationAsRepoRef().String(), + ShortRef: ExchangeEmailItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeEmailItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeEmailItemPath2.ItemLocation(), + LocationRef: ExchangeEmailItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "a-person", + Subject: "bar", + Received: Time2, + }, + }, + }, + { + RepoRef: ExchangeEmailItemPath3.locationAsRepoRef().String(), + ShortRef: ExchangeEmailItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeEmailItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeEmailItemPath3.ItemLocation(), + LocationRef: ExchangeEmailItemPath3.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "another-person", + Subject: "baz", + Received: Time2, + }, }, }, }, - { - RepoRef: ExchangeEmailItemPath3.RR.String(), - ShortRef: ExchangeEmailItemPath3.RR.ShortRef(), - ParentRef: ExchangeEmailItemPath3.RR.ToBuilder().Dir().ShortRef(), - ItemRef: ExchangeEmailItemPath3.ItemLocation(), - LocationRef: ExchangeEmailItemPath3.Loc.String(), - ItemInfo: details.ItemInfo{ - Exchange: &details.ExchangeInfo{ - ItemType: details.ExchangeMail, - Sender: "another-person", - Subject: "baz", - Received: Time2, + 0: { + { + RepoRef: ExchangeEmailItemPath1.locationAsRepoRef().String(), + ShortRef: ExchangeEmailItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeEmailItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "a-person", + Subject: "foo", + Received: Time1, + }, + }, + }, + { + RepoRef: ExchangeEmailItemPath2.locationAsRepoRef().String(), + ShortRef: ExchangeEmailItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeEmailItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "a-person", + Subject: "bar", + Received: Time2, + }, + }, + }, + { + RepoRef: ExchangeEmailItemPath3.locationAsRepoRef().String(), + ShortRef: ExchangeEmailItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeEmailItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeMail, + Sender: "another-person", + Subject: "baz", + Received: Time2, + }, }, }, }, @@ -199,30 +318,84 @@ var ( ExchangeContactsItemPath1 = ExchangeContactsBasePath.MustAppend(ItemName1, true) ExchangeContactsItemPath2 = ExchangeContactsBasePath2.MustAppend(ItemName2, true) - ExchangeContactsItems = []details.Entry{ - { - RepoRef: ExchangeContactsItemPath1.RR.String(), - ShortRef: ExchangeContactsItemPath1.RR.ShortRef(), - ParentRef: ExchangeContactsItemPath1.RR.ToBuilder().Dir().ShortRef(), - ItemRef: ExchangeContactsItemPath1.ItemLocation(), - LocationRef: ExchangeContactsItemPath1.Loc.String(), - ItemInfo: details.ItemInfo{ - Exchange: &details.ExchangeInfo{ - ItemType: details.ExchangeContact, - ContactName: "a-person", + exchangeContactsItemsByVersion = map[int][]details.Entry{ + version.All8MigrateUserPNToID: { + { + RepoRef: ExchangeContactsItemPath1.RR.String(), + ShortRef: ExchangeContactsItemPath1.RR.ShortRef(), + ParentRef: ExchangeContactsItemPath1.RR.ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeContactsItemPath1.ItemLocation(), + LocationRef: ExchangeContactsItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeContact, + ContactName: "a-person", + }, + }, + }, + { + RepoRef: ExchangeContactsItemPath2.RR.String(), + ShortRef: ExchangeContactsItemPath2.RR.ShortRef(), + ParentRef: ExchangeContactsItemPath2.RR.ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeContactsItemPath2.ItemLocation(), + LocationRef: ExchangeContactsItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeContact, + ContactName: "another-person", + }, }, }, }, - { - RepoRef: ExchangeContactsItemPath2.RR.String(), - ShortRef: ExchangeContactsItemPath2.RR.ShortRef(), - ParentRef: ExchangeContactsItemPath2.RR.ToBuilder().Dir().ShortRef(), - ItemRef: ExchangeContactsItemPath2.ItemLocation(), - LocationRef: ExchangeContactsItemPath2.Loc.String(), - ItemInfo: details.ItemInfo{ - Exchange: &details.ExchangeInfo{ - ItemType: details.ExchangeContact, - ContactName: "another-person", + version.OneDrive7LocationRef: { + { + RepoRef: ExchangeContactsItemPath1.locationAsRepoRef().String(), + ShortRef: ExchangeContactsItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeContactsItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeContactsItemPath1.ItemLocation(), + LocationRef: ExchangeContactsItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeContact, + ContactName: "a-person", + }, + }, + }, + { + RepoRef: ExchangeContactsItemPath2.locationAsRepoRef().String(), + ShortRef: ExchangeContactsItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeContactsItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeContactsItemPath2.ItemLocation(), + LocationRef: ExchangeContactsItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeContact, + ContactName: "another-person", + }, + }, + }, + }, + 0: { + { + RepoRef: ExchangeContactsItemPath1.locationAsRepoRef().String(), + ShortRef: ExchangeContactsItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeContactsItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeContact, + ContactName: "a-person", + }, + }, + }, + { + RepoRef: ExchangeContactsItemPath2.locationAsRepoRef().String(), + ShortRef: ExchangeContactsItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeContactsItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeContact, + ContactName: "another-person", + }, }, }, }, @@ -233,36 +406,100 @@ var ( ExchangeEventsItemPath1 = ExchangeEventsBasePath.MustAppend(ItemName1, true) ExchangeEventsItemPath2 = ExchangeEventsBasePath2.MustAppend(ItemName2, true) - ExchangeEventsItems = []details.Entry{ - { - RepoRef: ExchangeEventsItemPath1.RR.String(), - ShortRef: ExchangeEventsItemPath1.RR.ShortRef(), - ParentRef: ExchangeEventsItemPath1.RR.ToBuilder().Dir().ShortRef(), - ItemRef: ExchangeEventsItemPath1.ItemLocation(), - LocationRef: ExchangeEventsItemPath1.Loc.String(), - ItemInfo: details.ItemInfo{ - Exchange: &details.ExchangeInfo{ - ItemType: details.ExchangeEvent, - Organizer: "a-person", - Subject: "foo", - EventStart: Time1, - EventRecurs: false, + exchangeEventsItemsByVersion = map[int][]details.Entry{ + version.All8MigrateUserPNToID: { + { + RepoRef: ExchangeEventsItemPath1.RR.String(), + ShortRef: ExchangeEventsItemPath1.RR.ShortRef(), + ParentRef: ExchangeEventsItemPath1.RR.ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeEventsItemPath1.ItemLocation(), + LocationRef: ExchangeEventsItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeEvent, + Organizer: "a-person", + Subject: "foo", + EventStart: Time1, + EventRecurs: false, + }, + }, + }, + { + RepoRef: ExchangeEventsItemPath2.RR.String(), + ShortRef: ExchangeEventsItemPath2.RR.ShortRef(), + ParentRef: ExchangeEventsItemPath2.RR.ToBuilder().Dir().ShortRef(), + ItemRef: ExchangeEventsItemPath2.ItemLocation(), + LocationRef: ExchangeEventsItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeEvent, + Organizer: "a-person", + Subject: "foo", + EventStart: Time2, + EventRecurs: true, + }, }, }, }, - { - RepoRef: ExchangeEventsItemPath2.RR.String(), - ShortRef: ExchangeEventsItemPath2.RR.ShortRef(), - ParentRef: ExchangeEventsItemPath2.RR.ToBuilder().Dir().ShortRef(), - ItemRef: ExchangeEventsItemPath2.ItemLocation(), - LocationRef: ExchangeEventsItemPath2.Loc.String(), - ItemInfo: details.ItemInfo{ - Exchange: &details.ExchangeInfo{ - ItemType: details.ExchangeEvent, - Organizer: "a-person", - Subject: "foo", - EventStart: Time2, - EventRecurs: true, + 2: { + { + RepoRef: ExchangeEventsItemPath1.RR.String(), + ShortRef: ExchangeEventsItemPath1.RR.ShortRef(), + ParentRef: ExchangeEventsItemPath1.RR.ToBuilder().Dir().ShortRef(), + LocationRef: ExchangeEventsItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeEvent, + Organizer: "a-person", + Subject: "foo", + EventStart: Time1, + EventRecurs: false, + }, + }, + }, + { + RepoRef: ExchangeEventsItemPath2.RR.String(), + ShortRef: ExchangeEventsItemPath2.RR.ShortRef(), + ParentRef: ExchangeEventsItemPath2.RR.ToBuilder().Dir().ShortRef(), + LocationRef: ExchangeEventsItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeEvent, + Organizer: "a-person", + Subject: "foo", + EventStart: Time2, + EventRecurs: true, + }, + }, + }, + }, + 0: { + { + RepoRef: ExchangeEventsItemPath1.locationAsRepoRef().String(), + ShortRef: ExchangeEventsItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeEventsItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeEvent, + Organizer: "a-person", + Subject: "foo", + EventStart: Time1, + EventRecurs: false, + }, + }, + }, + { + RepoRef: ExchangeEventsItemPath2.locationAsRepoRef().String(), + ShortRef: ExchangeEventsItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: ExchangeEventsItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + Exchange: &details.ExchangeInfo{ + ItemType: details.ExchangeEvent, + Organizer: "a-person", + Subject: "foo", + EventStart: Time2, + EventRecurs: true, + }, }, }, }, @@ -281,58 +518,213 @@ var ( OneDriveParentFolder1 = OneDriveBasePath1.Loc.PopFront().String() OneDriveParentFolder2 = OneDriveBasePath2.Loc.PopFront().String() - OneDriveItems = []details.Entry{ - { - RepoRef: OneDriveItemPath1.RR.String(), - ShortRef: OneDriveItemPath1.RR.ShortRef(), - ParentRef: OneDriveItemPath1.RR.ToBuilder().Dir().ShortRef(), - ItemRef: OneDriveItemPath1.ItemLocation(), - LocationRef: OneDriveItemPath1.Loc.String(), - ItemInfo: details.ItemInfo{ - OneDrive: &details.OneDriveInfo{ - ItemType: details.OneDriveItem, - ParentPath: OneDriveFolderFolder, - ItemName: OneDriveItemPath1.ItemLocation() + "name", - Size: int64(23), - Owner: UserEmail1, - Created: Time2, - Modified: Time4, + oneDriveItemsByVersion = map[int][]details.Entry{ + version.All8MigrateUserPNToID: { + { + RepoRef: OneDriveItemPath1.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: OneDriveItemPath1.ItemLocation(), + LocationRef: OneDriveItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveFolderFolder, + ItemName: OneDriveItemPath1.ItemLocation() + "name", + Size: int64(23), + Owner: UserEmail1, + Created: Time2, + Modified: Time4, + }, + }, + }, + { + RepoRef: OneDriveItemPath2.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: OneDriveItemPath2.ItemLocation(), + LocationRef: OneDriveItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveParentFolder1, + ItemName: OneDriveItemPath2.ItemLocation() + "name", + Size: int64(42), + Owner: UserEmail1, + Created: Time1, + Modified: Time3, + }, + }, + }, + { + RepoRef: OneDriveItemPath3.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: OneDriveItemPath3.ItemLocation(), + LocationRef: OneDriveItemPath3.Loc.String(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveParentFolder2, + ItemName: OneDriveItemPath3.ItemLocation() + "name", + Size: int64(19), + Owner: UserEmail2, + Created: Time2, + Modified: Time4, + }, }, }, }, - { - RepoRef: OneDriveItemPath2.RR.String(), - ShortRef: OneDriveItemPath2.RR.ShortRef(), - ParentRef: OneDriveItemPath2.RR.ToBuilder().Dir().ShortRef(), - ItemRef: OneDriveItemPath2.ItemLocation(), - LocationRef: OneDriveItemPath2.Loc.String(), - ItemInfo: details.ItemInfo{ - OneDrive: &details.OneDriveInfo{ - ItemType: details.OneDriveItem, - ParentPath: OneDriveParentFolder1, - ItemName: OneDriveItemPath2.ItemLocation() + "name", - Size: int64(42), - Owner: UserEmail1, - Created: Time1, - Modified: Time3, + version.OneDrive7LocationRef: { + { + RepoRef: OneDriveItemPath1.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + LocationRef: OneDriveItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveFolderFolder, + ItemName: OneDriveItemPath1.ItemLocation() + "name", + Size: int64(23), + Owner: UserEmail1, + Created: Time2, + Modified: Time4, + }, + }, + }, + { + RepoRef: OneDriveItemPath2.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + LocationRef: OneDriveItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveParentFolder1, + ItemName: OneDriveItemPath2.ItemLocation() + "name", + Size: int64(42), + Owner: UserEmail1, + Created: Time1, + Modified: Time3, + }, + }, + }, + { + RepoRef: OneDriveItemPath3.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + LocationRef: OneDriveItemPath3.Loc.String(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveParentFolder2, + ItemName: OneDriveItemPath3.ItemLocation() + "name", + Size: int64(19), + Owner: UserEmail2, + Created: Time2, + Modified: Time4, + }, }, }, }, - { - RepoRef: OneDriveItemPath3.RR.String(), - ShortRef: OneDriveItemPath3.RR.ShortRef(), - ParentRef: OneDriveItemPath3.RR.ToBuilder().Dir().ShortRef(), - ItemRef: OneDriveItemPath3.ItemLocation(), - LocationRef: OneDriveItemPath3.Loc.String(), - ItemInfo: details.ItemInfo{ - OneDrive: &details.OneDriveInfo{ - ItemType: details.OneDriveItem, - ParentPath: OneDriveParentFolder2, - ItemName: OneDriveItemPath3.ItemLocation() + "name", - Size: int64(19), - Owner: UserEmail2, - Created: Time2, - Modified: Time4, + version.OneDrive6NameInMeta: { + { + RepoRef: OneDriveItemPath1.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveFolderFolder, + ItemName: OneDriveItemPath1.ItemLocation() + "name", + Size: int64(23), + Owner: UserEmail1, + Created: Time2, + Modified: Time4, + }, + }, + }, + { + RepoRef: OneDriveItemPath2.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveParentFolder1, + ItemName: OneDriveItemPath2.ItemLocation() + "name", + Size: int64(42), + Owner: UserEmail1, + Created: Time1, + Modified: Time3, + }, + }, + }, + { + RepoRef: OneDriveItemPath3.locationAsRepoRef().String(), + ShortRef: OneDriveItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveParentFolder2, + ItemName: OneDriveItemPath3.ItemLocation() + "name", + Size: int64(19), + Owner: UserEmail2, + Created: Time2, + Modified: Time4, + }, + }, + }, + }, + 0: { + { + RepoRef: OneDriveItemPath1.locationAsRepoRef().String() + "name", + ShortRef: OneDriveItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveFolderFolder, + ItemName: OneDriveItemPath1.ItemLocation() + "name", + Size: int64(23), + Owner: UserEmail1, + Created: Time2, + Modified: Time4, + }, + }, + }, + { + RepoRef: OneDriveItemPath2.locationAsRepoRef().String() + "name", + ShortRef: OneDriveItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveParentFolder1, + ItemName: OneDriveItemPath2.ItemLocation() + "name", + Size: int64(42), + Owner: UserEmail1, + Created: Time1, + Modified: Time3, + }, + }, + }, + { + RepoRef: OneDriveItemPath3.locationAsRepoRef().String() + "name", + ShortRef: OneDriveItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: OneDriveItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + OneDrive: &details.OneDriveInfo{ + ItemType: details.OneDriveItem, + ParentPath: OneDriveParentFolder2, + ItemName: OneDriveItemPath3.ItemLocation() + "name", + Size: int64(19), + Owner: UserEmail2, + Created: Time2, + Modified: Time4, + }, }, }, }, @@ -351,85 +743,241 @@ var ( SharePointParentLibrary1 = SharePointBasePath1.Loc.PopFront().String() SharePointParentLibrary2 = SharePointBasePath2.Loc.PopFront().String() - SharePointLibraryItems = []details.Entry{ - { - RepoRef: SharePointLibraryItemPath1.RR.String(), - ShortRef: SharePointLibraryItemPath1.RR.ShortRef(), - ParentRef: SharePointLibraryItemPath1.RR.ToBuilder().Dir().ShortRef(), - ItemRef: SharePointLibraryItemPath1.ItemLocation(), - LocationRef: SharePointLibraryItemPath1.Loc.String(), - ItemInfo: details.ItemInfo{ - SharePoint: &details.SharePointInfo{ - ItemType: details.SharePointLibrary, - ParentPath: SharePointLibraryFolder, - ItemName: SharePointLibraryItemPath1.ItemLocation() + "name", - Size: int64(23), - Owner: UserEmail1, - Created: Time2, - Modified: Time4, + sharePointLibraryItemsByVersion = map[int][]details.Entry{ + version.All8MigrateUserPNToID: { + { + RepoRef: SharePointLibraryItemPath1.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: SharePointLibraryItemPath1.ItemLocation(), + LocationRef: SharePointLibraryItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.SharePointLibrary, + ParentPath: SharePointLibraryFolder, + ItemName: SharePointLibraryItemPath1.ItemLocation() + "name", + Size: int64(23), + Owner: UserEmail1, + Created: Time2, + Modified: Time4, + }, + }, + }, + { + RepoRef: SharePointLibraryItemPath2.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: SharePointLibraryItemPath2.ItemLocation(), + LocationRef: SharePointLibraryItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.SharePointLibrary, + ParentPath: SharePointParentLibrary1, + ItemName: SharePointLibraryItemPath2.ItemLocation() + "name", + Size: int64(42), + Owner: UserEmail1, + Created: Time1, + Modified: Time3, + }, + }, + }, + { + RepoRef: SharePointLibraryItemPath3.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemRef: SharePointLibraryItemPath3.ItemLocation(), + LocationRef: SharePointLibraryItemPath3.Loc.String(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.SharePointLibrary, + ParentPath: SharePointParentLibrary2, + ItemName: SharePointLibraryItemPath3.ItemLocation() + "name", + Size: int64(19), + Owner: UserEmail2, + Created: Time2, + Modified: Time4, + }, }, }, }, - { - RepoRef: SharePointLibraryItemPath2.RR.String(), - ShortRef: SharePointLibraryItemPath2.RR.ShortRef(), - ParentRef: SharePointLibraryItemPath2.RR.ToBuilder().Dir().ShortRef(), - ItemRef: SharePointLibraryItemPath2.ItemLocation(), - LocationRef: SharePointLibraryItemPath2.Loc.String(), - ItemInfo: details.ItemInfo{ - SharePoint: &details.SharePointInfo{ - ItemType: details.SharePointLibrary, - ParentPath: SharePointParentLibrary1, - ItemName: SharePointLibraryItemPath2.ItemLocation() + "name", - Size: int64(42), - Owner: UserEmail1, - Created: Time1, - Modified: Time3, + version.OneDrive7LocationRef: { + { + RepoRef: SharePointLibraryItemPath1.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + LocationRef: SharePointLibraryItemPath1.Loc.String(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointLibraryFolder, + ItemName: SharePointLibraryItemPath1.ItemLocation() + "name", + Size: int64(23), + Owner: UserEmail1, + Created: Time2, + Modified: Time4, + }, + }, + }, + { + RepoRef: SharePointLibraryItemPath2.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + LocationRef: SharePointLibraryItemPath2.Loc.String(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointParentLibrary1, + ItemName: SharePointLibraryItemPath2.ItemLocation() + "name", + Size: int64(42), + Owner: UserEmail1, + Created: Time1, + Modified: Time3, + }, + }, + }, + { + RepoRef: SharePointLibraryItemPath3.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + LocationRef: SharePointLibraryItemPath3.Loc.String(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointParentLibrary2, + ItemName: SharePointLibraryItemPath3.ItemLocation() + "name", + Size: int64(19), + Owner: UserEmail2, + Created: Time2, + Modified: Time4, + }, }, }, }, - { - RepoRef: SharePointLibraryItemPath3.RR.String(), - ShortRef: SharePointLibraryItemPath3.RR.ShortRef(), - ParentRef: SharePointLibraryItemPath3.RR.ToBuilder().Dir().ShortRef(), - ItemRef: SharePointLibraryItemPath3.ItemLocation(), - LocationRef: SharePointLibraryItemPath3.Loc.String(), - ItemInfo: details.ItemInfo{ - SharePoint: &details.SharePointInfo{ - ItemType: details.SharePointLibrary, - ParentPath: SharePointParentLibrary2, - ItemName: SharePointLibraryItemPath3.ItemLocation() + "name", - Size: int64(19), - Owner: UserEmail2, - Created: Time2, - Modified: Time4, + version.OneDrive6NameInMeta: { + { + RepoRef: SharePointLibraryItemPath1.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointLibraryFolder, + ItemName: SharePointLibraryItemPath1.ItemLocation() + "name", + Size: int64(23), + Owner: UserEmail1, + Created: Time2, + Modified: Time4, + }, + }, + }, + { + RepoRef: SharePointLibraryItemPath2.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointParentLibrary1, + ItemName: SharePointLibraryItemPath2.ItemLocation() + "name", + Size: int64(42), + Owner: UserEmail1, + Created: Time1, + Modified: Time3, + }, + }, + }, + { + RepoRef: SharePointLibraryItemPath3.locationAsRepoRef().String(), + ShortRef: SharePointLibraryItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointParentLibrary2, + ItemName: SharePointLibraryItemPath3.ItemLocation() + "name", + Size: int64(19), + Owner: UserEmail2, + Created: Time2, + Modified: Time4, + }, + }, + }, + }, + 0: { + { + RepoRef: SharePointLibraryItemPath1.locationAsRepoRef().String() + "name", + ShortRef: SharePointLibraryItemPath1.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath1.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointLibraryFolder, + ItemName: SharePointLibraryItemPath1.ItemLocation() + "name", + Size: int64(23), + Owner: UserEmail1, + Created: Time2, + Modified: Time4, + }, + }, + }, + { + RepoRef: SharePointLibraryItemPath2.locationAsRepoRef().String() + "name", + ShortRef: SharePointLibraryItemPath2.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath2.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointParentLibrary1, + ItemName: SharePointLibraryItemPath2.ItemLocation() + "name", + Size: int64(42), + Owner: UserEmail1, + Created: Time1, + Modified: Time3, + }, + }, + }, + { + RepoRef: SharePointLibraryItemPath3.locationAsRepoRef().String() + "name", + ShortRef: SharePointLibraryItemPath3.locationAsRepoRef().ShortRef(), + ParentRef: SharePointLibraryItemPath3.locationAsRepoRef().ToBuilder().Dir().ShortRef(), + ItemInfo: details.ItemInfo{ + SharePoint: &details.SharePointInfo{ + ItemType: details.OneDriveItem, + ParentPath: SharePointParentLibrary2, + ItemName: SharePointLibraryItemPath3.ItemLocation() + "name", + Size: int64(19), + Owner: UserEmail2, + Created: Time2, + Modified: Time4, + }, }, }, }, } ) -func GetDetailsSet() *details.Details { +func GetDetailsSetForVersion(t *testing.T, wantedVersion int) *details.Details { entries := []details.Entry{} - - for _, e := range ExchangeEmailItems { - entries = append(entries, e) + // TODO(ashmrtn): At some point make an exported variable somewhere that has + // all the valid service/category pairs. + dataTypes := map[path.ServiceType][]path.CategoryType{ + path.ExchangeService: { + path.EmailCategory, + path.EventsCategory, + path.ContactsCategory, + }, + path.OneDriveService: { + path.FilesCategory, + }, + path.SharePointService: { + path.LibrariesCategory, + }, } - for _, e := range ExchangeContactsItems { - entries = append(entries, e) - } - - for _, e := range ExchangeEventsItems { - entries = append(entries, e) - } - - for _, e := range OneDriveItems { - entries = append(entries, e) - } - - for _, e := range SharePointLibraryItems { - entries = append(entries, e) + for s, cats := range dataTypes { + for _, cat := range cats { + entries = append(entries, GetDeetsForVersion(t, s, cat, wantedVersion)...) + } } return &details.Details{ @@ -438,3 +986,95 @@ func GetDetailsSet() *details.Details { }, } } + +// GetItemsForVersion returns the set of items for the requested +// (service, category, version) tuple that reside at the indicated indices. If +// -1 is the only index provided then returns all items. +func GetItemsForVersion( + t *testing.T, + service path.ServiceType, + cat path.CategoryType, + wantVersion int, + indices ...int, +) []details.Entry { + deets := GetDeetsForVersion(t, service, cat, wantVersion) + + if len(indices) == 1 && indices[0] == -1 { + return deets + } + + var res []details.Entry + + for _, i := range indices { + require.Less(t, i, len(deets), "requested index out of bounds", i, len(deets)) + res = append(res, deets[i]) + } + + return res +} + +// GetDeetsForVersion returns the set of details with the highest +// version <= the requested version. +func GetDeetsForVersion( + t *testing.T, + service path.ServiceType, + cat path.CategoryType, + wantVersion int, +) []details.Entry { + var input map[int][]details.Entry + + switch service { + case path.ExchangeService: + switch cat { + case path.EmailCategory: + input = exchangeEmailItemsByVersion + + case path.EventsCategory: + input = exchangeEventsItemsByVersion + + case path.ContactsCategory: + input = exchangeContactsItemsByVersion + } + + case path.OneDriveService: + if cat == path.FilesCategory { + input = oneDriveItemsByVersion + } + + case path.SharePointService: + if cat == path.LibrariesCategory { + input = sharePointLibraryItemsByVersion + } + } + + require.NotNil( + t, + input, + "unsupported (service, category)", + service.String(), + cat.String()) + + return getDeetsForVersion(t, wantVersion, input) +} + +func getDeetsForVersion( + t *testing.T, + wantVersion int, + deetsSet map[int][]details.Entry, +) []details.Entry { + var ( + res []details.Entry + resVersion = version.NoBackup + ) + + for v, deets := range deetsSet { + if v <= wantVersion && v > resVersion { + resVersion = v + res = deets + } + } + + require.NotEmpty(t, res, "unable to find details for version", wantVersion) + + return slices.Clone(res) +} diff --git a/src/pkg/logger/logger.go b/src/pkg/logger/logger.go index 39636a99c..f6af8ffce 100644 --- a/src/pkg/logger/logger.go +++ b/src/pkg/logger/logger.go @@ -14,7 +14,7 @@ import ( "go.uber.org/zap/zapcore" "golang.org/x/exp/slices" - "github.com/alcionai/corso/src/internal/common" + "github.com/alcionai/corso/src/internal/common/str" ) // Default location for writing logs, initialized in platform specific files @@ -256,7 +256,7 @@ func (s Settings) EnsureDefaults() Settings { algs := []piiAlg{PIIPlainText, PIIMask, PIIHash} if len(set.PIIHandling) == 0 || !slices.Contains(algs, set.PIIHandling) { - set.PIIHandling = piiAlg(common.First(piiHandling, string(PIIPlainText))) + set.PIIHandling = piiAlg(str.First(piiHandling, string(PIIPlainText))) } if len(set.File) == 0 { diff --git a/src/pkg/selectors/selectors_reduce_test.go b/src/pkg/selectors/selectors_reduce_test.go index c57cde409..51540ce48 100644 --- a/src/pkg/selectors/selectors_reduce_test.go +++ b/src/pkg/selectors/selectors_reduce_test.go @@ -1,6 +1,7 @@ package selectors_test import ( + "fmt" "testing" "time" @@ -9,9 +10,11 @@ import ( "github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/tester" + "github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details/testdata" "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" ) @@ -27,25 +30,31 @@ func (suite *SelectorReduceSuite) TestReduce() { ctx, flush := tester.NewContext() defer flush() - allDetails := testdata.GetDetailsSet() table := []struct { name string - selFunc func() selectors.Reducer - expected []details.Entry + selFunc func(t *testing.T, wantVersion int) selectors.Reducer + expected func(t *testing.T, wantVersion int) []details.Entry }{ { name: "ExchangeAllMail", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.Mails(selectors.Any(), selectors.Any())) return sel }, - expected: testdata.ExchangeEmailItems, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + -1) + }, }, { name: "ExchangeMailFolderPrefixMatch", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.MailFolders( []string{testdata.ExchangeEmailInboxPath.FolderLocation()}, @@ -53,48 +62,79 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: testdata.ExchangeEmailItems, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + -1) + }, }, { name: "ExchangeMailSubject", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Filter(sel.MailSubject("foo")) return sel }, - expected: []details.Entry{testdata.ExchangeEmailItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0) + }, }, { name: "ExchangeMailSubjectExcludeItem", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { + deets := testdata.GetDeetsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion) + sel := selectors.NewExchangeRestore(selectors.Any()) sel.Filter(sel.MailSender("a-person")) sel.Exclude(sel.Mails( selectors.Any(), - []string{testdata.ExchangeEmailItemPath2.RR.ShortRef()}, + []string{deets[1].ShortRef}, )) return sel }, - expected: []details.Entry{testdata.ExchangeEmailItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0) + }, }, { name: "ExchangeMailSender", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Filter(sel.MailSender("a-person")) return sel }, - expected: []details.Entry{ - testdata.ExchangeEmailItems[0], - testdata.ExchangeEmailItems[1], + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0, 1) }, }, { name: "ExchangeMailReceivedTime", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Filter(sel.MailReceivedBefore( dttm.Format(testdata.Time1.Add(time.Second)), @@ -102,11 +142,18 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: []details.Entry{testdata.ExchangeEmailItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0) + }, }, { name: "ExchangeMailID", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.Mails( selectors.Any(), @@ -115,24 +162,44 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: []details.Entry{testdata.ExchangeEmailItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0) + }, }, { name: "ExchangeMailShortRef", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { + deets := testdata.GetDeetsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion) + sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.Mails( selectors.Any(), - []string{testdata.ExchangeEmailItemPath1.RR.ShortRef()}, + []string{deets[0].ShortRef}, )) return sel }, - expected: []details.Entry{testdata.ExchangeEmailItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0) + }, }, { name: "ExchangeAllEventsAndMailWithSubject", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.Events( selectors.Any(), @@ -142,39 +209,62 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: []details.Entry{testdata.ExchangeEmailItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0) + }, }, { name: "ExchangeEventsAndMailWithSubject", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Filter(sel.EventSubject("foo")) sel.Filter(sel.MailSubject("foo")) return sel }, - expected: []details.Entry{}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return []details.Entry{} + }, }, { name: "ExchangeAll", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.AllData()) return sel }, - expected: append( - append( + expected: func(t *testing.T, wantVersion int) []details.Entry { + return append( append( - []details.Entry{}, - testdata.ExchangeEmailItems...), - testdata.ExchangeContactsItems...), - testdata.ExchangeEventsItems..., - ), + testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + -1), + testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EventsCategory, + wantVersion, + -1)...), + testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.ContactsCategory, + wantVersion, + -1)...) + }, }, { name: "ExchangeMailByFolder", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.MailFolders( []string{testdata.ExchangeEmailBasePath.FolderLocation()}, @@ -182,14 +272,21 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: []details.Entry{testdata.ExchangeEmailItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0) + }, }, // TODO (keepers): all folders are treated as prefix-matches at this time. // so this test actually does nothing different. In the future, we'll // need to amend the non-prefix folder tests to expect non-prefix matches. { name: "ExchangeMailByFolderPrefix", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.MailFolders( []string{testdata.ExchangeEmailBasePath.FolderLocation()}, @@ -198,11 +295,18 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: []details.Entry{testdata.ExchangeEmailItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + 0) + }, }, { name: "ExchangeMailByFolderRoot", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.MailFolders( []string{testdata.ExchangeEmailInboxPath.FolderLocation()}, @@ -210,11 +314,18 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: testdata.ExchangeEmailItems, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EmailCategory, + wantVersion, + -1) + }, }, { name: "ExchangeContactByFolder", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.ContactFolders( []string{testdata.ExchangeContactsBasePath.FolderLocation()}, @@ -222,11 +333,18 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: []details.Entry{testdata.ExchangeContactsItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.ContactsCategory, + wantVersion, + 0) + }, }, { name: "ExchangeContactByFolderRoot", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.ContactFolders( []string{testdata.ExchangeContactsRootPath.FolderLocation()}, @@ -234,12 +352,19 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: testdata.ExchangeContactsItems, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.ContactsCategory, + wantVersion, + -1) + }, }, { name: "ExchangeEventsByFolder", - selFunc: func() selectors.Reducer { + selFunc: func(t *testing.T, wantVersion int) selectors.Reducer { sel := selectors.NewExchangeRestore(selectors.Any()) sel.Include(sel.EventCalendars( []string{testdata.ExchangeEventsBasePath.FolderLocation()}, @@ -247,16 +372,28 @@ func (suite *SelectorReduceSuite) TestReduce() { return sel }, - expected: []details.Entry{testdata.ExchangeEventsItems[0]}, + expected: func(t *testing.T, wantVersion int) []details.Entry { + return testdata.GetItemsForVersion( + t, + path.ExchangeService, + path.EventsCategory, + wantVersion, + 0) + }, }, } - for _, test := range table { - suite.Run(test.name, func() { - t := suite.T() + for v := 0; v <= version.Backup; v++ { + suite.Run(fmt.Sprintf("version%d", v), func() { + for _, test := range table { + suite.Run(test.name, func() { + t := suite.T() - output := test.selFunc().Reduce(ctx, allDetails, fault.New(true)) - assert.ElementsMatch(t, test.expected, output.Entries) + allDetails := testdata.GetDetailsSetForVersion(t, v) + output := test.selFunc(t, v).Reduce(ctx, allDetails, fault.New(true)) + assert.ElementsMatch(t, test.expected(t, v), output.Entries) + }) + } }) } } diff --git a/src/pkg/services/m365/api/exchange_common.go b/src/pkg/services/m365/api/attachments.go similarity index 50% rename from src/pkg/services/m365/api/exchange_common.go rename to src/pkg/services/m365/api/attachments.go index 7f4f6afe2..e5125a64a 100644 --- a/src/pkg/services/m365/api/exchange_common.go +++ b/src/pkg/services/m365/api/attachments.go @@ -3,29 +3,11 @@ package api import ( "strings" - "github.com/alcionai/clues" "github.com/microsoftgraph/msgraph-sdk-go/models" "github.com/alcionai/corso/src/internal/common/ptr" - "github.com/alcionai/corso/src/internal/connector/graph" ) -// checkIDAndName is a helper function to ensure that -// the ID and name pointers are set prior to being called. -func checkIDAndName(c graph.Container) error { - id := ptr.Val(c.GetId()) - if len(id) == 0 { - return clues.New("container missing ID") - } - - dn := ptr.Val(c.GetDisplayName()) - if len(dn) == 0 { - return clues.New("container missing display name").With("container_id", id) - } - - return nil -} - func HasAttachments(body models.ItemBodyable) bool { if body == nil { return false diff --git a/src/pkg/services/m365/api/api.go b/src/pkg/services/m365/api/client.go similarity index 65% rename from src/pkg/services/m365/api/api.go rename to src/pkg/services/m365/api/client.go index 1500840fe..cf7930664 100644 --- a/src/pkg/services/m365/api/api.go +++ b/src/pkg/services/m365/api/client.go @@ -1,10 +1,7 @@ package api import ( - "context" - "github.com/alcionai/clues" - "github.com/microsoft/kiota-abstractions-go/serialization" "github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/pkg/account" @@ -78,31 +75,3 @@ func newLargeItemService(creds account.M365Config) (*graph.Service, error) { return a, nil } - -// --------------------------------------------------------------------------- -// common types and consts -// --------------------------------------------------------------------------- - -// DeltaUpdate holds the results of a current delta token. It normally -// gets produced when aggregating the addition and removal of items in -// a delta-queryable folder. -type DeltaUpdate struct { - // the deltaLink itself - URL string - // true if the old delta was marked as invalid - Reset bool -} - -// GraphQuery represents functions which perform exchange-specific queries -// into M365 backstore. Responses -> returned items will only contain the information -// that is included in the options -// TODO: use selector or path for granularity into specific folders or specific date ranges -type GraphQuery func(ctx context.Context, userID string) (serialization.Parsable, error) - -// GraphRetrievalFunctions are functions from the Microsoft Graph API that retrieve -// the default associated data of a M365 object. This varies by object. Additional -// Queries must be run to obtain the omitted fields. -type GraphRetrievalFunc func( - ctx context.Context, - user, m365ID string, -) (serialization.Parsable, error) diff --git a/src/pkg/services/m365/api/api_test.go b/src/pkg/services/m365/api/client_test.go similarity index 86% rename from src/pkg/services/m365/api/api_test.go rename to src/pkg/services/m365/api/client_test.go index cfaf8976f..196cc2322 100644 --- a/src/pkg/services/m365/api/api_test.go +++ b/src/pkg/services/m365/api/client_test.go @@ -49,41 +49,6 @@ func (suite *ExchangeServiceSuite) SetupSuite() { suite.gs = graph.NewService(adpt) } -func (suite *ExchangeServiceSuite) TestOptionsForCalendars() { - tests := []struct { - name string - params []string - checkError assert.ErrorAssertionFunc - }{ - { - name: "Empty Literal", - params: []string{}, - checkError: assert.NoError, - }, - { - name: "Invalid Parameter", - params: []string{"status"}, - checkError: assert.Error, - }, - { - name: "Invalid Parameters", - params: []string{"status", "height", "month"}, - checkError: assert.Error, - }, - { - name: "Valid Parameters", - params: []string{"changeKey", "events", "owner"}, - checkError: assert.NoError, - }, - } - for _, test := range tests { - suite.Run(test.name, func() { - _, err := optionsForCalendars(test.params) - test.checkError(suite.T(), err, clues.ToCore(err)) - }) - } -} - //nolint:lll var stubHTMLContent = "
\r\n