merge main
This commit is contained in:
commit
fbd62c8b0c
10
.github/workflows/ci.yml
vendored
10
.github/workflows/ci.yml
vendored
@ -155,7 +155,7 @@ jobs:
|
|||||||
|
|
||||||
# AWS creds
|
# AWS creds
|
||||||
- name: Configure AWS credentials from Test account
|
- name: Configure AWS credentials from Test account
|
||||||
uses: aws-actions/configure-aws-credentials@v2
|
uses: aws-actions/configure-aws-credentials@v3
|
||||||
with:
|
with:
|
||||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||||
role-session-name: integration-testing
|
role-session-name: integration-testing
|
||||||
@ -172,6 +172,7 @@ jobs:
|
|||||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||||
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
|
||||||
|
CORSO_ENABLE_GROUPS: true
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
go test \
|
go test \
|
||||||
@ -225,7 +226,7 @@ jobs:
|
|||||||
|
|
||||||
# AWS creds
|
# AWS creds
|
||||||
- name: Configure AWS credentials from Test account
|
- name: Configure AWS credentials from Test account
|
||||||
uses: aws-actions/configure-aws-credentials@v2
|
uses: aws-actions/configure-aws-credentials@v3
|
||||||
with:
|
with:
|
||||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||||
role-session-name: integration-testing
|
role-session-name: integration-testing
|
||||||
@ -242,6 +243,7 @@ jobs:
|
|||||||
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
|
||||||
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
|
||||||
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
|
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
|
||||||
|
CORSO_ENABLE_GROUPS: true
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
go test \
|
go test \
|
||||||
@ -275,6 +277,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
|
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
|
||||||
LOG_GRAPH_REQUESTS: true
|
LOG_GRAPH_REQUESTS: true
|
||||||
|
CORSO_ENABLE_GROUPS: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
@ -329,6 +332,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
|
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
|
||||||
LOG_GRAPH_REQUESTS: true
|
LOG_GRAPH_REQUESTS: true
|
||||||
|
CORSO_ENABLE_GROUPS: true
|
||||||
steps:
|
steps:
|
||||||
- name: Fail check if not repository_dispatch
|
- name: Fail check if not repository_dispatch
|
||||||
if: github.event_name != 'repository_dispatch'
|
if: github.event_name != 'repository_dispatch'
|
||||||
@ -373,7 +377,7 @@ jobs:
|
|||||||
|
|
||||||
# AWS creds
|
# AWS creds
|
||||||
- name: Configure AWS credentials from Test account
|
- name: Configure AWS credentials from Test account
|
||||||
uses: aws-actions/configure-aws-credentials@v2
|
uses: aws-actions/configure-aws-credentials@v3
|
||||||
with:
|
with:
|
||||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||||
role-session-name: integration-testing
|
role-session-name: integration-testing
|
||||||
|
|||||||
156
.github/workflows/longevity_test.yml
vendored
156
.github/workflows/longevity_test.yml
vendored
@ -47,8 +47,8 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
working-directory: src
|
working-directory: src
|
||||||
|
|
||||||
##########################################################################################################################################
|
############################################################################
|
||||||
# setup
|
# setup
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
@ -83,15 +83,14 @@ jobs:
|
|||||||
# Use shorter-lived credentials obtained from assume-role since these
|
# Use shorter-lived credentials obtained from assume-role since these
|
||||||
# runs haven't been taking long.
|
# runs haven't been taking long.
|
||||||
- name: Configure AWS credentials from Test account
|
- name: Configure AWS credentials from Test account
|
||||||
uses: aws-actions/configure-aws-credentials@v2
|
uses: aws-actions/configure-aws-credentials@v3
|
||||||
with:
|
with:
|
||||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||||
role-session-name: integration-testing
|
role-session-name: integration-testing
|
||||||
aws-region: us-east-1
|
aws-region: us-east-1
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
|
# Repository commands
|
||||||
# Repository commands
|
|
||||||
|
|
||||||
- name: Version Test
|
- name: Version Test
|
||||||
run: |
|
run: |
|
||||||
@ -105,6 +104,9 @@ jobs:
|
|||||||
./corso repo init s3 \
|
./corso repo init s3 \
|
||||||
--no-stats \
|
--no-stats \
|
||||||
--hide-progress \
|
--hide-progress \
|
||||||
|
--retention-mode $(echo "${{ env.RETENTION_MODE }}" | tr '[:upper:]' '[:lower:]') \
|
||||||
|
--retention-duration "${{ env.RETENTION_DURATION }}h" \
|
||||||
|
--extend-retention \
|
||||||
--prefix ${{ env.PREFIX }} \
|
--prefix ${{ env.PREFIX }} \
|
||||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||||
--succeed-if-exists \
|
--succeed-if-exists \
|
||||||
@ -133,9 +135,8 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
|
# Exchange
|
||||||
# Exchange
|
|
||||||
|
|
||||||
- name: Backup exchange test
|
- name: Backup exchange test
|
||||||
id: exchange-test
|
id: exchange-test
|
||||||
@ -158,8 +159,8 @@ jobs:
|
|||||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||||
echo result=$data >> $GITHUB_OUTPUT
|
echo result=$data >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
# Onedrive
|
# Onedrive
|
||||||
|
|
||||||
- name: Backup onedrive test
|
- name: Backup onedrive test
|
||||||
id: onedrive-test
|
id: onedrive-test
|
||||||
@ -183,9 +184,8 @@ jobs:
|
|||||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||||
echo result=$data >> $GITHUB_OUTPUT
|
echo result=$data >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
|
# Sharepoint test
|
||||||
# Sharepoint test
|
|
||||||
- name: Backup sharepoint test
|
- name: Backup sharepoint test
|
||||||
id: sharepoint-test
|
id: sharepoint-test
|
||||||
run: |
|
run: |
|
||||||
@ -209,9 +209,8 @@ jobs:
|
|||||||
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
data=$( echo $resultjson | jq -r '.[0] | .id' )
|
||||||
echo result=$data >> $GITHUB_OUTPUT
|
echo result=$data >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
|
# Backup Exchange Deletion test
|
||||||
# Backup Exchange Deletion test
|
|
||||||
- name: Backup Delete exchange test
|
- name: Backup Delete exchange test
|
||||||
id: delete-exchange-test
|
id: delete-exchange-test
|
||||||
env:
|
env:
|
||||||
@ -222,9 +221,8 @@ jobs:
|
|||||||
echo -e "\nDelete Backup exchange \n" >> ${CORSO_LOG_FILE}
|
echo -e "\nDelete Backup exchange \n" >> ${CORSO_LOG_FILE}
|
||||||
./longevity-test
|
./longevity-test
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
|
# Backup Onedrive Deletion test
|
||||||
# Backup Onedrive Deletion test
|
|
||||||
- name: Backup Delete onedrive test
|
- name: Backup Delete onedrive test
|
||||||
id: delete-onedrive-test
|
id: delete-onedrive-test
|
||||||
env:
|
env:
|
||||||
@ -235,9 +233,8 @@ jobs:
|
|||||||
echo -e "\nDelete Backup onedrive \n" >> ${CORSO_LOG_FILE}
|
echo -e "\nDelete Backup onedrive \n" >> ${CORSO_LOG_FILE}
|
||||||
./longevity-test
|
./longevity-test
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
|
# Backup Sharepoint Deletion test
|
||||||
# Backup Sharepoint Deletion test
|
|
||||||
- name: Backup Delete Sharepoint test
|
- name: Backup Delete Sharepoint test
|
||||||
id: delete-sharepoint-test
|
id: delete-sharepoint-test
|
||||||
env:
|
env:
|
||||||
@ -248,67 +245,62 @@ jobs:
|
|||||||
echo -e "\nDelete Backup sharepoint \n" >> ${CORSO_LOG_FILE}
|
echo -e "\nDelete Backup sharepoint \n" >> ${CORSO_LOG_FILE}
|
||||||
./longevity-test
|
./longevity-test
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
|
# Export OneDrive Test
|
||||||
|
- name: OneDrive Export test
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
|
||||||
|
|
||||||
# skipped until supported
|
echo -e "\Export OneDrive test - first entry\n" >> ${CORSO_LOG_FILE}
|
||||||
# Export OneDrive Test
|
./corso backup list onedrive 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
|
||||||
# - name: OneDrive Export test
|
while read -r line; do
|
||||||
# run: |
|
./corso export onedrive \
|
||||||
# set -euo pipefail
|
"/tmp/corso-export--$line" \
|
||||||
# echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
|
--no-stats \
|
||||||
|
--backup "$line" \
|
||||||
|
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_first.txt
|
||||||
|
done
|
||||||
|
|
||||||
# echo -e "\Export OneDrive test - first entry\n" >> ${CORSO_LOG_FILE}
|
echo -e "\Export OneDrive test - last entry\n" >> ${CORSO_LOG_FILE}
|
||||||
# ./corso backup list onedrive 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
|
./corso backup list onedrive 2>/dev/null | tail -n1 | awk '{print $1}' |
|
||||||
# while read -r line; do
|
while read -r line; do
|
||||||
# ./corso export onedrive \
|
./corso export onedrive \
|
||||||
# "/tmp/corso-export--$line" \
|
"/tmp/corso-export--$line" \
|
||||||
# --no-stats \
|
--no-stats \
|
||||||
# --backup "$line" \
|
--backup "$line" \
|
||||||
# 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_first.txt
|
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_last.txt
|
||||||
# done
|
done
|
||||||
|
|
||||||
# echo -e "\Export OneDrive test - last entry\n" >> ${CORSO_LOG_FILE}
|
##########################################################################
|
||||||
# ./corso backup list onedrive 2>/dev/null | tail -n1 | awk '{print $1}' |
|
# Export SharePoint Test
|
||||||
# while read -r line; do
|
- name: SharePoint Export test
|
||||||
# ./corso export onedrive \
|
run: |
|
||||||
# "/tmp/corso-export--$line" \
|
set -euo pipefail
|
||||||
# --no-stats \
|
echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
|
||||||
# --backup "$line" \
|
|
||||||
# 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_last.txt
|
|
||||||
# done
|
|
||||||
|
|
||||||
##########################################################################################################################################
|
echo -e "\Export SharePoint test - first entry\n" >> ${CORSO_LOG_FILE}
|
||||||
|
./corso backup list sharepoint 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
|
||||||
|
while read -r line; do
|
||||||
|
./corso export sharepoint \
|
||||||
|
"/tmp/corso-export--$line" \
|
||||||
|
--no-stats \
|
||||||
|
--backup "$line" \
|
||||||
|
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_first.txt
|
||||||
|
done
|
||||||
|
|
||||||
# skipped until supported
|
echo -e "\Export SharePoint test - last entry\n" >> ${CORSO_LOG_FILE}
|
||||||
# Export SharePoint Test
|
./corso backup list sharepoint 2>/dev/null | tail -n1 | awk '{print $1}' |
|
||||||
# - name: SharePoint Export test
|
while read -r line; do
|
||||||
# run: |
|
./corso export sharepoint \
|
||||||
# set -euo pipefail
|
"/tmp/corso-export--$line" \
|
||||||
# echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
|
--no-stats \
|
||||||
|
--backup "$line" \
|
||||||
|
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_last.txt
|
||||||
|
done
|
||||||
|
|
||||||
# echo -e "\Export SharePoint test - first entry\n" >> ${CORSO_LOG_FILE}
|
##########################################################################
|
||||||
# ./corso backup list sharepoint 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
|
# Maintenance test
|
||||||
# while read -r line; do
|
|
||||||
# ./corso export sharepoint \
|
|
||||||
# "/tmp/corso-export--$line" \
|
|
||||||
# --no-stats \
|
|
||||||
# --backup "$line" \
|
|
||||||
# 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_first.txt
|
|
||||||
# done
|
|
||||||
|
|
||||||
# echo -e "\Export SharePoint test - last entry\n" >> ${CORSO_LOG_FILE}
|
|
||||||
# ./corso backup list sharepoint 2>/dev/null | tail -n1 | awk '{print $1}' |
|
|
||||||
# while read -r line; do
|
|
||||||
# ./corso export sharepoint \
|
|
||||||
# "/tmp/corso-export--$line" \
|
|
||||||
# --no-stats \
|
|
||||||
# --backup "$line" \
|
|
||||||
# 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_last.txt
|
|
||||||
# done
|
|
||||||
|
|
||||||
##########################################################################################################################################
|
|
||||||
|
|
||||||
# Maintenance test
|
|
||||||
- name: Maintenance test Daily
|
- name: Maintenance test Daily
|
||||||
id: maintenance-test-daily
|
id: maintenance-test-daily
|
||||||
run: |
|
run: |
|
||||||
@ -362,7 +354,7 @@ jobs:
|
|||||||
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
|
||||||
--bucket-prefix ${{ env.PREFIX }} \
|
--bucket-prefix ${{ env.PREFIX }} \
|
||||||
--retention-mode ${{ env.RETENTION_MODE }} \
|
--retention-mode ${{ env.RETENTION_MODE }} \
|
||||||
--live-retention-duration "$((${{ env.RETENTION_DURATION}}-1))h" \
|
--live-retention-duration "$((${{ env.RETENTION_DURATION }}-1))h" \
|
||||||
--prefix "kopia.blobcfg" \
|
--prefix "kopia.blobcfg" \
|
||||||
--prefix "kopia.repository" \
|
--prefix "kopia.repository" \
|
||||||
--prefix "p" \
|
--prefix "p" \
|
||||||
@ -370,10 +362,8 @@ jobs:
|
|||||||
--prefix "x"
|
--prefix "x"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
##########################################################################################################################################
|
##########################################################################
|
||||||
|
# Logging & Notifications
|
||||||
# Logging & Notifications
|
|
||||||
|
|
||||||
# Upload the original go test output as an artifact for later review.
|
# Upload the original go test output as an artifact for later review.
|
||||||
- name: Upload test log
|
- name: Upload test log
|
||||||
if: always()
|
if: always()
|
||||||
|
|||||||
3
.github/workflows/nightly_test.yml
vendored
3
.github/workflows/nightly_test.yml
vendored
@ -59,6 +59,7 @@ jobs:
|
|||||||
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
|
||||||
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
|
||||||
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
|
||||||
|
CORSO_ENABLE_GROUPS: true
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
@ -75,7 +76,7 @@ jobs:
|
|||||||
|
|
||||||
# AWS creds
|
# AWS creds
|
||||||
- name: Configure AWS credentials from Test account
|
- name: Configure AWS credentials from Test account
|
||||||
uses: aws-actions/configure-aws-credentials@v2
|
uses: aws-actions/configure-aws-credentials@v3
|
||||||
with:
|
with:
|
||||||
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
|
||||||
role-session-name: integration-testing
|
role-session-name: integration-testing
|
||||||
|
|||||||
1
.github/workflows/sanity-test.yaml
vendored
1
.github/workflows/sanity-test.yaml
vendored
@ -39,6 +39,7 @@ jobs:
|
|||||||
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
|
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
|
||||||
RESTORE_DEST_PFX: Corso_Test_Sanity_
|
RESTORE_DEST_PFX: Corso_Test_Sanity_
|
||||||
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }}
|
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }}
|
||||||
|
CORSO_ENABLE_GROUPS: true
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
|
|||||||
18
CHANGELOG.md
18
CHANGELOG.md
@ -7,15 +7,29 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [Unreleased] (beta)
|
## [Unreleased] (beta)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- SharePoint exported libraries are now exported with a `Libraries` prefix.
|
||||||
|
|
||||||
|
## [v0.12.0] (beta) - 2023-08-29
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
- Added `export` command to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
|
||||||
- Restore commands now accept an optional resource override with the `--to-resource` flag. This allows restores to recreate backup data within different mailboxes, sites, and users.
|
- Restore commands now accept an optional resource override with the `--to-resource` flag. This allows restores to recreate backup data within different mailboxes, sites, and users.
|
||||||
|
- Improve `--mask-sensitive-data` logging mode.
|
||||||
|
- Reliability: Handle connection cancellation and resets observed when backing up or restoring large data sets.
|
||||||
|
- Reliability: Recover from Graph SDK panics when the Graph API returns incomplete responses.
|
||||||
|
- Performance: Improve backup delete performance by batching multiple storage operations into a single operation.
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- SharePoint document libraries deleted after the last backup can now be restored.
|
- SharePoint document libraries deleted after the last backup can now be restored.
|
||||||
- Restore requires the protected resource to have access to the service being restored.
|
- Restore requires the protected resource to have access to the service being restored.
|
||||||
|
- SharePoint data from multiple document libraries are not merged in exports
|
||||||
|
- `corso backup delete` was not removing the backup details data associated with that snapshot
|
||||||
|
- Fix OneDrive restores could fail with a concurrent map write error
|
||||||
|
- Fix backup list displaying backups that had errors
|
||||||
|
- Fix OneDrive backup could fail if item was deleted during backup
|
||||||
|
- Exchange backups would fail attempting to use delta tokens even if the user was over quota
|
||||||
|
|
||||||
### Added
|
|
||||||
- Added option to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
|
|
||||||
|
|
||||||
## [v0.11.1] (beta) - 2023-07-20
|
## [v0.11.1] (beta) - 2023-07-20
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
FROM golang:1.19-alpine as builder
|
FROM golang:1.20-alpine as builder
|
||||||
|
|
||||||
WORKDIR /go/src/app
|
WORKDIR /go/src/app
|
||||||
COPY src .
|
COPY src .
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package backup
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
@ -39,8 +40,9 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
|
|||||||
addExchangeCommands,
|
addExchangeCommands,
|
||||||
addOneDriveCommands,
|
addOneDriveCommands,
|
||||||
addSharePointCommands,
|
addSharePointCommands,
|
||||||
addGroupsCommands,
|
// awaiting release
|
||||||
addTeamsCommands,
|
// addGroupsCommands,
|
||||||
|
// addTeamsCommands,
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddCommands attaches all `corso backup * *` commands to the parent.
|
// AddCommands attaches all `corso backup * *` commands to the parent.
|
||||||
@ -55,6 +57,12 @@ func AddCommands(cmd *cobra.Command) {
|
|||||||
for _, addBackupTo := range serviceCommands {
|
for _, addBackupTo := range serviceCommands {
|
||||||
addBackupTo(subCommand)
|
addBackupTo(subCommand)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// delete after release
|
||||||
|
if len(os.Getenv("CORSO_ENABLE_GROUPS")) > 0 {
|
||||||
|
addGroupsCommands(subCommand)
|
||||||
|
addTeamsCommands(subCommand)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -247,7 +255,10 @@ func runBackups(
|
|||||||
return Only(ctx, clues.Wrap(berrs.Failure(), "Unable to retrieve backup results from storage"))
|
return Only(ctx, clues.Wrap(berrs.Failure(), "Unable to retrieve backup results from storage"))
|
||||||
}
|
}
|
||||||
|
|
||||||
Info(ctx, "Completed Backups:")
|
if len(bups) > 0 {
|
||||||
|
Info(ctx, "Completed Backups:")
|
||||||
|
}
|
||||||
|
|
||||||
backup.PrintAll(ctx, bups)
|
backup.PrintAll(ctx, bups)
|
||||||
|
|
||||||
if len(errs) > 0 {
|
if len(errs) > 0 {
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package export
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"os"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
@ -21,8 +22,9 @@ import (
|
|||||||
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
|
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||||
addOneDriveCommands,
|
addOneDriveCommands,
|
||||||
addSharePointCommands,
|
addSharePointCommands,
|
||||||
addGroupsCommands,
|
// awaiting release
|
||||||
addTeamsCommands,
|
// addGroupsCommands,
|
||||||
|
// addTeamsCommands,
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddCommands attaches all `corso export * *` commands to the parent.
|
// AddCommands attaches all `corso export * *` commands to the parent.
|
||||||
@ -33,6 +35,12 @@ func AddCommands(cmd *cobra.Command) {
|
|||||||
for _, addExportTo := range exportCommands {
|
for _, addExportTo := range exportCommands {
|
||||||
addExportTo(exportC)
|
addExportTo(exportC)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// delete after release
|
||||||
|
if len(os.Getenv("CORSO_ENABLE_GROUPS")) > 0 {
|
||||||
|
addGroupsCommands(exportC)
|
||||||
|
addTeamsCommands(exportC)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const exportCommand = "export"
|
const exportCommand = "export"
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package restore
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"os"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
@ -20,6 +21,9 @@ var restoreCommands = []func(cmd *cobra.Command) *cobra.Command{
|
|||||||
addExchangeCommands,
|
addExchangeCommands,
|
||||||
addOneDriveCommands,
|
addOneDriveCommands,
|
||||||
addSharePointCommands,
|
addSharePointCommands,
|
||||||
|
// awaiting release
|
||||||
|
// addGroupsCommands,
|
||||||
|
// addTeamsCommands,
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddCommands attaches all `corso restore * *` commands to the parent.
|
// AddCommands attaches all `corso restore * *` commands to the parent.
|
||||||
@ -30,6 +34,12 @@ func AddCommands(cmd *cobra.Command) {
|
|||||||
for _, addRestoreTo := range restoreCommands {
|
for _, addRestoreTo := range restoreCommands {
|
||||||
addRestoreTo(restoreC)
|
addRestoreTo(restoreC)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// delete after release
|
||||||
|
if len(os.Getenv("CORSO_ENABLE_GROUPS")) > 0 {
|
||||||
|
addGroupsCommands(restoreC)
|
||||||
|
addTeamsCommands(restoreC)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const restoreCommand = "restore"
|
const restoreCommand = "restore"
|
||||||
|
|||||||
@ -8,7 +8,7 @@ require (
|
|||||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1
|
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1
|
||||||
github.com/alcionai/clues v0.0.0-20230728164842-7dc4795a43e4
|
github.com/alcionai/clues v0.0.0-20230728164842-7dc4795a43e4
|
||||||
github.com/armon/go-metrics v0.4.1
|
github.com/armon/go-metrics v0.4.1
|
||||||
github.com/aws/aws-sdk-go v1.44.331
|
github.com/aws/aws-sdk-go v1.44.334
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.1
|
github.com/aws/aws-xray-sdk-go v1.8.1
|
||||||
github.com/cenkalti/backoff/v4 v4.2.1
|
github.com/cenkalti/backoff/v4 v4.2.1
|
||||||
github.com/google/uuid v1.3.1
|
github.com/google/uuid v1.3.1
|
||||||
|
|||||||
@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/
|
|||||||
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||||
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
||||||
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
||||||
github.com/aws/aws-sdk-go v1.44.331 h1:hEwdOTv6973uegCUY2EY8jyyq0OUg9INc0HOzcu2bjw=
|
github.com/aws/aws-sdk-go v1.44.334 h1:h2bdbGb//fez6Sv6PaYv868s9liDeoYM6hYsAqTB4MU=
|
||||||
github.com/aws/aws-sdk-go v1.44.331/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
github.com/aws/aws-sdk-go v1.44.334/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
|
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
|
||||||
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
|
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
|
||||||
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
|
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
|
||||||
|
|||||||
@ -3,12 +3,14 @@ package kopia
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/kopia/kopia/repo/manifest"
|
"github.com/kopia/kopia/repo/manifest"
|
||||||
"github.com/kopia/kopia/snapshot"
|
"github.com/kopia/kopia/snapshot"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
@ -17,6 +19,12 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/store"
|
"github.com/alcionai/corso/src/pkg/store"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
serviceCatTagPrefix = "sc-"
|
||||||
|
kopiaPathLabel = "path"
|
||||||
|
tenantTag = "tenant"
|
||||||
|
)
|
||||||
|
|
||||||
// cleanupOrphanedData uses bs and mf to lookup all models/snapshots for backups
|
// cleanupOrphanedData uses bs and mf to lookup all models/snapshots for backups
|
||||||
// and deletes items that are older than nowFunc() - gcBuffer (cutoff) that are
|
// and deletes items that are older than nowFunc() - gcBuffer (cutoff) that are
|
||||||
// not "complete" backups with:
|
// not "complete" backups with:
|
||||||
@ -67,8 +75,11 @@ func cleanupOrphanedData(
|
|||||||
// 1. check if there's a corresponding backup for them
|
// 1. check if there's a corresponding backup for them
|
||||||
// 2. delete the details if they're orphaned
|
// 2. delete the details if they're orphaned
|
||||||
deets = map[manifest.ID]struct{}{}
|
deets = map[manifest.ID]struct{}{}
|
||||||
// dataSnaps is a hash set of the snapshot IDs for item data snapshots.
|
// dataSnaps is a hash map of the snapshot IDs for item data snapshots.
|
||||||
dataSnaps = map[manifest.ID]struct{}{}
|
dataSnaps = map[manifest.ID]*manifest.EntryMetadata{}
|
||||||
|
// toDelete is the set of objects to delete from kopia. It starts out with
|
||||||
|
// all items and has ineligible items removed from it.
|
||||||
|
toDelete = map[manifest.ID]struct{}{}
|
||||||
)
|
)
|
||||||
|
|
||||||
cutoff := nowFunc().Add(-gcBuffer)
|
cutoff := nowFunc().Add(-gcBuffer)
|
||||||
@ -81,9 +92,11 @@ func cleanupOrphanedData(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
toDelete[snap.ID] = struct{}{}
|
||||||
|
|
||||||
k, _ := makeTagKV(TagBackupCategory)
|
k, _ := makeTagKV(TagBackupCategory)
|
||||||
if _, ok := snap.Labels[k]; ok {
|
if _, ok := snap.Labels[k]; ok {
|
||||||
dataSnaps[snap.ID] = struct{}{}
|
dataSnaps[snap.ID] = snap
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -106,6 +119,7 @@ func cleanupOrphanedData(
|
|||||||
}
|
}
|
||||||
|
|
||||||
deets[d.ModelStoreID] = struct{}{}
|
deets[d.ModelStoreID] = struct{}{}
|
||||||
|
toDelete[d.ModelStoreID] = struct{}{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all backup models.
|
// Get all backup models.
|
||||||
@ -114,8 +128,16 @@ func cleanupOrphanedData(
|
|||||||
return clues.Wrap(err, "getting all backup models")
|
return clues.Wrap(err, "getting all backup models")
|
||||||
}
|
}
|
||||||
|
|
||||||
toDelete := maps.Clone(deets)
|
var (
|
||||||
maps.Copy(toDelete, dataSnaps)
|
// assistBackups is the set of backups that have a
|
||||||
|
// * label denoting they're an assist backup
|
||||||
|
// * item data snapshot
|
||||||
|
// * details snapshot
|
||||||
|
assistBackups []*backup.Backup
|
||||||
|
// mostRecentMergeBase maps the reason to its most recent merge base's
|
||||||
|
// creation time. The map key is created using keysForBackup.
|
||||||
|
mostRecentMergeBase = map[string]time.Time{}
|
||||||
|
)
|
||||||
|
|
||||||
for _, bup := range bups {
|
for _, bup := range bups {
|
||||||
// Don't even try to see if this needs garbage collected because it's not
|
// Don't even try to see if this needs garbage collected because it's not
|
||||||
@ -150,7 +172,7 @@ func cleanupOrphanedData(
|
|||||||
// This isn't expected to really pop up, but it's possible if this
|
// This isn't expected to really pop up, but it's possible if this
|
||||||
// function is run concurrently with either a backup delete or another
|
// function is run concurrently with either a backup delete or another
|
||||||
// instance of this function.
|
// instance of this function.
|
||||||
logger.Ctx(ctx).Debugw(
|
logger.Ctx(ctx).Infow(
|
||||||
"backup model not found",
|
"backup model not found",
|
||||||
"search_backup_id", bup.ModelStoreID)
|
"search_backup_id", bup.ModelStoreID)
|
||||||
|
|
||||||
@ -162,7 +184,7 @@ func cleanupOrphanedData(
|
|||||||
ssid = bm.DetailsID
|
ssid = bm.DetailsID
|
||||||
}
|
}
|
||||||
|
|
||||||
_, dataOK := dataSnaps[manifest.ID(bm.SnapshotID)]
|
d, dataOK := dataSnaps[manifest.ID(bm.SnapshotID)]
|
||||||
_, deetsOK := deets[manifest.ID(ssid)]
|
_, deetsOK := deets[manifest.ID(ssid)]
|
||||||
|
|
||||||
// All data is present, we shouldn't garbage collect this backup.
|
// All data is present, we shouldn't garbage collect this backup.
|
||||||
@ -170,6 +192,59 @@ func cleanupOrphanedData(
|
|||||||
delete(toDelete, bup.ModelStoreID)
|
delete(toDelete, bup.ModelStoreID)
|
||||||
delete(toDelete, manifest.ID(bm.SnapshotID))
|
delete(toDelete, manifest.ID(bm.SnapshotID))
|
||||||
delete(toDelete, manifest.ID(ssid))
|
delete(toDelete, manifest.ID(ssid))
|
||||||
|
|
||||||
|
// This is a little messy to have, but can simplify the logic below.
|
||||||
|
// The state of tagging in corso isn't all that great right now and we'd
|
||||||
|
// really like to consolidate tags and clean them up. For now, we're
|
||||||
|
// going to copy tags that are related to Reasons for a backup from the
|
||||||
|
// item data snapshot to the backup model. This makes the function
|
||||||
|
// checking if assist backups should be garbage collected a bit easier
|
||||||
|
// because now they only have to source data from backup models.
|
||||||
|
if err := transferTags(d, &bm); err != nil {
|
||||||
|
logger.CtxErr(ctx, err).Infow(
|
||||||
|
"transferring legacy tags to backup model",
|
||||||
|
"snapshot_id", d.ID,
|
||||||
|
"backup_id", bup.ID)
|
||||||
|
|
||||||
|
// Continuing here means the base won't be eligible for old assist
|
||||||
|
// base garbage collection or as a newer merge base timestamp.
|
||||||
|
//
|
||||||
|
// We could add more logic to eventually delete the base if it's an
|
||||||
|
// assist base. If it's a merge base then it should be mostly harmless
|
||||||
|
// as a newer merge base should cause older assist bases to be garbage
|
||||||
|
// collected.
|
||||||
|
//
|
||||||
|
// Either way, I don't really expect to see failures when transferring
|
||||||
|
// tags so not worth adding extra code for unless we see it become a
|
||||||
|
// problem.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to the assist backup set so that we can attempt to garbage collect
|
||||||
|
// older assist backups below.
|
||||||
|
if bup.Tags[model.BackupTypeTag] == model.AssistBackup {
|
||||||
|
assistBackups = append(assistBackups, &bm)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it's a merge base track the time it was created so we can check
|
||||||
|
// later if we should remove all assist bases or not.
|
||||||
|
tags, err := keysForBackup(&bm)
|
||||||
|
if err != nil {
|
||||||
|
logger.CtxErr(ctx, err).
|
||||||
|
Info("getting Reason keys for merge base. May keep an additional assist base")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tag := range tags {
|
||||||
|
t := mostRecentMergeBase[tag]
|
||||||
|
if t.After(bm.CreationTime) {
|
||||||
|
// Don't update the merge base time if we've already seen a newer
|
||||||
|
// merge base.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
mostRecentMergeBase[tag] = bm.CreationTime
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,14 +253,200 @@ func cleanupOrphanedData(
|
|||||||
"num_items", len(toDelete),
|
"num_items", len(toDelete),
|
||||||
"kopia_ids", maps.Keys(toDelete))
|
"kopia_ids", maps.Keys(toDelete))
|
||||||
|
|
||||||
|
// This will technically save a superset of the assist bases we should keep.
|
||||||
|
// The reason for that is that we only add something to the set of assist
|
||||||
|
// bases after we've excluded backups in the buffer time zone. For example
|
||||||
|
// we could discover that of the set of assist bases we have, something is
|
||||||
|
// the youngest and exclude it from gabage collection. However, when looking
|
||||||
|
// at the set of all assist bases, including those in the buffer zone, it's
|
||||||
|
// possible the one we thought was the youngest actually isn't and could be
|
||||||
|
// garbage collected.
|
||||||
|
//
|
||||||
|
// This sort of edge case will ideally happen only for a few assist bases at
|
||||||
|
// a time. Assuming this function is run somewhat periodically, missing these
|
||||||
|
// edge cases is alright because they'll get picked up on a subsequent run.
|
||||||
|
assistItems := collectOldAssistBases(ctx, mostRecentMergeBase, assistBackups)
|
||||||
|
|
||||||
|
logger.Ctx(ctx).Debugw(
|
||||||
|
"garbage collecting old assist bases",
|
||||||
|
"assist_num_items", len(assistItems),
|
||||||
|
"assist_kopia_ids", assistItems)
|
||||||
|
|
||||||
|
assistItems = append(assistItems, maps.Keys(toDelete)...)
|
||||||
|
|
||||||
// Use single atomic batch delete operation to cleanup to keep from making a
|
// Use single atomic batch delete operation to cleanup to keep from making a
|
||||||
// bunch of manifest content blobs.
|
// bunch of manifest content blobs.
|
||||||
if err := bs.DeleteWithModelStoreIDs(ctx, maps.Keys(toDelete)...); err != nil {
|
if err := bs.DeleteWithModelStoreIDs(ctx, assistItems...); err != nil {
|
||||||
return clues.Wrap(err, "deleting orphaned data")
|
return clues.Wrap(err, "deleting orphaned data")
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(ashmrtn): Do some pruning of assist backup models so we don't keep
|
return nil
|
||||||
// them around forever.
|
}
|
||||||
|
|
||||||
|
var skipKeys = []string{
|
||||||
|
TagBackupID,
|
||||||
|
TagBackupCategory,
|
||||||
|
}
|
||||||
|
|
||||||
|
func transferTags(snap *manifest.EntryMetadata, bup *backup.Backup) error {
|
||||||
|
tenant, err := decodeElement(snap.Labels[kopiaPathLabel])
|
||||||
|
if err != nil {
|
||||||
|
return clues.Wrap(err, "decoding tenant from label")
|
||||||
|
}
|
||||||
|
|
||||||
|
if bup.Tags == nil {
|
||||||
|
bup.Tags = map[string]string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
bup.Tags[tenantTag] = tenant
|
||||||
|
|
||||||
|
skipTags := map[string]struct{}{}
|
||||||
|
|
||||||
|
for _, k := range skipKeys {
|
||||||
|
key, _ := makeTagKV(k)
|
||||||
|
skipTags[key] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Safe to check only this because the old field was deprecated prior to the
|
||||||
|
// tagging of assist backups and this function only deals with assist
|
||||||
|
// backups.
|
||||||
|
roid := bup.ProtectedResourceID
|
||||||
|
|
||||||
|
roidK, _ := makeTagKV(roid)
|
||||||
|
skipTags[roidK] = struct{}{}
|
||||||
|
|
||||||
|
// This is hacky, but right now we don't have a good way to get only the
|
||||||
|
// Reason tags for something. We can however, find them by searching for all
|
||||||
|
// the "normalized" tags and then discarding the ones we know aren't
|
||||||
|
// reasons. Unfortunately this won't work if custom tags are added to the
|
||||||
|
// backup that we don't know about.
|
||||||
|
//
|
||||||
|
// Convert them to the newer format that we'd like to have where the
|
||||||
|
// service/category tags have the form "sc-<service><category>".
|
||||||
|
for tag := range snap.Labels {
|
||||||
|
if _, ok := skipTags[tag]; ok || !strings.HasPrefix(tag, userTagPrefix) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
bup.Tags[strings.Replace(tag, userTagPrefix, serviceCatTagPrefix, 1)] = "0"
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// keysForBackup returns a slice of string keys representing the Reasons for this
|
||||||
|
// backup. If there's a problem creating the keys an error is returned.
|
||||||
|
func keysForBackup(bup *backup.Backup) ([]string, error) {
|
||||||
|
var (
|
||||||
|
res []string
|
||||||
|
// Safe to pull from this field since assist backups came after we switched
|
||||||
|
// to using ProtectedResourceID.
|
||||||
|
roid = bup.ProtectedResourceID
|
||||||
|
)
|
||||||
|
|
||||||
|
tenant := bup.Tags[tenantTag]
|
||||||
|
if len(tenant) == 0 {
|
||||||
|
// We can skip this backup. It won't get garbage collected, but it also
|
||||||
|
// won't result in incorrect behavior overall.
|
||||||
|
return nil, clues.New("missing tenant tag in backup").
|
||||||
|
With("backup_id", bup.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
for tag := range bup.Tags {
|
||||||
|
if strings.HasPrefix(tag, serviceCatTagPrefix) {
|
||||||
|
// Precise way we concatenate all this info doesn't really matter as
|
||||||
|
// long as it's consistent for all backups in the set and includes all
|
||||||
|
// the pieces we need to ensure uniqueness across.
|
||||||
|
fullTag := tenant + roid + tag
|
||||||
|
res = append(res, fullTag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func collectOldAssistBases(
|
||||||
|
ctx context.Context,
|
||||||
|
mostRecentMergeBase map[string]time.Time,
|
||||||
|
bups []*backup.Backup,
|
||||||
|
) []manifest.ID {
|
||||||
|
// maybeDelete is the set of backups that could be deleted. It starts out as
|
||||||
|
// the set of all backups and has ineligible backups removed from it.
|
||||||
|
maybeDelete := map[manifest.ID]*backup.Backup{}
|
||||||
|
// Figure out which backups have overlapping reasons. A single backup can
|
||||||
|
// appear in multiple slices in the map, one for each Reason associated with
|
||||||
|
// it.
|
||||||
|
bupsByReason := map[string][]*backup.Backup{}
|
||||||
|
|
||||||
|
for _, bup := range bups {
|
||||||
|
tags, err := keysForBackup(bup)
|
||||||
|
if err != nil {
|
||||||
|
logger.CtxErr(ctx, err).Error("not checking backup for garbage collection")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
maybeDelete[manifest.ID(bup.ModelStoreID)] = bup
|
||||||
|
|
||||||
|
for _, tag := range tags {
|
||||||
|
bupsByReason[tag] = append(bupsByReason[tag], bup)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For each set of backups we found, sort them by time. Mark all but the
|
||||||
|
// youngest backup in each group as eligible for garbage collection.
|
||||||
|
//
|
||||||
|
// We implement this process as removing backups from the set of potential
|
||||||
|
// backups to delete because it's possible for a backup to to not be the
|
||||||
|
// youngest for one Reason but be the youngest for a different Reason (i.e.
|
||||||
|
// most recent exchange mail backup but not the most recent exchange
|
||||||
|
// contacts backup). A simple delete operation in the map is sufficient to
|
||||||
|
// remove a backup even if it's only the youngest for a single Reason.
|
||||||
|
// Otherwise we'd need to do another pass after this to determine the
|
||||||
|
// isYoungest status for all Reasons in the backup.
|
||||||
|
//
|
||||||
|
// TODO(ashmrtn): Handle concurrent backups somehow? Right now backups that
|
||||||
|
// have overlapping start and end times aren't explicitly handled.
|
||||||
|
for tag, bupSet := range bupsByReason {
|
||||||
|
if len(bupSet) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort in reverse chronological order so that we can just remove the zeroth
|
||||||
|
// item from the delete set instead of getting the slice length.
|
||||||
|
// Unfortunately this could also put us in the pathologic case where almost
|
||||||
|
// all items need swapped since in theory kopia returns results in
|
||||||
|
// chronologic order and we're processing them in the order kopia returns
|
||||||
|
// them.
|
||||||
|
slices.SortStableFunc(bupSet, func(a, b *backup.Backup) int {
|
||||||
|
return -a.CreationTime.Compare(b.CreationTime)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Only remove the youngest assist base from the deletion set if we don't
|
||||||
|
// have a merge base that's younger than it. We don't need to check if the
|
||||||
|
// value is in the map here because the zero time is always at least as old
|
||||||
|
// as the times we'll see in our backups (if we see the zero time in our
|
||||||
|
// backup it's a bug but will still pass the check to keep the backup).
|
||||||
|
if t := mostRecentMergeBase[tag]; !bupSet[0].CreationTime.Before(t) {
|
||||||
|
delete(maybeDelete, manifest.ID(bupSet[0].ModelStoreID))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res := make([]manifest.ID, 0, 3*len(maybeDelete))
|
||||||
|
|
||||||
|
// For all items remaining in the delete set, generate the final set of items
|
||||||
|
// to delete. This set includes the data snapshot ID, details snapshot ID, and
|
||||||
|
// backup model ID to delete for each backup.
|
||||||
|
for bupID, bup := range maybeDelete {
|
||||||
|
// Don't need to check if we use StreamStoreID or DetailsID because
|
||||||
|
// DetailsID was deprecated prior to tagging backups as assist backups.
|
||||||
|
// Since the input set is only assist backups there's no overlap between the
|
||||||
|
// two implementations.
|
||||||
|
res = append(
|
||||||
|
res,
|
||||||
|
bupID,
|
||||||
|
manifest.ID(bup.SnapshotID),
|
||||||
|
manifest.ID(bup.StreamStoreID))
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|||||||
@ -15,6 +15,8 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup"
|
"github.com/alcionai/corso/src/pkg/backup"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
type BackupCleanupUnitSuite struct {
|
type BackupCleanupUnitSuite struct {
|
||||||
@ -163,6 +165,58 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bupCurrent2 := func() *backup.Backup {
|
||||||
|
return &backup.Backup{
|
||||||
|
BaseModel: model.BaseModel{
|
||||||
|
ID: model.StableID("current-bup-id-2"),
|
||||||
|
ModelStoreID: manifest.ID("current-bup-msid-2"),
|
||||||
|
},
|
||||||
|
SnapshotID: "current-snap-msid-2",
|
||||||
|
StreamStoreID: "current-deets-msid-2",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapCurrent2 := func() *manifest.EntryMetadata {
|
||||||
|
return &manifest.EntryMetadata{
|
||||||
|
ID: "current-snap-msid-2",
|
||||||
|
Labels: map[string]string{
|
||||||
|
backupTag: "0",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
deetsCurrent2 := func() *manifest.EntryMetadata {
|
||||||
|
return &manifest.EntryMetadata{
|
||||||
|
ID: "current-deets-msid-2",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bupCurrent3 := func() *backup.Backup {
|
||||||
|
return &backup.Backup{
|
||||||
|
BaseModel: model.BaseModel{
|
||||||
|
ID: model.StableID("current-bup-id-3"),
|
||||||
|
ModelStoreID: manifest.ID("current-bup-msid-3"),
|
||||||
|
},
|
||||||
|
SnapshotID: "current-snap-msid-3",
|
||||||
|
StreamStoreID: "current-deets-msid-3",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
snapCurrent3 := func() *manifest.EntryMetadata {
|
||||||
|
return &manifest.EntryMetadata{
|
||||||
|
ID: "current-snap-msid-3",
|
||||||
|
Labels: map[string]string{
|
||||||
|
backupTag: "0",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
deetsCurrent3 := func() *manifest.EntryMetadata {
|
||||||
|
return &manifest.EntryMetadata{
|
||||||
|
ID: "current-deets-msid-3",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Legacy backup with details in separate model.
|
// Legacy backup with details in separate model.
|
||||||
bupLegacy := func() *backup.Backup {
|
bupLegacy := func() *backup.Backup {
|
||||||
return &backup.Backup{
|
return &backup.Backup{
|
||||||
@ -261,9 +315,53 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
|
|||||||
return &res
|
return &res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
manifestWithReasons := func(
|
||||||
|
m *manifest.EntryMetadata,
|
||||||
|
tenantID string,
|
||||||
|
reasons ...identity.Reasoner,
|
||||||
|
) *manifest.EntryMetadata {
|
||||||
|
res := *m
|
||||||
|
|
||||||
|
if res.Labels == nil {
|
||||||
|
res.Labels = map[string]string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.Labels[kopiaPathLabel] = encodeAsPath(tenantID)
|
||||||
|
|
||||||
|
// Add the given reasons.
|
||||||
|
for _, r := range reasons {
|
||||||
|
for _, k := range tagKeys(r) {
|
||||||
|
key, _ := makeTagKV(k)
|
||||||
|
res.Labels[key] = "0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also add other common reasons on item data snapshots.
|
||||||
|
k, _ := makeTagKV(TagBackupCategory)
|
||||||
|
res.Labels[k] = "0"
|
||||||
|
|
||||||
|
return &res
|
||||||
|
}
|
||||||
|
|
||||||
backupWithTime := func(mt time.Time, b *backup.Backup) *backup.Backup {
|
backupWithTime := func(mt time.Time, b *backup.Backup) *backup.Backup {
|
||||||
res := *b
|
res := *b
|
||||||
res.ModTime = mt
|
res.ModTime = mt
|
||||||
|
res.CreationTime = mt
|
||||||
|
|
||||||
|
return &res
|
||||||
|
}
|
||||||
|
|
||||||
|
backupWithResource := func(protectedResource string, isAssist bool, b *backup.Backup) *backup.Backup {
|
||||||
|
res := *b
|
||||||
|
res.ProtectedResourceID = protectedResource
|
||||||
|
|
||||||
|
if isAssist {
|
||||||
|
if res.Tags == nil {
|
||||||
|
res.Tags = map[string]string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.Tags[model.BackupTypeTag] = model.AssistBackup
|
||||||
|
}
|
||||||
|
|
||||||
return &res
|
return &res
|
||||||
}
|
}
|
||||||
@ -529,6 +627,230 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
|
|||||||
buffer: 24 * time.Hour,
|
buffer: 24 * time.Hour,
|
||||||
expectErr: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
|
// Tests dealing with assist base cleanup.
|
||||||
|
{
|
||||||
|
// Test that even if we have multiple assist bases with the same
|
||||||
|
// Reason(s), none of them are garbage collected if they are within the
|
||||||
|
// buffer period used to exclude recently created backups from garbage
|
||||||
|
// collection.
|
||||||
|
name: "AssistBase NotYoungest InBufferTime Noops",
|
||||||
|
snapshots: []*manifest.EntryMetadata{
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime, snapCurrent()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime, deetsCurrent()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
|
||||||
|
},
|
||||||
|
backups: []backupRes{
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
|
||||||
|
},
|
||||||
|
time: baseTime,
|
||||||
|
buffer: 24 * time.Hour,
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Test that an assist base that has the same Reasons as a newer assist
|
||||||
|
// base is garbage collected when it's outside the buffer period.
|
||||||
|
name: "AssistBases NotYoungest CausesCleanup",
|
||||||
|
snapshots: []*manifest.EntryMetadata{
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime, snapCurrent()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime, deetsCurrent()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()),
|
||||||
|
},
|
||||||
|
backups: []backupRes{
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Minute), bupCurrent3()))},
|
||||||
|
},
|
||||||
|
expectDeleteIDs: []manifest.ID{
|
||||||
|
snapCurrent().ID,
|
||||||
|
deetsCurrent().ID,
|
||||||
|
manifest.ID(bupCurrent().ModelStoreID),
|
||||||
|
snapCurrent2().ID,
|
||||||
|
deetsCurrent2().ID,
|
||||||
|
manifest.ID(bupCurrent2().ModelStoreID),
|
||||||
|
},
|
||||||
|
time: baseTime.Add(48 * time.Hour),
|
||||||
|
buffer: 24 * time.Hour,
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Test that the most recent assist base is garbage collected if there's a
|
||||||
|
// newer merge base that has the same Reasons as the assist base.
|
||||||
|
name: "AssistBasesAndMergeBases NotYoungest CausesCleanupForAssistBase",
|
||||||
|
snapshots: []*manifest.EntryMetadata{
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime, snapCurrent()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime, deetsCurrent()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Minute), snapCurrent2()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent2()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), snapCurrent3()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), deetsCurrent3()),
|
||||||
|
},
|
||||||
|
backups: []backupRes{
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
|
||||||
|
{bup: backupWithResource("ro", false, backupWithTime(baseTime.Add(time.Minute), bupCurrent2()))},
|
||||||
|
{bup: backupWithResource("ro", false, backupWithTime(baseTime.Add(-time.Second), bupCurrent3()))},
|
||||||
|
},
|
||||||
|
expectDeleteIDs: []manifest.ID{
|
||||||
|
snapCurrent().ID,
|
||||||
|
deetsCurrent().ID,
|
||||||
|
manifest.ID(bupCurrent().ModelStoreID),
|
||||||
|
},
|
||||||
|
time: baseTime.Add(48 * time.Hour),
|
||||||
|
buffer: 24 * time.Hour,
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Test that an assist base that is not the most recent for Reason A but
|
||||||
|
// is the most recent for Reason B is not garbage collected.
|
||||||
|
name: "AssistBases YoungestInOneReason Noops",
|
||||||
|
snapshots: []*manifest.EntryMetadata{
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime, snapCurrent()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory),
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.ContactsCategory)),
|
||||||
|
manifestWithTime(baseTime, deetsCurrent()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
|
||||||
|
},
|
||||||
|
backups: []backupRes{
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
|
||||||
|
},
|
||||||
|
time: baseTime.Add(48 * time.Hour),
|
||||||
|
buffer: 24 * time.Hour,
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Test that assist bases that have the same tenant, service, and category
|
||||||
|
// but different protected resources are not garbage collected. This is
|
||||||
|
// a test to ensure the Reason field is properly handled when finding the
|
||||||
|
// most recent assist base.
|
||||||
|
name: "AssistBases DifferentProtectedResources Noops",
|
||||||
|
snapshots: []*manifest.EntryMetadata{
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime, snapCurrent()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro1", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime, deetsCurrent()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro2", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
|
||||||
|
},
|
||||||
|
backups: []backupRes{
|
||||||
|
{bup: backupWithResource("ro1", true, backupWithTime(baseTime, bupCurrent()))},
|
||||||
|
{bup: backupWithResource("ro2", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
|
||||||
|
},
|
||||||
|
time: baseTime.Add(48 * time.Hour),
|
||||||
|
buffer: 24 * time.Hour,
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Test that assist bases that have the same protected resource, service,
|
||||||
|
// and category but different tenants are not garbage collected. This is a
|
||||||
|
// test to ensure the Reason field is properly handled when finding the
|
||||||
|
// most recent assist base.
|
||||||
|
name: "AssistBases DifferentTenants Noops",
|
||||||
|
snapshots: []*manifest.EntryMetadata{
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime, snapCurrent()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime, deetsCurrent()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
|
||||||
|
"tenant2",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
|
||||||
|
},
|
||||||
|
backups: []backupRes{
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
|
||||||
|
},
|
||||||
|
time: baseTime.Add(48 * time.Hour),
|
||||||
|
buffer: 24 * time.Hour,
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Test that if the tenant is not available for a given assist base that
|
||||||
|
// it's excluded from the garbage collection set. This behavior is
|
||||||
|
// conservative because it's quite likely that we could garbage collect
|
||||||
|
// the base without issue.
|
||||||
|
name: "AssistBases NoTenant SkipsBackup",
|
||||||
|
snapshots: []*manifest.EntryMetadata{
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime, snapCurrent()),
|
||||||
|
"",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime, deetsCurrent()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
|
||||||
|
|
||||||
|
manifestWithReasons(
|
||||||
|
manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()),
|
||||||
|
"tenant1",
|
||||||
|
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
|
||||||
|
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()),
|
||||||
|
},
|
||||||
|
backups: []backupRes{
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
|
||||||
|
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Minute), bupCurrent3()))},
|
||||||
|
},
|
||||||
|
time: baseTime.Add(48 * time.Hour),
|
||||||
|
buffer: 24 * time.Hour,
|
||||||
|
expectDeleteIDs: []manifest.ID{
|
||||||
|
snapCurrent2().ID,
|
||||||
|
deetsCurrent2().ID,
|
||||||
|
manifest.ID(bupCurrent2().ModelStoreID),
|
||||||
|
},
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
|
|||||||
@ -23,7 +23,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func openKopiaRepo(
|
func openKopiaRepo(
|
||||||
t *testing.T,
|
t tester.TestT,
|
||||||
ctx context.Context, //revive:disable-line:context-as-argument
|
ctx context.Context, //revive:disable-line:context-as-argument
|
||||||
) (*conn, error) {
|
) (*conn, error) {
|
||||||
st := storeTD.NewPrefixedS3Storage(t)
|
st := storeTD.NewPrefixedS3Storage(t)
|
||||||
|
|||||||
@ -877,8 +877,8 @@ func traverseBaseDir(
|
|||||||
stats *count.Bus,
|
stats *count.Bus,
|
||||||
) error {
|
) error {
|
||||||
ctx = clues.Add(ctx,
|
ctx = clues.Add(ctx,
|
||||||
"old_dir_path", oldDirPath,
|
"old_parent_dir_path", oldDirPath,
|
||||||
"expected_dir_path", expectedDirPath)
|
"expected_parent_dir_path", expectedDirPath)
|
||||||
|
|
||||||
if depth >= maxInflateTraversalDepth {
|
if depth >= maxInflateTraversalDepth {
|
||||||
return clues.New("base snapshot tree too tall").WithClues(ctx)
|
return clues.New("base snapshot tree too tall").WithClues(ctx)
|
||||||
|
|||||||
@ -182,9 +182,9 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
|
|||||||
folderPath, err := pb.ToDataLayerOneDrivePath("tenant", "owner", false)
|
folderPath, err := pb.ToDataLayerOneDrivePath("tenant", "owner", false)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("a-user")
|
||||||
if test.service == path.SharePointService {
|
if test.service == path.SharePointService {
|
||||||
mbh = mock.DefaultSharePointBH()
|
mbh = mock.DefaultSharePointBH("a-site")
|
||||||
mbh.ItemInfo.SharePoint.Modified = now
|
mbh.ItemInfo.SharePoint.Modified = now
|
||||||
mbh.ItemInfo.SharePoint.ItemName = stubItemName
|
mbh.ItemInfo.SharePoint.ItemName = stubItemName
|
||||||
} else {
|
} else {
|
||||||
@ -301,7 +301,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
|
|||||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("a-user")
|
||||||
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||||
mbh.GetResps = []*http.Response{
|
mbh.GetResps = []*http.Response{
|
||||||
@ -378,7 +378,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry()
|
|||||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("a-user")
|
||||||
mbh.GI = mock.GetsItem{Item: stubItem}
|
mbh.GI = mock.GetsItem{Item: stubItem}
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||||
mbh.GetResps = []*http.Response{
|
mbh.GetResps = []*http.Response{
|
||||||
@ -436,7 +436,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
|
|||||||
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("a-user")
|
||||||
mbh.ItemInfo = details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}}
|
mbh.ItemInfo = details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}}
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||||
mbh.GetResps = []*http.Response{{
|
mbh.GetResps = []*http.Response{{
|
||||||
@ -587,7 +587,7 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
|
|||||||
true,
|
true,
|
||||||
false)
|
false)
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("a-user")
|
||||||
mbh.GI = mock.GetsItem{Item: stubItem}
|
mbh.GI = mock.GetsItem{Item: stubItem}
|
||||||
mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK}}
|
mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK}}
|
||||||
mbh.GetErrs = []error{test.err}
|
mbh.GetErrs = []error{test.err}
|
||||||
@ -766,7 +766,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("a-user")
|
||||||
mbh.GI = test.mgi
|
mbh.GI = test.mgi
|
||||||
mbh.ItemInfo = test.itemInfo
|
mbh.ItemInfo = test.itemInfo
|
||||||
mbh.GetResps = resps
|
mbh.GetResps = resps
|
||||||
@ -932,7 +932,7 @@ func (suite *CollectionUnitTestSuite) TestItemExtensions() {
|
|||||||
|
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("a-user")
|
||||||
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
mbh.GI = mock.GetsItem{Err: assert.AnError}
|
||||||
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
|
||||||
mbh.GetResps = []*http.Response{
|
mbh.GetResps = []*http.Response{
|
||||||
|
|||||||
@ -349,7 +349,7 @@ func (c *Collections) Get(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID, c.resourceOwner)
|
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
|
return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
|
||||||
}
|
}
|
||||||
@ -413,7 +413,7 @@ func (c *Collections) Get(
|
|||||||
|
|
||||||
// generate tombstones for drives that were removed.
|
// generate tombstones for drives that were removed.
|
||||||
for driveID := range driveTombstones {
|
for driveID := range driveTombstones {
|
||||||
prevDrivePath, err := c.handler.PathPrefix(c.tenantID, c.resourceOwner, driveID)
|
prevDrivePath, err := c.handler.PathPrefix(c.tenantID, driveID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, false, clues.Wrap(err, "making drive tombstone for previous path").WithClues(ctx)
|
return nil, false, clues.Wrap(err, "making drive tombstone for previous path").WithClues(ctx)
|
||||||
}
|
}
|
||||||
@ -642,7 +642,7 @@ func (c *Collections) getCollectionPath(
|
|||||||
pb = path.Builder{}.Append(path.Split(ptr.Val(item.GetParentReference().GetPath()))...)
|
pb = path.Builder{}.Append(path.Split(ptr.Val(item.GetParentReference().GetPath()))...)
|
||||||
}
|
}
|
||||||
|
|
||||||
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID, c.resourceOwner)
|
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "making item path")
|
return nil, clues.Wrap(err, "making item path")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -40,7 +40,7 @@ type statePath struct {
|
|||||||
func getExpectedStatePathGenerator(
|
func getExpectedStatePathGenerator(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
bh BackupHandler,
|
bh BackupHandler,
|
||||||
tenant, user, base string,
|
tenant, base string,
|
||||||
) func(data.CollectionState, ...string) statePath {
|
) func(data.CollectionState, ...string) statePath {
|
||||||
return func(state data.CollectionState, pths ...string) statePath {
|
return func(state data.CollectionState, pths ...string) statePath {
|
||||||
var (
|
var (
|
||||||
@ -56,12 +56,12 @@ func getExpectedStatePathGenerator(
|
|||||||
} else {
|
} else {
|
||||||
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
|
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
|
||||||
pb := path.Builder{}.Append(path.Split(base + pths[1])...)
|
pb := path.Builder{}.Append(path.Split(base + pths[1])...)
|
||||||
p2, err = bh.CanonicalPath(pb, tenant, user)
|
p2, err = bh.CanonicalPath(pb, tenant)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
pb := path.Builder{}.Append(path.Split(base + pths[0])...)
|
pb := path.Builder{}.Append(path.Split(base + pths[0])...)
|
||||||
p1, err = bh.CanonicalPath(pb, tenant, user)
|
p1, err = bh.CanonicalPath(pb, tenant)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
switch state {
|
switch state {
|
||||||
@ -88,11 +88,11 @@ func getExpectedStatePathGenerator(
|
|||||||
func getExpectedPathGenerator(
|
func getExpectedPathGenerator(
|
||||||
t *testing.T,
|
t *testing.T,
|
||||||
bh BackupHandler,
|
bh BackupHandler,
|
||||||
tenant, user, base string,
|
tenant, base string,
|
||||||
) func(string) string {
|
) func(string) string {
|
||||||
return func(p string) string {
|
return func(p string) string {
|
||||||
pb := path.Builder{}.Append(path.Split(base + p)...)
|
pb := path.Builder{}.Append(path.Split(base + p)...)
|
||||||
cp, err := bh.CanonicalPath(pb, tenant, user)
|
cp, err := bh.CanonicalPath(pb, tenant)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
return cp.String()
|
return cp.String()
|
||||||
@ -129,10 +129,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
|||||||
pkg = "/package"
|
pkg = "/package"
|
||||||
)
|
)
|
||||||
|
|
||||||
bh := itemBackupHandler{}
|
bh := itemBackupHandler{userID: user}
|
||||||
testBaseDrivePath := odConsts.DriveFolderPrefixBuilder("driveID1").String()
|
testBaseDrivePath := odConsts.DriveFolderPrefixBuilder("driveID1").String()
|
||||||
expectedPath := getExpectedPathGenerator(suite.T(), bh, tenant, user, testBaseDrivePath)
|
expectedPath := getExpectedPathGenerator(suite.T(), bh, tenant, testBaseDrivePath)
|
||||||
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, user, testBaseDrivePath)
|
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath)
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
testCase string
|
testCase string
|
||||||
@ -744,7 +744,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
|||||||
maps.Copy(outputFolderMap, tt.inputFolderMap)
|
maps.Copy(outputFolderMap, tt.inputFolderMap)
|
||||||
|
|
||||||
c := NewCollections(
|
c := NewCollections(
|
||||||
&itemBackupHandler{api.Drives{}, tt.scope},
|
&itemBackupHandler{api.Drives{}, user, tt.scope},
|
||||||
tenant,
|
tenant,
|
||||||
user,
|
user,
|
||||||
nil,
|
nil,
|
||||||
@ -1179,63 +1179,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata_ReadFailure()
|
|||||||
require.False(t, canUsePreviousBackup)
|
require.False(t, canUsePreviousBackup)
|
||||||
}
|
}
|
||||||
|
|
||||||
type mockDeltaPageLinker struct {
|
|
||||||
link *string
|
|
||||||
delta *string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pl *mockDeltaPageLinker) GetOdataNextLink() *string {
|
|
||||||
return pl.link
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pl *mockDeltaPageLinker) GetOdataDeltaLink() *string {
|
|
||||||
return pl.delta
|
|
||||||
}
|
|
||||||
|
|
||||||
type deltaPagerResult struct {
|
|
||||||
items []models.DriveItemable
|
|
||||||
nextLink *string
|
|
||||||
deltaLink *string
|
|
||||||
err error
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockItemPager struct {
|
|
||||||
// DriveID -> set of return values for queries for that drive.
|
|
||||||
toReturn []deltaPagerResult
|
|
||||||
getIdx int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *mockItemPager) GetPage(context.Context) (api.DeltaPageLinker, error) {
|
|
||||||
if len(p.toReturn) <= p.getIdx {
|
|
||||||
return nil, assert.AnError
|
|
||||||
}
|
|
||||||
|
|
||||||
idx := p.getIdx
|
|
||||||
p.getIdx++
|
|
||||||
|
|
||||||
return &mockDeltaPageLinker{
|
|
||||||
p.toReturn[idx].nextLink,
|
|
||||||
p.toReturn[idx].deltaLink,
|
|
||||||
}, p.toReturn[idx].err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *mockItemPager) SetNext(string) {}
|
|
||||||
func (p *mockItemPager) Reset() {}
|
|
||||||
|
|
||||||
func (p *mockItemPager) ValuesIn(api.DeltaPageLinker) ([]models.DriveItemable, error) {
|
|
||||||
idx := p.getIdx
|
|
||||||
if idx > 0 {
|
|
||||||
// Return values lag by one since we increment in GetPage().
|
|
||||||
idx--
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(p.toReturn) <= idx {
|
|
||||||
return nil, assert.AnError
|
|
||||||
}
|
|
||||||
|
|
||||||
return p.toReturn[idx].items, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||||
var (
|
var (
|
||||||
tenant = "a-tenant"
|
tenant = "a-tenant"
|
||||||
@ -1265,13 +1208,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
drive2.SetName(&driveID2)
|
drive2.SetName(&driveID2)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
bh = itemBackupHandler{}
|
bh = itemBackupHandler{userID: user}
|
||||||
|
|
||||||
driveBasePath1 = odConsts.DriveFolderPrefixBuilder(driveID1).String()
|
driveBasePath1 = odConsts.DriveFolderPrefixBuilder(driveID1).String()
|
||||||
driveBasePath2 = odConsts.DriveFolderPrefixBuilder(driveID2).String()
|
driveBasePath2 = odConsts.DriveFolderPrefixBuilder(driveID2).String()
|
||||||
|
|
||||||
expectedPath1 = getExpectedPathGenerator(suite.T(), bh, tenant, user, driveBasePath1)
|
expectedPath1 = getExpectedPathGenerator(suite.T(), bh, tenant, driveBasePath1)
|
||||||
expectedPath2 = getExpectedPathGenerator(suite.T(), bh, tenant, user, driveBasePath2)
|
expectedPath2 = getExpectedPathGenerator(suite.T(), bh, tenant, driveBasePath2)
|
||||||
|
|
||||||
rootFolderPath1 = expectedPath1("")
|
rootFolderPath1 = expectedPath1("")
|
||||||
folderPath1 = expectedPath1("/folder")
|
folderPath1 = expectedPath1("/folder")
|
||||||
@ -1283,7 +1226,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
drives []models.Driveable
|
drives []models.Driveable
|
||||||
items map[string][]deltaPagerResult
|
items map[string][]apiMock.PagerResult[models.DriveItemable]
|
||||||
canUsePreviousBackup bool
|
canUsePreviousBackup bool
|
||||||
errCheck assert.ErrorAssertionFunc
|
errCheck assert.ErrorAssertionFunc
|
||||||
prevFolderPaths map[string]map[string]string
|
prevFolderPaths map[string]map[string]string
|
||||||
@ -1302,14 +1245,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors",
|
name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"), // will be present, not needed
|
driveRootItem("root"), // will be present, not needed
|
||||||
delItem("file", driveBasePath1, "root", true, false, false),
|
delItem("file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1334,14 +1277,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors",
|
name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1366,15 +1309,15 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_NoErrors",
|
name: "OneDrive_OneItemPage_NoErrors",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1403,16 +1346,16 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple",
|
name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1441,16 +1384,16 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple",
|
name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
driveItem("file", "file2", driveBasePath1, "root", true, false, false),
|
driveItem("file", "file2", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1481,15 +1424,15 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_EmptyDelta_NoErrors",
|
name: "OneDrive_OneItemPage_EmptyDelta_NoErrors",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &empty, // probably will never happen with graph
|
DeltaLink: &empty, // probably will never happen with graph
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1518,23 +1461,23 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_TwoItemPages_NoErrors",
|
name: "OneDrive_TwoItemPages_NoErrors",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
nextLink: &next,
|
NextLink: &next,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1568,25 +1511,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
drive1,
|
drive1,
|
||||||
drive2,
|
drive2,
|
||||||
},
|
},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
driveID2: {
|
driveID2: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root2"),
|
driveRootItem("root2"),
|
||||||
driveItem("folder2", "folder", driveBasePath2, "root2", false, true, false),
|
driveItem("folder2", "folder", driveBasePath2, "root2", false, true, false),
|
||||||
driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false),
|
driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta2,
|
DeltaLink: &delta2,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1630,25 +1573,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
drive1,
|
drive1,
|
||||||
drive2,
|
drive2,
|
||||||
},
|
},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
driveID2: {
|
driveID2: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath2, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath2, "root", false, true, false),
|
||||||
driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false),
|
driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta2,
|
DeltaLink: &delta2,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1689,10 +1632,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_Errors",
|
name: "OneDrive_OneItemPage_Errors",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
err: assert.AnError,
|
Err: assert.AnError,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1709,17 +1652,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_DeltaError",
|
name: "OneDrive_OneItemPage_DeltaError",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
err: getDeltaError(),
|
Err: getDeltaError(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1744,25 +1687,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_TwoItemPage_DeltaError",
|
name: "OneDrive_TwoItemPage_DeltaError",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
err: getDeltaError(),
|
Err: getDeltaError(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
nextLink: &next,
|
NextLink: &next,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1790,22 +1733,22 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_TwoItemPage_NoDeltaError",
|
name: "OneDrive_TwoItemPage_NoDeltaError",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
driveItem("file", "file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
nextLink: &next,
|
NextLink: &next,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1837,18 +1780,18 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder",
|
name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
err: getDeltaError(),
|
Err: getDeltaError(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false),
|
driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1884,18 +1827,18 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation",
|
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
err: getDeltaError(),
|
Err: getDeltaError(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder2", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder2", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1934,25 +1877,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "OneDrive Two Item Pages with Malware",
|
name: "OneDrive Two Item Pages with Malware",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
malwareItem("malware", "malware", driveBasePath1+"/folder", "folder", true, false, false),
|
malwareItem("malware", "malware", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
nextLink: &next,
|
NextLink: &next,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false),
|
malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -1984,28 +1927,28 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "One Drive Delta Error Deleted Folder In New Results",
|
name: "One Drive Delta Error Deleted Folder In New Results",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
err: getDeltaError(),
|
Err: getDeltaError(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false),
|
driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file2", "file2", driveBasePath1+"/folder2", "folder2", true, false, false),
|
driveItem("file2", "file2", driveBasePath1+"/folder2", "folder2", true, false, false),
|
||||||
},
|
},
|
||||||
nextLink: &next,
|
NextLink: &next,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
delItem("folder2", driveBasePath1, "root", false, true, false),
|
delItem("folder2", driveBasePath1, "root", false, true, false),
|
||||||
delItem("file2", driveBasePath1, "root", true, false, false),
|
delItem("file2", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta2,
|
DeltaLink: &delta2,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2042,17 +1985,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "One Drive Delta Error Random Folder Delete",
|
name: "One Drive Delta Error Random Folder Delete",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
err: getDeltaError(),
|
Err: getDeltaError(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
delItem("folder", driveBasePath1, "root", false, true, false),
|
delItem("folder", driveBasePath1, "root", false, true, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2085,17 +2028,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "One Drive Delta Error Random Item Delete",
|
name: "One Drive Delta Error Random Item Delete",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
err: getDeltaError(),
|
Err: getDeltaError(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
delItem("file", driveBasePath1, "root", true, false, false),
|
delItem("file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2125,23 +2068,23 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "One Drive Folder Made And Deleted",
|
name: "One Drive Folder Made And Deleted",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
nextLink: &next,
|
NextLink: &next,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
delItem("folder", driveBasePath1, "root", false, true, false),
|
delItem("folder", driveBasePath1, "root", false, true, false),
|
||||||
delItem("file", driveBasePath1, "root", true, false, false),
|
delItem("file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta2,
|
DeltaLink: &delta2,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2169,22 +2112,22 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "One Drive Item Made And Deleted",
|
name: "One Drive Item Made And Deleted",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
|
||||||
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
|
||||||
},
|
},
|
||||||
nextLink: &next,
|
NextLink: &next,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
delItem("file", driveBasePath1, "root", true, false, false),
|
delItem("file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2215,14 +2158,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "One Drive Random Folder Delete",
|
name: "One Drive Random Folder Delete",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
delItem("folder", driveBasePath1, "root", false, true, false),
|
delItem("folder", driveBasePath1, "root", false, true, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2250,14 +2193,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "One Drive Random Item Delete",
|
name: "One Drive Random Item Delete",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"),
|
driveRootItem("root"),
|
||||||
delItem("file", driveBasePath1, "root", true, false, false),
|
delItem("file", driveBasePath1, "root", true, false, false),
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2285,13 +2228,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
{
|
{
|
||||||
name: "TwoPriorDrives_OneTombstoned",
|
name: "TwoPriorDrives_OneTombstoned",
|
||||||
drives: []models.Driveable{drive1},
|
drives: []models.Driveable{drive1},
|
||||||
items: map[string][]deltaPagerResult{
|
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID1: {
|
driveID1: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
driveRootItem("root"), // will be present
|
driveRootItem("root"), // will be present
|
||||||
},
|
},
|
||||||
deltaLink: &delta,
|
DeltaLink: &delta,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -2322,21 +2265,21 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
mockDrivePager := &apiMock.DrivePager{
|
mockDrivePager := &apiMock.Pager[models.Driveable]{
|
||||||
ToReturn: []apiMock.PagerResult{
|
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||||
{Drives: test.drives},
|
{Values: test.drives},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
itemPagers := map[string]api.DriveItemDeltaEnumerator{}
|
itemPagers := map[string]api.DeltaPager[models.DriveItemable]{}
|
||||||
|
|
||||||
for driveID := range test.items {
|
for driveID := range test.items {
|
||||||
itemPagers[driveID] = &mockItemPager{
|
itemPagers[driveID] = &apiMock.DeltaPager[models.DriveItemable]{
|
||||||
toReturn: test.items[driveID],
|
ToReturn: test.items[driveID],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("a-user")
|
||||||
mbh.DrivePagerV = mockDrivePager
|
mbh.DrivePagerV = mockDrivePager
|
||||||
mbh.ItemPagerV = itemPagers
|
mbh.ItemPagerV = itemPagers
|
||||||
|
|
||||||
@ -2583,7 +2526,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
|
|||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
items []deltaPagerResult
|
items []apiMock.PagerResult[models.DriveItemable]
|
||||||
deltaURL string
|
deltaURL string
|
||||||
prevDeltaSuccess bool
|
prevDeltaSuccess bool
|
||||||
prevDelta string
|
prevDelta string
|
||||||
@ -2592,8 +2535,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
|
|||||||
{
|
{
|
||||||
name: "delta on first run",
|
name: "delta on first run",
|
||||||
deltaURL: delta,
|
deltaURL: delta,
|
||||||
items: []deltaPagerResult{
|
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||||
{deltaLink: &delta},
|
{DeltaLink: &delta},
|
||||||
},
|
},
|
||||||
prevDeltaSuccess: true,
|
prevDeltaSuccess: true,
|
||||||
prevDelta: prevDelta,
|
prevDelta: prevDelta,
|
||||||
@ -2601,8 +2544,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
|
|||||||
{
|
{
|
||||||
name: "empty prev delta",
|
name: "empty prev delta",
|
||||||
deltaURL: delta,
|
deltaURL: delta,
|
||||||
items: []deltaPagerResult{
|
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||||
{deltaLink: &delta},
|
{DeltaLink: &delta},
|
||||||
},
|
},
|
||||||
prevDeltaSuccess: false,
|
prevDeltaSuccess: false,
|
||||||
prevDelta: "",
|
prevDelta: "",
|
||||||
@ -2610,9 +2553,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
|
|||||||
{
|
{
|
||||||
name: "next then delta",
|
name: "next then delta",
|
||||||
deltaURL: delta,
|
deltaURL: delta,
|
||||||
items: []deltaPagerResult{
|
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||||
{nextLink: &next},
|
{NextLink: &next},
|
||||||
{deltaLink: &delta},
|
{DeltaLink: &delta},
|
||||||
},
|
},
|
||||||
prevDeltaSuccess: true,
|
prevDeltaSuccess: true,
|
||||||
prevDelta: prevDelta,
|
prevDelta: prevDelta,
|
||||||
@ -2620,18 +2563,18 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
|
|||||||
{
|
{
|
||||||
name: "invalid prev delta",
|
name: "invalid prev delta",
|
||||||
deltaURL: delta,
|
deltaURL: delta,
|
||||||
items: []deltaPagerResult{
|
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||||
{err: getDeltaError()},
|
{Err: getDeltaError()},
|
||||||
{deltaLink: &delta}, // works on retry
|
{DeltaLink: &delta}, // works on retry
|
||||||
},
|
},
|
||||||
prevDelta: prevDelta,
|
prevDelta: prevDelta,
|
||||||
prevDeltaSuccess: false,
|
prevDeltaSuccess: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "fail a normal delta query",
|
name: "fail a normal delta query",
|
||||||
items: []deltaPagerResult{
|
items: []apiMock.PagerResult[models.DriveItemable]{
|
||||||
{nextLink: &next},
|
{NextLink: &next},
|
||||||
{err: assert.AnError},
|
{Err: assert.AnError},
|
||||||
},
|
},
|
||||||
prevDelta: prevDelta,
|
prevDelta: prevDelta,
|
||||||
prevDeltaSuccess: true,
|
prevDeltaSuccess: true,
|
||||||
@ -2645,8 +2588,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
itemPager := &mockItemPager{
|
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
|
||||||
toReturn: test.items,
|
ToReturn: test.items,
|
||||||
}
|
}
|
||||||
|
|
||||||
collectorFunc := func(
|
collectorFunc := func(
|
||||||
@ -2687,7 +2630,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
|
|||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
items []deltaPagerResult
|
items []apiMock.PagerResult[any]
|
||||||
deltaURL string
|
deltaURL string
|
||||||
prevDeltaSuccess bool
|
prevDeltaSuccess bool
|
||||||
prevDelta string
|
prevDelta string
|
||||||
@ -2704,10 +2647,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
itemPagers := map[string]api.DriveItemDeltaEnumerator{}
|
itemPagers := map[string]api.DeltaPager[models.DriveItemable]{}
|
||||||
itemPagers[driveID] = &mockItemPager{}
|
itemPagers[driveID] = &apiMock.DeltaPager[models.DriveItemable]{}
|
||||||
|
|
||||||
mbh := mock.DefaultOneDriveBH()
|
mbh := mock.DefaultOneDriveBH("test-user")
|
||||||
mbh.ItemPagerV = itemPagers
|
mbh.ItemPagerV = itemPagers
|
||||||
|
|
||||||
c := NewCollections(
|
c := NewCollections(
|
||||||
@ -2724,7 +2667,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
|
|||||||
// Add a few collections
|
// Add a few collections
|
||||||
for i := 0; i < collCount; i++ {
|
for i := 0; i < collCount; i++ {
|
||||||
coll, err := NewCollection(
|
coll, err := NewCollection(
|
||||||
&itemBackupHandler{api.Drives{}, anyFolder},
|
&itemBackupHandler{api.Drives{}, "test-user", anyFolder},
|
||||||
nil,
|
nil,
|
||||||
nil,
|
nil,
|
||||||
driveID,
|
driveID,
|
||||||
|
|||||||
139
src/internal/m365/collection/drive/export.go
Normal file
139
src/internal/m365/collection/drive/export.go
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
package drive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ export.Collection = &ExportCollection{}
|
||||||
|
|
||||||
|
// ExportCollection is the implementation of export.ExportCollection for OneDrive
|
||||||
|
type ExportCollection struct {
|
||||||
|
// baseDir contains the path of the collection
|
||||||
|
baseDir string
|
||||||
|
|
||||||
|
// backingCollection is the restore collection from which we will
|
||||||
|
// create the export collection.
|
||||||
|
backingCollection data.RestoreCollection
|
||||||
|
|
||||||
|
// backupVersion is the backupVersion of the backup this collection was part
|
||||||
|
// of. This is required to figure out how to get the name of the
|
||||||
|
// item.
|
||||||
|
backupVersion int
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewExportCollection(
|
||||||
|
baseDir string,
|
||||||
|
backingCollection data.RestoreCollection,
|
||||||
|
backupVersion int,
|
||||||
|
) ExportCollection {
|
||||||
|
return ExportCollection{
|
||||||
|
baseDir: baseDir,
|
||||||
|
backingCollection: backingCollection,
|
||||||
|
backupVersion: backupVersion,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec ExportCollection) BasePath() string {
|
||||||
|
return ec.baseDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ec ExportCollection) Items(ctx context.Context) <-chan export.Item {
|
||||||
|
ch := make(chan export.Item)
|
||||||
|
go items(ctx, ec, ch)
|
||||||
|
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
|
||||||
|
// items converts items in backing collection to export items
|
||||||
|
func items(ctx context.Context, ec ExportCollection, ch chan<- export.Item) {
|
||||||
|
defer close(ch)
|
||||||
|
|
||||||
|
errs := fault.New(false)
|
||||||
|
|
||||||
|
for item := range ec.backingCollection.Items(ctx, errs) {
|
||||||
|
itemUUID := item.ID()
|
||||||
|
if isMetadataFile(itemUUID, ec.backupVersion) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
name, err := getItemName(ctx, itemUUID, ec.backupVersion, ec.backingCollection)
|
||||||
|
|
||||||
|
ch <- export.Item{
|
||||||
|
ID: itemUUID,
|
||||||
|
Data: export.ItemData{
|
||||||
|
Name: name,
|
||||||
|
Body: item.ToReader(),
|
||||||
|
},
|
||||||
|
Error: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
eitems, erecovereable := errs.ItemsAndRecovered()
|
||||||
|
|
||||||
|
// Return all the items that we failed to source from the persistence layer
|
||||||
|
for _, err := range eitems {
|
||||||
|
ch <- export.Item{
|
||||||
|
ID: err.ID,
|
||||||
|
Error: &err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, ec := range erecovereable {
|
||||||
|
ch <- export.Item{
|
||||||
|
Error: ec,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// isMetadataFile is used to determine if a path corresponds to a
|
||||||
|
// metadata file. This is OneDrive specific logic and depends on the
|
||||||
|
// version of the backup unlike metadata.IsMetadataFile which only has
|
||||||
|
// to be concerned about the current version.
|
||||||
|
func isMetadataFile(id string, backupVersion int) bool {
|
||||||
|
if backupVersion < version.OneDrive1DataAndMetaFiles {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.HasSuffix(id, metadata.MetaFileSuffix) ||
|
||||||
|
strings.HasSuffix(id, metadata.DirMetaFileSuffix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// getItemName is used to get the name of the item.
|
||||||
|
// How we get the name depends on the version of the backup.
|
||||||
|
func getItemName(
|
||||||
|
ctx context.Context,
|
||||||
|
id string,
|
||||||
|
backupVersion int,
|
||||||
|
fin data.FetchItemByNamer,
|
||||||
|
) (string, error) {
|
||||||
|
if backupVersion < version.OneDrive1DataAndMetaFiles {
|
||||||
|
return id, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if backupVersion < version.OneDrive5DirMetaNoName {
|
||||||
|
return strings.TrimSuffix(id, metadata.DataFileSuffix), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasSuffix(id, metadata.DataFileSuffix) {
|
||||||
|
trimmedName := strings.TrimSuffix(id, metadata.DataFileSuffix)
|
||||||
|
metaName := trimmedName + metadata.MetaFileSuffix
|
||||||
|
|
||||||
|
meta, err := FetchAndReadMetadata(ctx, fin, metaName)
|
||||||
|
if err != nil {
|
||||||
|
return "", clues.Wrap(err, "getting metadata").WithClues(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
return meta.FileName, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", clues.New("invalid item id").WithClues(ctx)
|
||||||
|
}
|
||||||
145
src/internal/m365/collection/drive/export_test.go
Normal file
145
src/internal/m365/collection/drive/export_test.go
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
package drive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExportUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ExportUnitSuite) TestIsMetadataFile() {
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
id string
|
||||||
|
backupVersion int
|
||||||
|
isMeta bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "legacy",
|
||||||
|
backupVersion: version.OneDrive1DataAndMetaFiles,
|
||||||
|
isMeta: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "metadata file",
|
||||||
|
backupVersion: version.OneDrive3IsMetaMarker,
|
||||||
|
id: "name" + metadata.MetaFileSuffix,
|
||||||
|
isMeta: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "dir metadata file",
|
||||||
|
backupVersion: version.OneDrive3IsMetaMarker,
|
||||||
|
id: "name" + metadata.DirMetaFileSuffix,
|
||||||
|
isMeta: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "non metadata file",
|
||||||
|
backupVersion: version.OneDrive3IsMetaMarker,
|
||||||
|
id: "name" + metadata.DataFileSuffix,
|
||||||
|
isMeta: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
assert.Equal(suite.T(), test.isMeta, isMetadataFile(test.id, test.backupVersion), "is metadata")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type finD struct {
|
||||||
|
id string
|
||||||
|
name string
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
|
||||||
|
if fd.err != nil {
|
||||||
|
return nil, fd.err
|
||||||
|
}
|
||||||
|
|
||||||
|
if name == fd.id {
|
||||||
|
return &dataMock.Item{
|
||||||
|
ItemID: fd.id,
|
||||||
|
Reader: io.NopCloser(bytes.NewBufferString(`{"filename": "` + fd.name + `"}`)),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, assert.AnError
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ExportUnitSuite) TestGetItemName() {
|
||||||
|
table := []struct {
|
||||||
|
tname string
|
||||||
|
id string
|
||||||
|
backupVersion int
|
||||||
|
name string
|
||||||
|
fin data.FetchItemByNamer
|
||||||
|
errFunc assert.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
tname: "legacy",
|
||||||
|
id: "name",
|
||||||
|
backupVersion: version.OneDrive1DataAndMetaFiles,
|
||||||
|
name: "name",
|
||||||
|
errFunc: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tname: "name in filename",
|
||||||
|
id: "name.data",
|
||||||
|
backupVersion: version.OneDrive4DirIncludesPermissions,
|
||||||
|
name: "name",
|
||||||
|
errFunc: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tname: "name in metadata",
|
||||||
|
id: "id.data",
|
||||||
|
backupVersion: version.Backup,
|
||||||
|
name: "name",
|
||||||
|
fin: finD{id: "id.meta", name: "name"},
|
||||||
|
errFunc: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
tname: "name in metadata but error",
|
||||||
|
id: "id.data",
|
||||||
|
backupVersion: version.Backup,
|
||||||
|
name: "",
|
||||||
|
fin: finD{err: assert.AnError},
|
||||||
|
errFunc: assert.Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.tname, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
name, err := getItemName(
|
||||||
|
ctx,
|
||||||
|
test.id,
|
||||||
|
test.backupVersion,
|
||||||
|
test.fin)
|
||||||
|
test.errFunc(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, test.name, name, "name")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,6 +1,7 @@
|
|||||||
package drive
|
package drive
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
@ -14,10 +15,15 @@ type groupBackupHandler struct {
|
|||||||
scope selectors.GroupsScope
|
scope selectors.GroupsScope
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewGroupBackupHandler(groupID string, ac api.Drives, scope selectors.GroupsScope) groupBackupHandler {
|
func NewGroupBackupHandler(
|
||||||
|
groupID, siteID string,
|
||||||
|
ac api.Drives,
|
||||||
|
scope selectors.GroupsScope,
|
||||||
|
) groupBackupHandler {
|
||||||
return groupBackupHandler{
|
return groupBackupHandler{
|
||||||
libraryBackupHandler{
|
libraryBackupHandler{
|
||||||
ac: ac,
|
ac: ac,
|
||||||
|
siteID: siteID,
|
||||||
// Not adding scope here. Anything that needs scope has to
|
// Not adding scope here. Anything that needs scope has to
|
||||||
// be from group handler
|
// be from group handler
|
||||||
service: path.GroupsService,
|
service: path.GroupsService,
|
||||||
@ -27,16 +33,36 @@ func NewGroupBackupHandler(groupID string, ac api.Drives, scope selectors.Groups
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h groupBackupHandler) CanonicalPath(
|
func (h groupBackupHandler) PathPrefix(
|
||||||
folders *path.Builder,
|
tenantID, driveID string,
|
||||||
tenantID, resourceOwner string,
|
|
||||||
) (path.Path, error) {
|
) (path.Path, error) {
|
||||||
// TODO(meain): path fixes
|
// TODO: move tenantID to struct
|
||||||
return folders.ToDataLayerPath(tenantID, h.groupID, h.service, path.LibrariesCategory, false)
|
return path.Build(
|
||||||
|
tenantID,
|
||||||
|
h.groupID,
|
||||||
|
h.service,
|
||||||
|
path.LibrariesCategory,
|
||||||
|
false,
|
||||||
|
odConsts.SitesPathDir,
|
||||||
|
h.siteID,
|
||||||
|
odConsts.DrivesPathDir,
|
||||||
|
driveID,
|
||||||
|
odConsts.RootPathDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h groupBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
func (h groupBackupHandler) CanonicalPath(
|
||||||
return path.GroupsService, path.LibrariesCategory
|
folders *path.Builder,
|
||||||
|
tenantID string,
|
||||||
|
) (path.Path, error) {
|
||||||
|
return folders.ToDataLayerPath(
|
||||||
|
tenantID,
|
||||||
|
h.groupID,
|
||||||
|
h.service,
|
||||||
|
path.LibrariesCategory,
|
||||||
|
false,
|
||||||
|
odConsts.SitesPathDir,
|
||||||
|
h.siteID,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h groupBackupHandler) IsAllPass() bool {
|
func (h groupBackupHandler) IsAllPass() bool {
|
||||||
|
|||||||
@ -39,18 +39,15 @@ type BackupHandler interface {
|
|||||||
|
|
||||||
// PathPrefix constructs the service and category specific path prefix for
|
// PathPrefix constructs the service and category specific path prefix for
|
||||||
// the given values.
|
// the given values.
|
||||||
PathPrefix(tenantID, resourceOwner, driveID string) (path.Path, error)
|
PathPrefix(tenantID, driveID string) (path.Path, error)
|
||||||
|
|
||||||
// CanonicalPath constructs the service and category specific path for
|
// CanonicalPath constructs the service and category specific path for
|
||||||
// the given values.
|
// the given values.
|
||||||
CanonicalPath(
|
CanonicalPath(folders *path.Builder, tenantID string) (path.Path, error)
|
||||||
folders *path.Builder,
|
|
||||||
tenantID, resourceOwner string,
|
|
||||||
) (path.Path, error)
|
|
||||||
|
|
||||||
// ServiceCat returns the service and category used by this implementation.
|
// ServiceCat returns the service and category used by this implementation.
|
||||||
ServiceCat() (path.ServiceType, path.CategoryType)
|
ServiceCat() (path.ServiceType, path.CategoryType)
|
||||||
NewItemPager(driveID, link string, fields []string) api.DriveItemDeltaEnumerator
|
NewItemPager(driveID, link string, fields []string) api.DeltaPager[models.DriveItemable]
|
||||||
// FormatDisplayPath creates a human-readable string to represent the
|
// FormatDisplayPath creates a human-readable string to represent the
|
||||||
// provided path.
|
// provided path.
|
||||||
FormatDisplayPath(driveName string, parentPath *path.Builder) string
|
FormatDisplayPath(driveName string, parentPath *path.Builder) string
|
||||||
@ -62,7 +59,7 @@ type BackupHandler interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type NewDrivePagerer interface {
|
type NewDrivePagerer interface {
|
||||||
NewDrivePager(resourceOwner string, fields []string) api.DrivePager
|
NewDrivePager(resourceOwner string, fields []string) api.Pager[models.Driveable]
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetItemPermissioner interface {
|
type GetItemPermissioner interface {
|
||||||
|
|||||||
@ -42,7 +42,7 @@ type itemCollector func(
|
|||||||
// provided `collector` method
|
// provided `collector` method
|
||||||
func collectItems(
|
func collectItems(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
pager api.DriveItemDeltaEnumerator,
|
pager api.DeltaPager[models.DriveItemable],
|
||||||
driveID, driveName string,
|
driveID, driveName string,
|
||||||
collector itemCollector,
|
collector itemCollector,
|
||||||
oldPaths map[string]string,
|
oldPaths map[string]string,
|
||||||
@ -85,7 +85,7 @@ func collectItems(
|
|||||||
invalidPrevDelta = true
|
invalidPrevDelta = true
|
||||||
newPaths = map[string]string{}
|
newPaths = map[string]string{}
|
||||||
|
|
||||||
pager.Reset()
|
pager.Reset(ctx)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|||||||
@ -72,26 +72,26 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
resultDrives = append(resultDrives, d)
|
resultDrives = append(resultDrives, d)
|
||||||
}
|
}
|
||||||
|
|
||||||
tooManyRetries := make([]mock.PagerResult, 0, maxDrivesRetries+1)
|
tooManyRetries := make([]mock.PagerResult[models.Driveable], 0, maxDrivesRetries+1)
|
||||||
|
|
||||||
for i := 0; i < maxDrivesRetries+1; i++ {
|
for i := 0; i < maxDrivesRetries+1; i++ {
|
||||||
tooManyRetries = append(tooManyRetries, mock.PagerResult{
|
tooManyRetries = append(tooManyRetries, mock.PagerResult[models.Driveable]{
|
||||||
Err: context.DeadlineExceeded,
|
Err: context.DeadlineExceeded,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
pagerResults []mock.PagerResult
|
pagerResults []mock.PagerResult[models.Driveable]
|
||||||
retry bool
|
retry bool
|
||||||
expectedErr assert.ErrorAssertionFunc
|
expectedErr assert.ErrorAssertionFunc
|
||||||
expectedResults []models.Driveable
|
expectedResults []models.Driveable
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "AllOneResultNilNextLink",
|
name: "AllOneResultNilNextLink",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: resultDrives,
|
Values: resultDrives,
|
||||||
NextLink: nil,
|
NextLink: nil,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
@ -102,9 +102,9 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "AllOneResultEmptyNextLink",
|
name: "AllOneResultEmptyNextLink",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: resultDrives,
|
Values: resultDrives,
|
||||||
NextLink: &emptyLink,
|
NextLink: &emptyLink,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
@ -115,14 +115,14 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "SplitResultsNilNextLink",
|
name: "SplitResultsNilNextLink",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: resultDrives[:numDriveResults/2],
|
Values: resultDrives[:numDriveResults/2],
|
||||||
NextLink: &link,
|
NextLink: &link,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Drives: resultDrives[numDriveResults/2:],
|
Values: resultDrives[numDriveResults/2:],
|
||||||
NextLink: nil,
|
NextLink: nil,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
@ -133,14 +133,14 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "SplitResultsEmptyNextLink",
|
name: "SplitResultsEmptyNextLink",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: resultDrives[:numDriveResults/2],
|
Values: resultDrives[:numDriveResults/2],
|
||||||
NextLink: &link,
|
NextLink: &link,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Drives: resultDrives[numDriveResults/2:],
|
Values: resultDrives[numDriveResults/2:],
|
||||||
NextLink: &emptyLink,
|
NextLink: &emptyLink,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
@ -151,14 +151,14 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "NonRetryableError",
|
name: "NonRetryableError",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: resultDrives,
|
Values: resultDrives,
|
||||||
NextLink: &link,
|
NextLink: &link,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Drives: nil,
|
Values: nil,
|
||||||
NextLink: nil,
|
NextLink: nil,
|
||||||
Err: assert.AnError,
|
Err: assert.AnError,
|
||||||
},
|
},
|
||||||
@ -169,9 +169,9 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "MySiteURLNotFound",
|
name: "MySiteURLNotFound",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: nil,
|
Values: nil,
|
||||||
NextLink: nil,
|
NextLink: nil,
|
||||||
Err: graph.Stack(ctx, mySiteURLNotFound),
|
Err: graph.Stack(ctx, mySiteURLNotFound),
|
||||||
},
|
},
|
||||||
@ -182,9 +182,9 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "MySiteNotFound",
|
name: "MySiteNotFound",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: nil,
|
Values: nil,
|
||||||
NextLink: nil,
|
NextLink: nil,
|
||||||
Err: graph.Stack(ctx, mySiteNotFound),
|
Err: graph.Stack(ctx, mySiteNotFound),
|
||||||
},
|
},
|
||||||
@ -195,19 +195,19 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "SplitResultsContextTimeoutWithRetries",
|
name: "SplitResultsContextTimeoutWithRetries",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: resultDrives[:numDriveResults/2],
|
Values: resultDrives[:numDriveResults/2],
|
||||||
NextLink: &link,
|
NextLink: &link,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Drives: nil,
|
Values: nil,
|
||||||
NextLink: nil,
|
NextLink: nil,
|
||||||
Err: context.DeadlineExceeded,
|
Err: context.DeadlineExceeded,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Drives: resultDrives[numDriveResults/2:],
|
Values: resultDrives[numDriveResults/2:],
|
||||||
NextLink: &emptyLink,
|
NextLink: &emptyLink,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
@ -218,19 +218,19 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "SplitResultsContextTimeoutNoRetries",
|
name: "SplitResultsContextTimeoutNoRetries",
|
||||||
pagerResults: []mock.PagerResult{
|
pagerResults: []mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: resultDrives[:numDriveResults/2],
|
Values: resultDrives[:numDriveResults/2],
|
||||||
NextLink: &link,
|
NextLink: &link,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Drives: nil,
|
Values: nil,
|
||||||
NextLink: nil,
|
NextLink: nil,
|
||||||
Err: context.DeadlineExceeded,
|
Err: context.DeadlineExceeded,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Drives: resultDrives[numDriveResults/2:],
|
Values: resultDrives[numDriveResults/2:],
|
||||||
NextLink: &emptyLink,
|
NextLink: &emptyLink,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
@ -242,9 +242,9 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
{
|
{
|
||||||
name: "TooManyRetries",
|
name: "TooManyRetries",
|
||||||
pagerResults: append(
|
pagerResults: append(
|
||||||
[]mock.PagerResult{
|
[]mock.PagerResult[models.Driveable]{
|
||||||
{
|
{
|
||||||
Drives: resultDrives[:numDriveResults/2],
|
Values: resultDrives[:numDriveResults/2],
|
||||||
NextLink: &link,
|
NextLink: &link,
|
||||||
Err: nil,
|
Err: nil,
|
||||||
},
|
},
|
||||||
@ -263,7 +263,7 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
pager := &mock.DrivePager{
|
pager := &mock.Pager[models.Driveable]{
|
||||||
ToReturn: test.pagerResults,
|
ToReturn: test.pagerResults,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -344,7 +344,7 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
colls := NewCollections(
|
colls := NewCollections(
|
||||||
&itemBackupHandler{suite.ac.Drives(), scope},
|
&itemBackupHandler{suite.ac.Drives(), test.user, scope},
|
||||||
creds.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
test.user,
|
test.user,
|
||||||
service.updateStatus,
|
service.updateStatus,
|
||||||
|
|||||||
@ -23,12 +23,13 @@ import (
|
|||||||
var _ BackupHandler = &itemBackupHandler{}
|
var _ BackupHandler = &itemBackupHandler{}
|
||||||
|
|
||||||
type itemBackupHandler struct {
|
type itemBackupHandler struct {
|
||||||
ac api.Drives
|
ac api.Drives
|
||||||
scope selectors.OneDriveScope
|
userID string
|
||||||
|
scope selectors.OneDriveScope
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewItemBackupHandler(ac api.Drives, scope selectors.OneDriveScope) *itemBackupHandler {
|
func NewItemBackupHandler(ac api.Drives, userID string, scope selectors.OneDriveScope) *itemBackupHandler {
|
||||||
return &itemBackupHandler{ac, scope}
|
return &itemBackupHandler{ac, userID, scope}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h itemBackupHandler) Get(
|
func (h itemBackupHandler) Get(
|
||||||
@ -40,11 +41,11 @@ func (h itemBackupHandler) Get(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (h itemBackupHandler) PathPrefix(
|
func (h itemBackupHandler) PathPrefix(
|
||||||
tenantID, resourceOwner, driveID string,
|
tenantID, driveID string,
|
||||||
) (path.Path, error) {
|
) (path.Path, error) {
|
||||||
return path.Build(
|
return path.Build(
|
||||||
tenantID,
|
tenantID,
|
||||||
resourceOwner,
|
h.userID,
|
||||||
path.OneDriveService,
|
path.OneDriveService,
|
||||||
path.FilesCategory,
|
path.FilesCategory,
|
||||||
false,
|
false,
|
||||||
@ -55,9 +56,9 @@ func (h itemBackupHandler) PathPrefix(
|
|||||||
|
|
||||||
func (h itemBackupHandler) CanonicalPath(
|
func (h itemBackupHandler) CanonicalPath(
|
||||||
folders *path.Builder,
|
folders *path.Builder,
|
||||||
tenantID, resourceOwner string,
|
tenantID string,
|
||||||
) (path.Path, error) {
|
) (path.Path, error) {
|
||||||
return folders.ToDataLayerOneDrivePath(tenantID, resourceOwner, false)
|
return folders.ToDataLayerOneDrivePath(tenantID, h.userID, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h itemBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
func (h itemBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
||||||
@ -66,14 +67,14 @@ func (h itemBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
|||||||
|
|
||||||
func (h itemBackupHandler) NewDrivePager(
|
func (h itemBackupHandler) NewDrivePager(
|
||||||
resourceOwner string, fields []string,
|
resourceOwner string, fields []string,
|
||||||
) api.DrivePager {
|
) api.Pager[models.Driveable] {
|
||||||
return h.ac.NewUserDrivePager(resourceOwner, fields)
|
return h.ac.NewUserDrivePager(resourceOwner, fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h itemBackupHandler) NewItemPager(
|
func (h itemBackupHandler) NewItemPager(
|
||||||
driveID, link string,
|
driveID, link string,
|
||||||
fields []string,
|
fields []string,
|
||||||
) api.DriveItemDeltaEnumerator {
|
) api.DeltaPager[models.DriveItemable] {
|
||||||
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
|
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -145,7 +146,7 @@ func (h itemRestoreHandler) PostDrive(
|
|||||||
|
|
||||||
func (h itemRestoreHandler) NewDrivePager(
|
func (h itemRestoreHandler) NewDrivePager(
|
||||||
resourceOwner string, fields []string,
|
resourceOwner string, fields []string,
|
||||||
) api.DrivePager {
|
) api.Pager[models.Driveable] {
|
||||||
return h.ac.NewUserDrivePager(resourceOwner, fields)
|
return h.ac.NewUserDrivePager(resourceOwner, fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -36,10 +36,10 @@ func (suite *ItemBackupHandlerUnitSuite) TestCanonicalPath() {
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
h := itemBackupHandler{}
|
h := itemBackupHandler{userID: resourceOwner}
|
||||||
p := path.Builder{}.Append("prefix")
|
p := path.Builder{}.Append("prefix")
|
||||||
|
|
||||||
result, err := h.CanonicalPath(p, tenantID, resourceOwner)
|
result, err := h.CanonicalPath(p, tenantID)
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
if result != nil {
|
if result != nil {
|
||||||
|
|||||||
@ -124,6 +124,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
|||||||
|
|
||||||
bh := itemBackupHandler{
|
bh := itemBackupHandler{
|
||||||
suite.service.ac.Drives(),
|
suite.service.ac.Drives(),
|
||||||
|
suite.user,
|
||||||
(&selectors.OneDriveBackup{}).Folders(selectors.Any())[0],
|
(&selectors.OneDriveBackup{}).Folders(selectors.Any())[0],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,18 @@ var _ BackupHandler = &libraryBackupHandler{}
|
|||||||
|
|
||||||
type libraryBackupHandler struct {
|
type libraryBackupHandler struct {
|
||||||
ac api.Drives
|
ac api.Drives
|
||||||
|
siteID string
|
||||||
scope selectors.SharePointScope
|
scope selectors.SharePointScope
|
||||||
service path.ServiceType
|
service path.ServiceType
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLibraryBackupHandler(
|
func NewLibraryBackupHandler(
|
||||||
ac api.Drives,
|
ac api.Drives,
|
||||||
|
siteID string,
|
||||||
scope selectors.SharePointScope,
|
scope selectors.SharePointScope,
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
) libraryBackupHandler {
|
) libraryBackupHandler {
|
||||||
return libraryBackupHandler{ac, scope, service}
|
return libraryBackupHandler{ac, siteID, scope, service}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h libraryBackupHandler) Get(
|
func (h libraryBackupHandler) Get(
|
||||||
@ -42,11 +44,11 @@ func (h libraryBackupHandler) Get(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (h libraryBackupHandler) PathPrefix(
|
func (h libraryBackupHandler) PathPrefix(
|
||||||
tenantID, resourceOwner, driveID string,
|
tenantID, driveID string,
|
||||||
) (path.Path, error) {
|
) (path.Path, error) {
|
||||||
return path.Build(
|
return path.Build(
|
||||||
tenantID,
|
tenantID,
|
||||||
resourceOwner,
|
h.siteID,
|
||||||
h.service,
|
h.service,
|
||||||
path.LibrariesCategory,
|
path.LibrariesCategory,
|
||||||
false,
|
false,
|
||||||
@ -57,26 +59,26 @@ func (h libraryBackupHandler) PathPrefix(
|
|||||||
|
|
||||||
func (h libraryBackupHandler) CanonicalPath(
|
func (h libraryBackupHandler) CanonicalPath(
|
||||||
folders *path.Builder,
|
folders *path.Builder,
|
||||||
tenantID, resourceOwner string,
|
tenantID string,
|
||||||
) (path.Path, error) {
|
) (path.Path, error) {
|
||||||
return folders.ToDataLayerPath(tenantID, resourceOwner, h.service, path.LibrariesCategory, false)
|
return folders.ToDataLayerPath(tenantID, h.siteID, h.service, path.LibrariesCategory, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h libraryBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
func (h libraryBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
||||||
return path.SharePointService, path.LibrariesCategory
|
return h.service, path.LibrariesCategory
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h libraryBackupHandler) NewDrivePager(
|
func (h libraryBackupHandler) NewDrivePager(
|
||||||
resourceOwner string,
|
resourceOwner string,
|
||||||
fields []string,
|
fields []string,
|
||||||
) api.DrivePager {
|
) api.Pager[models.Driveable] {
|
||||||
return h.ac.NewSiteDrivePager(resourceOwner, fields)
|
return h.ac.NewSiteDrivePager(resourceOwner, fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h libraryBackupHandler) NewItemPager(
|
func (h libraryBackupHandler) NewItemPager(
|
||||||
driveID, link string,
|
driveID, link string,
|
||||||
fields []string,
|
fields []string,
|
||||||
) api.DriveItemDeltaEnumerator {
|
) api.DeltaPager[models.DriveItemable] {
|
||||||
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
|
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -184,7 +186,7 @@ func (h libraryRestoreHandler) PostDrive(
|
|||||||
func (h libraryRestoreHandler) NewDrivePager(
|
func (h libraryRestoreHandler) NewDrivePager(
|
||||||
resourceOwner string,
|
resourceOwner string,
|
||||||
fields []string,
|
fields []string,
|
||||||
) api.DrivePager {
|
) api.Pager[models.Driveable] {
|
||||||
return h.ac.Drives().NewSiteDrivePager(resourceOwner, fields)
|
return h.ac.Drives().NewSiteDrivePager(resourceOwner, fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -36,10 +36,10 @@ func (suite *LibraryBackupHandlerUnitSuite) TestCanonicalPath() {
|
|||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
h := libraryBackupHandler{service: path.SharePointService}
|
h := libraryBackupHandler{service: path.SharePointService, siteID: resourceOwner}
|
||||||
p := path.Builder{}.Append("prefix")
|
p := path.Builder{}.Append("prefix")
|
||||||
|
|
||||||
result, err := h.CanonicalPath(p, tenantID, resourceOwner)
|
result, err := h.CanonicalPath(p, tenantID)
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
if result != nil {
|
if result != nil {
|
||||||
@ -52,7 +52,7 @@ func (suite *LibraryBackupHandlerUnitSuite) TestCanonicalPath() {
|
|||||||
func (suite *LibraryBackupHandlerUnitSuite) TestServiceCat() {
|
func (suite *LibraryBackupHandlerUnitSuite) TestServiceCat() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
s, c := libraryBackupHandler{}.ServiceCat()
|
s, c := libraryBackupHandler{service: path.SharePointService}.ServiceCat()
|
||||||
assert.Equal(t, path.SharePointService, s)
|
assert.Equal(t, path.SharePointService, s)
|
||||||
assert.Equal(t, path.LibrariesCategory, c)
|
assert.Equal(t, path.LibrariesCategory, c)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -408,7 +408,7 @@ func (suite *RestoreUnitSuite) TestRestoreCaches_AddDrive() {
|
|||||||
type mockGDPARF struct {
|
type mockGDPARF struct {
|
||||||
err error
|
err error
|
||||||
rootFolder models.DriveItemable
|
rootFolder models.DriveItemable
|
||||||
pager *apiMock.DrivePager
|
pager *apiMock.Pager[models.Driveable]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *mockGDPARF) GetRootFolder(
|
func (m *mockGDPARF) GetRootFolder(
|
||||||
@ -421,7 +421,7 @@ func (m *mockGDPARF) GetRootFolder(
|
|||||||
func (m *mockGDPARF) NewDrivePager(
|
func (m *mockGDPARF) NewDrivePager(
|
||||||
string,
|
string,
|
||||||
[]string,
|
[]string,
|
||||||
) api.DrivePager {
|
) api.Pager[models.Driveable] {
|
||||||
return m.pager
|
return m.pager
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -439,16 +439,16 @@ func (suite *RestoreUnitSuite) TestRestoreCaches_Populate() {
|
|||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
mock *apiMock.DrivePager
|
mock *apiMock.Pager[models.Driveable]
|
||||||
expectErr require.ErrorAssertionFunc
|
expectErr require.ErrorAssertionFunc
|
||||||
expectLen int
|
expectLen int
|
||||||
checkValues bool
|
checkValues bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "no results",
|
name: "no results",
|
||||||
mock: &apiMock.DrivePager{
|
mock: &apiMock.Pager[models.Driveable]{
|
||||||
ToReturn: []apiMock.PagerResult{
|
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||||
{Drives: []models.Driveable{}},
|
{Values: []models.Driveable{}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
@ -456,9 +456,9 @@ func (suite *RestoreUnitSuite) TestRestoreCaches_Populate() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "one result",
|
name: "one result",
|
||||||
mock: &apiMock.DrivePager{
|
mock: &apiMock.Pager[models.Driveable]{
|
||||||
ToReturn: []apiMock.PagerResult{
|
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||||
{Drives: []models.Driveable{md}},
|
{Values: []models.Driveable{md}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
@ -467,8 +467,8 @@ func (suite *RestoreUnitSuite) TestRestoreCaches_Populate() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "error",
|
name: "error",
|
||||||
mock: &apiMock.DrivePager{
|
mock: &apiMock.Pager[models.Driveable]{
|
||||||
ToReturn: []apiMock.PagerResult{
|
ToReturn: []apiMock.PagerResult[models.Driveable]{
|
||||||
{Err: assert.AnError},
|
{Err: assert.AnError},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@ -47,7 +47,7 @@ type urlCache struct {
|
|||||||
refreshMu sync.Mutex
|
refreshMu sync.Mutex
|
||||||
deltaQueryCount int
|
deltaQueryCount int
|
||||||
|
|
||||||
itemPager api.DriveItemDeltaEnumerator
|
itemPager api.DeltaPager[models.DriveItemable]
|
||||||
|
|
||||||
errs *fault.Bus
|
errs *fault.Bus
|
||||||
}
|
}
|
||||||
@ -56,7 +56,7 @@ type urlCache struct {
|
|||||||
func newURLCache(
|
func newURLCache(
|
||||||
driveID, prevDelta string,
|
driveID, prevDelta string,
|
||||||
refreshInterval time.Duration,
|
refreshInterval time.Duration,
|
||||||
itemPager api.DriveItemDeltaEnumerator,
|
itemPager api.DeltaPager[models.DriveItemable],
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (*urlCache, error) {
|
) (*urlCache, error) {
|
||||||
err := validateCacheParams(
|
err := validateCacheParams(
|
||||||
@ -83,7 +83,7 @@ func newURLCache(
|
|||||||
func validateCacheParams(
|
func validateCacheParams(
|
||||||
driveID string,
|
driveID string,
|
||||||
refreshInterval time.Duration,
|
refreshInterval time.Duration,
|
||||||
itemPager api.DriveItemDeltaEnumerator,
|
itemPager api.DeltaPager[models.DriveItemable],
|
||||||
) error {
|
) error {
|
||||||
if len(driveID) == 0 {
|
if len(driveID) == 0 {
|
||||||
return clues.New("drive id is empty")
|
return clues.New("drive id is empty")
|
||||||
@ -182,7 +182,7 @@ func (uc *urlCache) deltaQuery(
|
|||||||
) error {
|
) error {
|
||||||
logger.Ctx(ctx).Debug("starting delta query")
|
logger.Ctx(ctx).Debug("starting delta query")
|
||||||
// Reset item pager to remove any previous state
|
// Reset item pager to remove any previous state
|
||||||
uc.itemPager.Reset()
|
uc.itemPager.Reset(ctx)
|
||||||
|
|
||||||
_, _, _, err := collectItems(
|
_, _, _, err := collectItems(
|
||||||
ctx,
|
ctx,
|
||||||
|
|||||||
@ -24,6 +24,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/control/testdata"
|
"github.com/alcionai/corso/src/pkg/control/testdata"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||||
)
|
)
|
||||||
|
|
||||||
type URLCacheIntegrationSuite struct {
|
type URLCacheIntegrationSuite struct {
|
||||||
@ -209,20 +210,20 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
pagerResult map[string][]deltaPagerResult
|
pagerResult map[string][]apiMock.PagerResult[models.DriveItemable]
|
||||||
expectedItemProps map[string]itemProps
|
expectedItemProps map[string]itemProps
|
||||||
expectedErr require.ErrorAssertionFunc
|
expectedErr require.ErrorAssertionFunc
|
||||||
cacheAssert func(*urlCache, time.Time)
|
cacheAssert func(*urlCache, time.Time)
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "single item in cache",
|
name: "single item in cache",
|
||||||
pagerResult: map[string][]deltaPagerResult{
|
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID: {
|
driveID: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||||
},
|
},
|
||||||
deltaLink: &deltaString,
|
DeltaLink: &deltaString,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -241,17 +242,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple items in cache",
|
name: "multiple items in cache",
|
||||||
pagerResult: map[string][]deltaPagerResult{
|
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID: {
|
driveID: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
||||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
||||||
},
|
},
|
||||||
deltaLink: &deltaString,
|
DeltaLink: &deltaString,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -286,17 +287,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "duplicate items with potentially new urls",
|
name: "duplicate items with potentially new urls",
|
||||||
pagerResult: map[string][]deltaPagerResult{
|
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID: {
|
driveID: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||||
fileItem("1", "file1", "root", "root", "https://test1.com", false),
|
fileItem("1", "file1", "root", "root", "https://test1.com", false),
|
||||||
fileItem("2", "file2", "root", "root", "https://test2.com", false),
|
fileItem("2", "file2", "root", "root", "https://test2.com", false),
|
||||||
},
|
},
|
||||||
deltaLink: &deltaString,
|
DeltaLink: &deltaString,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -323,15 +324,15 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "deleted items",
|
name: "deleted items",
|
||||||
pagerResult: map[string][]deltaPagerResult{
|
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID: {
|
driveID: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
|
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
|
||||||
},
|
},
|
||||||
deltaLink: &deltaString,
|
DeltaLink: &deltaString,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -354,13 +355,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "item not found in cache",
|
name: "item not found in cache",
|
||||||
pagerResult: map[string][]deltaPagerResult{
|
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID: {
|
driveID: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||||
},
|
},
|
||||||
deltaLink: &deltaString,
|
DeltaLink: &deltaString,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -376,20 +377,20 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multi-page delta query error",
|
name: "multi-page delta query error",
|
||||||
pagerResult: map[string][]deltaPagerResult{
|
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID: {
|
driveID: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||||
},
|
},
|
||||||
nextLink: &next,
|
NextLink: &next,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||||
},
|
},
|
||||||
deltaLink: &deltaString,
|
DeltaLink: &deltaString,
|
||||||
err: errors.New("delta query error"),
|
Err: errors.New("delta query error"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -407,14 +408,14 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
|
|
||||||
{
|
{
|
||||||
name: "folder item",
|
name: "folder item",
|
||||||
pagerResult: map[string][]deltaPagerResult{
|
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
|
||||||
driveID: {
|
driveID: {
|
||||||
{
|
{
|
||||||
items: []models.DriveItemable{
|
Values: []models.DriveItemable{
|
||||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||||
driveItem("2", "folder2", "root", "root", false, true, false),
|
driveItem("2", "folder2", "root", "root", false, true, false),
|
||||||
},
|
},
|
||||||
deltaLink: &deltaString,
|
DeltaLink: &deltaString,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -436,8 +437,8 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
itemPager := &mockItemPager{
|
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
|
||||||
toReturn: test.pagerResult[driveID],
|
ToReturn: test.pagerResult[driveID],
|
||||||
}
|
}
|
||||||
|
|
||||||
cache, err := newURLCache(
|
cache, err := newURLCache(
|
||||||
@ -487,7 +488,7 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
|||||||
driveID,
|
driveID,
|
||||||
"",
|
"",
|
||||||
refreshInterval,
|
refreshInterval,
|
||||||
&mockItemPager{},
|
&apiMock.DeltaPager[models.DriveItemable]{},
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
|
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -516,7 +517,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
name string
|
name string
|
||||||
driveID string
|
driveID string
|
||||||
refreshInt time.Duration
|
refreshInt time.Duration
|
||||||
itemPager api.DriveItemDeltaEnumerator
|
itemPager api.DeltaPager[models.DriveItemable]
|
||||||
errors *fault.Bus
|
errors *fault.Bus
|
||||||
expectedErr require.ErrorAssertionFunc
|
expectedErr require.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
@ -524,7 +525,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
name: "invalid driveID",
|
name: "invalid driveID",
|
||||||
driveID: "",
|
driveID: "",
|
||||||
refreshInt: 1 * time.Hour,
|
refreshInt: 1 * time.Hour,
|
||||||
itemPager: &mockItemPager{},
|
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectedErr: require.Error,
|
expectedErr: require.Error,
|
||||||
},
|
},
|
||||||
@ -532,7 +533,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
name: "invalid refresh interval",
|
name: "invalid refresh interval",
|
||||||
driveID: "drive1",
|
driveID: "drive1",
|
||||||
refreshInt: 100 * time.Millisecond,
|
refreshInt: 100 * time.Millisecond,
|
||||||
itemPager: &mockItemPager{},
|
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectedErr: require.Error,
|
expectedErr: require.Error,
|
||||||
},
|
},
|
||||||
@ -548,7 +549,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
|||||||
name: "valid",
|
name: "valid",
|
||||||
driveID: "drive1",
|
driveID: "drive1",
|
||||||
refreshInt: 1 * time.Hour,
|
refreshInt: 1 * time.Hour,
|
||||||
itemPager: &mockItemPager{},
|
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
|
||||||
errors: fault.New(true),
|
errors: fault.New(true),
|
||||||
expectedErr: require.NoError,
|
expectedErr: require.NoError,
|
||||||
},
|
},
|
||||||
|
|||||||
@ -243,7 +243,7 @@ func populateCollections(
|
|||||||
|
|
||||||
func collectItems(
|
func collectItems(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
pager api.ChannelMessageDeltaEnumerator,
|
pager api.DeltaPager[models.ChatMessageable],
|
||||||
) ([]models.ChatMessageable, error) {
|
) ([]models.ChatMessageable, error) {
|
||||||
items := []models.ChatMessageable{}
|
items := []models.ChatMessageable{}
|
||||||
|
|
||||||
|
|||||||
@ -16,7 +16,7 @@ type BackupHandler interface {
|
|||||||
) (models.Channelable, error)
|
) (models.Channelable, error)
|
||||||
NewChannelsPager(
|
NewChannelsPager(
|
||||||
teamID string,
|
teamID string,
|
||||||
) api.ChannelEnumerator
|
) api.Pager[models.Channelable]
|
||||||
|
|
||||||
GetMessageByID(
|
GetMessageByID(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -24,7 +24,7 @@ type BackupHandler interface {
|
|||||||
) (models.ChatMessageable, error)
|
) (models.ChatMessageable, error)
|
||||||
NewMessagePager(
|
NewMessagePager(
|
||||||
teamID, channelID string,
|
teamID, channelID string,
|
||||||
) api.ChannelMessageDeltaEnumerator
|
) api.DeltaPager[models.ChatMessageable]
|
||||||
|
|
||||||
GetMessageReplies(
|
GetMessageReplies(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -34,7 +34,7 @@ type BackupHandler interface {
|
|||||||
|
|
||||||
type BackupMessagesHandler interface {
|
type BackupMessagesHandler interface {
|
||||||
GetMessage(ctx context.Context, teamID, channelID, itemID string) (models.ChatMessageable, error)
|
GetMessage(ctx context.Context, teamID, channelID, itemID string) (models.ChatMessageable, error)
|
||||||
NewMessagePager(teamID, channelID string) api.ChannelMessageDeltaEnumerator
|
NewMessagePager(teamID, channelID string) api.DeltaPager[models.ChatMessageable]
|
||||||
GetChannel(ctx context.Context, teamID, channelID string) (models.Channelable, error)
|
GetChannel(ctx context.Context, teamID, channelID string) (models.Channelable, error)
|
||||||
GetReply(ctx context.Context, teamID, channelID, messageID string) (serialization.Parsable, error)
|
GetReply(ctx context.Context, teamID, channelID, messageID string) (serialization.Parsable, error)
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -41,8 +42,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
|||||||
)
|
)
|
||||||
|
|
||||||
switch sels.Service {
|
switch sels.Service {
|
||||||
case selectors.ServiceOneDrive, selectors.ServiceSharePoint:
|
case selectors.ServiceOneDrive:
|
||||||
// OneDrive and SharePoint can share the code to create collections
|
|
||||||
expCollections, err = onedrive.ProduceExportCollections(
|
expCollections, err = onedrive.ProduceExportCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
@ -51,6 +51,17 @@ func (ctrl *Controller) ProduceExportCollections(
|
|||||||
dcs,
|
dcs,
|
||||||
deets,
|
deets,
|
||||||
errs)
|
errs)
|
||||||
|
case selectors.ServiceSharePoint:
|
||||||
|
expCollections, err = sharepoint.ProduceExportCollections(
|
||||||
|
ctx,
|
||||||
|
backupVersion,
|
||||||
|
exportCfg,
|
||||||
|
opts,
|
||||||
|
dcs,
|
||||||
|
ctrl.backupDriveIDNames,
|
||||||
|
deets,
|
||||||
|
errs)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
|
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -80,7 +80,12 @@ func ProduceBackupCollections(
|
|||||||
dbcs, canUsePreviousBackup, err = site.CollectLibraries(
|
dbcs, canUsePreviousBackup, err = site.CollectLibraries(
|
||||||
ctx,
|
ctx,
|
||||||
sbpc,
|
sbpc,
|
||||||
drive.NewGroupBackupHandler(bpc.ProtectedResource.ID(), ac.Drives(), scope),
|
drive.NewGroupBackupHandler(
|
||||||
|
bpc.ProtectedResource.ID(),
|
||||||
|
ptr.Val(resp.GetId()),
|
||||||
|
ac.Drives(),
|
||||||
|
scope,
|
||||||
|
),
|
||||||
creds.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
ssmb,
|
ssmb,
|
||||||
su,
|
su,
|
||||||
|
|||||||
@ -49,7 +49,7 @@ func ProduceBackupCollections(
|
|||||||
logger.Ctx(ctx).Debug("creating OneDrive collections")
|
logger.Ctx(ctx).Debug("creating OneDrive collections")
|
||||||
|
|
||||||
nc := drive.NewCollections(
|
nc := drive.NewCollections(
|
||||||
drive.NewItemBackupHandler(ac.Drives(), scope),
|
drive.NewItemBackupHandler(ac.Drives(), bpc.ProtectedResource.ID(), scope),
|
||||||
tenant,
|
tenant,
|
||||||
bpc.ProtectedResource.ID(),
|
bpc.ProtectedResource.ID(),
|
||||||
su,
|
su,
|
||||||
|
|||||||
@ -3,6 +3,7 @@ package onedrive
|
|||||||
import "github.com/alcionai/corso/src/pkg/path"
|
import "github.com/alcionai/corso/src/pkg/path"
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
SitesPathDir = "sites"
|
||||||
// const used as the root dir for the drive portion of a path prefix.
|
// const used as the root dir for the drive portion of a path prefix.
|
||||||
// eg: tid/onedrive/ro/files/drives/driveid/...
|
// eg: tid/onedrive/ro/files/drives/driveid/...
|
||||||
DrivesPathDir = "drives"
|
DrivesPathDir = "drives"
|
||||||
|
|||||||
@ -2,14 +2,11 @@ package onedrive
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
@ -17,121 +14,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ export.Collection = &exportCollection{}
|
|
||||||
|
|
||||||
// exportCollection is the implementation of export.ExportCollection for OneDrive
|
|
||||||
type exportCollection struct {
|
|
||||||
// baseDir contains the path of the collection
|
|
||||||
baseDir string
|
|
||||||
|
|
||||||
// backingCollection is the restore collection from which we will
|
|
||||||
// create the export collection.
|
|
||||||
backingCollection data.RestoreCollection
|
|
||||||
|
|
||||||
// backupVersion is the backupVersion of the backup this collection was part
|
|
||||||
// of. This is required to figure out how to get the name of the
|
|
||||||
// item.
|
|
||||||
backupVersion int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ec exportCollection) BasePath() string {
|
|
||||||
return ec.baseDir
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ec exportCollection) Items(ctx context.Context) <-chan export.Item {
|
|
||||||
ch := make(chan export.Item)
|
|
||||||
go items(ctx, ec, ch)
|
|
||||||
|
|
||||||
return ch
|
|
||||||
}
|
|
||||||
|
|
||||||
// items converts items in backing collection to export items
|
|
||||||
func items(ctx context.Context, ec exportCollection, ch chan<- export.Item) {
|
|
||||||
defer close(ch)
|
|
||||||
|
|
||||||
errs := fault.New(false)
|
|
||||||
|
|
||||||
// There will only be a single item in the backingCollections
|
|
||||||
// for OneDrive
|
|
||||||
for item := range ec.backingCollection.Items(ctx, errs) {
|
|
||||||
itemUUID := item.ID()
|
|
||||||
if isMetadataFile(itemUUID, ec.backupVersion) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
name, err := getItemName(ctx, itemUUID, ec.backupVersion, ec.backingCollection)
|
|
||||||
|
|
||||||
ch <- export.Item{
|
|
||||||
ID: itemUUID,
|
|
||||||
Data: export.ItemData{
|
|
||||||
Name: name,
|
|
||||||
Body: item.ToReader(),
|
|
||||||
},
|
|
||||||
Error: err,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
eitems, erecovereable := errs.ItemsAndRecovered()
|
|
||||||
|
|
||||||
// Return all the items that we failed to get from kopia at the end
|
|
||||||
for _, err := range eitems {
|
|
||||||
ch <- export.Item{
|
|
||||||
ID: err.ID,
|
|
||||||
Error: &err,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, ec := range erecovereable {
|
|
||||||
ch <- export.Item{
|
|
||||||
Error: ec,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// isMetadataFile is used to determine if a path corresponds to a
|
|
||||||
// metadata file. This is OneDrive specific logic and depends on the
|
|
||||||
// version of the backup unlike metadata.IsMetadataFile which only has
|
|
||||||
// to be concerned about the current version.
|
|
||||||
func isMetadataFile(id string, backupVersion int) bool {
|
|
||||||
if backupVersion < version.OneDrive1DataAndMetaFiles {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return strings.HasSuffix(id, metadata.MetaFileSuffix) ||
|
|
||||||
strings.HasSuffix(id, metadata.DirMetaFileSuffix)
|
|
||||||
}
|
|
||||||
|
|
||||||
// getItemName is used to get the name of the item.
|
|
||||||
// How we get the name depends on the version of the backup.
|
|
||||||
func getItemName(
|
|
||||||
ctx context.Context,
|
|
||||||
id string,
|
|
||||||
backupVersion int,
|
|
||||||
fin data.FetchItemByNamer,
|
|
||||||
) (string, error) {
|
|
||||||
if backupVersion < version.OneDrive1DataAndMetaFiles {
|
|
||||||
return id, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if backupVersion < version.OneDrive5DirMetaNoName {
|
|
||||||
return strings.TrimSuffix(id, metadata.DataFileSuffix), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if strings.HasSuffix(id, metadata.DataFileSuffix) {
|
|
||||||
trimmedName := strings.TrimSuffix(id, metadata.DataFileSuffix)
|
|
||||||
metaName := trimmedName + metadata.MetaFileSuffix
|
|
||||||
|
|
||||||
meta, err := drive.FetchAndReadMetadata(ctx, fin, metaName)
|
|
||||||
if err != nil {
|
|
||||||
return "", clues.Wrap(err, "getting metadata").WithClues(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
return meta.FileName, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return "", clues.New("invalid item id").WithClues(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProduceExportCollections will create the export collections for the
|
// ProduceExportCollections will create the export collections for the
|
||||||
// given restore collections.
|
// given restore collections.
|
||||||
func ProduceExportCollections(
|
func ProduceExportCollections(
|
||||||
@ -156,11 +38,7 @@ func ProduceExportCollections(
|
|||||||
|
|
||||||
baseDir := path.Builder{}.Append(drivePath.Folders...)
|
baseDir := path.Builder{}.Append(drivePath.Folders...)
|
||||||
|
|
||||||
ec = append(ec, exportCollection{
|
ec = append(ec, drive.NewExportCollection(baseDir.String(), dc, backupVersion))
|
||||||
baseDir: baseDir.String(),
|
|
||||||
backingCollection: dc,
|
|
||||||
backupVersion: backupVersion,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ec, el.Failure()
|
return ec, el.Failure()
|
||||||
|
|||||||
@ -11,7 +11,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -30,45 +30,6 @@ func TestExportUnitSuite(t *testing.T) {
|
|||||||
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ExportUnitSuite) TestIsMetadataFile() {
|
|
||||||
table := []struct {
|
|
||||||
name string
|
|
||||||
id string
|
|
||||||
backupVersion int
|
|
||||||
isMeta bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "legacy",
|
|
||||||
backupVersion: version.OneDrive1DataAndMetaFiles,
|
|
||||||
isMeta: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "metadata file",
|
|
||||||
backupVersion: version.OneDrive3IsMetaMarker,
|
|
||||||
id: "name" + metadata.MetaFileSuffix,
|
|
||||||
isMeta: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "dir metadata file",
|
|
||||||
backupVersion: version.OneDrive3IsMetaMarker,
|
|
||||||
id: "name" + metadata.DirMetaFileSuffix,
|
|
||||||
isMeta: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "non metadata file",
|
|
||||||
backupVersion: version.OneDrive3IsMetaMarker,
|
|
||||||
id: "name" + metadata.DataFileSuffix,
|
|
||||||
isMeta: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.name, func() {
|
|
||||||
assert.Equal(suite.T(), test.isMeta, isMetadataFile(test.id, test.backupVersion), "is metadata")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type finD struct {
|
type finD struct {
|
||||||
id string
|
id string
|
||||||
name string
|
name string
|
||||||
@ -90,66 +51,6 @@ func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, err
|
|||||||
return nil, assert.AnError
|
return nil, assert.AnError
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ExportUnitSuite) TestGetItemName() {
|
|
||||||
table := []struct {
|
|
||||||
tname string
|
|
||||||
id string
|
|
||||||
backupVersion int
|
|
||||||
name string
|
|
||||||
fin data.FetchItemByNamer
|
|
||||||
errFunc assert.ErrorAssertionFunc
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
tname: "legacy",
|
|
||||||
id: "name",
|
|
||||||
backupVersion: version.OneDrive1DataAndMetaFiles,
|
|
||||||
name: "name",
|
|
||||||
errFunc: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tname: "name in filename",
|
|
||||||
id: "name.data",
|
|
||||||
backupVersion: version.OneDrive4DirIncludesPermissions,
|
|
||||||
name: "name",
|
|
||||||
errFunc: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tname: "name in metadata",
|
|
||||||
id: "id.data",
|
|
||||||
backupVersion: version.Backup,
|
|
||||||
name: "name",
|
|
||||||
fin: finD{id: "id.meta", name: "name"},
|
|
||||||
errFunc: assert.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
tname: "name in metadata but error",
|
|
||||||
id: "id.data",
|
|
||||||
backupVersion: version.Backup,
|
|
||||||
name: "",
|
|
||||||
fin: finD{err: assert.AnError},
|
|
||||||
errFunc: assert.Error,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range table {
|
|
||||||
suite.Run(test.tname, func() {
|
|
||||||
t := suite.T()
|
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
name, err := getItemName(
|
|
||||||
ctx,
|
|
||||||
test.id,
|
|
||||||
test.backupVersion,
|
|
||||||
test.fin)
|
|
||||||
test.errFunc(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, test.name, name, "name")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type mockRestoreCollection struct {
|
type mockRestoreCollection struct {
|
||||||
path path.Path
|
path path.Path
|
||||||
items []*dataMock.Item
|
items []*dataMock.Item
|
||||||
@ -391,11 +292,7 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ec := exportCollection{
|
ec := drive.NewExportCollection("", test.backingCollection, test.version)
|
||||||
baseDir: "",
|
|
||||||
backingCollection: test.backingCollection,
|
|
||||||
backupVersion: test.version,
|
|
||||||
}
|
|
||||||
|
|
||||||
items := ec.Items(ctx)
|
items := ec.Items(ctx)
|
||||||
|
|
||||||
|
|||||||
@ -31,12 +31,13 @@ type BackupHandler struct {
|
|||||||
CanonPathFn canonPather
|
CanonPathFn canonPather
|
||||||
CanonPathErr error
|
CanonPathErr error
|
||||||
|
|
||||||
Service path.ServiceType
|
ResourceOwner string
|
||||||
Category path.CategoryType
|
Service path.ServiceType
|
||||||
|
Category path.CategoryType
|
||||||
|
|
||||||
DrivePagerV api.DrivePager
|
DrivePagerV api.Pager[models.Driveable]
|
||||||
// driveID -> itemPager
|
// driveID -> itemPager
|
||||||
ItemPagerV map[string]api.DriveItemDeltaEnumerator
|
ItemPagerV map[string]api.DeltaPager[models.DriveItemable]
|
||||||
|
|
||||||
LocationIDFn locationIDer
|
LocationIDFn locationIDer
|
||||||
|
|
||||||
@ -45,44 +46,46 @@ type BackupHandler struct {
|
|||||||
GetErrs []error
|
GetErrs []error
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultOneDriveBH() *BackupHandler {
|
func DefaultOneDriveBH(resourceOwner string) *BackupHandler {
|
||||||
return &BackupHandler{
|
return &BackupHandler{
|
||||||
ItemInfo: details.ItemInfo{
|
ItemInfo: details.ItemInfo{
|
||||||
OneDrive: &details.OneDriveInfo{},
|
OneDrive: &details.OneDriveInfo{},
|
||||||
Extension: &details.ExtensionData{},
|
Extension: &details.ExtensionData{},
|
||||||
},
|
},
|
||||||
GI: GetsItem{Err: clues.New("not defined")},
|
GI: GetsItem{Err: clues.New("not defined")},
|
||||||
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
||||||
PathPrefixFn: defaultOneDrivePathPrefixer,
|
PathPrefixFn: defaultOneDrivePathPrefixer,
|
||||||
CanonPathFn: defaultOneDriveCanonPather,
|
CanonPathFn: defaultOneDriveCanonPather,
|
||||||
Service: path.OneDriveService,
|
ResourceOwner: resourceOwner,
|
||||||
Category: path.FilesCategory,
|
Service: path.OneDriveService,
|
||||||
LocationIDFn: defaultOneDriveLocationIDer,
|
Category: path.FilesCategory,
|
||||||
GetResps: []*http.Response{nil},
|
LocationIDFn: defaultOneDriveLocationIDer,
|
||||||
GetErrs: []error{clues.New("not defined")},
|
GetResps: []*http.Response{nil},
|
||||||
|
GetErrs: []error{clues.New("not defined")},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultSharePointBH() *BackupHandler {
|
func DefaultSharePointBH(resourceOwner string) *BackupHandler {
|
||||||
return &BackupHandler{
|
return &BackupHandler{
|
||||||
ItemInfo: details.ItemInfo{
|
ItemInfo: details.ItemInfo{
|
||||||
SharePoint: &details.SharePointInfo{},
|
SharePoint: &details.SharePointInfo{},
|
||||||
Extension: &details.ExtensionData{},
|
Extension: &details.ExtensionData{},
|
||||||
},
|
},
|
||||||
GI: GetsItem{Err: clues.New("not defined")},
|
GI: GetsItem{Err: clues.New("not defined")},
|
||||||
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
GIP: GetsItemPermission{Err: clues.New("not defined")},
|
||||||
PathPrefixFn: defaultSharePointPathPrefixer,
|
PathPrefixFn: defaultSharePointPathPrefixer,
|
||||||
CanonPathFn: defaultSharePointCanonPather,
|
CanonPathFn: defaultSharePointCanonPather,
|
||||||
Service: path.SharePointService,
|
ResourceOwner: resourceOwner,
|
||||||
Category: path.LibrariesCategory,
|
Service: path.SharePointService,
|
||||||
LocationIDFn: defaultSharePointLocationIDer,
|
Category: path.LibrariesCategory,
|
||||||
GetResps: []*http.Response{nil},
|
LocationIDFn: defaultSharePointLocationIDer,
|
||||||
GetErrs: []error{clues.New("not defined")},
|
GetResps: []*http.Response{nil},
|
||||||
|
GetErrs: []error{clues.New("not defined")},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h BackupHandler) PathPrefix(tID, ro, driveID string) (path.Path, error) {
|
func (h BackupHandler) PathPrefix(tID, driveID string) (path.Path, error) {
|
||||||
pp, err := h.PathPrefixFn(tID, ro, driveID)
|
pp, err := h.PathPrefixFn(tID, h.ResourceOwner, driveID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -90,8 +93,8 @@ func (h BackupHandler) PathPrefix(tID, ro, driveID string) (path.Path, error) {
|
|||||||
return pp, h.PathPrefixErr
|
return pp, h.PathPrefixErr
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h BackupHandler) CanonicalPath(pb *path.Builder, tID, ro string) (path.Path, error) {
|
func (h BackupHandler) CanonicalPath(pb *path.Builder, tID string) (path.Path, error) {
|
||||||
cp, err := h.CanonPathFn(pb, tID, ro)
|
cp, err := h.CanonPathFn(pb, tID, h.ResourceOwner)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -103,11 +106,11 @@ func (h BackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
|
|||||||
return h.Service, h.Category
|
return h.Service, h.Category
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h BackupHandler) NewDrivePager(string, []string) api.DrivePager {
|
func (h BackupHandler) NewDrivePager(string, []string) api.Pager[models.Driveable] {
|
||||||
return h.DrivePagerV
|
return h.DrivePagerV
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DriveItemDeltaEnumerator {
|
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DeltaPager[models.DriveItemable] {
|
||||||
return h.ItemPagerV[driveID]
|
return h.ItemPagerV[driveID]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -249,7 +252,7 @@ type RestoreHandler struct {
|
|||||||
PostItemResp models.DriveItemable
|
PostItemResp models.DriveItemable
|
||||||
PostItemErr error
|
PostItemErr error
|
||||||
|
|
||||||
DrivePagerV api.DrivePager
|
DrivePagerV api.Pager[models.Driveable]
|
||||||
|
|
||||||
PostDriveResp models.Driveable
|
PostDriveResp models.Driveable
|
||||||
PostDriveErr error
|
PostDriveErr error
|
||||||
@ -264,7 +267,7 @@ func (h RestoreHandler) PostDrive(
|
|||||||
return h.PostDriveResp, h.PostDriveErr
|
return h.PostDriveResp, h.PostDriveErr
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h RestoreHandler) NewDrivePager(string, []string) api.DrivePager {
|
func (h RestoreHandler) NewDrivePager(string, []string) api.Pager[models.Driveable] {
|
||||||
return h.DrivePagerV
|
return h.DrivePagerV
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -80,7 +80,11 @@ func ProduceBackupCollections(
|
|||||||
spcs, canUsePreviousBackup, err = site.CollectLibraries(
|
spcs, canUsePreviousBackup, err = site.CollectLibraries(
|
||||||
ctx,
|
ctx,
|
||||||
bpc,
|
bpc,
|
||||||
drive.NewLibraryBackupHandler(ac.Drives(), scope, bpc.Selector.PathService()),
|
drive.NewLibraryBackupHandler(
|
||||||
|
ac.Drives(),
|
||||||
|
bpc.ProtectedResource.ID(),
|
||||||
|
scope,
|
||||||
|
bpc.Selector.PathService()),
|
||||||
creds.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
ssmb,
|
ssmb,
|
||||||
su,
|
su,
|
||||||
|
|||||||
@ -50,8 +50,8 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
|
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
|
||||||
ep, err := drive.NewLibraryBackupHandler(api.Drives{}, nil, path.SharePointService).
|
ep, err := drive.NewLibraryBackupHandler(api.Drives{}, siteID, nil, path.SharePointService).
|
||||||
CanonicalPath(pb, tenantID, siteID)
|
CanonicalPath(pb, tenantID)
|
||||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
@ -101,7 +101,7 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
c := drive.NewCollections(
|
c := drive.NewCollections(
|
||||||
drive.NewLibraryBackupHandler(api.Drives{}, test.scope, path.SharePointService),
|
drive.NewLibraryBackupHandler(api.Drives{}, siteID, test.scope, path.SharePointService),
|
||||||
tenantID,
|
tenantID,
|
||||||
siteID,
|
siteID,
|
||||||
nil,
|
nil,
|
||||||
|
|||||||
58
src/internal/m365/service/sharepoint/export.go
Normal file
58
src/internal/m365/service/sharepoint/export.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package sharepoint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ProduceExportCollections will create the export collections for the
|
||||||
|
// given restore collections.
|
||||||
|
func ProduceExportCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
backupVersion int,
|
||||||
|
exportCfg control.ExportConfig,
|
||||||
|
opts control.Options,
|
||||||
|
dcs []data.RestoreCollection,
|
||||||
|
backupDriveIDNames idname.CacheBuilder,
|
||||||
|
deets *details.Builder,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]export.Collection, error) {
|
||||||
|
var (
|
||||||
|
el = errs.Local()
|
||||||
|
ec = make([]export.Collection, 0, len(dcs))
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, dc := range dcs {
|
||||||
|
drivePath, err := path.ToDrivePath(dc.FullPath())
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "transforming path to drive path").WithClues(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
driveName, ok := backupDriveIDNames.NameOf(drivePath.DriveID)
|
||||||
|
if !ok {
|
||||||
|
// This should not happen, but just in case
|
||||||
|
logger.Ctx(ctx).With("drive_id", drivePath.DriveID).Info("drive name not found, using drive id")
|
||||||
|
driveName = drivePath.DriveID
|
||||||
|
}
|
||||||
|
|
||||||
|
baseDir := path.Builder{}.
|
||||||
|
Append("Libraries").
|
||||||
|
Append(driveName).
|
||||||
|
Append(drivePath.Folders...)
|
||||||
|
|
||||||
|
ec = append(ec, drive.NewExportCollection(baseDir.String(), dc, backupVersion))
|
||||||
|
}
|
||||||
|
|
||||||
|
return ec, el.Failure()
|
||||||
|
}
|
||||||
154
src/internal/m365/service/sharepoint/export_test.go
Normal file
154
src/internal/m365/service/sharepoint/export_test.go
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
package sharepoint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
|
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExportUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
type finD struct {
|
||||||
|
id string
|
||||||
|
name string
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
|
||||||
|
if fd.err != nil {
|
||||||
|
return nil, fd.err
|
||||||
|
}
|
||||||
|
|
||||||
|
if name == fd.id {
|
||||||
|
return &dataMock.Item{
|
||||||
|
ItemID: fd.id,
|
||||||
|
Reader: io.NopCloser(bytes.NewBufferString(`{"filename": "` + fd.name + `"}`)),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, assert.AnError
|
||||||
|
}
|
||||||
|
|
||||||
|
type mockRestoreCollection struct {
|
||||||
|
path path.Path
|
||||||
|
items []*dataMock.Item
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rc mockRestoreCollection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||||
|
ch := make(chan data.Item)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer close(ch)
|
||||||
|
|
||||||
|
el := errs.Local()
|
||||||
|
|
||||||
|
for _, item := range rc.items {
|
||||||
|
if item.ReadErr != nil {
|
||||||
|
el.AddRecoverable(ctx, item.ReadErr)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ch <- item
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rc mockRestoreCollection) FullPath() path.Path {
|
||||||
|
return rc.path
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
var (
|
||||||
|
driveID = "driveID1"
|
||||||
|
driveName = "driveName1"
|
||||||
|
exportCfg = control.ExportConfig{}
|
||||||
|
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
|
||||||
|
cache = idname.NewCache(
|
||||||
|
// Cache check with lowercased ids
|
||||||
|
map[string]string{strings.ToLower(driveID): driveName},
|
||||||
|
)
|
||||||
|
dii = odStub.DriveItemInfo()
|
||||||
|
expectedPath = "Libraries/" + driveName
|
||||||
|
expectedItems = []export.Item{
|
||||||
|
{
|
||||||
|
ID: "id1.data",
|
||||||
|
Data: export.ItemData{
|
||||||
|
Name: "name1",
|
||||||
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
dii.OneDrive.ItemName = "name1"
|
||||||
|
|
||||||
|
p, err := dpb.ToDataLayerOneDrivePath("t", "u", false)
|
||||||
|
assert.NoError(t, err, "build path")
|
||||||
|
|
||||||
|
dcs := []data.RestoreCollection{
|
||||||
|
data.FetchRestoreCollection{
|
||||||
|
Collection: mockRestoreCollection{
|
||||||
|
path: p,
|
||||||
|
items: []*dataMock.Item{
|
||||||
|
{
|
||||||
|
ItemID: "id1.data",
|
||||||
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
|
ItemInfo: dii,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
FetchItemByNamer: finD{id: "id1.meta", name: "name1"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
ecs, err := ProduceExportCollections(
|
||||||
|
ctx,
|
||||||
|
int(version.Backup),
|
||||||
|
exportCfg,
|
||||||
|
control.DefaultOptions(),
|
||||||
|
dcs,
|
||||||
|
cache,
|
||||||
|
nil,
|
||||||
|
fault.New(true))
|
||||||
|
assert.NoError(t, err, "export collections error")
|
||||||
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
|
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
||||||
|
|
||||||
|
fitems := []export.Item{}
|
||||||
|
for item := range ecs[0].Items(ctx) {
|
||||||
|
fitems = append(fitems, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedItems, fitems, "items")
|
||||||
|
}
|
||||||
@ -492,7 +492,10 @@ func consumeBackupCollections(
|
|||||||
isIncremental bool,
|
isIncremental bool,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (*kopia.BackupStats, *details.Builder, kopia.DetailsMergeInfoer, error) {
|
) (*kopia.BackupStats, *details.Builder, kopia.DetailsMergeInfoer, error) {
|
||||||
ctx = clues.Add(ctx, "collection_source", "operations")
|
ctx = clues.Add(
|
||||||
|
ctx,
|
||||||
|
"collection_source", "operations",
|
||||||
|
"snapshot_type", "item data")
|
||||||
|
|
||||||
progressBar := observe.MessageWithCompletion(ctx, "Backing up data")
|
progressBar := observe.MessageWithCompletion(ctx, "Backing up data")
|
||||||
defer close(progressBar)
|
defer close(progressBar)
|
||||||
|
|||||||
@ -61,6 +61,8 @@ func (ss *storeStreamer) Collect(ctx context.Context, col Collectable) error {
|
|||||||
|
|
||||||
// Write persists the collected objects in the stream store
|
// Write persists the collected objects in the stream store
|
||||||
func (ss *storeStreamer) Write(ctx context.Context, errs *fault.Bus) (string, error) {
|
func (ss *storeStreamer) Write(ctx context.Context, errs *fault.Bus) (string, error) {
|
||||||
|
ctx = clues.Add(ctx, "snapshot_type", "stream store")
|
||||||
|
|
||||||
id, err := write(ctx, ss.kw, ss.dbcs, errs)
|
id, err := write(ctx, ss.kw, ss.dbcs, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", clues.Wrap(err, "writing to stream store")
|
return "", clues.Wrap(err, "writing to stream store")
|
||||||
|
|||||||
@ -3,7 +3,6 @@ package tester
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
@ -11,7 +10,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewContext(t *testing.T) (context.Context, func()) {
|
func NewContext(t TestT) (context.Context, func()) {
|
||||||
level := logger.LLInfo
|
level := logger.LLInfo
|
||||||
format := logger.LFText
|
format := logger.LFText
|
||||||
|
|
||||||
@ -34,7 +33,7 @@ func NewContext(t *testing.T) (context.Context, func()) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func WithContext(
|
func WithContext(
|
||||||
t *testing.T,
|
t TestT,
|
||||||
ctx context.Context, //revive:disable-line:context-as-argument
|
ctx context.Context, //revive:disable-line:context-as-argument
|
||||||
) (context.Context, func()) {
|
) (context.Context, func()) {
|
||||||
ls := logger.Settings{
|
ls := logger.Settings{
|
||||||
@ -48,7 +47,7 @@ func WithContext(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func enrichTestCtx(
|
func enrichTestCtx(
|
||||||
t *testing.T,
|
t TestT,
|
||||||
ctx context.Context, //revive:disable-line:context-as-argument
|
ctx context.Context, //revive:disable-line:context-as-argument
|
||||||
) context.Context {
|
) context.Context {
|
||||||
if t == nil {
|
if t == nil {
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
// AreSameFunc asserts whether the two funcs are the same func.
|
// AreSameFunc asserts whether the two funcs are the same func.
|
||||||
@ -26,8 +27,15 @@ func AreSameFunc(t *testing.T, expect, have any) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type TestT interface {
|
||||||
|
Logf(format string, args ...any)
|
||||||
|
Name() string
|
||||||
|
TempDir() string
|
||||||
|
require.TestingT
|
||||||
|
}
|
||||||
|
|
||||||
// LogTimeOfTest logs the test name and the time that it was run.
|
// LogTimeOfTest logs the test name and the time that it was run.
|
||||||
func LogTimeOfTest(t *testing.T) string {
|
func LogTimeOfTest(t TestT) string {
|
||||||
now := time.Now().UTC().Format(time.RFC3339Nano)
|
now := time.Now().UTC().Format(time.RFC3339Nano)
|
||||||
name := t.Name()
|
name := t.Name()
|
||||||
|
|
||||||
|
|||||||
@ -49,7 +49,7 @@ type GroupsInfo struct {
|
|||||||
// Channels Specific
|
// Channels Specific
|
||||||
ChannelName string `json:"channelName,omitempty"`
|
ChannelName string `json:"channelName,omitempty"`
|
||||||
ChannelID string `json:"channelID,omitempty"`
|
ChannelID string `json:"channelID,omitempty"`
|
||||||
LastResponseAt time.Time `json:"lastResponseAt,omitempty"`
|
LastReplyAt time.Time `json:"lastResponseAt,omitempty"`
|
||||||
MessageCreator string `json:"messageCreator,omitempty"`
|
MessageCreator string `json:"messageCreator,omitempty"`
|
||||||
MessagePreview string `json:"messagePreview,omitempty"`
|
MessagePreview string `json:"messagePreview,omitempty"`
|
||||||
ReplyCount int `json:"replyCount,omitempty"`
|
ReplyCount int `json:"replyCount,omitempty"`
|
||||||
|
|||||||
@ -306,6 +306,7 @@ func (pb Builder) ToDataLayerPath(
|
|||||||
service ServiceType,
|
service ServiceType,
|
||||||
category CategoryType,
|
category CategoryType,
|
||||||
isItem bool,
|
isItem bool,
|
||||||
|
elems ...string,
|
||||||
) (Path, error) {
|
) (Path, error) {
|
||||||
if err := ValidateServiceAndCategory(service, category); err != nil {
|
if err := ValidateServiceAndCategory(service, category); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -315,12 +316,15 @@ func (pb Builder) ToDataLayerPath(
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
prefixItems := append([]string{
|
||||||
|
tenant,
|
||||||
|
service.String(),
|
||||||
|
user,
|
||||||
|
category.String(),
|
||||||
|
}, elems...)
|
||||||
|
|
||||||
return &dataLayerResourcePath{
|
return &dataLayerResourcePath{
|
||||||
Builder: *pb.withPrefix(
|
Builder: *pb.withPrefix(prefixItems...),
|
||||||
tenant,
|
|
||||||
service.String(),
|
|
||||||
user,
|
|
||||||
category.String()),
|
|
||||||
service: service,
|
service: service,
|
||||||
category: category,
|
category: category,
|
||||||
hasItem: isItem,
|
hasItem: isItem,
|
||||||
|
|||||||
@ -367,3 +367,39 @@ func (suite *BuilderUnitSuite) TestPIIHandling() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *BuilderUnitSuite) TestToDataLayerPath() {
|
||||||
|
location := Builder{}.Append("foo", "bar")
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
extra []string
|
||||||
|
expect string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no extra",
|
||||||
|
extra: []string{},
|
||||||
|
expect: "t/onedrive/u/files/foo/bar",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "single extra",
|
||||||
|
extra: []string{"oof"},
|
||||||
|
expect: "t/onedrive/u/files/oof/foo/bar",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multi extra",
|
||||||
|
extra: []string{"oof", "rab"},
|
||||||
|
expect: "t/onedrive/u/files/oof/rab/foo/bar",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
dlp, err := location.ToDataLayerPath("t", "u", OneDriveService, FilesCategory, false, test.extra...)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
assert.Equal(t, test.expect, dlp.PlainString())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -126,7 +126,7 @@ func (suite *RepositoryIntegrationSuite) TestInitialize() {
|
|||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
account account.Account
|
account account.Account
|
||||||
storage func(*testing.T) storage.Storage
|
storage func(tester.TestT) storage.Storage
|
||||||
errCheck assert.ErrorAssertionFunc
|
errCheck assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
|
|||||||
@ -335,7 +335,7 @@ func (s *exchange) AllData() []ExchangeScope {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// -------------------
|
// -------------------
|
||||||
// Info Factories
|
// ItemInfo Factories
|
||||||
|
|
||||||
// ContactName produces one or more exchange contact name info scopes.
|
// ContactName produces one or more exchange contact name info scopes.
|
||||||
// Matches any contact whose name contains the provided string.
|
// Matches any contact whose name contains the provided string.
|
||||||
@ -352,7 +352,7 @@ func (sr *ExchangeRestore) ContactName(senderID string) []ExchangeScope {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// EventSubject produces one or more exchange event subject info scopes.
|
// EventOrganizer produces one or more exchange event subject info scopes.
|
||||||
// Matches any event where the event subject contains one of the provided strings.
|
// Matches any event where the event subject contains one of the provided strings.
|
||||||
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
||||||
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/identity"
|
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -242,7 +243,7 @@ func (s *groups) Channel(channel string) []GroupsScope {
|
|||||||
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
||||||
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
||||||
// If any slice is empty, it defaults to [selectors.None]
|
// If any slice is empty, it defaults to [selectors.None]
|
||||||
func (s *sharePoint) ChannelMessages(channels, messages []string, opts ...option) []GroupsScope {
|
func (s *groups) ChannelMessages(channels, messages []string, opts ...option) []GroupsScope {
|
||||||
var (
|
var (
|
||||||
scopes = []GroupsScope{}
|
scopes = []GroupsScope{}
|
||||||
os = append([]option{pathComparator()}, opts...)
|
os = append([]option{pathComparator()}, opts...)
|
||||||
@ -309,7 +310,76 @@ func (s *groups) LibraryItems(libraries, items []string, opts ...option) []Group
|
|||||||
// -------------------
|
// -------------------
|
||||||
// ItemInfo Factories
|
// ItemInfo Factories
|
||||||
|
|
||||||
// TODO
|
// MessageCreator produces one or more groups channelMessage info scopes.
|
||||||
|
// Matches any channel message created by the specified user.
|
||||||
|
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
|
||||||
|
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
|
||||||
|
// If any slice is empty, it defaults to [selectors.None]
|
||||||
|
func (s *GroupsRestore) MessageCreator(creator string) []GroupsScope {
|
||||||
|
return []GroupsScope{
|
||||||
|
makeInfoScope[GroupsScope](
|
||||||
|
GroupsChannelMessage,
|
||||||
|
GroupsInfoChannelMessageCreator,
|
||||||
|
[]string{creator},
|
||||||
|
filters.In),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MessageCreatedAfter produces a channel message created-after info scope.
|
||||||
|
// Matches any message where the creation time is after the timestring.
|
||||||
|
// If the input equals selectors.Any, the scope will match all times.
|
||||||
|
// If the input is empty or selectors.None, the scope will always fail comparisons.
|
||||||
|
func (s *GroupsRestore) MessageCreatedAfter(timeStrings string) []GroupsScope {
|
||||||
|
return []GroupsScope{
|
||||||
|
makeInfoScope[GroupsScope](
|
||||||
|
GroupsChannelMessage,
|
||||||
|
GroupsInfoChannelMessageCreatedAfter,
|
||||||
|
[]string{timeStrings},
|
||||||
|
filters.Less),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MessageCreatedBefore produces a channel message created-before info scope.
|
||||||
|
// Matches any message where the creation time is after the timestring.
|
||||||
|
// If the input equals selectors.Any, the scope will match all times.
|
||||||
|
// If the input is empty or selectors.None, the scope will always fail comparisons.
|
||||||
|
func (s *GroupsRestore) MessageCreatedBefore(timeStrings string) []GroupsScope {
|
||||||
|
return []GroupsScope{
|
||||||
|
makeInfoScope[GroupsScope](
|
||||||
|
GroupsChannelMessage,
|
||||||
|
GroupsInfoChannelMessageCreatedBefore,
|
||||||
|
[]string{timeStrings},
|
||||||
|
filters.Greater),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MessageLastReplyAfter produces a channel message last-response-after info scope.
|
||||||
|
// Matches any message where last response time is after the timestring.
|
||||||
|
// If the input equals selectors.Any, the scope will match all times.
|
||||||
|
// If the input is empty or selectors.None, the scope will always fail comparisons.
|
||||||
|
func (s *GroupsRestore) MessageLastReplyAfter(timeStrings string) []GroupsScope {
|
||||||
|
return []GroupsScope{
|
||||||
|
makeInfoScope[GroupsScope](
|
||||||
|
GroupsChannelMessage,
|
||||||
|
GroupsInfoChannelMessageLastReplyAfter,
|
||||||
|
[]string{timeStrings},
|
||||||
|
filters.Less),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MessageLastReplyBefore produces a channel message last-response-before info scope.
|
||||||
|
// Matches any message where last response time is after the timestring.
|
||||||
|
// If the input equals selectors.Any, the scope will match all times.
|
||||||
|
// If the input is empty or selectors.None, the scope will always fail comparisons.
|
||||||
|
func (s *GroupsRestore) MessageLastReplyBefore(timeStrings string) []GroupsScope {
|
||||||
|
return []GroupsScope{
|
||||||
|
makeInfoScope[GroupsScope](
|
||||||
|
GroupsChannelMessage,
|
||||||
|
GroupsInfoChannelMessageLastReplyBefore,
|
||||||
|
[]string{timeStrings},
|
||||||
|
filters.Greater),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Categories
|
// Categories
|
||||||
@ -334,9 +404,16 @@ const (
|
|||||||
|
|
||||||
// details.itemInfo comparables
|
// details.itemInfo comparables
|
||||||
|
|
||||||
// channel drive selection
|
// channel and drive selection
|
||||||
GroupsInfoSiteLibraryDrive groupsCategory = "GroupsInfoSiteLibraryDrive"
|
GroupsInfoSiteLibraryDrive groupsCategory = "GroupsInfoSiteLibraryDrive"
|
||||||
GroupsInfoChannel groupsCategory = "GroupsInfoChannel"
|
GroupsInfoChannel groupsCategory = "GroupsInfoChannel"
|
||||||
|
|
||||||
|
// data contained within details.ItemInfo
|
||||||
|
GroupsInfoChannelMessageCreatedAfter groupsCategory = "GroupsInfoChannelMessageCreatedAfter"
|
||||||
|
GroupsInfoChannelMessageCreatedBefore groupsCategory = "GroupsInfoChannelMessageCreatedBefore"
|
||||||
|
GroupsInfoChannelMessageCreator groupsCategory = "GroupsInfoChannelMessageCreator"
|
||||||
|
GroupsInfoChannelMessageLastReplyAfter groupsCategory = "GroupsInfoChannelMessageLastReplyAfter"
|
||||||
|
GroupsInfoChannelMessageLastReplyBefore groupsCategory = "GroupsInfoChannelMessageLastReplyBefore"
|
||||||
)
|
)
|
||||||
|
|
||||||
// groupsLeafProperties describes common metadata of the leaf categories
|
// groupsLeafProperties describes common metadata of the leaf categories
|
||||||
@ -368,7 +445,9 @@ func (c groupsCategory) leafCat() categorizer {
|
|||||||
switch c {
|
switch c {
|
||||||
// TODO: if channels ever contain more than one type of item,
|
// TODO: if channels ever contain more than one type of item,
|
||||||
// we'll need to fix this up.
|
// we'll need to fix this up.
|
||||||
case GroupsChannel, GroupsChannelMessage:
|
case GroupsChannel, GroupsChannelMessage,
|
||||||
|
GroupsInfoChannelMessageCreatedAfter, GroupsInfoChannelMessageCreatedBefore, GroupsInfoChannelMessageCreator,
|
||||||
|
GroupsInfoChannelMessageLastReplyAfter, GroupsInfoChannelMessageLastReplyBefore:
|
||||||
return GroupsChannelMessage
|
return GroupsChannelMessage
|
||||||
case GroupsLibraryFolder, GroupsLibraryItem, GroupsInfoSiteLibraryDrive:
|
case GroupsLibraryFolder, GroupsLibraryItem, GroupsInfoSiteLibraryDrive:
|
||||||
return GroupsLibraryItem
|
return GroupsLibraryItem
|
||||||
@ -414,15 +493,15 @@ func (c groupsCategory) pathValues(
|
|||||||
rFld string
|
rFld string
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if ent.Groups == nil {
|
||||||
|
return nil, clues.New("no Groups ItemInfo in details")
|
||||||
|
}
|
||||||
|
|
||||||
switch c {
|
switch c {
|
||||||
case GroupsChannel, GroupsChannelMessage:
|
case GroupsChannel, GroupsChannelMessage:
|
||||||
folderCat, itemCat = GroupsChannel, GroupsChannelMessage
|
folderCat, itemCat = GroupsChannel, GroupsChannelMessage
|
||||||
rFld = ent.Groups.ParentPath
|
rFld = ent.Groups.ParentPath
|
||||||
case GroupsLibraryFolder, GroupsLibraryItem:
|
case GroupsLibraryFolder, GroupsLibraryItem:
|
||||||
if ent.Groups == nil {
|
|
||||||
return nil, clues.New("no Groups ItemInfo in details")
|
|
||||||
}
|
|
||||||
|
|
||||||
folderCat, itemCat = GroupsLibraryFolder, GroupsLibraryItem
|
folderCat, itemCat = GroupsLibraryFolder, GroupsLibraryItem
|
||||||
rFld = ent.Groups.ParentPath
|
rFld = ent.Groups.ParentPath
|
||||||
default:
|
default:
|
||||||
@ -591,8 +670,23 @@ func (s GroupsScope) matchesInfo(dii details.ItemInfo) bool {
|
|||||||
|
|
||||||
return matchesAny(s, GroupsInfoSiteLibraryDrive, ds)
|
return matchesAny(s, GroupsInfoSiteLibraryDrive, ds)
|
||||||
case GroupsInfoChannel:
|
case GroupsInfoChannel:
|
||||||
ds := Any()
|
ds := []string{}
|
||||||
|
|
||||||
|
if len(info.ChannelID) > 0 {
|
||||||
|
ds = append(ds, info.ChannelID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(info.ChannelName) > 0 {
|
||||||
|
ds = append(ds, info.ChannelName)
|
||||||
|
}
|
||||||
|
|
||||||
return matchesAny(s, GroupsInfoChannel, ds)
|
return matchesAny(s, GroupsInfoChannel, ds)
|
||||||
|
case GroupsInfoChannelMessageCreator:
|
||||||
|
i = info.MessageCreator
|
||||||
|
case GroupsInfoChannelMessageCreatedAfter, GroupsInfoChannelMessageCreatedBefore:
|
||||||
|
i = dttm.Format(info.Created)
|
||||||
|
case GroupsInfoChannelMessageLastReplyAfter, GroupsInfoChannelMessageLastReplyBefore:
|
||||||
|
i = dttm.Format(info.LastReplyAt)
|
||||||
}
|
}
|
||||||
|
|
||||||
return s.Matches(infoCat, i)
|
return s.Matches(infoCat, i)
|
||||||
|
|||||||
@ -1,15 +1,21 @@
|
|||||||
package selectors
|
package selectors
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
"golang.org/x/exp/slices"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||||
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -55,211 +61,236 @@ func (suite *GroupsSelectorSuite) TestToGroupsRestore() {
|
|||||||
assert.NotZero(t, or.Scopes())
|
assert.NotZero(t, or.Scopes())
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(rkeepers): implement
|
func (suite *GroupsSelectorSuite) TestGroupsRestore_Reduce() {
|
||||||
// func (suite *GroupsSelectorSuite) TestGroupsRestore_Reduce() {
|
toRR := func(cat path.CategoryType, midID string, folders []string, item string) string {
|
||||||
// toRR := func(cat path.CategoryType, siteID string, folders []string, item string) string {
|
var (
|
||||||
// folderElems := make([]string, 0, len(folders))
|
folderElems = make([]string, 0, len(folders))
|
||||||
|
isDrive = cat == path.LibrariesCategory
|
||||||
|
)
|
||||||
|
|
||||||
// for _, f := range folders {
|
for _, f := range folders {
|
||||||
// folderElems = append(folderElems, f+".d")
|
if isDrive {
|
||||||
// }
|
f = f + ".d"
|
||||||
|
}
|
||||||
|
|
||||||
// return stubRepoRef(
|
folderElems = append(folderElems, f)
|
||||||
// path.GroupsService,
|
}
|
||||||
// cat,
|
|
||||||
// siteID,
|
|
||||||
// strings.Join(folderElems, "/"),
|
|
||||||
// item)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// var (
|
return stubRepoRef(
|
||||||
// prefixElems = []string{
|
path.GroupsService,
|
||||||
// odConsts.DrivesPathDir,
|
cat,
|
||||||
// "drive!id",
|
midID,
|
||||||
// odConsts.RootPathDir,
|
strings.Join(folderElems, "/"),
|
||||||
// }
|
item)
|
||||||
// itemElems1 = []string{"folderA", "folderB"}
|
}
|
||||||
// itemElems2 = []string{"folderA", "folderC"}
|
|
||||||
// itemElems3 = []string{"folderD", "folderE"}
|
|
||||||
// pairAC = "folderA/folderC"
|
|
||||||
// pairGH = "folderG/folderH"
|
|
||||||
// item = toRR(
|
|
||||||
// path.LibrariesCategory,
|
|
||||||
// "sid",
|
|
||||||
// append(slices.Clone(prefixElems), itemElems1...),
|
|
||||||
// "item")
|
|
||||||
// item2 = toRR(
|
|
||||||
// path.LibrariesCategory,
|
|
||||||
// "sid",
|
|
||||||
// append(slices.Clone(prefixElems), itemElems2...),
|
|
||||||
// "item2")
|
|
||||||
// item3 = toRR(
|
|
||||||
// path.LibrariesCategory,
|
|
||||||
// "sid",
|
|
||||||
// append(slices.Clone(prefixElems), itemElems3...),
|
|
||||||
// "item3")
|
|
||||||
// item4 = stubRepoRef(path.GroupsService, path.PagesCategory, "sid", pairGH, "item4")
|
|
||||||
// item5 = stubRepoRef(path.GroupsService, path.PagesCategory, "sid", pairGH, "item5")
|
|
||||||
// )
|
|
||||||
|
|
||||||
// deets := &details.Details{
|
var (
|
||||||
// DetailsModel: details.DetailsModel{
|
drivePrefixElems = []string{
|
||||||
// Entries: []details.Entry{
|
odConsts.DrivesPathDir,
|
||||||
// {
|
"drive!id",
|
||||||
// RepoRef: item,
|
odConsts.RootPathDir,
|
||||||
// ItemRef: "item",
|
}
|
||||||
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems1...), "/"),
|
itemElems1 = []string{"folderA", "folderB"}
|
||||||
// ItemInfo: details.ItemInfo{
|
itemElems2 = []string{"folderA", "folderC"}
|
||||||
// Groups: &details.GroupsInfo{
|
itemElems3 = []string{"folderD", "folderE"}
|
||||||
// ItemType: details.GroupsLibrary,
|
pairAC = "folderA/folderC"
|
||||||
// ItemName: "itemName",
|
libItem = toRR(
|
||||||
// ParentPath: strings.Join(itemElems1, "/"),
|
path.LibrariesCategory,
|
||||||
// },
|
"sid",
|
||||||
// },
|
append(slices.Clone(drivePrefixElems), itemElems1...),
|
||||||
// },
|
"item")
|
||||||
// {
|
libItem2 = toRR(
|
||||||
// RepoRef: item2,
|
path.LibrariesCategory,
|
||||||
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems2...), "/"),
|
"sid",
|
||||||
// // ItemRef intentionally blank to test fallback case
|
append(slices.Clone(drivePrefixElems), itemElems2...),
|
||||||
// ItemInfo: details.ItemInfo{
|
"item2")
|
||||||
// Groups: &details.GroupsInfo{
|
libItem3 = toRR(
|
||||||
// ItemType: details.GroupsLibrary,
|
path.LibrariesCategory,
|
||||||
// ItemName: "itemName2",
|
"sid",
|
||||||
// ParentPath: strings.Join(itemElems2, "/"),
|
append(slices.Clone(drivePrefixElems), itemElems3...),
|
||||||
// },
|
"item3")
|
||||||
// },
|
chanItem = toRR(path.ChannelMessagesCategory, "gid", slices.Clone(itemElems1), "chitem")
|
||||||
// },
|
chanItem2 = toRR(path.ChannelMessagesCategory, "gid", slices.Clone(itemElems2), "chitem2")
|
||||||
// {
|
chanItem3 = toRR(path.ChannelMessagesCategory, "gid", slices.Clone(itemElems3), "chitem3")
|
||||||
// RepoRef: item3,
|
)
|
||||||
// ItemRef: "item3",
|
|
||||||
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems3...), "/"),
|
|
||||||
// ItemInfo: details.ItemInfo{
|
|
||||||
// Groups: &details.GroupsInfo{
|
|
||||||
// ItemType: details.GroupsLibrary,
|
|
||||||
// ItemName: "itemName3",
|
|
||||||
// ParentPath: strings.Join(itemElems3, "/"),
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// RepoRef: item4,
|
|
||||||
// LocationRef: pairGH,
|
|
||||||
// ItemRef: "item4",
|
|
||||||
// ItemInfo: details.ItemInfo{
|
|
||||||
// Groups: &details.GroupsInfo{
|
|
||||||
// ItemType: details.GroupsPage,
|
|
||||||
// ItemName: "itemName4",
|
|
||||||
// ParentPath: pairGH,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// RepoRef: item5,
|
|
||||||
// LocationRef: pairGH,
|
|
||||||
// // ItemRef intentionally blank to test fallback case
|
|
||||||
// ItemInfo: details.ItemInfo{
|
|
||||||
// Groups: &details.GroupsInfo{
|
|
||||||
// ItemType: details.GroupsPage,
|
|
||||||
// ItemName: "itemName5",
|
|
||||||
// ParentPath: pairGH,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
|
|
||||||
// arr := func(s ...string) []string {
|
deets := &details.Details{
|
||||||
// return s
|
DetailsModel: details.DetailsModel{
|
||||||
// }
|
Entries: []details.Entry{
|
||||||
|
{
|
||||||
|
RepoRef: libItem,
|
||||||
|
ItemRef: "item",
|
||||||
|
LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems1...), "/"),
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
Groups: &details.GroupsInfo{
|
||||||
|
ItemType: details.SharePointLibrary,
|
||||||
|
ItemName: "itemName",
|
||||||
|
ParentPath: strings.Join(itemElems1, "/"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: libItem2,
|
||||||
|
LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems2...), "/"),
|
||||||
|
// ItemRef intentionally blank to test fallback case
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
Groups: &details.GroupsInfo{
|
||||||
|
ItemType: details.SharePointLibrary,
|
||||||
|
ItemName: "itemName2",
|
||||||
|
ParentPath: strings.Join(itemElems2, "/"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: libItem3,
|
||||||
|
ItemRef: "item3",
|
||||||
|
LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems3...), "/"),
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
Groups: &details.GroupsInfo{
|
||||||
|
ItemType: details.SharePointLibrary,
|
||||||
|
ItemName: "itemName3",
|
||||||
|
ParentPath: strings.Join(itemElems3, "/"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: chanItem,
|
||||||
|
ItemRef: "citem",
|
||||||
|
LocationRef: strings.Join(itemElems1, "/"),
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
Groups: &details.GroupsInfo{
|
||||||
|
ItemType: details.TeamsChannelMessage,
|
||||||
|
ParentPath: strings.Join(itemElems1, "/"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: chanItem2,
|
||||||
|
LocationRef: strings.Join(itemElems2, "/"),
|
||||||
|
// ItemRef intentionally blank to test fallback case
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
Groups: &details.GroupsInfo{
|
||||||
|
ItemType: details.TeamsChannelMessage,
|
||||||
|
ParentPath: strings.Join(itemElems2, "/"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
RepoRef: chanItem3,
|
||||||
|
ItemRef: "citem3",
|
||||||
|
LocationRef: strings.Join(itemElems3, "/"),
|
||||||
|
ItemInfo: details.ItemInfo{
|
||||||
|
Groups: &details.GroupsInfo{
|
||||||
|
ItemType: details.TeamsChannelMessage,
|
||||||
|
ParentPath: strings.Join(itemElems3, "/"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
// table := []struct {
|
arr := func(s ...string) []string {
|
||||||
// name string
|
return s
|
||||||
// makeSelector func() *GroupsRestore
|
}
|
||||||
// expect []string
|
|
||||||
// cfg Config
|
|
||||||
// }{
|
|
||||||
// {
|
|
||||||
// name: "all",
|
|
||||||
// makeSelector: func() *GroupsRestore {
|
|
||||||
// odr := NewGroupsRestore(Any())
|
|
||||||
// odr.Include(odr.AllData())
|
|
||||||
// return odr
|
|
||||||
// },
|
|
||||||
// expect: arr(item, item2, item3, item4, item5),
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "only match item",
|
|
||||||
// makeSelector: func() *GroupsRestore {
|
|
||||||
// odr := NewGroupsRestore(Any())
|
|
||||||
// odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
|
|
||||||
// return odr
|
|
||||||
// },
|
|
||||||
// expect: arr(item2),
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "id doesn't match name",
|
|
||||||
// makeSelector: func() *GroupsRestore {
|
|
||||||
// odr := NewGroupsRestore(Any())
|
|
||||||
// odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
|
|
||||||
// return odr
|
|
||||||
// },
|
|
||||||
// expect: []string{},
|
|
||||||
// cfg: Config{OnlyMatchItemNames: true},
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "only match item name",
|
|
||||||
// makeSelector: func() *GroupsRestore {
|
|
||||||
// odr := NewGroupsRestore(Any())
|
|
||||||
// odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
|
|
||||||
// return odr
|
|
||||||
// },
|
|
||||||
// expect: arr(item2),
|
|
||||||
// cfg: Config{OnlyMatchItemNames: true},
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "name doesn't match",
|
|
||||||
// makeSelector: func() *GroupsRestore {
|
|
||||||
// odr := NewGroupsRestore(Any())
|
|
||||||
// odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
|
|
||||||
// return odr
|
|
||||||
// },
|
|
||||||
// expect: []string{},
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "only match folder",
|
|
||||||
// makeSelector: func() *GroupsRestore {
|
|
||||||
// odr := NewGroupsRestore([]string{"sid"})
|
|
||||||
// odr.Include(odr.LibraryFolders([]string{"folderA/folderB", pairAC}))
|
|
||||||
// return odr
|
|
||||||
// },
|
|
||||||
// expect: arr(item, item2),
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "pages match folder",
|
|
||||||
// makeSelector: func() *GroupsRestore {
|
|
||||||
// odr := NewGroupsRestore([]string{"sid"})
|
|
||||||
// odr.Include(odr.Pages([]string{pairGH, pairAC}))
|
|
||||||
// return odr
|
|
||||||
// },
|
|
||||||
// expect: arr(item4, item5),
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
// for _, test := range table {
|
|
||||||
// suite.Run(test.name, func() {
|
|
||||||
// t := suite.T()
|
|
||||||
|
|
||||||
// ctx, flush := tester.NewContext(t)
|
table := []struct {
|
||||||
// defer flush()
|
name string
|
||||||
|
makeSelector func() *GroupsRestore
|
||||||
|
expect []string
|
||||||
|
cfg Config
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "all",
|
||||||
|
makeSelector: func() *GroupsRestore {
|
||||||
|
sel := NewGroupsRestore(Any())
|
||||||
|
sel.Include(sel.AllData())
|
||||||
|
return sel
|
||||||
|
},
|
||||||
|
expect: arr(libItem, libItem2, libItem3, chanItem, chanItem2, chanItem3),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "only match library item",
|
||||||
|
makeSelector: func() *GroupsRestore {
|
||||||
|
sel := NewGroupsRestore(Any())
|
||||||
|
sel.Include(sel.LibraryItems(Any(), []string{"item2"}))
|
||||||
|
return sel
|
||||||
|
},
|
||||||
|
expect: arr(libItem2),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "only match channel item",
|
||||||
|
makeSelector: func() *GroupsRestore {
|
||||||
|
sel := NewGroupsRestore(Any())
|
||||||
|
sel.Include(sel.ChannelMessages(Any(), []string{"chitem2"}))
|
||||||
|
return sel
|
||||||
|
},
|
||||||
|
expect: arr(chanItem2),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "library id doesn't match name",
|
||||||
|
makeSelector: func() *GroupsRestore {
|
||||||
|
sel := NewGroupsRestore(Any())
|
||||||
|
sel.Include(sel.LibraryItems(Any(), []string{"item2"}))
|
||||||
|
return sel
|
||||||
|
},
|
||||||
|
expect: []string{},
|
||||||
|
cfg: Config{OnlyMatchItemNames: true},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "channel id doesn't match name",
|
||||||
|
makeSelector: func() *GroupsRestore {
|
||||||
|
sel := NewGroupsRestore(Any())
|
||||||
|
sel.Include(sel.ChannelMessages(Any(), []string{"item2"}))
|
||||||
|
return sel
|
||||||
|
},
|
||||||
|
expect: []string{},
|
||||||
|
cfg: Config{OnlyMatchItemNames: true},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "library only match item name",
|
||||||
|
makeSelector: func() *GroupsRestore {
|
||||||
|
sel := NewGroupsRestore(Any())
|
||||||
|
sel.Include(sel.LibraryItems(Any(), []string{"itemName2"}))
|
||||||
|
return sel
|
||||||
|
},
|
||||||
|
expect: arr(libItem2),
|
||||||
|
cfg: Config{OnlyMatchItemNames: true},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name doesn't match",
|
||||||
|
makeSelector: func() *GroupsRestore {
|
||||||
|
sel := NewGroupsRestore(Any())
|
||||||
|
sel.Include(sel.LibraryItems(Any(), []string{"itemName2"}))
|
||||||
|
return sel
|
||||||
|
},
|
||||||
|
expect: []string{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "only match folder",
|
||||||
|
makeSelector: func() *GroupsRestore {
|
||||||
|
sel := NewGroupsRestore([]string{"sid"})
|
||||||
|
sel.Include(sel.LibraryFolders([]string{"folderA/folderB", pairAC}))
|
||||||
|
return sel
|
||||||
|
},
|
||||||
|
expect: arr(libItem, libItem2),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
// sel := test.makeSelector()
|
ctx, flush := tester.NewContext(t)
|
||||||
// sel.Configure(test.cfg)
|
defer flush()
|
||||||
// results := sel.Reduce(ctx, deets, fault.New(true))
|
|
||||||
// paths := results.Paths()
|
sel := test.makeSelector()
|
||||||
// assert.Equal(t, test.expect, paths)
|
sel.Configure(test.cfg)
|
||||||
// })
|
results := sel.Reduce(ctx, deets, fault.New(true))
|
||||||
// }
|
paths := results.Paths()
|
||||||
// }
|
assert.Equal(t, test.expect, paths)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (suite *GroupsSelectorSuite) TestGroupsCategory_PathValues() {
|
func (suite *GroupsSelectorSuite) TestGroupsCategory_PathValues() {
|
||||||
var (
|
var (
|
||||||
@ -324,91 +355,111 @@ func (suite *GroupsSelectorSuite) TestGroupsCategory_PathValues() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(abin): implement
|
func (suite *GroupsSelectorSuite) TestGroupsScope_MatchesInfo() {
|
||||||
// func (suite *GroupsSelectorSuite) TestGroupsScope_MatchesInfo() {
|
var (
|
||||||
// var (
|
sel = NewGroupsRestore(Any())
|
||||||
// sel = NewGroupsRestore(Any())
|
user = "user@mail.com"
|
||||||
// host = "www.website.com"
|
host = "www.website.com"
|
||||||
// pth = "/foo"
|
// pth = "/foo"
|
||||||
// url = host + pth
|
// url = host + pth
|
||||||
// epoch = time.Time{}
|
epoch = time.Time{}
|
||||||
// now = time.Now()
|
now = time.Now()
|
||||||
// modification = now.Add(15 * time.Minute)
|
modification = now.Add(15 * time.Minute)
|
||||||
// future = now.Add(45 * time.Minute)
|
future = now.Add(45 * time.Minute)
|
||||||
// )
|
dtch = details.TeamsChannelMessage
|
||||||
|
)
|
||||||
|
|
||||||
// table := []struct {
|
table := []struct {
|
||||||
// name string
|
name string
|
||||||
// infoURL string
|
itemType details.ItemType
|
||||||
// scope []GroupsScope
|
creator string
|
||||||
// expect assert.BoolAssertionFunc
|
scope []GroupsScope
|
||||||
// }{
|
expect assert.BoolAssertionFunc
|
||||||
// {"host match", host, sel.WebURL([]string{host}), assert.True},
|
}{
|
||||||
// {"url match", url, sel.WebURL([]string{url}), assert.True},
|
// TODO(abin): implement
|
||||||
// {"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True},
|
// {"host match", host, sel.WebURL([]string{host}), assert.True},
|
||||||
// {"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False},
|
// {"url match", url, sel.WebURL([]string{url}), assert.True},
|
||||||
// {"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True},
|
// {"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True},
|
||||||
// {"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False},
|
// {"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False},
|
||||||
// {"url does not suffix substring", url, sel.WebURL([]string{"oo"}, SuffixMatch()), assert.False},
|
// {"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True},
|
||||||
// {"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False},
|
// {"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False},
|
||||||
// {"file create after the epoch", host, sel.CreatedAfter(dttm.Format(epoch)), assert.True},
|
// {"url does not suffix substring", url, sel.WebURL([]string{"oo"}, SuffixMatch()), assert.False},
|
||||||
// {"file create after now", host, sel.CreatedAfter(dttm.Format(now)), assert.False},
|
// {"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False},
|
||||||
// {"file create after later", url, sel.CreatedAfter(dttm.Format(future)), assert.False},
|
// {"file create after the epoch", host, sel.CreatedAfter(dttm.Format(epoch)), assert.True},
|
||||||
// {"file create before future", host, sel.CreatedBefore(dttm.Format(future)), assert.True},
|
// {"file create after now", host, sel.CreatedAfter(dttm.Format(now)), assert.False},
|
||||||
// {"file create before now", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
|
// {"file create after later", url, sel.CreatedAfter(dttm.Format(future)), assert.False},
|
||||||
// {"file create before modification", host, sel.CreatedBefore(dttm.Format(modification)), assert.True},
|
// {"file create before future", host, sel.CreatedBefore(dttm.Format(future)), assert.True},
|
||||||
// {"file create before epoch", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
|
// {"file create before now", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
|
||||||
// {"file modified after the epoch", host, sel.ModifiedAfter(dttm.Format(epoch)), assert.True},
|
// {"file create before modification", host, sel.CreatedBefore(dttm.Format(modification)), assert.True},
|
||||||
// {"file modified after now", host, sel.ModifiedAfter(dttm.Format(now)), assert.True},
|
// {"file create before epoch", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
|
||||||
// {"file modified after later", host, sel.ModifiedAfter(dttm.Format(future)), assert.False},
|
// {"file modified after the epoch", host, sel.ModifiedAfter(dttm.Format(epoch)), assert.True},
|
||||||
// {"file modified before future", host, sel.ModifiedBefore(dttm.Format(future)), assert.True},
|
// {"file modified after now", host, sel.ModifiedAfter(dttm.Format(now)), assert.True},
|
||||||
// {"file modified before now", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
|
// {"file modified after later", host, sel.ModifiedAfter(dttm.Format(future)), assert.False},
|
||||||
// {"file modified before epoch", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
|
// {"file modified before future", host, sel.ModifiedBefore(dttm.Format(future)), assert.True},
|
||||||
// {"in library", host, sel.Library("included-library"), assert.True},
|
// {"file modified before now", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
|
||||||
// {"not in library", host, sel.Library("not-included-library"), assert.False},
|
// {"file modified before epoch", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
|
||||||
// {"library id", host, sel.Library("1234"), assert.True},
|
// {"in library", host, sel.Library("included-library"), assert.True},
|
||||||
// {"not library id", host, sel.Library("abcd"), assert.False},
|
// {"not in library", host, sel.Library("not-included-library"), assert.False},
|
||||||
// }
|
// {"library id", host, sel.Library("1234"), assert.True},
|
||||||
// for _, test := range table {
|
// {"not library id", host, sel.Library("abcd"), assert.False},
|
||||||
// suite.Run(test.name, func() {
|
|
||||||
// t := suite.T()
|
|
||||||
|
|
||||||
// itemInfo := details.ItemInfo{
|
{"channel message created by", dtch, user, sel.MessageCreator(user), assert.True},
|
||||||
// Groups: &details.GroupsInfo{
|
{"channel message not created by", dtch, user, sel.MessageCreator(host), assert.False},
|
||||||
// ItemType: details.GroupsPage,
|
{"chan msg create after the epoch", dtch, user, sel.MessageCreatedAfter(dttm.Format(epoch)), assert.True},
|
||||||
// WebURL: test.infoURL,
|
{"chan msg create after now", dtch, user, sel.MessageCreatedAfter(dttm.Format(now)), assert.False},
|
||||||
// Created: now,
|
{"chan msg create after later", dtch, user, sel.MessageCreatedAfter(dttm.Format(future)), assert.False},
|
||||||
// Modified: modification,
|
{"chan msg create before future", dtch, user, sel.MessageCreatedBefore(dttm.Format(future)), assert.True},
|
||||||
// DriveName: "included-library",
|
{"chan msg create before now", dtch, user, sel.MessageCreatedBefore(dttm.Format(now)), assert.False},
|
||||||
// DriveID: "1234",
|
{"chan msg create before reply", dtch, user, sel.MessageCreatedBefore(dttm.Format(modification)), assert.True},
|
||||||
// },
|
{"chan msg create before epoch", dtch, user, sel.MessageCreatedBefore(dttm.Format(now)), assert.False},
|
||||||
// }
|
{"chan msg last reply after the epoch", dtch, user, sel.MessageLastReplyAfter(dttm.Format(epoch)), assert.True},
|
||||||
|
{"chan msg last reply after now", dtch, user, sel.MessageLastReplyAfter(dttm.Format(now)), assert.True},
|
||||||
|
{"chan msg last reply after later", dtch, user, sel.MessageLastReplyAfter(dttm.Format(future)), assert.False},
|
||||||
|
{"chan msg last reply before future", dtch, user, sel.MessageLastReplyBefore(dttm.Format(future)), assert.True},
|
||||||
|
{"chan msg last reply before now", dtch, user, sel.MessageLastReplyBefore(dttm.Format(now)), assert.False},
|
||||||
|
{"chan msg last reply before epoch", dtch, user, sel.MessageLastReplyBefore(dttm.Format(now)), assert.False},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
// scopes := setScopesToDefault(test.scope)
|
itemInfo := details.ItemInfo{
|
||||||
// for _, scope := range scopes {
|
Groups: &details.GroupsInfo{
|
||||||
// test.expect(t, scope.matchesInfo(itemInfo))
|
ItemType: test.itemType,
|
||||||
// }
|
WebURL: test.creator,
|
||||||
// })
|
MessageCreator: test.creator,
|
||||||
// }
|
Created: now,
|
||||||
// }
|
Modified: modification,
|
||||||
|
LastReplyAt: modification,
|
||||||
|
DriveName: "included-library",
|
||||||
|
DriveID: "1234",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
scopes := setScopesToDefault(test.scope)
|
||||||
|
for _, scope := range scopes {
|
||||||
|
test.expect(t, scope.matchesInfo(itemInfo))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (suite *GroupsSelectorSuite) TestCategory_PathType() {
|
func (suite *GroupsSelectorSuite) TestCategory_PathType() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
cat groupsCategory
|
cat groupsCategory
|
||||||
pathType path.CategoryType
|
pathType path.CategoryType
|
||||||
}{
|
}{
|
||||||
{
|
{GroupsCategoryUnknown, path.UnknownCategory},
|
||||||
cat: GroupsCategoryUnknown,
|
{GroupsChannel, path.ChannelMessagesCategory},
|
||||||
pathType: path.UnknownCategory,
|
{GroupsChannelMessage, path.ChannelMessagesCategory},
|
||||||
},
|
{GroupsInfoChannelMessageCreator, path.ChannelMessagesCategory},
|
||||||
{
|
{GroupsInfoChannelMessageCreatedAfter, path.ChannelMessagesCategory},
|
||||||
cat: GroupsChannel,
|
{GroupsInfoChannelMessageCreatedBefore, path.ChannelMessagesCategory},
|
||||||
pathType: path.ChannelMessagesCategory,
|
{GroupsInfoChannelMessageLastReplyAfter, path.ChannelMessagesCategory},
|
||||||
},
|
{GroupsInfoChannelMessageLastReplyBefore, path.ChannelMessagesCategory},
|
||||||
{
|
{GroupsLibraryFolder, path.LibrariesCategory},
|
||||||
cat: GroupsChannelMessage,
|
{GroupsLibraryItem, path.LibrariesCategory},
|
||||||
pathType: path.ChannelMessagesCategory,
|
{GroupsInfoSiteLibraryDrive, path.LibrariesCategory},
|
||||||
},
|
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.cat.String(), func() {
|
suite.Run(test.cat.String(), func() {
|
||||||
|
|||||||
@ -9,19 +9,79 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// item pager
|
// channel message pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type ChannelMessageDeltaEnumerator interface {
|
var _ DeltaPager[models.ChatMessageable] = &channelMessageDeltaPageCtrl{}
|
||||||
DeltaGetPager
|
|
||||||
ValuesInPageLinker[models.ChatMessageable]
|
type channelMessageDeltaPageCtrl struct {
|
||||||
SetNextLinker
|
resourceID, channelID string
|
||||||
|
gs graph.Servicer
|
||||||
|
builder *teams.ItemChannelsItemMessagesDeltaRequestBuilder
|
||||||
|
options *teams.ItemChannelsItemMessagesDeltaRequestBuilderGetRequestConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ ChannelMessageDeltaEnumerator = &ChannelMessageDeltaPageCtrl{}
|
func (p *channelMessageDeltaPageCtrl) SetNext(nextLink string) {
|
||||||
|
p.builder = teams.NewItemChannelsItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *channelMessageDeltaPageCtrl) GetPage(
|
||||||
|
ctx context.Context,
|
||||||
|
) (DeltaPageLinker, error) {
|
||||||
|
resp, err := p.builder.Get(ctx, p.options)
|
||||||
|
return resp, graph.Stack(ctx, err).OrNil()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *channelMessageDeltaPageCtrl) Reset(context.Context) {
|
||||||
|
p.builder = p.gs.
|
||||||
|
Client().
|
||||||
|
Teams().
|
||||||
|
ByTeamId(p.resourceID).
|
||||||
|
Channels().
|
||||||
|
ByChannelId(p.channelID).
|
||||||
|
Messages().
|
||||||
|
Delta()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *channelMessageDeltaPageCtrl) ValuesIn(l PageLinker) ([]models.ChatMessageable, error) {
|
||||||
|
return getValues[models.ChatMessageable](l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Channels) NewChannelMessageDeltaPager(
|
||||||
|
teamID, channelID, prevDelta string,
|
||||||
|
) *channelMessageDeltaPageCtrl {
|
||||||
|
builder := c.Stable.
|
||||||
|
Client().
|
||||||
|
Teams().
|
||||||
|
ByTeamId(teamID).
|
||||||
|
Channels().
|
||||||
|
ByChannelId(channelID).
|
||||||
|
Messages().
|
||||||
|
Delta()
|
||||||
|
|
||||||
|
if len(prevDelta) > 0 {
|
||||||
|
builder = teams.NewItemChannelsItemMessagesDeltaRequestBuilder(prevDelta, c.Stable.Adapter())
|
||||||
|
}
|
||||||
|
|
||||||
|
options := &teams.ItemChannelsItemMessagesDeltaRequestBuilderGetRequestConfiguration{
|
||||||
|
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
|
||||||
|
}
|
||||||
|
|
||||||
|
return &channelMessageDeltaPageCtrl{
|
||||||
|
resourceID: teamID,
|
||||||
|
channelID: channelID,
|
||||||
|
builder: builder,
|
||||||
|
gs: c.Stable,
|
||||||
|
options: options,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// var _ ChannelMessageDeltaEnumerator = &ChannelMessageDeltaPageCtrl{}
|
||||||
|
var _ Pager[models.Channelable] = &channelPageCtrl{}
|
||||||
|
|
||||||
type ChannelMessageDeltaPageCtrl struct {
|
type ChannelMessageDeltaPageCtrl struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -88,7 +148,7 @@ type channelMessagePageCtrl struct {
|
|||||||
func (c Channels) GetItemIDsInContainer(
|
func (c Channels) GetItemIDsInContainer(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
teamID, channelID string,
|
teamID, channelID string,
|
||||||
) (map[string]MessageItemIDType, error) {
|
) (map[string]struct{}, error) {
|
||||||
ctx = clues.Add(ctx, "channel_id", channelID)
|
ctx = clues.Add(ctx, "channel_id", channelID)
|
||||||
pager := c.NewChannelItemPager(teamID, channelID)
|
pager := c.NewChannelItemPager(teamID, channelID)
|
||||||
|
|
||||||
@ -97,12 +157,10 @@ func (c Channels) GetItemIDsInContainer(
|
|||||||
return nil, graph.Wrap(ctx, err, "enumerating contacts")
|
return nil, graph.Wrap(ctx, err, "enumerating contacts")
|
||||||
}
|
}
|
||||||
|
|
||||||
m := map[string]MessageItemIDType{}
|
m := map[string]struct{}{}
|
||||||
|
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
m[ptr.Val(item.GetId())] = MessageItemIDType{
|
m[ptr.Val(item.GetId())] = struct{}{}
|
||||||
ItemID: ptr.Val(item.GetId()),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return m, nil
|
return m, nil
|
||||||
@ -131,6 +189,7 @@ func (c Channels) NewChannelItemPager(
|
|||||||
return &channelMessagePageCtrl{c.Stable, builder, options}
|
return &channelMessagePageCtrl{c.Stable, builder, options}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 False Positive
|
||||||
func (p *channelMessagePageCtrl) getPage(ctx context.Context) (PageLinkValuer[models.ChatMessageable], error) {
|
func (p *channelMessagePageCtrl) getPage(ctx context.Context) (PageLinkValuer[models.ChatMessageable], error) {
|
||||||
page, err := p.builder.Get(ctx, p.options)
|
page, err := p.builder.Get(ctx, p.options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -145,17 +204,71 @@ func (p *channelMessagePageCtrl) setNext(nextLink string) {
|
|||||||
p.builder = teams.NewItemChannelsItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
|
p.builder = teams.NewItemChannelsItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetChannelMessagesDelta fetches a delta of all messages in the channel.
|
||||||
|
func (c Channels) GetChannelMessagesDelta(
|
||||||
|
ctx context.Context,
|
||||||
|
teamID, channelID, prevDelta string,
|
||||||
|
) ([]models.ChatMessageable, DeltaUpdate, error) {
|
||||||
|
var (
|
||||||
|
vs = []models.ChatMessageable{}
|
||||||
|
pager = c.NewChannelMessageDeltaPager(teamID, channelID, prevDelta)
|
||||||
|
invalidPrevDelta = len(prevDelta) == 0
|
||||||
|
newDeltaLink string
|
||||||
|
)
|
||||||
|
|
||||||
|
// Loop through all pages returned by Graph API.
|
||||||
|
for {
|
||||||
|
page, err := pager.GetPage(graph.ConsumeNTokens(ctx, graph.SingleGetOrDeltaLC))
|
||||||
|
if graph.IsErrInvalidDelta(err) {
|
||||||
|
logger.Ctx(ctx).Infow("Invalid previous delta", "delta_link", prevDelta)
|
||||||
|
|
||||||
|
invalidPrevDelta = true
|
||||||
|
vs = []models.ChatMessageable{}
|
||||||
|
|
||||||
|
pager.Reset(ctx)
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, DeltaUpdate{}, graph.Wrap(ctx, err, "retrieving page of channel messages")
|
||||||
|
}
|
||||||
|
|
||||||
|
vals, err := pager.ValuesIn(page)
|
||||||
|
if err != nil {
|
||||||
|
return nil, DeltaUpdate{}, graph.Wrap(ctx, err, "extracting channel messages from response")
|
||||||
|
}
|
||||||
|
|
||||||
|
vs = append(vs, vals...)
|
||||||
|
|
||||||
|
nextLink, deltaLink := NextAndDeltaLink(page)
|
||||||
|
|
||||||
|
if len(deltaLink) > 0 {
|
||||||
|
newDeltaLink = deltaLink
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(nextLink) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
pager.SetNext(nextLink)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.Ctx(ctx).Debugf("retrieved %d channel messages", len(vs))
|
||||||
|
|
||||||
|
du := DeltaUpdate{
|
||||||
|
URL: newDeltaLink,
|
||||||
|
Reset: invalidPrevDelta,
|
||||||
|
}
|
||||||
|
|
||||||
|
return vs, du, nil
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// channel pager
|
// channel pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type ChannelEnumerator interface {
|
var _ Pager[models.Channelable] = &channelPageCtrl{}
|
||||||
PageLinker
|
|
||||||
ValuesInPageLinker[models.Channelable]
|
|
||||||
SetNextLinker
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ ChannelEnumerator = &channelPageCtrl{}
|
|
||||||
|
|
||||||
type channelPageCtrl struct {
|
type channelPageCtrl struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -163,14 +276,31 @@ type channelPageCtrl struct {
|
|||||||
options *teams.ItemChannelsRequestBuilderGetRequestConfiguration
|
options *teams.ItemChannelsRequestBuilderGetRequestConfiguration
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *channelPageCtrl) SetNext(nextLink string) {
|
||||||
|
p.builder = teams.NewItemChannelsRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *channelPageCtrl) GetPage(
|
||||||
|
ctx context.Context,
|
||||||
|
) (PageLinker, error) {
|
||||||
|
resp, err := p.builder.Get(ctx, p.options)
|
||||||
|
return resp, graph.Stack(ctx, err).OrNil()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *channelPageCtrl) ValuesIn(l PageLinker) ([]models.Channelable, error) {
|
||||||
|
return getValues[models.Channelable](l)
|
||||||
|
}
|
||||||
|
|
||||||
func (c Channels) NewChannelPager(
|
func (c Channels) NewChannelPager(
|
||||||
teamID,
|
teamID string,
|
||||||
channelID string,
|
|
||||||
fields []string,
|
|
||||||
) *channelPageCtrl {
|
) *channelPageCtrl {
|
||||||
|
requestConfig := &teams.ItemChannelsRequestBuilderGetRequestConfiguration{
|
||||||
|
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
|
||||||
|
}
|
||||||
|
|
||||||
res := &channelPageCtrl{
|
res := &channelPageCtrl{
|
||||||
gs: c.Stable,
|
gs: c.Stable,
|
||||||
options: nil,
|
options: requestConfig,
|
||||||
builder: c.Stable.
|
builder: c.Stable.
|
||||||
Client().
|
Client().
|
||||||
Teams().
|
Teams().
|
||||||
@ -181,30 +311,39 @@ func (c Channels) NewChannelPager(
|
|||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *channelPageCtrl) SetNext(nextLink string) {
|
// GetChannels fetches all channels in the team.
|
||||||
p.builder = teams.NewItemChannelsRequestBuilder(nextLink, p.gs.Adapter())
|
func (c Channels) GetChannels(
|
||||||
}
|
ctx context.Context,
|
||||||
|
teamID string,
|
||||||
func (p *channelPageCtrl) GetPage(ctx context.Context) (PageLinker, error) {
|
) ([]models.Channelable, error) {
|
||||||
var (
|
var (
|
||||||
resp PageLinker
|
vs = []models.Channelable{}
|
||||||
err error
|
pager = c.NewChannelPager(teamID)
|
||||||
)
|
)
|
||||||
|
|
||||||
resp, err = p.builder.Get(ctx, p.options)
|
// Loop through all pages returned by Graph API.
|
||||||
if err != nil {
|
for {
|
||||||
return nil, graph.Stack(ctx, err)
|
page, err := pager.GetPage(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "retrieving page of channels")
|
||||||
|
}
|
||||||
|
|
||||||
|
vals, err := pager.ValuesIn(page)
|
||||||
|
if err != nil {
|
||||||
|
return nil, graph.Wrap(ctx, err, "extracting channels from response")
|
||||||
|
}
|
||||||
|
|
||||||
|
vs = append(vs, vals...)
|
||||||
|
|
||||||
|
nextLink := ptr.Val(page.GetOdataNextLink())
|
||||||
|
if len(nextLink) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
pager.SetNext(nextLink)
|
||||||
}
|
}
|
||||||
|
|
||||||
return resp, nil
|
logger.Ctx(ctx).Debugf("retrieved %d channels", len(vs))
|
||||||
}
|
|
||||||
|
|
||||||
func (p *channelPageCtrl) ValuesIn(l PageLinker) ([]models.Channelable, error) {
|
return vs, nil
|
||||||
return getValues[models.Channelable](l)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *channelPageCtrl) GetOdataNextLink() *string {
|
|
||||||
// No next link preent in the API result
|
|
||||||
emptyString := ""
|
|
||||||
return &emptyString
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -4,64 +4,70 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
"github.com/alcionai/corso/src/internal/tester/tconfig"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ChannelPagerIntgSuite struct {
|
type ChannelsPagerIntgSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
its intgTesterSetup
|
its intgTesterSetup
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChannelPagerIntgSuite(t *testing.T) {
|
func TestChannelPagerIntgSuite(t *testing.T) {
|
||||||
suite.Run(t, &ChannelPagerIntgSuite{
|
suite.Run(t, &ChannelsPagerIntgSuite{
|
||||||
Suite: tester.NewIntegrationSuite(
|
Suite: tester.NewIntegrationSuite(
|
||||||
t,
|
t,
|
||||||
[][]string{tconfig.M365AcctCredEnvs}),
|
[][]string{tconfig.M365AcctCredEnvs}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ChannelPagerIntgSuite) SetupSuite() {
|
func (suite *ChannelsPagerIntgSuite) SetupSuite() {
|
||||||
suite.its = newIntegrationTesterSetup(suite.T())
|
suite.its = newIntegrationTesterSetup(suite.T())
|
||||||
}
|
}
|
||||||
|
|
||||||
// This will be added once 'pager' is implemented
|
func (suite *ChannelsPagerIntgSuite) TestEnumerateChannels() {
|
||||||
// func (suite *ChannelPagerIntgSuite) TestChannels_GetPage() {
|
|
||||||
// t := suite.T()
|
|
||||||
|
|
||||||
// ctx, flush := tester.NewContext(t)
|
|
||||||
// defer flush()
|
|
||||||
|
|
||||||
// teamID := tconfig.M365TeamID(t)
|
|
||||||
// channelID := tconfig.M365ChannelID(t)
|
|
||||||
// pager := suite.its.ac.Channels().NewMessagePager(teamID, channelID, []string{})
|
|
||||||
// a, err := pager.GetPage(ctx)
|
|
||||||
// assert.NoError(t, err, clues.ToCore(err))
|
|
||||||
// assert.NotNil(t, a)
|
|
||||||
// }
|
|
||||||
|
|
||||||
func (suite *ChannelPagerIntgSuite) TestChannels_Get() {
|
|
||||||
t := suite.T()
|
|
||||||
ctx, flush := tester.NewContext(t)
|
|
||||||
|
|
||||||
defer flush()
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
containerName = "General"
|
t = suite.T()
|
||||||
teamID = tconfig.M365TeamID(t)
|
ac = suite.its.ac.Channels()
|
||||||
chanClient = suite.its.ac.Channels()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// GET channel -should be found
|
ctx, flush := tester.NewContext(t)
|
||||||
channel, err := chanClient.GetChannelByName(ctx, teamID, containerName)
|
defer flush()
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
|
||||||
assert.Equal(t, ptr.Val(channel.GetDisplayName()), containerName)
|
|
||||||
|
|
||||||
// GET channel -should be found
|
chans, err := ac.GetChannels(ctx, suite.its.group.id)
|
||||||
_, err = chanClient.GetChannel(ctx, teamID, ptr.Val(channel.GetId()))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
require.NotEmpty(t, chans)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ChannelsPagerIntgSuite) TestEnumerateChannelMessages() {
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
ac = suite.its.ac.Channels()
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
msgs, du, err := ac.GetChannelMessagesDelta(
|
||||||
|
ctx,
|
||||||
|
suite.its.group.id,
|
||||||
|
suite.its.group.testContainerID,
|
||||||
|
"")
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
require.NotEmpty(t, msgs)
|
||||||
|
require.NotZero(t, du.URL, "delta link")
|
||||||
|
require.True(t, du.Reset, "reset due to empty prev delta link")
|
||||||
|
|
||||||
|
msgs, du, err = ac.GetChannelMessagesDelta(
|
||||||
|
ctx,
|
||||||
|
suite.its.group.id,
|
||||||
|
suite.its.group.testContainerID,
|
||||||
|
du.URL)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
require.Empty(t, msgs, "should have no new messages from delta")
|
||||||
|
require.NotZero(t, du.URL, "delta link")
|
||||||
|
require.False(t, du.Reset, "prev delta link should be valid")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -82,13 +82,10 @@ func (c Contacts) DeleteContainer(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// prefer GetContainerByID where possible.
|
func (c Contacts) GetContainerByID(
|
||||||
// use this only in cases where the models.ContactFolderable
|
|
||||||
// is required.
|
|
||||||
func (c Contacts) GetFolder(
|
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
) (models.ContactFolderable, error) {
|
) (graph.Container, error) {
|
||||||
config := &users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration{
|
config := &users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration{
|
||||||
QueryParameters: &users.ItemContactFoldersContactFolderItemRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemContactFoldersContactFolderItemRequestBuilderGetQueryParameters{
|
||||||
Select: idAnd(displayName, parentFolderID),
|
Select: idAnd(displayName, parentFolderID),
|
||||||
@ -109,14 +106,6 @@ func (c Contacts) GetFolder(
|
|||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// interface-compliant wrapper of GetFolder
|
|
||||||
func (c Contacts) GetContainerByID(
|
|
||||||
ctx context.Context,
|
|
||||||
userID, containerID string,
|
|
||||||
) (graph.Container, error) {
|
|
||||||
return c.GetFolder(ctx, userID, containerID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetContainerByName fetches a folder by name
|
// GetContainerByName fetches a folder by name
|
||||||
func (c Contacts) GetContainerByName(
|
func (c Contacts) GetContainerByName(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
|||||||
@ -191,7 +191,7 @@ func (c Contacts) GetItemIDsInContainer(
|
|||||||
// item ID pager
|
// item ID pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ itemIDPager = &contactIDPager{}
|
var _ DeltaPager[getIDAndAddtler] = &contactIDPager{}
|
||||||
|
|
||||||
type contactIDPager struct {
|
type contactIDPager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -203,7 +203,7 @@ func (c Contacts) NewContactIDsPager(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) itemIDPager {
|
) DeltaPager[getIDAndAddtler] {
|
||||||
config := &users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration{
|
config := &users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration{
|
||||||
QueryParameters: &users.ItemContactFoldersItemContactsRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemContactFoldersItemContactsRequestBuilderGetQueryParameters{
|
||||||
Select: idAnd(parentFolderID),
|
Select: idAnd(parentFolderID),
|
||||||
@ -223,7 +223,7 @@ func (c Contacts) NewContactIDsPager(
|
|||||||
return &contactIDPager{c.Stable, builder, config}
|
return &contactIDPager{c.Stable, builder, config}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *contactIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *contactIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
resp, err := p.builder.Get(ctx, p.options)
|
resp, err := p.builder.Get(ctx, p.options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Stack(ctx, err)
|
return nil, graph.Stack(ctx, err)
|
||||||
@ -232,14 +232,14 @@ func (p *contactIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
|||||||
return EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil
|
return EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *contactIDPager) setNext(nextLink string) {
|
func (p *contactIDPager) SetNext(nextLink string) {
|
||||||
p.builder = users.NewItemContactFoldersItemContactsRequestBuilder(nextLink, p.gs.Adapter())
|
p.builder = users.NewItemContactFoldersItemContactsRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
// non delta pagers don't need reset
|
// non delta pagers don't need reset
|
||||||
func (p *contactIDPager) reset(context.Context) {}
|
func (p *contactIDPager) Reset(context.Context) {}
|
||||||
|
|
||||||
func (p *contactIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
func (p *contactIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
||||||
return toValues[models.Contactable](pl)
|
return toValues[models.Contactable](pl)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -247,7 +247,7 @@ func (p *contactIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
|||||||
// delta item ID pager
|
// delta item ID pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ itemIDPager = &contactDeltaIDPager{}
|
var _ DeltaPager[getIDAndAddtler] = &contactDeltaIDPager{}
|
||||||
|
|
||||||
type contactDeltaIDPager struct {
|
type contactDeltaIDPager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -271,7 +271,7 @@ func (c Contacts) NewContactDeltaIDsPager(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID, oldDelta string,
|
userID, containerID, oldDelta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) itemIDPager {
|
) DeltaPager[getIDAndAddtler] {
|
||||||
options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{
|
options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{
|
||||||
QueryParameters: &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetQueryParameters{
|
||||||
Select: idAnd(parentFolderID),
|
Select: idAnd(parentFolderID),
|
||||||
@ -290,7 +290,7 @@ func (c Contacts) NewContactDeltaIDsPager(
|
|||||||
return &contactDeltaIDPager{c.Stable, userID, containerID, builder, options}
|
return &contactDeltaIDPager{c.Stable, userID, containerID, builder, options}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *contactDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *contactDeltaIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
resp, err := p.builder.Get(ctx, p.options)
|
resp, err := p.builder.Get(ctx, p.options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Stack(ctx, err)
|
return nil, graph.Stack(ctx, err)
|
||||||
@ -299,15 +299,15 @@ func (p *contactDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, err
|
|||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *contactDeltaIDPager) setNext(nextLink string) {
|
func (p *contactDeltaIDPager) SetNext(nextLink string) {
|
||||||
p.builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(nextLink, p.gs.Adapter())
|
p.builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *contactDeltaIDPager) reset(ctx context.Context) {
|
func (p *contactDeltaIDPager) Reset(ctx context.Context) {
|
||||||
p.builder = getContactDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options)
|
p.builder = getContactDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *contactDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
func (p *contactDeltaIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
||||||
return toValues[models.Contactable](pl)
|
return toValues[models.Contactable](pl)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -39,13 +39,13 @@ func (suite *ContactsPagerIntgSuite) TestContacts_GetItemsInContainerByCollision
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
container, err := ac.GetContainerByID(ctx, suite.its.userID, "contacts")
|
container, err := ac.GetContainerByID(ctx, suite.its.user.id, "contacts")
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
conts, err := ac.Stable.
|
conts, err := ac.Stable.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
ByUserId(suite.its.userID).
|
ByUserId(suite.its.user.id).
|
||||||
ContactFolders().
|
ContactFolders().
|
||||||
ByContactFolderId(ptr.Val(container.GetId())).
|
ByContactFolderId(ptr.Val(container.GetId())).
|
||||||
Contacts().
|
Contacts().
|
||||||
@ -61,7 +61,7 @@ func (suite *ContactsPagerIntgSuite) TestContacts_GetItemsInContainerByCollision
|
|||||||
|
|
||||||
expect := maps.Keys(expectM)
|
expect := maps.Keys(expectM)
|
||||||
|
|
||||||
results, err := suite.its.ac.Contacts().GetItemsInContainerByCollisionKey(ctx, suite.its.userID, "contacts")
|
results, err := suite.its.ac.Contacts().GetItemsInContainerByCollisionKey(ctx, suite.its.user.id, "contacts")
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.Less(t, 0, len(results), "requires at least one result")
|
require.Less(t, 0, len(results), "requires at least one result")
|
||||||
|
|
||||||
@ -91,13 +91,13 @@ func (suite *ContactsPagerIntgSuite) TestContacts_GetItemsIDsInContainer() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
container, err := ac.GetContainerByID(ctx, suite.its.userID, api.DefaultContacts)
|
container, err := ac.GetContainerByID(ctx, suite.its.user.id, api.DefaultContacts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
msgs, err := ac.Stable.
|
msgs, err := ac.Stable.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
ByUserId(suite.its.userID).
|
ByUserId(suite.its.user.id).
|
||||||
ContactFolders().
|
ContactFolders().
|
||||||
ByContactFolderId(ptr.Val(container.GetId())).
|
ByContactFolderId(ptr.Val(container.GetId())).
|
||||||
Contacts().
|
Contacts().
|
||||||
@ -112,7 +112,7 @@ func (suite *ContactsPagerIntgSuite) TestContacts_GetItemsIDsInContainer() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
results, err := suite.its.ac.Contacts().
|
results, err := suite.its.ac.Contacts().
|
||||||
GetItemIDsInContainer(ctx, suite.its.userID, api.DefaultContacts)
|
GetItemIDsInContainer(ctx, suite.its.user.id, api.DefaultContacts)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.Less(t, 0, len(results), "requires at least one result")
|
require.Less(t, 0, len(results), "requires at least one result")
|
||||||
require.Equal(t, len(expect), len(results), "must have same count of items")
|
require.Equal(t, len(expect), len(results), "must have same count of items")
|
||||||
|
|||||||
@ -141,7 +141,7 @@ func (suite *ContactsAPIIntgSuite) TestContacts_GetContainerByName() {
|
|||||||
|
|
||||||
cc, err := suite.its.ac.Contacts().CreateContainer(
|
cc, err := suite.its.ac.Contacts().CreateContainer(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userID,
|
suite.its.user.id,
|
||||||
"",
|
"",
|
||||||
rc.Location)
|
rc.Location)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -168,7 +168,7 @@ func (suite *ContactsAPIIntgSuite) TestContacts_GetContainerByName() {
|
|||||||
|
|
||||||
_, err := suite.its.ac.
|
_, err := suite.its.ac.
|
||||||
Contacts().
|
Contacts().
|
||||||
GetContainerByName(ctx, suite.its.userID, "", test.name)
|
GetContainerByName(ctx, suite.its.user.id, "", test.name)
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -120,20 +120,11 @@ func (c Drives) GetItemIDsInContainer(
|
|||||||
return m, nil
|
return m, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// delta item pager
|
// delta item pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type DriveItemDeltaEnumerator interface {
|
var _ DeltaPager[models.DriveItemable] = &DriveItemDeltaPageCtrl{}
|
||||||
GetPage(context.Context) (DeltaPageLinker, error)
|
|
||||||
SetNext(nextLink string)
|
|
||||||
Reset()
|
|
||||||
ValuesIn(DeltaPageLinker) ([]models.DriveItemable, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ DriveItemDeltaEnumerator = &DriveItemDeltaPageCtrl{}
|
|
||||||
|
|
||||||
type DriveItemDeltaPageCtrl struct {
|
type DriveItemDeltaPageCtrl struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -198,7 +189,7 @@ func (p *DriveItemDeltaPageCtrl) SetNext(link string) {
|
|||||||
p.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, p.gs.Adapter())
|
p.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *DriveItemDeltaPageCtrl) Reset() {
|
func (p *DriveItemDeltaPageCtrl) Reset(context.Context) {
|
||||||
p.builder = p.gs.Client().
|
p.builder = p.gs.Client().
|
||||||
Drives().
|
Drives().
|
||||||
ByDriveId(p.driveID).
|
ByDriveId(p.driveID).
|
||||||
@ -207,7 +198,7 @@ func (p *DriveItemDeltaPageCtrl) Reset() {
|
|||||||
Delta()
|
Delta()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *DriveItemDeltaPageCtrl) ValuesIn(l DeltaPageLinker) ([]models.DriveItemable, error) {
|
func (p *DriveItemDeltaPageCtrl) ValuesIn(l PageLinker) ([]models.DriveItemable, error) {
|
||||||
return getValues[models.DriveItemable](l)
|
return getValues[models.DriveItemable](l)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -215,7 +206,7 @@ func (p *DriveItemDeltaPageCtrl) ValuesIn(l DeltaPageLinker) ([]models.DriveItem
|
|||||||
// user's drives pager
|
// user's drives pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ DrivePager = &userDrivePager{}
|
var _ Pager[models.Driveable] = &userDrivePager{}
|
||||||
|
|
||||||
type userDrivePager struct {
|
type userDrivePager struct {
|
||||||
userID string
|
userID string
|
||||||
@ -305,7 +296,7 @@ func (p *userDrivePager) ValuesIn(l PageLinker) ([]models.Driveable, error) {
|
|||||||
// site's libraries pager
|
// site's libraries pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ DrivePager = &siteDrivePager{}
|
var _ Pager[models.Driveable] = &siteDrivePager{}
|
||||||
|
|
||||||
type siteDrivePager struct {
|
type siteDrivePager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -367,17 +358,10 @@ func (p *siteDrivePager) ValuesIn(l PageLinker) ([]models.Driveable, error) {
|
|||||||
// drive pager
|
// drive pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// DrivePager pages through different types of drive owners
|
|
||||||
type DrivePager interface {
|
|
||||||
GetPage(context.Context) (PageLinker, error)
|
|
||||||
SetNext(nextLink string)
|
|
||||||
ValuesIn(PageLinker) ([]models.Driveable, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetAllDrives fetches all drives for the given pager
|
// GetAllDrives fetches all drives for the given pager
|
||||||
func GetAllDrives(
|
func GetAllDrives(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
pager DrivePager,
|
pager Pager[models.Driveable],
|
||||||
retry bool,
|
retry bool,
|
||||||
maxRetryCount int,
|
maxRetryCount int,
|
||||||
) ([]models.Driveable, error) {
|
) ([]models.Driveable, error) {
|
||||||
|
|||||||
@ -39,13 +39,13 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemsInContainerByCollisionKey()
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "user drive",
|
name: "user drive",
|
||||||
driveID: suite.its.userDriveID,
|
driveID: suite.its.user.driveID,
|
||||||
rootFolderID: suite.its.userDriveRootFolderID,
|
rootFolderID: suite.its.user.driveRootFolderID,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "site drive",
|
name: "site drive",
|
||||||
driveID: suite.its.siteDriveID,
|
driveID: suite.its.site.driveID,
|
||||||
rootFolderID: suite.its.siteDriveRootFolderID,
|
rootFolderID: suite.its.site.driveRootFolderID,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
@ -75,7 +75,7 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemsInContainerByCollisionKey()
|
|||||||
t,
|
t,
|
||||||
ims,
|
ims,
|
||||||
"need at least one item to compare in user %s drive %s folder %s",
|
"need at least one item to compare in user %s drive %s folder %s",
|
||||||
suite.its.userID, test.driveID, test.rootFolderID)
|
suite.its.user.id, test.driveID, test.rootFolderID)
|
||||||
|
|
||||||
results, err := suite.its.ac.
|
results, err := suite.its.ac.
|
||||||
Drives().
|
Drives().
|
||||||
@ -113,13 +113,13 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemIDsInContainer() {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "user drive",
|
name: "user drive",
|
||||||
driveID: suite.its.userDriveID,
|
driveID: suite.its.user.driveID,
|
||||||
rootFolderID: suite.its.userDriveRootFolderID,
|
rootFolderID: suite.its.user.driveRootFolderID,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "site drive",
|
name: "site drive",
|
||||||
driveID: suite.its.siteDriveID,
|
driveID: suite.its.site.driveID,
|
||||||
rootFolderID: suite.its.siteDriveRootFolderID,
|
rootFolderID: suite.its.site.driveRootFolderID,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
@ -149,7 +149,7 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemIDsInContainer() {
|
|||||||
t,
|
t,
|
||||||
igv,
|
igv,
|
||||||
"need at least one item to compare in user %s drive %s folder %s",
|
"need at least one item to compare in user %s drive %s folder %s",
|
||||||
suite.its.userID, test.driveID, test.rootFolderID)
|
suite.its.user.id, test.driveID, test.rootFolderID)
|
||||||
|
|
||||||
for _, itm := range igv {
|
for _, itm := range igv {
|
||||||
expect[ptr.Val(itm.GetId())] = api.DriveItemIDType{
|
expect[ptr.Val(itm.GetId())] = api.DriveItemIDType{
|
||||||
|
|||||||
@ -76,8 +76,8 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
|
|||||||
// generate a parent for the test data
|
// generate a parent for the test data
|
||||||
parent, err := acd.PostItemInContainer(
|
parent, err := acd.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userDriveID,
|
suite.its.user.driveID,
|
||||||
suite.its.userDriveRootFolderID,
|
suite.its.user.driveRootFolderID,
|
||||||
newItem(rc.Location, true),
|
newItem(rc.Location, true),
|
||||||
control.Replace)
|
control.Replace)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -86,7 +86,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
|
|||||||
folder := newItem("collision", true)
|
folder := newItem("collision", true)
|
||||||
origFolder, err := acd.PostItemInContainer(
|
origFolder, err := acd.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userDriveID,
|
suite.its.user.driveID,
|
||||||
ptr.Val(parent.GetId()),
|
ptr.Val(parent.GetId()),
|
||||||
folder,
|
folder,
|
||||||
control.Copy)
|
control.Copy)
|
||||||
@ -96,7 +96,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
|
|||||||
file := newItem("collision.txt", false)
|
file := newItem("collision.txt", false)
|
||||||
origFile, err := acd.PostItemInContainer(
|
origFile, err := acd.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userDriveID,
|
suite.its.user.driveID,
|
||||||
ptr.Val(parent.GetId()),
|
ptr.Val(parent.GetId()),
|
||||||
file,
|
file,
|
||||||
control.Copy)
|
control.Copy)
|
||||||
@ -211,7 +211,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
|
|||||||
t := suite.T()
|
t := suite.T()
|
||||||
i, err := acd.PostItemInContainer(
|
i, err := acd.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userDriveID,
|
suite.its.user.driveID,
|
||||||
ptr.Val(parent.GetId()),
|
ptr.Val(parent.GetId()),
|
||||||
test.postItem,
|
test.postItem,
|
||||||
test.onCollision)
|
test.onCollision)
|
||||||
@ -239,8 +239,8 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
|
|||||||
// generate a folder for the test data
|
// generate a folder for the test data
|
||||||
folder, err := acd.PostItemInContainer(
|
folder, err := acd.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userDriveID,
|
suite.its.user.driveID,
|
||||||
suite.its.userDriveRootFolderID,
|
suite.its.user.driveRootFolderID,
|
||||||
newItem(rc.Location, true),
|
newItem(rc.Location, true),
|
||||||
// skip instead of replace here to get
|
// skip instead of replace here to get
|
||||||
// an ErrItemAlreadyExistsConflict, just in case.
|
// an ErrItemAlreadyExistsConflict, just in case.
|
||||||
@ -252,7 +252,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
|
|||||||
file := newItem(fmt.Sprintf("collision_%d.txt", i), false)
|
file := newItem(fmt.Sprintf("collision_%d.txt", i), false)
|
||||||
f, err := acd.PostItemInContainer(
|
f, err := acd.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userDriveID,
|
suite.its.user.driveID,
|
||||||
ptr.Val(folder.GetId()),
|
ptr.Val(folder.GetId()),
|
||||||
file,
|
file,
|
||||||
control.Copy)
|
control.Copy)
|
||||||
@ -263,7 +263,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
|
|||||||
|
|
||||||
resultFolder, err := acd.PostItemInContainer(
|
resultFolder, err := acd.PostItemInContainer(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userDriveID,
|
suite.its.user.driveID,
|
||||||
ptr.Val(folder.GetParentReference().GetId()),
|
ptr.Val(folder.GetParentReference().GetId()),
|
||||||
newItem(rc.Location, true),
|
newItem(rc.Location, true),
|
||||||
control.Replace)
|
control.Replace)
|
||||||
@ -274,7 +274,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
|
|||||||
resultFileColl, err := acd.Stable.
|
resultFileColl, err := acd.Stable.
|
||||||
Client().
|
Client().
|
||||||
Drives().
|
Drives().
|
||||||
ByDriveId(suite.its.userDriveID).
|
ByDriveId(suite.its.user.driveID).
|
||||||
Items().
|
Items().
|
||||||
ByDriveItemId(ptr.Val(resultFolder.GetId())).
|
ByDriveItemId(ptr.Val(resultFolder.GetId())).
|
||||||
Children().
|
Children().
|
||||||
|
|||||||
@ -91,13 +91,10 @@ func (c Events) DeleteContainer(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// prefer GetContainerByID where possible.
|
func (c Events) GetContainerByID(
|
||||||
// use this only in cases where the models.Calendarable
|
|
||||||
// is required.
|
|
||||||
func (c Events) GetCalendar(
|
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
) (models.Calendarable, error) {
|
) (graph.Container, error) {
|
||||||
config := &users.ItemCalendarsCalendarItemRequestBuilderGetRequestConfiguration{
|
config := &users.ItemCalendarsCalendarItemRequestBuilderGetRequestConfiguration{
|
||||||
QueryParameters: &users.ItemCalendarsCalendarItemRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemCalendarsCalendarItemRequestBuilderGetQueryParameters{
|
||||||
Select: idAnd("name", "owner"),
|
Select: idAnd("name", "owner"),
|
||||||
@ -115,20 +112,7 @@ func (c Events) GetCalendar(
|
|||||||
return nil, graph.Stack(ctx, err)
|
return nil, graph.Stack(ctx, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return resp, nil
|
return graph.CalendarDisplayable{Calendarable: resp}, nil
|
||||||
}
|
|
||||||
|
|
||||||
// interface-compliant wrapper of GetCalendar
|
|
||||||
func (c Events) GetContainerByID(
|
|
||||||
ctx context.Context,
|
|
||||||
userID, containerID string,
|
|
||||||
) (graph.Container, error) {
|
|
||||||
cal, err := c.GetCalendar(ctx, userID, containerID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return graph.CalendarDisplayable{Calendarable: cal}, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetContainerByName fetches a calendar by name
|
// GetContainerByName fetches a calendar by name
|
||||||
|
|||||||
@ -173,7 +173,7 @@ func (c Events) GetItemsInContainerByCollisionKey(
|
|||||||
// item ID pager
|
// item ID pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ itemIDPager = &eventIDPager{}
|
var _ DeltaPager[getIDAndAddtler] = &eventIDPager{}
|
||||||
|
|
||||||
type eventIDPager struct {
|
type eventIDPager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -185,7 +185,7 @@ func (c Events) NewEventIDsPager(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
options := &users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration{
|
options := &users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration{
|
||||||
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
|
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
|
||||||
QueryParameters: &users.ItemCalendarsItemEventsRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemCalendarsItemEventsRequestBuilderGetQueryParameters{
|
||||||
@ -204,7 +204,7 @@ func (c Events) NewEventIDsPager(
|
|||||||
return &eventIDPager{c.Stable, builder, options}, nil
|
return &eventIDPager{c.Stable, builder, options}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *eventIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *eventIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
resp, err := p.builder.Get(ctx, p.options)
|
resp, err := p.builder.Get(ctx, p.options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Stack(ctx, err)
|
return nil, graph.Stack(ctx, err)
|
||||||
@ -213,14 +213,14 @@ func (p *eventIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
|||||||
return EmptyDeltaLinker[models.Eventable]{PageLinkValuer: resp}, nil
|
return EmptyDeltaLinker[models.Eventable]{PageLinkValuer: resp}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *eventIDPager) setNext(nextLink string) {
|
func (p *eventIDPager) SetNext(nextLink string) {
|
||||||
p.builder = users.NewItemCalendarsItemEventsRequestBuilder(nextLink, p.gs.Adapter())
|
p.builder = users.NewItemCalendarsItemEventsRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
// non delta pagers don't need reset
|
// non delta pagers don't need reset
|
||||||
func (p *eventIDPager) reset(context.Context) {}
|
func (p *eventIDPager) Reset(context.Context) {}
|
||||||
|
|
||||||
func (p *eventIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
func (p *eventIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
||||||
return toValues[models.Eventable](pl)
|
return toValues[models.Eventable](pl)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -228,7 +228,7 @@ func (p *eventIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
|||||||
// delta item ID pager
|
// delta item ID pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ itemIDPager = &eventDeltaIDPager{}
|
var _ DeltaPager[getIDAndAddtler] = &eventDeltaIDPager{}
|
||||||
|
|
||||||
type eventDeltaIDPager struct {
|
type eventDeltaIDPager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -242,7 +242,7 @@ func (c Events) NewEventDeltaIDsPager(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID, oldDelta string,
|
userID, containerID, oldDelta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
options := &users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration{
|
options := &users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration{
|
||||||
Headers: newPreferHeaders(preferPageSize(c.options.DeltaPageSize), preferImmutableIDs(immutableIDs)),
|
Headers: newPreferHeaders(preferPageSize(c.options.DeltaPageSize), preferImmutableIDs(immutableIDs)),
|
||||||
QueryParameters: &users.ItemCalendarsItemEventsDeltaRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemCalendarsItemEventsDeltaRequestBuilderGetQueryParameters{
|
||||||
@ -281,7 +281,7 @@ func getEventDeltaBuilder(
|
|||||||
return builder
|
return builder
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *eventDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *eventDeltaIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
resp, err := p.builder.Get(ctx, p.options)
|
resp, err := p.builder.Get(ctx, p.options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Stack(ctx, err)
|
return nil, graph.Stack(ctx, err)
|
||||||
@ -290,15 +290,15 @@ func (p *eventDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error
|
|||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *eventDeltaIDPager) setNext(nextLink string) {
|
func (p *eventDeltaIDPager) SetNext(nextLink string) {
|
||||||
p.builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(nextLink, p.gs.Adapter())
|
p.builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *eventDeltaIDPager) reset(ctx context.Context) {
|
func (p *eventDeltaIDPager) Reset(ctx context.Context) {
|
||||||
p.builder = getEventDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options)
|
p.builder = getEventDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *eventDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
func (p *eventDeltaIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
||||||
return toValues[models.Eventable](pl)
|
return toValues[models.Eventable](pl)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -39,13 +39,13 @@ func (suite *EventsPagerIntgSuite) TestEvents_GetItemsInContainerByCollisionKey(
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
container, err := ac.GetContainerByID(ctx, suite.its.userID, "calendar")
|
container, err := ac.GetContainerByID(ctx, suite.its.user.id, "calendar")
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
evts, err := ac.Stable.
|
evts, err := ac.Stable.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
ByUserId(suite.its.userID).
|
ByUserId(suite.its.user.id).
|
||||||
Calendars().
|
Calendars().
|
||||||
ByCalendarId(ptr.Val(container.GetId())).
|
ByCalendarId(ptr.Val(container.GetId())).
|
||||||
Events().
|
Events().
|
||||||
@ -63,7 +63,7 @@ func (suite *EventsPagerIntgSuite) TestEvents_GetItemsInContainerByCollisionKey(
|
|||||||
|
|
||||||
results, err := suite.its.ac.
|
results, err := suite.its.ac.
|
||||||
Events().
|
Events().
|
||||||
GetItemsInContainerByCollisionKey(ctx, suite.its.userID, "calendar")
|
GetItemsInContainerByCollisionKey(ctx, suite.its.user.id, "calendar")
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.Less(t, 0, len(results), "requires at least one result")
|
require.Less(t, 0, len(results), "requires at least one result")
|
||||||
|
|
||||||
|
|||||||
@ -289,7 +289,7 @@ func (suite *EventsAPIIntgSuite) TestEvents_canFindNonStandardFolder() {
|
|||||||
ac := suite.its.ac.Events()
|
ac := suite.its.ac.Events()
|
||||||
rc := testdata.DefaultRestoreConfig("api_calendar_discovery")
|
rc := testdata.DefaultRestoreConfig("api_calendar_discovery")
|
||||||
|
|
||||||
cal, err := ac.CreateContainer(ctx, suite.its.userID, "", rc.Location)
|
cal, err := ac.CreateContainer(ctx, suite.its.user.id, "", rc.Location)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -306,7 +306,7 @@ func (suite *EventsAPIIntgSuite) TestEvents_canFindNonStandardFolder() {
|
|||||||
|
|
||||||
err = ac.EnumerateContainers(
|
err = ac.EnumerateContainers(
|
||||||
ctx,
|
ctx,
|
||||||
suite.its.userID,
|
suite.its.user.id,
|
||||||
"Calendar",
|
"Calendar",
|
||||||
findContainer,
|
findContainer,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
@ -342,7 +342,7 @@ func (suite *EventsAPIIntgSuite) TestEvents_GetContainerByName() {
|
|||||||
|
|
||||||
_, err := suite.its.ac.
|
_, err := suite.its.ac.
|
||||||
Events().
|
Events().
|
||||||
GetContainerByName(ctx, suite.its.userID, "", test.name)
|
GetContainerByName(ctx, suite.its.user.id, "", test.name)
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -112,7 +112,7 @@ func (suite *GroupsIntgSuite) TestGetAll() {
|
|||||||
|
|
||||||
func (suite *GroupsIntgSuite) TestGroups_GetByID() {
|
func (suite *GroupsIntgSuite) TestGroups_GetByID() {
|
||||||
var (
|
var (
|
||||||
groupID = suite.its.groupID
|
groupID = suite.its.group.id
|
||||||
groupsAPI = suite.its.ac.Groups()
|
groupsAPI = suite.its.ac.Groups()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -74,16 +74,19 @@ func parseableToMap(t *testing.T, thing serialization.Parsable) map[string]any {
|
|||||||
// Suite Setup
|
// Suite Setup
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type ids struct {
|
||||||
|
id string
|
||||||
|
driveID string
|
||||||
|
driveRootFolderID string
|
||||||
|
testContainerID string
|
||||||
|
}
|
||||||
|
|
||||||
type intgTesterSetup struct {
|
type intgTesterSetup struct {
|
||||||
ac api.Client
|
ac api.Client
|
||||||
gockAC api.Client
|
gockAC api.Client
|
||||||
userID string
|
user ids
|
||||||
userDriveID string
|
site ids
|
||||||
userDriveRootFolderID string
|
group ids
|
||||||
siteID string
|
|
||||||
siteDriveID string
|
|
||||||
siteDriveRootFolderID string
|
|
||||||
groupID string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
|
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
|
||||||
@ -106,42 +109,47 @@ func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
|
|||||||
|
|
||||||
// user drive
|
// user drive
|
||||||
|
|
||||||
its.userID = tconfig.M365UserID(t)
|
its.user.id = tconfig.M365UserID(t)
|
||||||
|
|
||||||
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, its.userID)
|
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, its.user.id)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
its.userDriveID = ptr.Val(userDrive.GetId())
|
its.user.driveID = ptr.Val(userDrive.GetId())
|
||||||
|
|
||||||
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.userDriveID)
|
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.user.driveID)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
its.userDriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
|
its.user.driveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
|
||||||
|
|
||||||
its.siteID = tconfig.M365SiteID(t)
|
|
||||||
|
|
||||||
// site
|
// site
|
||||||
|
|
||||||
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, its.siteID)
|
its.site.id = tconfig.M365SiteID(t)
|
||||||
|
|
||||||
|
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, its.site.id)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
its.siteDriveID = ptr.Val(siteDrive.GetId())
|
its.site.driveID = ptr.Val(siteDrive.GetId())
|
||||||
|
|
||||||
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.siteDriveID)
|
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.site.driveID)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
its.siteDriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
|
its.site.driveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
|
||||||
|
|
||||||
// group
|
// groups/teams
|
||||||
|
|
||||||
// use of the TeamID is intentional here, so that we are assured
|
// use of the TeamID is intentional here, so that we are assured
|
||||||
// the group has full usage of the teams api.
|
// the group has full usage of the teams api.
|
||||||
its.groupID = tconfig.M365TeamID(t)
|
its.group.id = tconfig.M365TeamID(t)
|
||||||
|
|
||||||
team, err := its.ac.Groups().GetByID(ctx, its.groupID)
|
channel, err := its.ac.Channels().
|
||||||
|
GetChannelByName(
|
||||||
|
ctx,
|
||||||
|
its.group.id,
|
||||||
|
"Test")
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
require.Equal(t, "Test", ptr.Val(channel.GetDisplayName()))
|
||||||
|
|
||||||
its.groupID = ptr.Val(team.GetId())
|
its.group.testContainerID = ptr.Val(channel.GetId())
|
||||||
|
|
||||||
return its
|
return its
|
||||||
}
|
}
|
||||||
|
|||||||
@ -16,11 +16,31 @@ import (
|
|||||||
// common interfaces
|
// common interfaces
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// TODO(keepers): replace all matching uses of GetPage with this.
|
type DeltaPager[T any] interface {
|
||||||
|
DeltaGetPager
|
||||||
|
Resetter
|
||||||
|
SetNextLinker
|
||||||
|
ValuesInPageLinker[T]
|
||||||
|
}
|
||||||
|
|
||||||
|
type Pager[T any] interface {
|
||||||
|
GetPager
|
||||||
|
SetNextLinker
|
||||||
|
ValuesInPageLinker[T]
|
||||||
|
}
|
||||||
|
|
||||||
type DeltaGetPager interface {
|
type DeltaGetPager interface {
|
||||||
GetPage(context.Context) (DeltaPageLinker, error)
|
GetPage(context.Context) (DeltaPageLinker, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type GetPager interface {
|
||||||
|
GetPage(context.Context) (PageLinker, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Valuer[T any] interface {
|
||||||
|
GetValue() []T
|
||||||
|
}
|
||||||
|
|
||||||
type ValuesInPageLinker[T any] interface {
|
type ValuesInPageLinker[T any] interface {
|
||||||
ValuesIn(PageLinker) ([]T, error)
|
ValuesIn(PageLinker) ([]T, error)
|
||||||
}
|
}
|
||||||
@ -34,10 +54,19 @@ type DeltaPageLinker interface {
|
|||||||
GetOdataDeltaLink() *string
|
GetOdataDeltaLink() *string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PageLinkValuer[T any] interface {
|
||||||
|
PageLinker
|
||||||
|
Valuer[T]
|
||||||
|
}
|
||||||
|
|
||||||
type SetNextLinker interface {
|
type SetNextLinker interface {
|
||||||
SetNext(nextLink string)
|
SetNext(nextLink string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Resetter interface {
|
||||||
|
Reset(context.Context)
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// common funcs
|
// common funcs
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -55,15 +84,6 @@ func NextAndDeltaLink(pl DeltaPageLinker) (string, string) {
|
|||||||
return NextLink(pl), ptr.Val(pl.GetOdataDeltaLink())
|
return NextLink(pl), ptr.Val(pl.GetOdataDeltaLink())
|
||||||
}
|
}
|
||||||
|
|
||||||
type Valuer[T any] interface {
|
|
||||||
GetValue() []T
|
|
||||||
}
|
|
||||||
|
|
||||||
type PageLinkValuer[T any] interface {
|
|
||||||
PageLinker
|
|
||||||
Valuer[T]
|
|
||||||
}
|
|
||||||
|
|
||||||
// EmptyDeltaLinker is used to convert PageLinker to DeltaPageLinker
|
// EmptyDeltaLinker is used to convert PageLinker to DeltaPageLinker
|
||||||
type EmptyDeltaLinker[T any] struct {
|
type EmptyDeltaLinker[T any] struct {
|
||||||
PageLinkValuer[T]
|
PageLinkValuer[T]
|
||||||
@ -148,19 +168,6 @@ func toValues[T any](a any) ([]getIDAndAddtler, error) {
|
|||||||
return r, nil
|
return r, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type itemIDPager interface {
|
|
||||||
// getPage get a page with the specified options from graph
|
|
||||||
getPage(context.Context) (DeltaPageLinker, error)
|
|
||||||
// setNext is used to pass in the next url got from graph
|
|
||||||
setNext(string)
|
|
||||||
// reset is used to clear delta url in delta pagers. When
|
|
||||||
// reset is called, we reset the state(delta url) that we
|
|
||||||
// currently have and start a new delta query without the token.
|
|
||||||
reset(context.Context)
|
|
||||||
// valuesIn gets us the values in a page
|
|
||||||
valuesIn(PageLinker) ([]getIDAndAddtler, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type getIDAndAddtler interface {
|
type getIDAndAddtler interface {
|
||||||
GetId() *string
|
GetId() *string
|
||||||
GetAdditionalData() map[string]any
|
GetAdditionalData() map[string]any
|
||||||
@ -169,13 +176,13 @@ type getIDAndAddtler interface {
|
|||||||
func getAddedAndRemovedItemIDs(
|
func getAddedAndRemovedItemIDs(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Servicer,
|
service graph.Servicer,
|
||||||
pager itemIDPager,
|
pager DeltaPager[getIDAndAddtler],
|
||||||
deltaPager itemIDPager,
|
deltaPager DeltaPager[getIDAndAddtler],
|
||||||
oldDelta string,
|
oldDelta string,
|
||||||
canMakeDeltaQueries bool,
|
canMakeDeltaQueries bool,
|
||||||
) ([]string, []string, DeltaUpdate, error) {
|
) ([]string, []string, DeltaUpdate, error) {
|
||||||
var (
|
var (
|
||||||
pgr itemIDPager
|
pgr DeltaPager[getIDAndAddtler]
|
||||||
resetDelta bool
|
resetDelta bool
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -204,7 +211,7 @@ func getAddedAndRemovedItemIDs(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// reset deltaPager
|
// reset deltaPager
|
||||||
pgr.reset(ctx)
|
pgr.Reset(ctx)
|
||||||
|
|
||||||
added, removed, deltaURL, err = getItemsAddedAndRemovedFromContainer(ctx, pgr)
|
added, removed, deltaURL, err = getItemsAddedAndRemovedFromContainer(ctx, pgr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -217,7 +224,7 @@ func getAddedAndRemovedItemIDs(
|
|||||||
// generic controller for retrieving all item ids in a container.
|
// generic controller for retrieving all item ids in a container.
|
||||||
func getItemsAddedAndRemovedFromContainer(
|
func getItemsAddedAndRemovedFromContainer(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
pager itemIDPager,
|
pager DeltaPager[getIDAndAddtler],
|
||||||
) ([]string, []string, string, error) {
|
) ([]string, []string, string, error) {
|
||||||
var (
|
var (
|
||||||
addedIDs = []string{}
|
addedIDs = []string{}
|
||||||
@ -229,14 +236,14 @@ func getItemsAddedAndRemovedFromContainer(
|
|||||||
|
|
||||||
for {
|
for {
|
||||||
// get the next page of data, check for standard errors
|
// get the next page of data, check for standard errors
|
||||||
resp, err := pager.getPage(ctx)
|
resp, err := pager.GetPage(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, deltaURL, graph.Stack(ctx, err)
|
return nil, nil, deltaURL, graph.Stack(ctx, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// each category type responds with a different interface, but all
|
// each category type responds with a different interface, but all
|
||||||
// of them comply with GetValue, which is where we'll get our item data.
|
// of them comply with GetValue, which is where we'll get our item data.
|
||||||
items, err := pager.valuesIn(resp)
|
items, err := pager.ValuesIn(resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, "", graph.Stack(ctx, err)
|
return nil, nil, "", graph.Stack(ctx, err)
|
||||||
}
|
}
|
||||||
@ -278,7 +285,7 @@ func getItemsAddedAndRemovedFromContainer(
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
pager.setNext(nextLink)
|
pager.SetNext(nextLink)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Ctx(ctx).Infow("completed enumeration", "count", itemCount)
|
logger.Ctx(ctx).Infow("completed enumeration", "count", itemCount)
|
||||||
|
|||||||
@ -95,7 +95,7 @@ func (p *testPager) setNext(nextLink string) {}
|
|||||||
|
|
||||||
// mock id pager
|
// mock id pager
|
||||||
|
|
||||||
var _ itemIDPager = &testIDsPager{}
|
var _ DeltaPager[getIDAndAddtler] = &testIDsPager{}
|
||||||
|
|
||||||
type testIDsPager struct {
|
type testIDsPager struct {
|
||||||
t *testing.T
|
t *testing.T
|
||||||
@ -105,7 +105,7 @@ type testIDsPager struct {
|
|||||||
needsReset bool
|
needsReset bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testIDsPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *testIDsPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
if p.errorCode != "" {
|
if p.errorCode != "" {
|
||||||
ierr := odataerrors.NewMainError()
|
ierr := odataerrors.NewMainError()
|
||||||
ierr.SetCode(&p.errorCode)
|
ierr.SetCode(&p.errorCode)
|
||||||
@ -118,8 +118,8 @@ func (p *testIDsPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
|||||||
|
|
||||||
return testPage{}, nil
|
return testPage{}, nil
|
||||||
}
|
}
|
||||||
func (p *testIDsPager) setNext(string) {}
|
func (p *testIDsPager) SetNext(string) {}
|
||||||
func (p *testIDsPager) reset(context.Context) {
|
func (p *testIDsPager) Reset(context.Context) {
|
||||||
if !p.needsReset {
|
if !p.needsReset {
|
||||||
require.Fail(p.t, "reset should not be called")
|
require.Fail(p.t, "reset should not be called")
|
||||||
}
|
}
|
||||||
@ -128,7 +128,7 @@ func (p *testIDsPager) reset(context.Context) {
|
|||||||
p.errorCode = ""
|
p.errorCode = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testIDsPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
func (p *testIDsPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
||||||
items := []getIDAndAddtler{}
|
items := []getIDAndAddtler{}
|
||||||
|
|
||||||
for _, id := range p.added {
|
for _, id := range p.added {
|
||||||
@ -208,15 +208,21 @@ func (suite *ItemPagerUnitSuite) TestEnumerateItems() {
|
|||||||
|
|
||||||
func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
pagerGetter func(*testing.T, context.Context, graph.Servicer, string, string, bool) (itemIDPager, error)
|
pagerGetter func(
|
||||||
|
*testing.T,
|
||||||
|
context.Context,
|
||||||
|
graph.Servicer,
|
||||||
|
string, string,
|
||||||
|
bool,
|
||||||
|
) (DeltaPager[getIDAndAddtler], error)
|
||||||
deltaPagerGetter func(
|
deltaPagerGetter func(
|
||||||
*testing.T,
|
*testing.T,
|
||||||
context.Context,
|
context.Context,
|
||||||
graph.Servicer,
|
graph.Servicer,
|
||||||
string, string, string,
|
string, string, string,
|
||||||
bool,
|
bool,
|
||||||
) (itemIDPager, error)
|
) (DeltaPager[getIDAndAddtler], error)
|
||||||
added []string
|
added []string
|
||||||
removed []string
|
removed []string
|
||||||
deltaUpdate DeltaUpdate
|
deltaUpdate DeltaUpdate
|
||||||
@ -232,7 +238,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
|||||||
user string,
|
user string,
|
||||||
directory string,
|
directory string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
// this should not be called
|
// this should not be called
|
||||||
return nil, assert.AnError
|
return nil, assert.AnError
|
||||||
},
|
},
|
||||||
@ -244,7 +250,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
|||||||
directory string,
|
directory string,
|
||||||
delta string,
|
delta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
return &testIDsPager{
|
return &testIDsPager{
|
||||||
t: t,
|
t: t,
|
||||||
added: []string{"uno", "dos"},
|
added: []string{"uno", "dos"},
|
||||||
@ -265,7 +271,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
|||||||
user string,
|
user string,
|
||||||
directory string,
|
directory string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
// this should not be called
|
// this should not be called
|
||||||
return nil, assert.AnError
|
return nil, assert.AnError
|
||||||
},
|
},
|
||||||
@ -277,7 +283,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
|||||||
directory string,
|
directory string,
|
||||||
delta string,
|
delta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
return &testIDsPager{
|
return &testIDsPager{
|
||||||
t: t,
|
t: t,
|
||||||
added: []string{"uno", "dos"},
|
added: []string{"uno", "dos"},
|
||||||
@ -299,7 +305,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
|||||||
user string,
|
user string,
|
||||||
directory string,
|
directory string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
// this should not be called
|
// this should not be called
|
||||||
return nil, assert.AnError
|
return nil, assert.AnError
|
||||||
},
|
},
|
||||||
@ -311,7 +317,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
|||||||
directory string,
|
directory string,
|
||||||
delta string,
|
delta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
return &testIDsPager{
|
return &testIDsPager{
|
||||||
t: t,
|
t: t,
|
||||||
added: []string{"uno", "dos"},
|
added: []string{"uno", "dos"},
|
||||||
@ -335,7 +341,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
|||||||
user string,
|
user string,
|
||||||
directory string,
|
directory string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
return &testIDsPager{
|
return &testIDsPager{
|
||||||
t: t,
|
t: t,
|
||||||
added: []string{"uno", "dos"},
|
added: []string{"uno", "dos"},
|
||||||
@ -350,7 +356,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
|
|||||||
directory string,
|
directory string,
|
||||||
delta string,
|
delta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) (itemIDPager, error) {
|
) (DeltaPager[getIDAndAddtler], error) {
|
||||||
return &testIDsPager{errorCode: "ErrorQuotaExceeded"}, nil
|
return &testIDsPager{errorCode: "ErrorQuotaExceeded"}, nil
|
||||||
},
|
},
|
||||||
added: []string{"uno", "dos"},
|
added: []string{"uno", "dos"},
|
||||||
|
|||||||
@ -41,7 +41,7 @@ func (suite *ListsAPIIntgSuite) TestLists_PostDrive() {
|
|||||||
var (
|
var (
|
||||||
acl = suite.its.ac.Lists()
|
acl = suite.its.ac.Lists()
|
||||||
driveName = testdata.DefaultRestoreConfig("list_api_post_drive").Location
|
driveName = testdata.DefaultRestoreConfig("list_api_post_drive").Location
|
||||||
siteID = suite.its.siteID
|
siteID = suite.its.site.id
|
||||||
)
|
)
|
||||||
|
|
||||||
// first post, should have no errors
|
// first post, should have no errors
|
||||||
|
|||||||
@ -41,46 +41,6 @@ type Mail struct {
|
|||||||
// containers
|
// containers
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// CreateMailFolder makes a mail folder iff a folder of the same name does not exist
|
|
||||||
// Reference: https://docs.microsoft.com/en-us/graph/api/user-post-mailfolders?view=graph-rest-1.0&tabs=http
|
|
||||||
func (c Mail) CreateMailFolder(
|
|
||||||
ctx context.Context,
|
|
||||||
userID, containerName string,
|
|
||||||
) (models.MailFolderable, error) {
|
|
||||||
isHidden := false
|
|
||||||
body := models.NewMailFolder()
|
|
||||||
body.SetDisplayName(&containerName)
|
|
||||||
body.SetIsHidden(&isHidden)
|
|
||||||
|
|
||||||
mdl, err := c.Stable.Client().
|
|
||||||
Users().
|
|
||||||
ByUserId(userID).
|
|
||||||
MailFolders().
|
|
||||||
Post(ctx, body, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, graph.Wrap(ctx, err, "creating mail folder")
|
|
||||||
}
|
|
||||||
|
|
||||||
return mdl, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Mail) DeleteMailFolder(
|
|
||||||
ctx context.Context,
|
|
||||||
userID, id string,
|
|
||||||
) error {
|
|
||||||
err := c.Stable.Client().
|
|
||||||
Users().
|
|
||||||
ByUserId(userID).
|
|
||||||
MailFolders().
|
|
||||||
ByMailFolderId(id).
|
|
||||||
Delete(ctx, nil)
|
|
||||||
if err != nil {
|
|
||||||
return graph.Wrap(ctx, err, "deleting mail folder")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c Mail) CreateContainer(
|
func (c Mail) CreateContainer(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, parentContainerID, containerName string,
|
userID, parentContainerID, containerName string,
|
||||||
@ -131,13 +91,10 @@ func (c Mail) DeleteContainer(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// prefer GetContainerByID where possible.
|
func (c Mail) GetContainerByID(
|
||||||
// use this only in cases where the models.MailFolderable
|
|
||||||
// is required.
|
|
||||||
func (c Mail) GetFolder(
|
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
) (models.MailFolderable, error) {
|
) (graph.Container, error) {
|
||||||
config := &users.ItemMailFoldersMailFolderItemRequestBuilderGetRequestConfiguration{
|
config := &users.ItemMailFoldersMailFolderItemRequestBuilderGetRequestConfiguration{
|
||||||
QueryParameters: &users.ItemMailFoldersMailFolderItemRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemMailFoldersMailFolderItemRequestBuilderGetQueryParameters{
|
||||||
Select: idAnd(displayName, parentFolderID),
|
Select: idAnd(displayName, parentFolderID),
|
||||||
@ -158,14 +115,6 @@ func (c Mail) GetFolder(
|
|||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// interface-compliant wrapper of GetFolder
|
|
||||||
func (c Mail) GetContainerByID(
|
|
||||||
ctx context.Context,
|
|
||||||
userID, containerID string,
|
|
||||||
) (graph.Container, error) {
|
|
||||||
return c.GetFolder(ctx, userID, containerID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetContainerByName fetches a folder by name
|
// GetContainerByName fetches a folder by name
|
||||||
func (c Mail) GetContainerByName(
|
func (c Mail) GetContainerByName(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
|||||||
@ -174,7 +174,7 @@ func (p *mailPageCtrl) setNext(nextLink string) {
|
|||||||
// item ID pager
|
// item ID pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ itemIDPager = &mailIDPager{}
|
var _ DeltaPager[getIDAndAddtler] = &mailIDPager{}
|
||||||
|
|
||||||
type mailIDPager struct {
|
type mailIDPager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -186,7 +186,7 @@ func (c Mail) NewMailIDsPager(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID string,
|
userID, containerID string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) itemIDPager {
|
) DeltaPager[getIDAndAddtler] {
|
||||||
config := &users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration{
|
config := &users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration{
|
||||||
QueryParameters: &users.ItemMailFoldersItemMessagesRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemMailFoldersItemMessagesRequestBuilderGetQueryParameters{
|
||||||
Select: idAnd("isRead"),
|
Select: idAnd("isRead"),
|
||||||
@ -206,7 +206,7 @@ func (c Mail) NewMailIDsPager(
|
|||||||
return &mailIDPager{c.Stable, builder, config}
|
return &mailIDPager{c.Stable, builder, config}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *mailIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
page, err := p.builder.Get(ctx, p.options)
|
page, err := p.builder.Get(ctx, p.options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Stack(ctx, err)
|
return nil, graph.Stack(ctx, err)
|
||||||
@ -215,14 +215,14 @@ func (p *mailIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
|||||||
return EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil
|
return EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailIDPager) setNext(nextLink string) {
|
func (p *mailIDPager) SetNext(nextLink string) {
|
||||||
p.builder = users.NewItemMailFoldersItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
|
p.builder = users.NewItemMailFoldersItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
// non delta pagers don't have reset
|
// non delta pagers don't have reset
|
||||||
func (p *mailIDPager) reset(context.Context) {}
|
func (p *mailIDPager) Reset(context.Context) {}
|
||||||
|
|
||||||
func (p *mailIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
func (p *mailIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
||||||
return toValues[models.Messageable](pl)
|
return toValues[models.Messageable](pl)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -272,7 +272,7 @@ func (c Mail) GetItemIDsInContainer(
|
|||||||
// delta item ID pager
|
// delta item ID pager
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
var _ itemIDPager = &mailDeltaIDPager{}
|
var _ DeltaPager[getIDAndAddtler] = &mailDeltaIDPager{}
|
||||||
|
|
||||||
type mailDeltaIDPager struct {
|
type mailDeltaIDPager struct {
|
||||||
gs graph.Servicer
|
gs graph.Servicer
|
||||||
@ -304,7 +304,7 @@ func (c Mail) NewMailDeltaIDsPager(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
userID, containerID, oldDelta string,
|
userID, containerID, oldDelta string,
|
||||||
immutableIDs bool,
|
immutableIDs bool,
|
||||||
) itemIDPager {
|
) DeltaPager[getIDAndAddtler] {
|
||||||
config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
|
config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
|
||||||
QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
|
QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
|
||||||
Select: idAnd("isRead"),
|
Select: idAnd("isRead"),
|
||||||
@ -324,7 +324,7 @@ func (c Mail) NewMailDeltaIDsPager(
|
|||||||
return &mailDeltaIDPager{c.Stable, userID, containerID, builder, config}
|
return &mailDeltaIDPager{c.Stable, userID, containerID, builder, config}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
|
func (p *mailDeltaIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
|
||||||
page, err := p.builder.Get(ctx, p.options)
|
page, err := p.builder.Get(ctx, p.options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, graph.Stack(ctx, err)
|
return nil, graph.Stack(ctx, err)
|
||||||
@ -333,11 +333,11 @@ func (p *mailDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error)
|
|||||||
return page, nil
|
return page, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailDeltaIDPager) setNext(nextLink string) {
|
func (p *mailDeltaIDPager) SetNext(nextLink string) {
|
||||||
p.builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter())
|
p.builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailDeltaIDPager) reset(ctx context.Context) {
|
func (p *mailDeltaIDPager) Reset(ctx context.Context) {
|
||||||
p.builder = p.gs.
|
p.builder = p.gs.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
@ -348,7 +348,7 @@ func (p *mailDeltaIDPager) reset(ctx context.Context) {
|
|||||||
Delta()
|
Delta()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *mailDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
func (p *mailDeltaIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
|
||||||
return toValues[models.Messageable](pl)
|
return toValues[models.Messageable](pl)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -40,13 +40,13 @@ func (suite *MailPagerIntgSuite) TestMail_GetItemsInContainerByCollisionKey() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
container, err := ac.GetContainerByID(ctx, suite.its.userID, api.MailInbox)
|
container, err := ac.GetContainerByID(ctx, suite.its.user.id, api.MailInbox)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
msgs, err := ac.Stable.
|
msgs, err := ac.Stable.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
ByUserId(suite.its.userID).
|
ByUserId(suite.its.user.id).
|
||||||
MailFolders().
|
MailFolders().
|
||||||
ByMailFolderId(ptr.Val(container.GetId())).
|
ByMailFolderId(ptr.Val(container.GetId())).
|
||||||
Messages().
|
Messages().
|
||||||
@ -62,7 +62,7 @@ func (suite *MailPagerIntgSuite) TestMail_GetItemsInContainerByCollisionKey() {
|
|||||||
|
|
||||||
expect := maps.Keys(expectM)
|
expect := maps.Keys(expectM)
|
||||||
|
|
||||||
results, err := suite.its.ac.Mail().GetItemsInContainerByCollisionKey(ctx, suite.its.userID, api.MailInbox)
|
results, err := suite.its.ac.Mail().GetItemsInContainerByCollisionKey(ctx, suite.its.user.id, api.MailInbox)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.Less(t, 0, len(results), "requires at least one result")
|
require.Less(t, 0, len(results), "requires at least one result")
|
||||||
|
|
||||||
@ -101,7 +101,7 @@ func (suite *MailPagerIntgSuite) TestMail_GetItemsIDsInContainer() {
|
|||||||
msgs, err := ac.Stable.
|
msgs, err := ac.Stable.
|
||||||
Client().
|
Client().
|
||||||
Users().
|
Users().
|
||||||
ByUserId(suite.its.userID).
|
ByUserId(suite.its.user.id).
|
||||||
MailFolders().
|
MailFolders().
|
||||||
ByMailFolderId(api.MailInbox).
|
ByMailFolderId(api.MailInbox).
|
||||||
Messages().
|
Messages().
|
||||||
@ -116,7 +116,7 @@ func (suite *MailPagerIntgSuite) TestMail_GetItemsIDsInContainer() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
results, err := suite.its.ac.Mail().
|
results, err := suite.its.ac.Mail().
|
||||||
GetItemIDsInContainer(ctx, suite.its.userID, api.MailInbox)
|
GetItemIDsInContainer(ctx, suite.its.user.id, api.MailInbox)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.Less(t, 0, len(results), "requires at least one result")
|
require.Less(t, 0, len(results), "requires at least one result")
|
||||||
require.Equal(t, len(expect), len(results), "must have same count of items")
|
require.Equal(t, len(expect), len(results), "must have same count of items")
|
||||||
|
|||||||
@ -383,7 +383,7 @@ func (suite *MailAPIIntgSuite) TestMail_RestoreLargeAttachment() {
|
|||||||
|
|
||||||
folderName := testdata.DefaultRestoreConfig("maillargeattachmenttest").Location
|
folderName := testdata.DefaultRestoreConfig("maillargeattachmenttest").Location
|
||||||
msgs := suite.its.ac.Mail()
|
msgs := suite.its.ac.Mail()
|
||||||
mailfolder, err := msgs.CreateMailFolder(ctx, userID, folderName)
|
mailfolder, err := msgs.CreateContainer(ctx, userID, api.MsgFolderRoot, folderName)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
msg := models.NewMessage()
|
msg := models.NewMessage()
|
||||||
@ -414,7 +414,7 @@ func (suite *MailAPIIntgSuite) TestMail_GetContainerByName() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
parent, err := acm.CreateContainer(ctx, suite.its.userID, "msgfolderroot", rc.Location)
|
parent, err := acm.CreateContainer(ctx, suite.its.user.id, "msgfolderroot", rc.Location)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
@ -448,7 +448,7 @@ func (suite *MailAPIIntgSuite) TestMail_GetContainerByName() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
_, err := acm.GetContainerByName(ctx, suite.its.userID, test.parentContainerID, test.name)
|
_, err := acm.GetContainerByName(ctx, suite.its.user.id, test.parentContainerID, test.name)
|
||||||
test.expectErr(t, err, clues.ToCore(err))
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -460,10 +460,10 @@ func (suite *MailAPIIntgSuite) TestMail_GetContainerByName() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
child, err := acm.CreateContainer(ctx, suite.its.userID, pid, rc.Location)
|
child, err := acm.CreateContainer(ctx, suite.its.user.id, pid, rc.Location)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
result, err := acm.GetContainerByName(ctx, suite.its.userID, pid, rc.Location)
|
result, err := acm.GetContainerByName(ctx, suite.its.user.id, pid, rc.Location)
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
assert.Equal(t, ptr.Val(child.GetId()), ptr.Val(result.GetId()))
|
assert.Equal(t, ptr.Val(child.GetId()), ptr.Val(result.GetId()))
|
||||||
})
|
})
|
||||||
|
|||||||
@ -1,56 +0,0 @@
|
|||||||
package mock
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
|
||||||
)
|
|
||||||
|
|
||||||
type PageLink struct {
|
|
||||||
Link *string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pl *PageLink) GetOdataNextLink() *string {
|
|
||||||
return pl.Link
|
|
||||||
}
|
|
||||||
|
|
||||||
type PagerResult struct {
|
|
||||||
Drives []models.Driveable
|
|
||||||
NextLink *string
|
|
||||||
Err error
|
|
||||||
}
|
|
||||||
|
|
||||||
type DrivePager struct {
|
|
||||||
ToReturn []PagerResult
|
|
||||||
GetIdx int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *DrivePager) GetPage(context.Context) (api.PageLinker, error) {
|
|
||||||
if len(p.ToReturn) <= p.GetIdx {
|
|
||||||
return nil, clues.New("ToReturn index out of bounds")
|
|
||||||
}
|
|
||||||
|
|
||||||
idx := p.GetIdx
|
|
||||||
p.GetIdx++
|
|
||||||
|
|
||||||
return &PageLink{p.ToReturn[idx].NextLink}, p.ToReturn[idx].Err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *DrivePager) SetNext(string) {}
|
|
||||||
|
|
||||||
func (p *DrivePager) ValuesIn(api.PageLinker) ([]models.Driveable, error) {
|
|
||||||
idx := p.GetIdx
|
|
||||||
if idx > 0 {
|
|
||||||
// Return values lag by one since we increment in GetPage().
|
|
||||||
idx--
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(p.ToReturn) <= idx {
|
|
||||||
return nil, clues.New("ToReturn index out of bounds")
|
|
||||||
}
|
|
||||||
|
|
||||||
return p.ToReturn[idx].Drives, nil
|
|
||||||
}
|
|
||||||
113
src/pkg/services/m365/api/mock/pager.go
Normal file
113
src/pkg/services/m365/api/mock/pager.go
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
package mock
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DeltaNextLinks struct {
|
||||||
|
Next *string
|
||||||
|
Delta *string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dnl *DeltaNextLinks) GetOdataNextLink() *string {
|
||||||
|
return dnl.Next
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dnl *DeltaNextLinks) GetOdataDeltaLink() *string {
|
||||||
|
return dnl.Delta
|
||||||
|
}
|
||||||
|
|
||||||
|
type PagerResult[T any] struct {
|
||||||
|
Values []T
|
||||||
|
NextLink *string
|
||||||
|
DeltaLink *string
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// non-delta pager
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type Pager[T any] struct {
|
||||||
|
ToReturn []PagerResult[T]
|
||||||
|
getIdx int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Pager[T]) GetPage(context.Context) (api.PageLinker, error) {
|
||||||
|
if len(p.ToReturn) <= p.getIdx {
|
||||||
|
return nil, clues.New("index out of bounds").
|
||||||
|
With("index", p.getIdx, "values", p.ToReturn)
|
||||||
|
}
|
||||||
|
|
||||||
|
idx := p.getIdx
|
||||||
|
p.getIdx++
|
||||||
|
|
||||||
|
link := DeltaNextLinks{Next: p.ToReturn[idx].NextLink}
|
||||||
|
|
||||||
|
return &link, p.ToReturn[idx].Err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Pager[T]) SetNext(string) {}
|
||||||
|
|
||||||
|
func (p *Pager[T]) ValuesIn(api.PageLinker) ([]T, error) {
|
||||||
|
idx := p.getIdx
|
||||||
|
if idx > 0 {
|
||||||
|
// Return values lag by one since we increment in GetPage().
|
||||||
|
idx--
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(p.ToReturn) <= idx {
|
||||||
|
return nil, clues.New("index out of bounds").
|
||||||
|
With("index", idx, "values", p.ToReturn)
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.ToReturn[idx].Values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// delta pager
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
type DeltaPager[T any] struct {
|
||||||
|
ToReturn []PagerResult[T]
|
||||||
|
getIdx int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeltaPager[T]) GetPage(context.Context) (api.DeltaPageLinker, error) {
|
||||||
|
if len(p.ToReturn) <= p.getIdx {
|
||||||
|
return nil, clues.New("index out of bounds").
|
||||||
|
With("index", p.getIdx, "values", p.ToReturn)
|
||||||
|
}
|
||||||
|
|
||||||
|
idx := p.getIdx
|
||||||
|
p.getIdx++
|
||||||
|
|
||||||
|
link := DeltaNextLinks{
|
||||||
|
Next: p.ToReturn[idx].NextLink,
|
||||||
|
Delta: p.ToReturn[idx].DeltaLink,
|
||||||
|
}
|
||||||
|
|
||||||
|
return &link, p.ToReturn[idx].Err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *DeltaPager[T]) SetNext(string) {}
|
||||||
|
func (p *DeltaPager[T]) Reset(context.Context) {}
|
||||||
|
|
||||||
|
func (p *DeltaPager[T]) ValuesIn(api.PageLinker) ([]T, error) {
|
||||||
|
idx := p.getIdx
|
||||||
|
if idx > 0 {
|
||||||
|
// Return values lag by one since we increment in GetPage().
|
||||||
|
idx--
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(p.ToReturn) <= idx {
|
||||||
|
return nil, clues.New("index out of bounds").
|
||||||
|
With("index", idx, "values", p.ToReturn)
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.ToReturn[idx].Values, nil
|
||||||
|
}
|
||||||
@ -238,7 +238,7 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
|
|||||||
return nil, clues.Stack(err)
|
return nil, clues.Stack(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
userInfo.Mailbox.QuotaExceeded = graph.IsErrQuotaExceeded(err)
|
mi.QuotaExceeded = graph.IsErrQuotaExceeded(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
userInfo.Mailbox = mi
|
userInfo.Mailbox = mi
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/h2non/gock"
|
"github.com/h2non/gock"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
@ -250,3 +251,29 @@ func (suite *UsersIntgSuite) TestUsers_GetInfo_errors() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *UsersIntgSuite) TestUsers_GetInfo_quotaExceeded() {
|
||||||
|
t := suite.T()
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
|
||||||
|
defer flush()
|
||||||
|
defer gock.Off()
|
||||||
|
|
||||||
|
gock.EnableNetworking()
|
||||||
|
gock.New(graphAPIHostURL).
|
||||||
|
// Wildcard match on the inbox folder ID.
|
||||||
|
Get(v1APIURLPath("users", suite.its.user.id, "mailFolders", "(.*)", "messages", "delta")).
|
||||||
|
Reply(403).
|
||||||
|
SetHeaders(
|
||||||
|
map[string]string{
|
||||||
|
"Content-Type": "application/json; odata.metadata=minimal; " +
|
||||||
|
"odata.streaming=true; IEEE754Compatible=false; charset=utf-8",
|
||||||
|
},
|
||||||
|
).
|
||||||
|
BodyString(`{"error":{"code":"ErrorQuotaExceeded","message":"The process failed to get the correct properties."}}`)
|
||||||
|
|
||||||
|
output, err := suite.its.gockAC.Users().GetInfo(ctx, suite.its.user.id)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
assert.True(t, output.Mailbox.QuotaExceeded)
|
||||||
|
}
|
||||||
|
|||||||
3
src/pkg/storage/testdata/storage.go
vendored
3
src/pkg/storage/testdata/storage.go
vendored
@ -2,7 +2,6 @@ package testdata
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
@ -28,7 +27,7 @@ var AWSStorageCredEnvs = []string{
|
|||||||
// Uses t.TempDir() to generate a unique config storage and caching directory for this
|
// Uses t.TempDir() to generate a unique config storage and caching directory for this
|
||||||
// test. Suites that need to identify this value can retrieve it again from the common
|
// test. Suites that need to identify this value can retrieve it again from the common
|
||||||
// configs.
|
// configs.
|
||||||
func NewPrefixedS3Storage(t *testing.T) storage.Storage {
|
func NewPrefixedS3Storage(t tester.TestT) storage.Storage {
|
||||||
now := tester.LogTimeOfTest(t)
|
now := tester.LogTimeOfTest(t)
|
||||||
|
|
||||||
cfg, err := tconfig.ReadTestConfig()
|
cfg, err := tconfig.ReadTestConfig()
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
.PHONY: buildimage build dev shell check genclidocs _validatemdgen publish sync
|
.PHONY: buildimage build dev shell check genclidocs _validatemdgen publish sync
|
||||||
|
|
||||||
GO_VERSION := 1.19
|
GO_VERSION := 1.20
|
||||||
CORSO_BUILD_DIR := /tmp/.corsobuild
|
CORSO_BUILD_DIR := /tmp/.corsobuild
|
||||||
CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
|
CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
|
||||||
CORSO_BUILD_MOD := ${CORSO_BUILD_DIR}/mod
|
CORSO_BUILD_MOD := ${CORSO_BUILD_DIR}/mod
|
||||||
|
|||||||
@ -54,10 +54,10 @@ then click **Add permissions**.
|
|||||||
| Calendars.ReadWrite | Application | Read and write calendars in all mailboxes |
|
| Calendars.ReadWrite | Application | Read and write calendars in all mailboxes |
|
||||||
| Contacts.ReadWrite | Application | Read and write contacts in all mailboxes |
|
| Contacts.ReadWrite | Application | Read and write contacts in all mailboxes |
|
||||||
| Files.ReadWrite.All | Application | Read and write files in all site collections |
|
| Files.ReadWrite.All | Application | Read and write files in all site collections |
|
||||||
| Mail.ReadWrite | Application | Read and write mail in all mailboxes |
|
|
||||||
| User.Read.All | Application | Read all users' full profiles |
|
|
||||||
| Sites.FullControl.All | Application | Have full control of all site collections |
|
|
||||||
| MailboxSettings.Read | Application | Read all user mailbox settings |
|
| MailboxSettings.Read | Application | Read all user mailbox settings |
|
||||||
|
| Mail.ReadWrite | Application | Read and write mail in all mailboxes |
|
||||||
|
| Sites.FullControl.All | Application | Have full control of all site collections |
|
||||||
|
| User.Read.All | Application | Read all users' full profiles |
|
||||||
|
|
||||||
<!-- vale Microsoft.Spacing = YES -->
|
<!-- vale Microsoft.Spacing = YES -->
|
||||||
|
|
||||||
|
|||||||
14
website/package-lock.json
generated
14
website/package-lock.json
generated
@ -20,7 +20,7 @@
|
|||||||
"feather-icons": "^4.29.0",
|
"feather-icons": "^4.29.0",
|
||||||
"jarallax": "^2.1.3",
|
"jarallax": "^2.1.3",
|
||||||
"mdx-mermaid": "^1.3.2",
|
"mdx-mermaid": "^1.3.2",
|
||||||
"mermaid": "^10.3.1",
|
"mermaid": "^10.4.0",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^1.3.5",
|
||||||
"react": "^17.0.2",
|
"react": "^17.0.2",
|
||||||
"react-dom": "^17.0.2",
|
"react-dom": "^17.0.2",
|
||||||
@ -9344,9 +9344,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/mermaid": {
|
"node_modules/mermaid": {
|
||||||
"version": "10.3.1",
|
"version": "10.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.4.0.tgz",
|
||||||
"integrity": "sha512-hkenh7WkuRWPcob3oJtrN3W+yzrrIYuWF1OIfk/d0xGE8UWlvDhfexaHmDwwe8DKQgqMLI8DWEPwGprxkumjuw==",
|
"integrity": "sha512-4QCQLp79lvz7UZxow5HUX7uWTPJOaQBVExduo91tliXC7v78i6kssZOPHxLL+Xs30KU72cpPn3g3imw/xm/gaw==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@braintree/sanitize-url": "^6.0.1",
|
"@braintree/sanitize-url": "^6.0.1",
|
||||||
"@types/d3-scale": "^4.0.3",
|
"@types/d3-scale": "^4.0.3",
|
||||||
@ -21856,9 +21856,9 @@
|
|||||||
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="
|
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="
|
||||||
},
|
},
|
||||||
"mermaid": {
|
"mermaid": {
|
||||||
"version": "10.3.1",
|
"version": "10.4.0",
|
||||||
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.3.1.tgz",
|
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.4.0.tgz",
|
||||||
"integrity": "sha512-hkenh7WkuRWPcob3oJtrN3W+yzrrIYuWF1OIfk/d0xGE8UWlvDhfexaHmDwwe8DKQgqMLI8DWEPwGprxkumjuw==",
|
"integrity": "sha512-4QCQLp79lvz7UZxow5HUX7uWTPJOaQBVExduo91tliXC7v78i6kssZOPHxLL+Xs30KU72cpPn3g3imw/xm/gaw==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@braintree/sanitize-url": "^6.0.1",
|
"@braintree/sanitize-url": "^6.0.1",
|
||||||
"@types/d3-scale": "^4.0.3",
|
"@types/d3-scale": "^4.0.3",
|
||||||
|
|||||||
@ -26,7 +26,7 @@
|
|||||||
"feather-icons": "^4.29.0",
|
"feather-icons": "^4.29.0",
|
||||||
"jarallax": "^2.1.3",
|
"jarallax": "^2.1.3",
|
||||||
"mdx-mermaid": "^1.3.2",
|
"mdx-mermaid": "^1.3.2",
|
||||||
"mermaid": "^10.3.1",
|
"mermaid": "^10.4.0",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^1.3.5",
|
||||||
"react": "^17.0.2",
|
"react": "^17.0.2",
|
||||||
"react-dom": "^17.0.2",
|
"react-dom": "^17.0.2",
|
||||||
|
|||||||
@ -79,7 +79,8 @@ const sidebars = {
|
|||||||
'cli/corso-backup-list-onedrive',
|
'cli/corso-backup-list-onedrive',
|
||||||
'cli/corso-backup-details-onedrive',
|
'cli/corso-backup-details-onedrive',
|
||||||
'cli/corso-backup-delete-onedrive',
|
'cli/corso-backup-delete-onedrive',
|
||||||
'cli/corso-restore-onedrive']
|
'cli/corso-restore-onedrive',
|
||||||
|
'cli/corso-export-onedrive']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
type: 'category',
|
type: 'category',
|
||||||
@ -93,7 +94,8 @@ const sidebars = {
|
|||||||
'cli/corso-backup-list-sharepoint',
|
'cli/corso-backup-list-sharepoint',
|
||||||
'cli/corso-backup-details-sharepoint',
|
'cli/corso-backup-details-sharepoint',
|
||||||
'cli/corso-backup-delete-sharepoint',
|
'cli/corso-backup-delete-sharepoint',
|
||||||
'cli/corso-restore-sharepoint']
|
'cli/corso-restore-sharepoint',
|
||||||
|
'cli/corso-export-sharepoint']
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user