merge main

This commit is contained in:
neha-Gupta1 2023-08-31 13:57:02 +05:30
commit fbd62c8b0c
88 changed files with 2660 additions and 1418 deletions

View File

@ -155,7 +155,7 @@ jobs:
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
@ -172,6 +172,7 @@ jobs:
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
S3_BUCKET: ${{ secrets.CI_TESTS_S3_BUCKET }}
CORSO_ENABLE_GROUPS: true
run: |
set -euo pipefail
go test \
@ -225,7 +226,7 @@ jobs:
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
@ -242,6 +243,7 @@ jobs:
CORSO_SECONDARY_M365_TEST_USER_ID: ${{ vars.CORSO_SECONDARY_M365_TEST_USER_ID }}
CORSO_PASSPHRASE: ${{ secrets.INTEGRATION_TEST_CORSO_PASSPHRASE }}
S3_BUCKET: ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }}
CORSO_ENABLE_GROUPS: true
run: |
set -euo pipefail
go test \
@ -275,6 +277,7 @@ jobs:
env:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-unit.log
LOG_GRAPH_REQUESTS: true
CORSO_ENABLE_GROUPS: true
steps:
- uses: actions/checkout@v3
@ -329,6 +332,7 @@ jobs:
env:
CORSO_LOG_FILE: ${{ github.workspace }}/testlog/run-fork.log
LOG_GRAPH_REQUESTS: true
CORSO_ENABLE_GROUPS: true
steps:
- name: Fail check if not repository_dispatch
if: github.event_name != 'repository_dispatch'
@ -373,7 +377,7 @@ jobs:
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing

View File

@ -47,8 +47,8 @@ jobs:
run:
working-directory: src
##########################################################################################################################################
# setup
############################################################################
# setup
steps:
- uses: actions/checkout@v3
with:
@ -83,15 +83,14 @@ jobs:
# Use shorter-lived credentials obtained from assume-role since these
# runs haven't been taking long.
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing
aws-region: us-east-1
##########################################################################################################################################
# Repository commands
##########################################################################
# Repository commands
- name: Version Test
run: |
@ -105,6 +104,9 @@ jobs:
./corso repo init s3 \
--no-stats \
--hide-progress \
--retention-mode $(echo "${{ env.RETENTION_MODE }}" | tr '[:upper:]' '[:lower:]') \
--retention-duration "${{ env.RETENTION_DURATION }}h" \
--extend-retention \
--prefix ${{ env.PREFIX }} \
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
--succeed-if-exists \
@ -133,9 +135,8 @@ jobs:
exit 1
fi
##########################################################################################################################################
# Exchange
##########################################################################
# Exchange
- name: Backup exchange test
id: exchange-test
@ -158,8 +159,8 @@ jobs:
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################################################################################
# Onedrive
##########################################################################
# Onedrive
- name: Backup onedrive test
id: onedrive-test
@ -183,9 +184,8 @@ jobs:
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################################################################################
# Sharepoint test
##########################################################################
# Sharepoint test
- name: Backup sharepoint test
id: sharepoint-test
run: |
@ -209,9 +209,8 @@ jobs:
data=$( echo $resultjson | jq -r '.[0] | .id' )
echo result=$data >> $GITHUB_OUTPUT
##########################################################################################################################################
# Backup Exchange Deletion test
##########################################################################
# Backup Exchange Deletion test
- name: Backup Delete exchange test
id: delete-exchange-test
env:
@ -222,9 +221,8 @@ jobs:
echo -e "\nDelete Backup exchange \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################################################################################
# Backup Onedrive Deletion test
##########################################################################
# Backup Onedrive Deletion test
- name: Backup Delete onedrive test
id: delete-onedrive-test
env:
@ -235,9 +233,8 @@ jobs:
echo -e "\nDelete Backup onedrive \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################################################################################
# Backup Sharepoint Deletion test
##########################################################################
# Backup Sharepoint Deletion test
- name: Backup Delete Sharepoint test
id: delete-sharepoint-test
env:
@ -248,67 +245,62 @@ jobs:
echo -e "\nDelete Backup sharepoint \n" >> ${CORSO_LOG_FILE}
./longevity-test
##########################################################################################################################################
##########################################################################
# Export OneDrive Test
- name: OneDrive Export test
run: |
set -euo pipefail
echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
# skipped until supported
# Export OneDrive Test
# - name: OneDrive Export test
# run: |
# set -euo pipefail
# echo -e "\Export OneDrive test\n" >> ${CORSO_LOG_FILE}
echo -e "\Export OneDrive test - first entry\n" >> ${CORSO_LOG_FILE}
./corso backup list onedrive 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
while read -r line; do
./corso export onedrive \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_first.txt
done
# echo -e "\Export OneDrive test - first entry\n" >> ${CORSO_LOG_FILE}
# ./corso backup list onedrive 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
# while read -r line; do
# ./corso export onedrive \
# "/tmp/corso-export--$line" \
# --no-stats \
# --backup "$line" \
# 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_first.txt
# done
echo -e "\Export OneDrive test - last entry\n" >> ${CORSO_LOG_FILE}
./corso backup list onedrive 2>/dev/null | tail -n1 | awk '{print $1}' |
while read -r line; do
./corso export onedrive \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_last.txt
done
# echo -e "\Export OneDrive test - last entry\n" >> ${CORSO_LOG_FILE}
# ./corso backup list onedrive 2>/dev/null | tail -n1 | awk '{print $1}' |
# while read -r line; do
# ./corso export onedrive \
# "/tmp/corso-export--$line" \
# --no-stats \
# --backup "$line" \
# 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_onedrive_last.txt
# done
##########################################################################
# Export SharePoint Test
- name: SharePoint Export test
run: |
set -euo pipefail
echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
##########################################################################################################################################
echo -e "\Export SharePoint test - first entry\n" >> ${CORSO_LOG_FILE}
./corso backup list sharepoint 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
while read -r line; do
./corso export sharepoint \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_first.txt
done
# skipped until supported
# Export SharePoint Test
# - name: SharePoint Export test
# run: |
# set -euo pipefail
# echo -e "\Export SharePoint test\n" >> ${CORSO_LOG_FILE}
echo -e "\Export SharePoint test - last entry\n" >> ${CORSO_LOG_FILE}
./corso backup list sharepoint 2>/dev/null | tail -n1 | awk '{print $1}' |
while read -r line; do
./corso export sharepoint \
"/tmp/corso-export--$line" \
--no-stats \
--backup "$line" \
2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_last.txt
done
# echo -e "\Export SharePoint test - first entry\n" >> ${CORSO_LOG_FILE}
# ./corso backup list sharepoint 2>/dev/null | tail -n+2 | head -n1 | awk '{print $1}' |
# while read -r line; do
# ./corso export sharepoint \
# "/tmp/corso-export--$line" \
# --no-stats \
# --backup "$line" \
# 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_first.txt
# done
# echo -e "\Export SharePoint test - last entry\n" >> ${CORSO_LOG_FILE}
# ./corso backup list sharepoint 2>/dev/null | tail -n1 | awk '{print $1}' |
# while read -r line; do
# ./corso export sharepoint \
# "/tmp/corso-export--$line" \
# --no-stats \
# --backup "$line" \
# 2>&1 | tee ${{ env.CORSO_LOG_DIR }}/export_sharepoint_last.txt
# done
##########################################################################################################################################
# Maintenance test
##########################################################################
# Maintenance test
- name: Maintenance test Daily
id: maintenance-test-daily
run: |
@ -362,7 +354,7 @@ jobs:
--bucket ${{ secrets.CI_RETENTION_TESTS_S3_BUCKET }} \
--bucket-prefix ${{ env.PREFIX }} \
--retention-mode ${{ env.RETENTION_MODE }} \
--live-retention-duration "$((${{ env.RETENTION_DURATION}}-1))h" \
--live-retention-duration "$((${{ env.RETENTION_DURATION }}-1))h" \
--prefix "kopia.blobcfg" \
--prefix "kopia.repository" \
--prefix "p" \
@ -370,10 +362,8 @@ jobs:
--prefix "x"
fi
##########################################################################################################################################
# Logging & Notifications
##########################################################################
# Logging & Notifications
# Upload the original go test output as an artifact for later review.
- name: Upload test log
if: always()

View File

@ -59,6 +59,7 @@ jobs:
AZURE_CLIENT_ID_NAME: ${{ needs.SetM365App.outputs.client_id_env }}
AZURE_CLIENT_SECRET_NAME: ${{ needs.SetM365App.outputs.client_secret_env }}
CLIENT_APP_SLOT: ${{ needs.SetM365App.outputs.client_app_slot }}
CORSO_ENABLE_GROUPS: true
steps:
- uses: actions/checkout@v3
@ -75,7 +76,7 @@ jobs:
# AWS creds
- name: Configure AWS credentials from Test account
uses: aws-actions/configure-aws-credentials@v2
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: ${{ secrets.AWS_IAM_ROLE }}
role-session-name: integration-testing

View File

@ -39,6 +39,7 @@ jobs:
CORSO_LOG_FILE: ${{ github.workspace }}/src/testlog/run-sanity.log
RESTORE_DEST_PFX: Corso_Test_Sanity_
TEST_USER: ${{ github.event.inputs.user != '' && github.event.inputs.user || secrets.CORSO_M365_TEST_USER_ID }}
CORSO_ENABLE_GROUPS: true
defaults:
run:

View File

@ -7,15 +7,29 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] (beta)
### Changed
- SharePoint exported libraries are now exported with a `Libraries` prefix.
## [v0.12.0] (beta) - 2023-08-29
### Added
- Added `export` command to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
- Restore commands now accept an optional resource override with the `--to-resource` flag. This allows restores to recreate backup data within different mailboxes, sites, and users.
- Improve `--mask-sensitive-data` logging mode.
- Reliability: Handle connection cancellation and resets observed when backing up or restoring large data sets.
- Reliability: Recover from Graph SDK panics when the Graph API returns incomplete responses.
- Performance: Improve backup delete performance by batching multiple storage operations into a single operation.
### Fixed
- SharePoint document libraries deleted after the last backup can now be restored.
- Restore requires the protected resource to have access to the service being restored.
- SharePoint data from multiple document libraries are not merged in exports
- `corso backup delete` was not removing the backup details data associated with that snapshot
- Fix OneDrive restores could fail with a concurrent map write error
- Fix backup list displaying backups that had errors
- Fix OneDrive backup could fail if item was deleted during backup
- Exchange backups would fail attempting to use delta tokens even if the user was over quota
### Added
- Added option to export data from OneDrive and SharePoint backups as individual files or as a single zip file.
## [v0.11.1] (beta) - 2023-07-20

View File

@ -1,4 +1,4 @@
FROM golang:1.19-alpine as builder
FROM golang:1.20-alpine as builder
WORKDIR /go/src/app
COPY src .

View File

@ -3,6 +3,7 @@ package backup
import (
"context"
"fmt"
"os"
"strings"
"github.com/alcionai/clues"
@ -39,8 +40,9 @@ var serviceCommands = []func(cmd *cobra.Command) *cobra.Command{
addExchangeCommands,
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addTeamsCommands,
// awaiting release
// addGroupsCommands,
// addTeamsCommands,
}
// AddCommands attaches all `corso backup * *` commands to the parent.
@ -55,6 +57,12 @@ func AddCommands(cmd *cobra.Command) {
for _, addBackupTo := range serviceCommands {
addBackupTo(subCommand)
}
// delete after release
if len(os.Getenv("CORSO_ENABLE_GROUPS")) > 0 {
addGroupsCommands(subCommand)
addTeamsCommands(subCommand)
}
}
}
@ -247,7 +255,10 @@ func runBackups(
return Only(ctx, clues.Wrap(berrs.Failure(), "Unable to retrieve backup results from storage"))
}
Info(ctx, "Completed Backups:")
if len(bups) > 0 {
Info(ctx, "Completed Backups:")
}
backup.PrintAll(ctx, bups)
if len(errs) > 0 {

View File

@ -3,6 +3,7 @@ package export
import (
"context"
"errors"
"os"
"github.com/alcionai/clues"
"github.com/spf13/cobra"
@ -21,8 +22,9 @@ import (
var exportCommands = []func(cmd *cobra.Command) *cobra.Command{
addOneDriveCommands,
addSharePointCommands,
addGroupsCommands,
addTeamsCommands,
// awaiting release
// addGroupsCommands,
// addTeamsCommands,
}
// AddCommands attaches all `corso export * *` commands to the parent.
@ -33,6 +35,12 @@ func AddCommands(cmd *cobra.Command) {
for _, addExportTo := range exportCommands {
addExportTo(exportC)
}
// delete after release
if len(os.Getenv("CORSO_ENABLE_GROUPS")) > 0 {
addGroupsCommands(exportC)
addTeamsCommands(exportC)
}
}
const exportCommand = "export"

View File

@ -2,6 +2,7 @@ package restore
import (
"context"
"os"
"github.com/alcionai/clues"
"github.com/pkg/errors"
@ -20,6 +21,9 @@ var restoreCommands = []func(cmd *cobra.Command) *cobra.Command{
addExchangeCommands,
addOneDriveCommands,
addSharePointCommands,
// awaiting release
// addGroupsCommands,
// addTeamsCommands,
}
// AddCommands attaches all `corso restore * *` commands to the parent.
@ -30,6 +34,12 @@ func AddCommands(cmd *cobra.Command) {
for _, addRestoreTo := range restoreCommands {
addRestoreTo(restoreC)
}
// delete after release
if len(os.Getenv("CORSO_ENABLE_GROUPS")) > 0 {
addGroupsCommands(restoreC)
addTeamsCommands(restoreC)
}
}
const restoreCommand = "restore"

View File

@ -8,7 +8,7 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1
github.com/alcionai/clues v0.0.0-20230728164842-7dc4795a43e4
github.com/armon/go-metrics v0.4.1
github.com/aws/aws-sdk-go v1.44.331
github.com/aws/aws-sdk-go v1.44.334
github.com/aws/aws-xray-sdk-go v1.8.1
github.com/cenkalti/backoff/v4 v4.2.1
github.com/google/uuid v1.3.1

View File

@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/aws/aws-sdk-go v1.44.331 h1:hEwdOTv6973uegCUY2EY8jyyq0OUg9INc0HOzcu2bjw=
github.com/aws/aws-sdk-go v1.44.331/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-sdk-go v1.44.334 h1:h2bdbGb//fez6Sv6PaYv868s9liDeoYM6hYsAqTB4MU=
github.com/aws/aws-sdk-go v1.44.334/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=

View File

@ -3,12 +3,14 @@ package kopia
import (
"context"
"errors"
"strings"
"time"
"github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot"
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/model"
@ -17,6 +19,12 @@ import (
"github.com/alcionai/corso/src/pkg/store"
)
const (
serviceCatTagPrefix = "sc-"
kopiaPathLabel = "path"
tenantTag = "tenant"
)
// cleanupOrphanedData uses bs and mf to lookup all models/snapshots for backups
// and deletes items that are older than nowFunc() - gcBuffer (cutoff) that are
// not "complete" backups with:
@ -67,8 +75,11 @@ func cleanupOrphanedData(
// 1. check if there's a corresponding backup for them
// 2. delete the details if they're orphaned
deets = map[manifest.ID]struct{}{}
// dataSnaps is a hash set of the snapshot IDs for item data snapshots.
dataSnaps = map[manifest.ID]struct{}{}
// dataSnaps is a hash map of the snapshot IDs for item data snapshots.
dataSnaps = map[manifest.ID]*manifest.EntryMetadata{}
// toDelete is the set of objects to delete from kopia. It starts out with
// all items and has ineligible items removed from it.
toDelete = map[manifest.ID]struct{}{}
)
cutoff := nowFunc().Add(-gcBuffer)
@ -81,9 +92,11 @@ func cleanupOrphanedData(
continue
}
toDelete[snap.ID] = struct{}{}
k, _ := makeTagKV(TagBackupCategory)
if _, ok := snap.Labels[k]; ok {
dataSnaps[snap.ID] = struct{}{}
dataSnaps[snap.ID] = snap
continue
}
@ -106,6 +119,7 @@ func cleanupOrphanedData(
}
deets[d.ModelStoreID] = struct{}{}
toDelete[d.ModelStoreID] = struct{}{}
}
// Get all backup models.
@ -114,8 +128,16 @@ func cleanupOrphanedData(
return clues.Wrap(err, "getting all backup models")
}
toDelete := maps.Clone(deets)
maps.Copy(toDelete, dataSnaps)
var (
// assistBackups is the set of backups that have a
// * label denoting they're an assist backup
// * item data snapshot
// * details snapshot
assistBackups []*backup.Backup
// mostRecentMergeBase maps the reason to its most recent merge base's
// creation time. The map key is created using keysForBackup.
mostRecentMergeBase = map[string]time.Time{}
)
for _, bup := range bups {
// Don't even try to see if this needs garbage collected because it's not
@ -150,7 +172,7 @@ func cleanupOrphanedData(
// This isn't expected to really pop up, but it's possible if this
// function is run concurrently with either a backup delete or another
// instance of this function.
logger.Ctx(ctx).Debugw(
logger.Ctx(ctx).Infow(
"backup model not found",
"search_backup_id", bup.ModelStoreID)
@ -162,7 +184,7 @@ func cleanupOrphanedData(
ssid = bm.DetailsID
}
_, dataOK := dataSnaps[manifest.ID(bm.SnapshotID)]
d, dataOK := dataSnaps[manifest.ID(bm.SnapshotID)]
_, deetsOK := deets[manifest.ID(ssid)]
// All data is present, we shouldn't garbage collect this backup.
@ -170,6 +192,59 @@ func cleanupOrphanedData(
delete(toDelete, bup.ModelStoreID)
delete(toDelete, manifest.ID(bm.SnapshotID))
delete(toDelete, manifest.ID(ssid))
// This is a little messy to have, but can simplify the logic below.
// The state of tagging in corso isn't all that great right now and we'd
// really like to consolidate tags and clean them up. For now, we're
// going to copy tags that are related to Reasons for a backup from the
// item data snapshot to the backup model. This makes the function
// checking if assist backups should be garbage collected a bit easier
// because now they only have to source data from backup models.
if err := transferTags(d, &bm); err != nil {
logger.CtxErr(ctx, err).Infow(
"transferring legacy tags to backup model",
"snapshot_id", d.ID,
"backup_id", bup.ID)
// Continuing here means the base won't be eligible for old assist
// base garbage collection or as a newer merge base timestamp.
//
// We could add more logic to eventually delete the base if it's an
// assist base. If it's a merge base then it should be mostly harmless
// as a newer merge base should cause older assist bases to be garbage
// collected.
//
// Either way, I don't really expect to see failures when transferring
// tags so not worth adding extra code for unless we see it become a
// problem.
continue
}
// Add to the assist backup set so that we can attempt to garbage collect
// older assist backups below.
if bup.Tags[model.BackupTypeTag] == model.AssistBackup {
assistBackups = append(assistBackups, &bm)
continue
}
// If it's a merge base track the time it was created so we can check
// later if we should remove all assist bases or not.
tags, err := keysForBackup(&bm)
if err != nil {
logger.CtxErr(ctx, err).
Info("getting Reason keys for merge base. May keep an additional assist base")
}
for _, tag := range tags {
t := mostRecentMergeBase[tag]
if t.After(bm.CreationTime) {
// Don't update the merge base time if we've already seen a newer
// merge base.
continue
}
mostRecentMergeBase[tag] = bm.CreationTime
}
}
}
@ -178,14 +253,200 @@ func cleanupOrphanedData(
"num_items", len(toDelete),
"kopia_ids", maps.Keys(toDelete))
// This will technically save a superset of the assist bases we should keep.
// The reason for that is that we only add something to the set of assist
// bases after we've excluded backups in the buffer time zone. For example
// we could discover that of the set of assist bases we have, something is
// the youngest and exclude it from gabage collection. However, when looking
// at the set of all assist bases, including those in the buffer zone, it's
// possible the one we thought was the youngest actually isn't and could be
// garbage collected.
//
// This sort of edge case will ideally happen only for a few assist bases at
// a time. Assuming this function is run somewhat periodically, missing these
// edge cases is alright because they'll get picked up on a subsequent run.
assistItems := collectOldAssistBases(ctx, mostRecentMergeBase, assistBackups)
logger.Ctx(ctx).Debugw(
"garbage collecting old assist bases",
"assist_num_items", len(assistItems),
"assist_kopia_ids", assistItems)
assistItems = append(assistItems, maps.Keys(toDelete)...)
// Use single atomic batch delete operation to cleanup to keep from making a
// bunch of manifest content blobs.
if err := bs.DeleteWithModelStoreIDs(ctx, maps.Keys(toDelete)...); err != nil {
if err := bs.DeleteWithModelStoreIDs(ctx, assistItems...); err != nil {
return clues.Wrap(err, "deleting orphaned data")
}
// TODO(ashmrtn): Do some pruning of assist backup models so we don't keep
// them around forever.
return nil
}
var skipKeys = []string{
TagBackupID,
TagBackupCategory,
}
func transferTags(snap *manifest.EntryMetadata, bup *backup.Backup) error {
tenant, err := decodeElement(snap.Labels[kopiaPathLabel])
if err != nil {
return clues.Wrap(err, "decoding tenant from label")
}
if bup.Tags == nil {
bup.Tags = map[string]string{}
}
bup.Tags[tenantTag] = tenant
skipTags := map[string]struct{}{}
for _, k := range skipKeys {
key, _ := makeTagKV(k)
skipTags[key] = struct{}{}
}
// Safe to check only this because the old field was deprecated prior to the
// tagging of assist backups and this function only deals with assist
// backups.
roid := bup.ProtectedResourceID
roidK, _ := makeTagKV(roid)
skipTags[roidK] = struct{}{}
// This is hacky, but right now we don't have a good way to get only the
// Reason tags for something. We can however, find them by searching for all
// the "normalized" tags and then discarding the ones we know aren't
// reasons. Unfortunately this won't work if custom tags are added to the
// backup that we don't know about.
//
// Convert them to the newer format that we'd like to have where the
// service/category tags have the form "sc-<service><category>".
for tag := range snap.Labels {
if _, ok := skipTags[tag]; ok || !strings.HasPrefix(tag, userTagPrefix) {
continue
}
bup.Tags[strings.Replace(tag, userTagPrefix, serviceCatTagPrefix, 1)] = "0"
}
return nil
}
// keysForBackup returns a slice of string keys representing the Reasons for this
// backup. If there's a problem creating the keys an error is returned.
func keysForBackup(bup *backup.Backup) ([]string, error) {
var (
res []string
// Safe to pull from this field since assist backups came after we switched
// to using ProtectedResourceID.
roid = bup.ProtectedResourceID
)
tenant := bup.Tags[tenantTag]
if len(tenant) == 0 {
// We can skip this backup. It won't get garbage collected, but it also
// won't result in incorrect behavior overall.
return nil, clues.New("missing tenant tag in backup").
With("backup_id", bup.ID)
}
for tag := range bup.Tags {
if strings.HasPrefix(tag, serviceCatTagPrefix) {
// Precise way we concatenate all this info doesn't really matter as
// long as it's consistent for all backups in the set and includes all
// the pieces we need to ensure uniqueness across.
fullTag := tenant + roid + tag
res = append(res, fullTag)
}
}
return res, nil
}
func collectOldAssistBases(
ctx context.Context,
mostRecentMergeBase map[string]time.Time,
bups []*backup.Backup,
) []manifest.ID {
// maybeDelete is the set of backups that could be deleted. It starts out as
// the set of all backups and has ineligible backups removed from it.
maybeDelete := map[manifest.ID]*backup.Backup{}
// Figure out which backups have overlapping reasons. A single backup can
// appear in multiple slices in the map, one for each Reason associated with
// it.
bupsByReason := map[string][]*backup.Backup{}
for _, bup := range bups {
tags, err := keysForBackup(bup)
if err != nil {
logger.CtxErr(ctx, err).Error("not checking backup for garbage collection")
continue
}
maybeDelete[manifest.ID(bup.ModelStoreID)] = bup
for _, tag := range tags {
bupsByReason[tag] = append(bupsByReason[tag], bup)
}
}
// For each set of backups we found, sort them by time. Mark all but the
// youngest backup in each group as eligible for garbage collection.
//
// We implement this process as removing backups from the set of potential
// backups to delete because it's possible for a backup to to not be the
// youngest for one Reason but be the youngest for a different Reason (i.e.
// most recent exchange mail backup but not the most recent exchange
// contacts backup). A simple delete operation in the map is sufficient to
// remove a backup even if it's only the youngest for a single Reason.
// Otherwise we'd need to do another pass after this to determine the
// isYoungest status for all Reasons in the backup.
//
// TODO(ashmrtn): Handle concurrent backups somehow? Right now backups that
// have overlapping start and end times aren't explicitly handled.
for tag, bupSet := range bupsByReason {
if len(bupSet) == 0 {
continue
}
// Sort in reverse chronological order so that we can just remove the zeroth
// item from the delete set instead of getting the slice length.
// Unfortunately this could also put us in the pathologic case where almost
// all items need swapped since in theory kopia returns results in
// chronologic order and we're processing them in the order kopia returns
// them.
slices.SortStableFunc(bupSet, func(a, b *backup.Backup) int {
return -a.CreationTime.Compare(b.CreationTime)
})
// Only remove the youngest assist base from the deletion set if we don't
// have a merge base that's younger than it. We don't need to check if the
// value is in the map here because the zero time is always at least as old
// as the times we'll see in our backups (if we see the zero time in our
// backup it's a bug but will still pass the check to keep the backup).
if t := mostRecentMergeBase[tag]; !bupSet[0].CreationTime.Before(t) {
delete(maybeDelete, manifest.ID(bupSet[0].ModelStoreID))
}
}
res := make([]manifest.ID, 0, 3*len(maybeDelete))
// For all items remaining in the delete set, generate the final set of items
// to delete. This set includes the data snapshot ID, details snapshot ID, and
// backup model ID to delete for each backup.
for bupID, bup := range maybeDelete {
// Don't need to check if we use StreamStoreID or DetailsID because
// DetailsID was deprecated prior to tagging backups as assist backups.
// Since the input set is only assist backups there's no overlap between the
// two implementations.
res = append(
res,
bupID,
manifest.ID(bup.SnapshotID),
manifest.ID(bup.StreamStoreID))
}
return res
}

View File

@ -15,6 +15,8 @@ import (
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/path"
)
type BackupCleanupUnitSuite struct {
@ -163,6 +165,58 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
}
}
bupCurrent2 := func() *backup.Backup {
return &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID("current-bup-id-2"),
ModelStoreID: manifest.ID("current-bup-msid-2"),
},
SnapshotID: "current-snap-msid-2",
StreamStoreID: "current-deets-msid-2",
}
}
snapCurrent2 := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "current-snap-msid-2",
Labels: map[string]string{
backupTag: "0",
},
}
}
deetsCurrent2 := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "current-deets-msid-2",
}
}
bupCurrent3 := func() *backup.Backup {
return &backup.Backup{
BaseModel: model.BaseModel{
ID: model.StableID("current-bup-id-3"),
ModelStoreID: manifest.ID("current-bup-msid-3"),
},
SnapshotID: "current-snap-msid-3",
StreamStoreID: "current-deets-msid-3",
}
}
snapCurrent3 := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "current-snap-msid-3",
Labels: map[string]string{
backupTag: "0",
},
}
}
deetsCurrent3 := func() *manifest.EntryMetadata {
return &manifest.EntryMetadata{
ID: "current-deets-msid-3",
}
}
// Legacy backup with details in separate model.
bupLegacy := func() *backup.Backup {
return &backup.Backup{
@ -261,9 +315,53 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
return &res
}
manifestWithReasons := func(
m *manifest.EntryMetadata,
tenantID string,
reasons ...identity.Reasoner,
) *manifest.EntryMetadata {
res := *m
if res.Labels == nil {
res.Labels = map[string]string{}
}
res.Labels[kopiaPathLabel] = encodeAsPath(tenantID)
// Add the given reasons.
for _, r := range reasons {
for _, k := range tagKeys(r) {
key, _ := makeTagKV(k)
res.Labels[key] = "0"
}
}
// Also add other common reasons on item data snapshots.
k, _ := makeTagKV(TagBackupCategory)
res.Labels[k] = "0"
return &res
}
backupWithTime := func(mt time.Time, b *backup.Backup) *backup.Backup {
res := *b
res.ModTime = mt
res.CreationTime = mt
return &res
}
backupWithResource := func(protectedResource string, isAssist bool, b *backup.Backup) *backup.Backup {
res := *b
res.ProtectedResourceID = protectedResource
if isAssist {
if res.Tags == nil {
res.Tags = map[string]string{}
}
res.Tags[model.BackupTypeTag] = model.AssistBackup
}
return &res
}
@ -529,6 +627,230 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
// Tests dealing with assist base cleanup.
{
// Test that even if we have multiple assist bases with the same
// Reason(s), none of them are garbage collected if they are within the
// buffer period used to exclude recently created backups from garbage
// collection.
name: "AssistBase NotYoungest InBufferTime Noops",
snapshots: []*manifest.EntryMetadata{
manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
},
backups: []backupRes{
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
},
time: baseTime,
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
{
// Test that an assist base that has the same Reasons as a newer assist
// base is garbage collected when it's outside the buffer period.
name: "AssistBases NotYoungest CausesCleanup",
snapshots: []*manifest.EntryMetadata{
manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()),
},
backups: []backupRes{
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Minute), bupCurrent3()))},
},
expectDeleteIDs: []manifest.ID{
snapCurrent().ID,
deetsCurrent().ID,
manifest.ID(bupCurrent().ModelStoreID),
snapCurrent2().ID,
deetsCurrent2().ID,
manifest.ID(bupCurrent2().ModelStoreID),
},
time: baseTime.Add(48 * time.Hour),
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
{
// Test that the most recent assist base is garbage collected if there's a
// newer merge base that has the same Reasons as the assist base.
name: "AssistBasesAndMergeBases NotYoungest CausesCleanupForAssistBase",
snapshots: []*manifest.EntryMetadata{
manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Minute), snapCurrent2()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent2()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent3()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent3()),
},
backups: []backupRes{
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithResource("ro", false, backupWithTime(baseTime.Add(time.Minute), bupCurrent2()))},
{bup: backupWithResource("ro", false, backupWithTime(baseTime.Add(-time.Second), bupCurrent3()))},
},
expectDeleteIDs: []manifest.ID{
snapCurrent().ID,
deetsCurrent().ID,
manifest.ID(bupCurrent().ModelStoreID),
},
time: baseTime.Add(48 * time.Hour),
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
{
// Test that an assist base that is not the most recent for Reason A but
// is the most recent for Reason B is not garbage collected.
name: "AssistBases YoungestInOneReason Noops",
snapshots: []*manifest.EntryMetadata{
manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory),
NewReason("", "ro", path.ExchangeService, path.ContactsCategory)),
manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
},
backups: []backupRes{
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
},
time: baseTime.Add(48 * time.Hour),
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
{
// Test that assist bases that have the same tenant, service, and category
// but different protected resources are not garbage collected. This is
// a test to ensure the Reason field is properly handled when finding the
// most recent assist base.
name: "AssistBases DifferentProtectedResources Noops",
snapshots: []*manifest.EntryMetadata{
manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()),
"tenant1",
NewReason("", "ro1", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1",
NewReason("", "ro2", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
},
backups: []backupRes{
{bup: backupWithResource("ro1", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithResource("ro2", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
},
time: baseTime.Add(48 * time.Hour),
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
{
// Test that assist bases that have the same protected resource, service,
// and category but different tenants are not garbage collected. This is a
// test to ensure the Reason field is properly handled when finding the
// most recent assist base.
name: "AssistBases DifferentTenants Noops",
snapshots: []*manifest.EntryMetadata{
manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant2",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
},
backups: []backupRes{
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
},
time: baseTime.Add(48 * time.Hour),
buffer: 24 * time.Hour,
expectErr: assert.NoError,
},
{
// Test that if the tenant is not available for a given assist base that
// it's excluded from the garbage collection set. This behavior is
// conservative because it's quite likely that we could garbage collect
// the base without issue.
name: "AssistBases NoTenant SkipsBackup",
snapshots: []*manifest.EntryMetadata{
manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()),
"",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
manifestWithReasons(
manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()),
"tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()),
},
backups: []backupRes{
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Second), bupCurrent2()))},
{bup: backupWithResource("ro", true, backupWithTime(baseTime.Add(time.Minute), bupCurrent3()))},
},
time: baseTime.Add(48 * time.Hour),
buffer: 24 * time.Hour,
expectDeleteIDs: []manifest.ID{
snapCurrent2().ID,
deetsCurrent2().ID,
manifest.ID(bupCurrent2().ModelStoreID),
},
expectErr: assert.NoError,
},
}
for _, test := range table {

View File

@ -23,7 +23,7 @@ import (
)
func openKopiaRepo(
t *testing.T,
t tester.TestT,
ctx context.Context, //revive:disable-line:context-as-argument
) (*conn, error) {
st := storeTD.NewPrefixedS3Storage(t)

View File

@ -877,8 +877,8 @@ func traverseBaseDir(
stats *count.Bus,
) error {
ctx = clues.Add(ctx,
"old_dir_path", oldDirPath,
"expected_dir_path", expectedDirPath)
"old_parent_dir_path", oldDirPath,
"expected_parent_dir_path", expectedDirPath)
if depth >= maxInflateTraversalDepth {
return clues.New("base snapshot tree too tall").WithClues(ctx)

View File

@ -182,9 +182,9 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
folderPath, err := pb.ToDataLayerOneDrivePath("tenant", "owner", false)
require.NoError(t, err, clues.ToCore(err))
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("a-user")
if test.service == path.SharePointService {
mbh = mock.DefaultSharePointBH()
mbh = mock.DefaultSharePointBH("a-site")
mbh.ItemInfo.SharePoint.Modified = now
mbh.ItemInfo.SharePoint.ItemName = stubItemName
} else {
@ -301,7 +301,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
require.NoError(t, err, clues.ToCore(err))
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("a-user")
mbh.GI = mock.GetsItem{Err: assert.AnError}
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
mbh.GetResps = []*http.Response{
@ -378,7 +378,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry()
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
require.NoError(t, err)
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("a-user")
mbh.GI = mock.GetsItem{Item: stubItem}
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
mbh.GetResps = []*http.Response{
@ -436,7 +436,7 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
require.NoError(t, err, clues.ToCore(err))
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("a-user")
mbh.ItemInfo = details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}}
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
mbh.GetResps = []*http.Response{{
@ -587,7 +587,7 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
true,
false)
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("a-user")
mbh.GI = mock.GetsItem{Item: stubItem}
mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK}}
mbh.GetErrs = []error{test.err}
@ -766,7 +766,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
}
}
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("a-user")
mbh.GI = test.mgi
mbh.ItemInfo = test.itemInfo
mbh.GetResps = resps
@ -932,7 +932,7 @@ func (suite *CollectionUnitTestSuite) TestItemExtensions() {
wg.Add(1)
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("a-user")
mbh.GI = mock.GetsItem{Err: assert.AnError}
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
mbh.GetResps = []*http.Response{

View File

@ -349,7 +349,7 @@ func (c *Collections) Get(
continue
}
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID, c.resourceOwner)
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID)
if err != nil {
return nil, false, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
}
@ -413,7 +413,7 @@ func (c *Collections) Get(
// generate tombstones for drives that were removed.
for driveID := range driveTombstones {
prevDrivePath, err := c.handler.PathPrefix(c.tenantID, c.resourceOwner, driveID)
prevDrivePath, err := c.handler.PathPrefix(c.tenantID, driveID)
if err != nil {
return nil, false, clues.Wrap(err, "making drive tombstone for previous path").WithClues(ctx)
}
@ -642,7 +642,7 @@ func (c *Collections) getCollectionPath(
pb = path.Builder{}.Append(path.Split(ptr.Val(item.GetParentReference().GetPath()))...)
}
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID, c.resourceOwner)
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID)
if err != nil {
return nil, clues.Wrap(err, "making item path")
}

View File

@ -40,7 +40,7 @@ type statePath struct {
func getExpectedStatePathGenerator(
t *testing.T,
bh BackupHandler,
tenant, user, base string,
tenant, base string,
) func(data.CollectionState, ...string) statePath {
return func(state data.CollectionState, pths ...string) statePath {
var (
@ -56,12 +56,12 @@ func getExpectedStatePathGenerator(
} else {
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
pb := path.Builder{}.Append(path.Split(base + pths[1])...)
p2, err = bh.CanonicalPath(pb, tenant, user)
p2, err = bh.CanonicalPath(pb, tenant)
require.NoError(t, err, clues.ToCore(err))
}
pb := path.Builder{}.Append(path.Split(base + pths[0])...)
p1, err = bh.CanonicalPath(pb, tenant, user)
p1, err = bh.CanonicalPath(pb, tenant)
require.NoError(t, err, clues.ToCore(err))
switch state {
@ -88,11 +88,11 @@ func getExpectedStatePathGenerator(
func getExpectedPathGenerator(
t *testing.T,
bh BackupHandler,
tenant, user, base string,
tenant, base string,
) func(string) string {
return func(p string) string {
pb := path.Builder{}.Append(path.Split(base + p)...)
cp, err := bh.CanonicalPath(pb, tenant, user)
cp, err := bh.CanonicalPath(pb, tenant)
require.NoError(t, err, clues.ToCore(err))
return cp.String()
@ -129,10 +129,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
pkg = "/package"
)
bh := itemBackupHandler{}
bh := itemBackupHandler{userID: user}
testBaseDrivePath := odConsts.DriveFolderPrefixBuilder("driveID1").String()
expectedPath := getExpectedPathGenerator(suite.T(), bh, tenant, user, testBaseDrivePath)
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, user, testBaseDrivePath)
expectedPath := getExpectedPathGenerator(suite.T(), bh, tenant, testBaseDrivePath)
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, testBaseDrivePath)
tests := []struct {
testCase string
@ -744,7 +744,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
maps.Copy(outputFolderMap, tt.inputFolderMap)
c := NewCollections(
&itemBackupHandler{api.Drives{}, tt.scope},
&itemBackupHandler{api.Drives{}, user, tt.scope},
tenant,
user,
nil,
@ -1179,63 +1179,6 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata_ReadFailure()
require.False(t, canUsePreviousBackup)
}
type mockDeltaPageLinker struct {
link *string
delta *string
}
func (pl *mockDeltaPageLinker) GetOdataNextLink() *string {
return pl.link
}
func (pl *mockDeltaPageLinker) GetOdataDeltaLink() *string {
return pl.delta
}
type deltaPagerResult struct {
items []models.DriveItemable
nextLink *string
deltaLink *string
err error
}
type mockItemPager struct {
// DriveID -> set of return values for queries for that drive.
toReturn []deltaPagerResult
getIdx int
}
func (p *mockItemPager) GetPage(context.Context) (api.DeltaPageLinker, error) {
if len(p.toReturn) <= p.getIdx {
return nil, assert.AnError
}
idx := p.getIdx
p.getIdx++
return &mockDeltaPageLinker{
p.toReturn[idx].nextLink,
p.toReturn[idx].deltaLink,
}, p.toReturn[idx].err
}
func (p *mockItemPager) SetNext(string) {}
func (p *mockItemPager) Reset() {}
func (p *mockItemPager) ValuesIn(api.DeltaPageLinker) ([]models.DriveItemable, error) {
idx := p.getIdx
if idx > 0 {
// Return values lag by one since we increment in GetPage().
idx--
}
if len(p.toReturn) <= idx {
return nil, assert.AnError
}
return p.toReturn[idx].items, nil
}
func (suite *OneDriveCollectionsUnitSuite) TestGet() {
var (
tenant = "a-tenant"
@ -1265,13 +1208,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drive2.SetName(&driveID2)
var (
bh = itemBackupHandler{}
bh = itemBackupHandler{userID: user}
driveBasePath1 = odConsts.DriveFolderPrefixBuilder(driveID1).String()
driveBasePath2 = odConsts.DriveFolderPrefixBuilder(driveID2).String()
expectedPath1 = getExpectedPathGenerator(suite.T(), bh, tenant, user, driveBasePath1)
expectedPath2 = getExpectedPathGenerator(suite.T(), bh, tenant, user, driveBasePath2)
expectedPath1 = getExpectedPathGenerator(suite.T(), bh, tenant, driveBasePath1)
expectedPath2 = getExpectedPathGenerator(suite.T(), bh, tenant, driveBasePath2)
rootFolderPath1 = expectedPath1("")
folderPath1 = expectedPath1("/folder")
@ -1283,7 +1226,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
table := []struct {
name string
drives []models.Driveable
items map[string][]deltaPagerResult
items map[string][]apiMock.PagerResult[models.DriveItemable]
canUsePreviousBackup bool
errCheck assert.ErrorAssertionFunc
prevFolderPaths map[string]map[string]string
@ -1302,14 +1245,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_DelFileOnly_NoFolders_NoErrors",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"), // will be present, not needed
delItem("file", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1334,14 +1277,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_NoFolderDeltas_NoErrors",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1366,15 +1309,15 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_NoErrors",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1403,16 +1346,16 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
driveItem("file", "file2", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1441,16 +1384,16 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
driveItem("file", "file2", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1481,15 +1424,15 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_EmptyDelta_NoErrors",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &empty, // probably will never happen with graph
DeltaLink: &empty, // probably will never happen with graph
},
},
},
@ -1518,23 +1461,23 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_TwoItemPages_NoErrors",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
nextLink: &next,
NextLink: &next,
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1568,25 +1511,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drive1,
drive2,
},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
driveID2: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root2"),
driveItem("folder2", "folder", driveBasePath2, "root2", false, true, false),
driveItem("file2", "file", driveBasePath2+"/folder", "folder2", true, false, false),
},
deltaLink: &delta2,
DeltaLink: &delta2,
},
},
},
@ -1630,25 +1573,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drive1,
drive2,
},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
driveID2: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath2, "root", false, true, false),
driveItem("file2", "file", driveBasePath2+"/folder", "folder", true, false, false),
},
deltaLink: &delta2,
DeltaLink: &delta2,
},
},
},
@ -1689,10 +1632,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_Errors",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
err: assert.AnError,
Err: assert.AnError,
},
},
},
@ -1709,17 +1652,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_DeltaError",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
err: getDeltaError(),
Err: getDeltaError(),
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1744,25 +1687,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_TwoItemPage_DeltaError",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
err: getDeltaError(),
Err: getDeltaError(),
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
nextLink: &next,
NextLink: &next,
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1790,22 +1733,22 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_TwoItemPage_NoDeltaError",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", "file", driveBasePath1, "root", true, false, false),
},
nextLink: &next,
NextLink: &next,
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1837,18 +1780,18 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_InvalidPrevDelta_DeleteNonExistentFolder",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
err: getDeltaError(),
Err: getDeltaError(),
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder2", "folder2", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1884,18 +1827,18 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive_OneItemPage_InvalidPrevDelta_AnotherFolderAtDeletedLocation",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
err: getDeltaError(),
Err: getDeltaError(),
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder2", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder2", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1934,25 +1877,25 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "OneDrive Two Item Pages with Malware",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
malwareItem("malware", "malware", driveBasePath1+"/folder", "folder", true, false, false),
},
nextLink: &next,
NextLink: &next,
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file2", "file2", driveBasePath1+"/folder", "folder", true, false, false),
malwareItem("malware2", "malware2", driveBasePath1+"/folder", "folder", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -1984,28 +1927,28 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "One Drive Delta Error Deleted Folder In New Results",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
err: getDeltaError(),
Err: getDeltaError(),
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
driveItem("folder2", "folder2", driveBasePath1, "root", false, true, false),
driveItem("file2", "file2", driveBasePath1+"/folder2", "folder2", true, false, false),
},
nextLink: &next,
NextLink: &next,
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("folder2", driveBasePath1, "root", false, true, false),
delItem("file2", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta2,
DeltaLink: &delta2,
},
},
},
@ -2042,17 +1985,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "One Drive Delta Error Random Folder Delete",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
err: getDeltaError(),
Err: getDeltaError(),
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("folder", driveBasePath1, "root", false, true, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -2085,17 +2028,17 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "One Drive Delta Error Random Item Delete",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
err: getDeltaError(),
Err: getDeltaError(),
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -2125,23 +2068,23 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "One Drive Folder Made And Deleted",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
nextLink: &next,
NextLink: &next,
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("folder", driveBasePath1, "root", false, true, false),
delItem("file", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta2,
DeltaLink: &delta2,
},
},
},
@ -2169,22 +2112,22 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "One Drive Item Made And Deleted",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
driveItem("folder", "folder", driveBasePath1, "root", false, true, false),
driveItem("file", "file", driveBasePath1+"/folder", "folder", true, false, false),
},
nextLink: &next,
NextLink: &next,
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -2215,14 +2158,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "One Drive Random Folder Delete",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("folder", driveBasePath1, "root", false, true, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -2250,14 +2193,14 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "One Drive Random Item Delete",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"),
delItem("file", driveBasePath1, "root", true, false, false),
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -2285,13 +2228,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
{
name: "TwoPriorDrives_OneTombstoned",
drives: []models.Driveable{drive1},
items: map[string][]deltaPagerResult{
items: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID1: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
driveRootItem("root"), // will be present
},
deltaLink: &delta,
DeltaLink: &delta,
},
},
},
@ -2322,21 +2265,21 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
ctx, flush := tester.NewContext(t)
defer flush()
mockDrivePager := &apiMock.DrivePager{
ToReturn: []apiMock.PagerResult{
{Drives: test.drives},
mockDrivePager := &apiMock.Pager[models.Driveable]{
ToReturn: []apiMock.PagerResult[models.Driveable]{
{Values: test.drives},
},
}
itemPagers := map[string]api.DriveItemDeltaEnumerator{}
itemPagers := map[string]api.DeltaPager[models.DriveItemable]{}
for driveID := range test.items {
itemPagers[driveID] = &mockItemPager{
toReturn: test.items[driveID],
itemPagers[driveID] = &apiMock.DeltaPager[models.DriveItemable]{
ToReturn: test.items[driveID],
}
}
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("a-user")
mbh.DrivePagerV = mockDrivePager
mbh.ItemPagerV = itemPagers
@ -2583,7 +2526,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
table := []struct {
name string
items []deltaPagerResult
items []apiMock.PagerResult[models.DriveItemable]
deltaURL string
prevDeltaSuccess bool
prevDelta string
@ -2592,8 +2535,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
{
name: "delta on first run",
deltaURL: delta,
items: []deltaPagerResult{
{deltaLink: &delta},
items: []apiMock.PagerResult[models.DriveItemable]{
{DeltaLink: &delta},
},
prevDeltaSuccess: true,
prevDelta: prevDelta,
@ -2601,8 +2544,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
{
name: "empty prev delta",
deltaURL: delta,
items: []deltaPagerResult{
{deltaLink: &delta},
items: []apiMock.PagerResult[models.DriveItemable]{
{DeltaLink: &delta},
},
prevDeltaSuccess: false,
prevDelta: "",
@ -2610,9 +2553,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
{
name: "next then delta",
deltaURL: delta,
items: []deltaPagerResult{
{nextLink: &next},
{deltaLink: &delta},
items: []apiMock.PagerResult[models.DriveItemable]{
{NextLink: &next},
{DeltaLink: &delta},
},
prevDeltaSuccess: true,
prevDelta: prevDelta,
@ -2620,18 +2563,18 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
{
name: "invalid prev delta",
deltaURL: delta,
items: []deltaPagerResult{
{err: getDeltaError()},
{deltaLink: &delta}, // works on retry
items: []apiMock.PagerResult[models.DriveItemable]{
{Err: getDeltaError()},
{DeltaLink: &delta}, // works on retry
},
prevDelta: prevDelta,
prevDeltaSuccess: false,
},
{
name: "fail a normal delta query",
items: []deltaPagerResult{
{nextLink: &next},
{err: assert.AnError},
items: []apiMock.PagerResult[models.DriveItemable]{
{NextLink: &next},
{Err: assert.AnError},
},
prevDelta: prevDelta,
prevDeltaSuccess: true,
@ -2645,8 +2588,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
ctx, flush := tester.NewContext(t)
defer flush()
itemPager := &mockItemPager{
toReturn: test.items,
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
ToReturn: test.items,
}
collectorFunc := func(
@ -2687,7 +2630,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
table := []struct {
name string
items []deltaPagerResult
items []apiMock.PagerResult[any]
deltaURL string
prevDeltaSuccess bool
prevDelta string
@ -2704,10 +2647,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
ctx, flush := tester.NewContext(t)
defer flush()
itemPagers := map[string]api.DriveItemDeltaEnumerator{}
itemPagers[driveID] = &mockItemPager{}
itemPagers := map[string]api.DeltaPager[models.DriveItemable]{}
itemPagers[driveID] = &apiMock.DeltaPager[models.DriveItemable]{}
mbh := mock.DefaultOneDriveBH()
mbh := mock.DefaultOneDriveBH("test-user")
mbh.ItemPagerV = itemPagers
c := NewCollections(
@ -2724,7 +2667,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestAddURLCacheToDriveCollections() {
// Add a few collections
for i := 0; i < collCount; i++ {
coll, err := NewCollection(
&itemBackupHandler{api.Drives{}, anyFolder},
&itemBackupHandler{api.Drives{}, "test-user", anyFolder},
nil,
nil,
driveID,

View File

@ -0,0 +1,139 @@
package drive
import (
"context"
"strings"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault"
)
var _ export.Collection = &ExportCollection{}
// ExportCollection is the implementation of export.ExportCollection for OneDrive
type ExportCollection struct {
// baseDir contains the path of the collection
baseDir string
// backingCollection is the restore collection from which we will
// create the export collection.
backingCollection data.RestoreCollection
// backupVersion is the backupVersion of the backup this collection was part
// of. This is required to figure out how to get the name of the
// item.
backupVersion int
}
func NewExportCollection(
baseDir string,
backingCollection data.RestoreCollection,
backupVersion int,
) ExportCollection {
return ExportCollection{
baseDir: baseDir,
backingCollection: backingCollection,
backupVersion: backupVersion,
}
}
func (ec ExportCollection) BasePath() string {
return ec.baseDir
}
func (ec ExportCollection) Items(ctx context.Context) <-chan export.Item {
ch := make(chan export.Item)
go items(ctx, ec, ch)
return ch
}
// items converts items in backing collection to export items
func items(ctx context.Context, ec ExportCollection, ch chan<- export.Item) {
defer close(ch)
errs := fault.New(false)
for item := range ec.backingCollection.Items(ctx, errs) {
itemUUID := item.ID()
if isMetadataFile(itemUUID, ec.backupVersion) {
continue
}
name, err := getItemName(ctx, itemUUID, ec.backupVersion, ec.backingCollection)
ch <- export.Item{
ID: itemUUID,
Data: export.ItemData{
Name: name,
Body: item.ToReader(),
},
Error: err,
}
}
eitems, erecovereable := errs.ItemsAndRecovered()
// Return all the items that we failed to source from the persistence layer
for _, err := range eitems {
ch <- export.Item{
ID: err.ID,
Error: &err,
}
}
for _, ec := range erecovereable {
ch <- export.Item{
Error: ec,
}
}
}
// isMetadataFile is used to determine if a path corresponds to a
// metadata file. This is OneDrive specific logic and depends on the
// version of the backup unlike metadata.IsMetadataFile which only has
// to be concerned about the current version.
func isMetadataFile(id string, backupVersion int) bool {
if backupVersion < version.OneDrive1DataAndMetaFiles {
return false
}
return strings.HasSuffix(id, metadata.MetaFileSuffix) ||
strings.HasSuffix(id, metadata.DirMetaFileSuffix)
}
// getItemName is used to get the name of the item.
// How we get the name depends on the version of the backup.
func getItemName(
ctx context.Context,
id string,
backupVersion int,
fin data.FetchItemByNamer,
) (string, error) {
if backupVersion < version.OneDrive1DataAndMetaFiles {
return id, nil
}
if backupVersion < version.OneDrive5DirMetaNoName {
return strings.TrimSuffix(id, metadata.DataFileSuffix), nil
}
if strings.HasSuffix(id, metadata.DataFileSuffix) {
trimmedName := strings.TrimSuffix(id, metadata.DataFileSuffix)
metaName := trimmedName + metadata.MetaFileSuffix
meta, err := FetchAndReadMetadata(ctx, fin, metaName)
if err != nil {
return "", clues.Wrap(err, "getting metadata").WithClues(ctx)
}
return meta.FileName, nil
}
return "", clues.New("invalid item id").WithClues(ctx)
}

View File

@ -0,0 +1,145 @@
package drive
import (
"bytes"
"context"
"io"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
)
type ExportUnitSuite struct {
tester.Suite
}
func TestExportUnitSuite(t *testing.T) {
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExportUnitSuite) TestIsMetadataFile() {
table := []struct {
name string
id string
backupVersion int
isMeta bool
}{
{
name: "legacy",
backupVersion: version.OneDrive1DataAndMetaFiles,
isMeta: false,
},
{
name: "metadata file",
backupVersion: version.OneDrive3IsMetaMarker,
id: "name" + metadata.MetaFileSuffix,
isMeta: true,
},
{
name: "dir metadata file",
backupVersion: version.OneDrive3IsMetaMarker,
id: "name" + metadata.DirMetaFileSuffix,
isMeta: true,
},
{
name: "non metadata file",
backupVersion: version.OneDrive3IsMetaMarker,
id: "name" + metadata.DataFileSuffix,
isMeta: false,
},
}
for _, test := range table {
suite.Run(test.name, func() {
assert.Equal(suite.T(), test.isMeta, isMetadataFile(test.id, test.backupVersion), "is metadata")
})
}
}
type finD struct {
id string
name string
err error
}
func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
if fd.err != nil {
return nil, fd.err
}
if name == fd.id {
return &dataMock.Item{
ItemID: fd.id,
Reader: io.NopCloser(bytes.NewBufferString(`{"filename": "` + fd.name + `"}`)),
}, nil
}
return nil, assert.AnError
}
func (suite *ExportUnitSuite) TestGetItemName() {
table := []struct {
tname string
id string
backupVersion int
name string
fin data.FetchItemByNamer
errFunc assert.ErrorAssertionFunc
}{
{
tname: "legacy",
id: "name",
backupVersion: version.OneDrive1DataAndMetaFiles,
name: "name",
errFunc: assert.NoError,
},
{
tname: "name in filename",
id: "name.data",
backupVersion: version.OneDrive4DirIncludesPermissions,
name: "name",
errFunc: assert.NoError,
},
{
tname: "name in metadata",
id: "id.data",
backupVersion: version.Backup,
name: "name",
fin: finD{id: "id.meta", name: "name"},
errFunc: assert.NoError,
},
{
tname: "name in metadata but error",
id: "id.data",
backupVersion: version.Backup,
name: "",
fin: finD{err: assert.AnError},
errFunc: assert.Error,
},
}
for _, test := range table {
suite.Run(test.tname, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
name, err := getItemName(
ctx,
test.id,
test.backupVersion,
test.fin)
test.errFunc(t, err)
assert.Equal(t, test.name, name, "name")
})
}
}

View File

@ -1,6 +1,7 @@
package drive
import (
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
@ -14,10 +15,15 @@ type groupBackupHandler struct {
scope selectors.GroupsScope
}
func NewGroupBackupHandler(groupID string, ac api.Drives, scope selectors.GroupsScope) groupBackupHandler {
func NewGroupBackupHandler(
groupID, siteID string,
ac api.Drives,
scope selectors.GroupsScope,
) groupBackupHandler {
return groupBackupHandler{
libraryBackupHandler{
ac: ac,
ac: ac,
siteID: siteID,
// Not adding scope here. Anything that needs scope has to
// be from group handler
service: path.GroupsService,
@ -27,16 +33,36 @@ func NewGroupBackupHandler(groupID string, ac api.Drives, scope selectors.Groups
}
}
func (h groupBackupHandler) CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
func (h groupBackupHandler) PathPrefix(
tenantID, driveID string,
) (path.Path, error) {
// TODO(meain): path fixes
return folders.ToDataLayerPath(tenantID, h.groupID, h.service, path.LibrariesCategory, false)
// TODO: move tenantID to struct
return path.Build(
tenantID,
h.groupID,
h.service,
path.LibrariesCategory,
false,
odConsts.SitesPathDir,
h.siteID,
odConsts.DrivesPathDir,
driveID,
odConsts.RootPathDir)
}
func (h groupBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
return path.GroupsService, path.LibrariesCategory
func (h groupBackupHandler) CanonicalPath(
folders *path.Builder,
tenantID string,
) (path.Path, error) {
return folders.ToDataLayerPath(
tenantID,
h.groupID,
h.service,
path.LibrariesCategory,
false,
odConsts.SitesPathDir,
h.siteID,
)
}
func (h groupBackupHandler) IsAllPass() bool {

View File

@ -39,18 +39,15 @@ type BackupHandler interface {
// PathPrefix constructs the service and category specific path prefix for
// the given values.
PathPrefix(tenantID, resourceOwner, driveID string) (path.Path, error)
PathPrefix(tenantID, driveID string) (path.Path, error)
// CanonicalPath constructs the service and category specific path for
// the given values.
CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
) (path.Path, error)
CanonicalPath(folders *path.Builder, tenantID string) (path.Path, error)
// ServiceCat returns the service and category used by this implementation.
ServiceCat() (path.ServiceType, path.CategoryType)
NewItemPager(driveID, link string, fields []string) api.DriveItemDeltaEnumerator
NewItemPager(driveID, link string, fields []string) api.DeltaPager[models.DriveItemable]
// FormatDisplayPath creates a human-readable string to represent the
// provided path.
FormatDisplayPath(driveName string, parentPath *path.Builder) string
@ -62,7 +59,7 @@ type BackupHandler interface {
}
type NewDrivePagerer interface {
NewDrivePager(resourceOwner string, fields []string) api.DrivePager
NewDrivePager(resourceOwner string, fields []string) api.Pager[models.Driveable]
}
type GetItemPermissioner interface {

View File

@ -42,7 +42,7 @@ type itemCollector func(
// provided `collector` method
func collectItems(
ctx context.Context,
pager api.DriveItemDeltaEnumerator,
pager api.DeltaPager[models.DriveItemable],
driveID, driveName string,
collector itemCollector,
oldPaths map[string]string,
@ -85,7 +85,7 @@ func collectItems(
invalidPrevDelta = true
newPaths = map[string]string{}
pager.Reset()
pager.Reset(ctx)
continue
}

View File

@ -72,26 +72,26 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
resultDrives = append(resultDrives, d)
}
tooManyRetries := make([]mock.PagerResult, 0, maxDrivesRetries+1)
tooManyRetries := make([]mock.PagerResult[models.Driveable], 0, maxDrivesRetries+1)
for i := 0; i < maxDrivesRetries+1; i++ {
tooManyRetries = append(tooManyRetries, mock.PagerResult{
tooManyRetries = append(tooManyRetries, mock.PagerResult[models.Driveable]{
Err: context.DeadlineExceeded,
})
}
table := []struct {
name string
pagerResults []mock.PagerResult
pagerResults []mock.PagerResult[models.Driveable]
retry bool
expectedErr assert.ErrorAssertionFunc
expectedResults []models.Driveable
}{
{
name: "AllOneResultNilNextLink",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: resultDrives,
Values: resultDrives,
NextLink: nil,
Err: nil,
},
@ -102,9 +102,9 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
},
{
name: "AllOneResultEmptyNextLink",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: resultDrives,
Values: resultDrives,
NextLink: &emptyLink,
Err: nil,
},
@ -115,14 +115,14 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
},
{
name: "SplitResultsNilNextLink",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: resultDrives[:numDriveResults/2],
Values: resultDrives[:numDriveResults/2],
NextLink: &link,
Err: nil,
},
{
Drives: resultDrives[numDriveResults/2:],
Values: resultDrives[numDriveResults/2:],
NextLink: nil,
Err: nil,
},
@ -133,14 +133,14 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
},
{
name: "SplitResultsEmptyNextLink",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: resultDrives[:numDriveResults/2],
Values: resultDrives[:numDriveResults/2],
NextLink: &link,
Err: nil,
},
{
Drives: resultDrives[numDriveResults/2:],
Values: resultDrives[numDriveResults/2:],
NextLink: &emptyLink,
Err: nil,
},
@ -151,14 +151,14 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
},
{
name: "NonRetryableError",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: resultDrives,
Values: resultDrives,
NextLink: &link,
Err: nil,
},
{
Drives: nil,
Values: nil,
NextLink: nil,
Err: assert.AnError,
},
@ -169,9 +169,9 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
},
{
name: "MySiteURLNotFound",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: nil,
Values: nil,
NextLink: nil,
Err: graph.Stack(ctx, mySiteURLNotFound),
},
@ -182,9 +182,9 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
},
{
name: "MySiteNotFound",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: nil,
Values: nil,
NextLink: nil,
Err: graph.Stack(ctx, mySiteNotFound),
},
@ -195,19 +195,19 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
},
{
name: "SplitResultsContextTimeoutWithRetries",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: resultDrives[:numDriveResults/2],
Values: resultDrives[:numDriveResults/2],
NextLink: &link,
Err: nil,
},
{
Drives: nil,
Values: nil,
NextLink: nil,
Err: context.DeadlineExceeded,
},
{
Drives: resultDrives[numDriveResults/2:],
Values: resultDrives[numDriveResults/2:],
NextLink: &emptyLink,
Err: nil,
},
@ -218,19 +218,19 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
},
{
name: "SplitResultsContextTimeoutNoRetries",
pagerResults: []mock.PagerResult{
pagerResults: []mock.PagerResult[models.Driveable]{
{
Drives: resultDrives[:numDriveResults/2],
Values: resultDrives[:numDriveResults/2],
NextLink: &link,
Err: nil,
},
{
Drives: nil,
Values: nil,
NextLink: nil,
Err: context.DeadlineExceeded,
},
{
Drives: resultDrives[numDriveResults/2:],
Values: resultDrives[numDriveResults/2:],
NextLink: &emptyLink,
Err: nil,
},
@ -242,9 +242,9 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
{
name: "TooManyRetries",
pagerResults: append(
[]mock.PagerResult{
[]mock.PagerResult[models.Driveable]{
{
Drives: resultDrives[:numDriveResults/2],
Values: resultDrives[:numDriveResults/2],
NextLink: &link,
Err: nil,
},
@ -263,7 +263,7 @@ func (suite *ItemCollectorUnitSuite) TestDrives() {
ctx, flush := tester.NewContext(t)
defer flush()
pager := &mock.DrivePager{
pager := &mock.Pager[models.Driveable]{
ToReturn: test.pagerResults,
}
@ -344,7 +344,7 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
)
colls := NewCollections(
&itemBackupHandler{suite.ac.Drives(), scope},
&itemBackupHandler{suite.ac.Drives(), test.user, scope},
creds.AzureTenantID,
test.user,
service.updateStatus,

View File

@ -23,12 +23,13 @@ import (
var _ BackupHandler = &itemBackupHandler{}
type itemBackupHandler struct {
ac api.Drives
scope selectors.OneDriveScope
ac api.Drives
userID string
scope selectors.OneDriveScope
}
func NewItemBackupHandler(ac api.Drives, scope selectors.OneDriveScope) *itemBackupHandler {
return &itemBackupHandler{ac, scope}
func NewItemBackupHandler(ac api.Drives, userID string, scope selectors.OneDriveScope) *itemBackupHandler {
return &itemBackupHandler{ac, userID, scope}
}
func (h itemBackupHandler) Get(
@ -40,11 +41,11 @@ func (h itemBackupHandler) Get(
}
func (h itemBackupHandler) PathPrefix(
tenantID, resourceOwner, driveID string,
tenantID, driveID string,
) (path.Path, error) {
return path.Build(
tenantID,
resourceOwner,
h.userID,
path.OneDriveService,
path.FilesCategory,
false,
@ -55,9 +56,9 @@ func (h itemBackupHandler) PathPrefix(
func (h itemBackupHandler) CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
tenantID string,
) (path.Path, error) {
return folders.ToDataLayerOneDrivePath(tenantID, resourceOwner, false)
return folders.ToDataLayerOneDrivePath(tenantID, h.userID, false)
}
func (h itemBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
@ -66,14 +67,14 @@ func (h itemBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
func (h itemBackupHandler) NewDrivePager(
resourceOwner string, fields []string,
) api.DrivePager {
) api.Pager[models.Driveable] {
return h.ac.NewUserDrivePager(resourceOwner, fields)
}
func (h itemBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DriveItemDeltaEnumerator {
) api.DeltaPager[models.DriveItemable] {
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
}
@ -145,7 +146,7 @@ func (h itemRestoreHandler) PostDrive(
func (h itemRestoreHandler) NewDrivePager(
resourceOwner string, fields []string,
) api.DrivePager {
) api.Pager[models.Driveable] {
return h.ac.NewUserDrivePager(resourceOwner, fields)
}

View File

@ -36,10 +36,10 @@ func (suite *ItemBackupHandlerUnitSuite) TestCanonicalPath() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := itemBackupHandler{}
h := itemBackupHandler{userID: resourceOwner}
p := path.Builder{}.Append("prefix")
result, err := h.CanonicalPath(p, tenantID, resourceOwner)
result, err := h.CanonicalPath(p, tenantID)
test.expectErr(t, err, clues.ToCore(err))
if result != nil {

View File

@ -124,6 +124,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
bh := itemBackupHandler{
suite.service.ac.Drives(),
suite.user,
(&selectors.OneDriveBackup{}).Folders(selectors.Any())[0],
}

View File

@ -21,16 +21,18 @@ var _ BackupHandler = &libraryBackupHandler{}
type libraryBackupHandler struct {
ac api.Drives
siteID string
scope selectors.SharePointScope
service path.ServiceType
}
func NewLibraryBackupHandler(
ac api.Drives,
siteID string,
scope selectors.SharePointScope,
service path.ServiceType,
) libraryBackupHandler {
return libraryBackupHandler{ac, scope, service}
return libraryBackupHandler{ac, siteID, scope, service}
}
func (h libraryBackupHandler) Get(
@ -42,11 +44,11 @@ func (h libraryBackupHandler) Get(
}
func (h libraryBackupHandler) PathPrefix(
tenantID, resourceOwner, driveID string,
tenantID, driveID string,
) (path.Path, error) {
return path.Build(
tenantID,
resourceOwner,
h.siteID,
h.service,
path.LibrariesCategory,
false,
@ -57,26 +59,26 @@ func (h libraryBackupHandler) PathPrefix(
func (h libraryBackupHandler) CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
tenantID string,
) (path.Path, error) {
return folders.ToDataLayerPath(tenantID, resourceOwner, h.service, path.LibrariesCategory, false)
return folders.ToDataLayerPath(tenantID, h.siteID, h.service, path.LibrariesCategory, false)
}
func (h libraryBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
return path.SharePointService, path.LibrariesCategory
return h.service, path.LibrariesCategory
}
func (h libraryBackupHandler) NewDrivePager(
resourceOwner string,
fields []string,
) api.DrivePager {
) api.Pager[models.Driveable] {
return h.ac.NewSiteDrivePager(resourceOwner, fields)
}
func (h libraryBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DriveItemDeltaEnumerator {
) api.DeltaPager[models.DriveItemable] {
return h.ac.NewDriveItemDeltaPager(driveID, link, fields)
}
@ -184,7 +186,7 @@ func (h libraryRestoreHandler) PostDrive(
func (h libraryRestoreHandler) NewDrivePager(
resourceOwner string,
fields []string,
) api.DrivePager {
) api.Pager[models.Driveable] {
return h.ac.Drives().NewSiteDrivePager(resourceOwner, fields)
}

View File

@ -36,10 +36,10 @@ func (suite *LibraryBackupHandlerUnitSuite) TestCanonicalPath() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := libraryBackupHandler{service: path.SharePointService}
h := libraryBackupHandler{service: path.SharePointService, siteID: resourceOwner}
p := path.Builder{}.Append("prefix")
result, err := h.CanonicalPath(p, tenantID, resourceOwner)
result, err := h.CanonicalPath(p, tenantID)
test.expectErr(t, err, clues.ToCore(err))
if result != nil {
@ -52,7 +52,7 @@ func (suite *LibraryBackupHandlerUnitSuite) TestCanonicalPath() {
func (suite *LibraryBackupHandlerUnitSuite) TestServiceCat() {
t := suite.T()
s, c := libraryBackupHandler{}.ServiceCat()
s, c := libraryBackupHandler{service: path.SharePointService}.ServiceCat()
assert.Equal(t, path.SharePointService, s)
assert.Equal(t, path.LibrariesCategory, c)
}

View File

@ -408,7 +408,7 @@ func (suite *RestoreUnitSuite) TestRestoreCaches_AddDrive() {
type mockGDPARF struct {
err error
rootFolder models.DriveItemable
pager *apiMock.DrivePager
pager *apiMock.Pager[models.Driveable]
}
func (m *mockGDPARF) GetRootFolder(
@ -421,7 +421,7 @@ func (m *mockGDPARF) GetRootFolder(
func (m *mockGDPARF) NewDrivePager(
string,
[]string,
) api.DrivePager {
) api.Pager[models.Driveable] {
return m.pager
}
@ -439,16 +439,16 @@ func (suite *RestoreUnitSuite) TestRestoreCaches_Populate() {
table := []struct {
name string
mock *apiMock.DrivePager
mock *apiMock.Pager[models.Driveable]
expectErr require.ErrorAssertionFunc
expectLen int
checkValues bool
}{
{
name: "no results",
mock: &apiMock.DrivePager{
ToReturn: []apiMock.PagerResult{
{Drives: []models.Driveable{}},
mock: &apiMock.Pager[models.Driveable]{
ToReturn: []apiMock.PagerResult[models.Driveable]{
{Values: []models.Driveable{}},
},
},
expectErr: require.NoError,
@ -456,9 +456,9 @@ func (suite *RestoreUnitSuite) TestRestoreCaches_Populate() {
},
{
name: "one result",
mock: &apiMock.DrivePager{
ToReturn: []apiMock.PagerResult{
{Drives: []models.Driveable{md}},
mock: &apiMock.Pager[models.Driveable]{
ToReturn: []apiMock.PagerResult[models.Driveable]{
{Values: []models.Driveable{md}},
},
},
expectErr: require.NoError,
@ -467,8 +467,8 @@ func (suite *RestoreUnitSuite) TestRestoreCaches_Populate() {
},
{
name: "error",
mock: &apiMock.DrivePager{
ToReturn: []apiMock.PagerResult{
mock: &apiMock.Pager[models.Driveable]{
ToReturn: []apiMock.PagerResult[models.Driveable]{
{Err: assert.AnError},
},
},

View File

@ -47,7 +47,7 @@ type urlCache struct {
refreshMu sync.Mutex
deltaQueryCount int
itemPager api.DriveItemDeltaEnumerator
itemPager api.DeltaPager[models.DriveItemable]
errs *fault.Bus
}
@ -56,7 +56,7 @@ type urlCache struct {
func newURLCache(
driveID, prevDelta string,
refreshInterval time.Duration,
itemPager api.DriveItemDeltaEnumerator,
itemPager api.DeltaPager[models.DriveItemable],
errs *fault.Bus,
) (*urlCache, error) {
err := validateCacheParams(
@ -83,7 +83,7 @@ func newURLCache(
func validateCacheParams(
driveID string,
refreshInterval time.Duration,
itemPager api.DriveItemDeltaEnumerator,
itemPager api.DeltaPager[models.DriveItemable],
) error {
if len(driveID) == 0 {
return clues.New("drive id is empty")
@ -182,7 +182,7 @@ func (uc *urlCache) deltaQuery(
) error {
logger.Ctx(ctx).Debug("starting delta query")
// Reset item pager to remove any previous state
uc.itemPager.Reset()
uc.itemPager.Reset(ctx)
_, _, _, err := collectItems(
ctx,

View File

@ -24,6 +24,7 @@ import (
"github.com/alcionai/corso/src/pkg/control/testdata"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
)
type URLCacheIntegrationSuite struct {
@ -209,20 +210,20 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
table := []struct {
name string
pagerResult map[string][]deltaPagerResult
pagerResult map[string][]apiMock.PagerResult[models.DriveItemable]
expectedItemProps map[string]itemProps
expectedErr require.ErrorAssertionFunc
cacheAssert func(*urlCache, time.Time)
}{
{
name: "single item in cache",
pagerResult: map[string][]deltaPagerResult{
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
deltaLink: &deltaString,
DeltaLink: &deltaString,
},
},
},
@ -241,17 +242,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "multiple items in cache",
pagerResult: map[string][]deltaPagerResult{
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
},
deltaLink: &deltaString,
DeltaLink: &deltaString,
},
},
},
@ -286,17 +287,17 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "duplicate items with potentially new urls",
pagerResult: map[string][]deltaPagerResult{
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
fileItem("1", "file1", "root", "root", "https://test1.com", false),
fileItem("2", "file2", "root", "root", "https://test2.com", false),
},
deltaLink: &deltaString,
DeltaLink: &deltaString,
},
},
},
@ -323,15 +324,15 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "deleted items",
pagerResult: map[string][]deltaPagerResult{
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
},
deltaLink: &deltaString,
DeltaLink: &deltaString,
},
},
},
@ -354,13 +355,13 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "item not found in cache",
pagerResult: map[string][]deltaPagerResult{
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
deltaLink: &deltaString,
DeltaLink: &deltaString,
},
},
},
@ -376,20 +377,20 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
},
{
name: "multi-page delta query error",
pagerResult: map[string][]deltaPagerResult{
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
},
nextLink: &next,
NextLink: &next,
},
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
},
deltaLink: &deltaString,
err: errors.New("delta query error"),
DeltaLink: &deltaString,
Err: errors.New("delta query error"),
},
},
},
@ -407,14 +408,14 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
{
name: "folder item",
pagerResult: map[string][]deltaPagerResult{
pagerResult: map[string][]apiMock.PagerResult[models.DriveItemable]{
driveID: {
{
items: []models.DriveItemable{
Values: []models.DriveItemable{
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
driveItem("2", "folder2", "root", "root", false, true, false),
},
deltaLink: &deltaString,
DeltaLink: &deltaString,
},
},
},
@ -436,8 +437,8 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
ctx, flush := tester.NewContext(t)
defer flush()
itemPager := &mockItemPager{
toReturn: test.pagerResult[driveID],
itemPager := &apiMock.DeltaPager[models.DriveItemable]{
ToReturn: test.pagerResult[driveID],
}
cache, err := newURLCache(
@ -487,7 +488,7 @@ func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
driveID,
"",
refreshInterval,
&mockItemPager{},
&apiMock.DeltaPager[models.DriveItemable]{},
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
@ -516,7 +517,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name string
driveID string
refreshInt time.Duration
itemPager api.DriveItemDeltaEnumerator
itemPager api.DeltaPager[models.DriveItemable]
errors *fault.Bus
expectedErr require.ErrorAssertionFunc
}{
@ -524,7 +525,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "invalid driveID",
driveID: "",
refreshInt: 1 * time.Hour,
itemPager: &mockItemPager{},
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
errors: fault.New(true),
expectedErr: require.Error,
},
@ -532,7 +533,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "invalid refresh interval",
driveID: "drive1",
refreshInt: 100 * time.Millisecond,
itemPager: &mockItemPager{},
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
errors: fault.New(true),
expectedErr: require.Error,
},
@ -548,7 +549,7 @@ func (suite *URLCacheUnitSuite) TestNewURLCache() {
name: "valid",
driveID: "drive1",
refreshInt: 1 * time.Hour,
itemPager: &mockItemPager{},
itemPager: &apiMock.DeltaPager[models.DriveItemable]{},
errors: fault.New(true),
expectedErr: require.NoError,
},

View File

@ -243,7 +243,7 @@ func populateCollections(
func collectItems(
ctx context.Context,
pager api.ChannelMessageDeltaEnumerator,
pager api.DeltaPager[models.ChatMessageable],
) ([]models.ChatMessageable, error) {
items := []models.ChatMessageable{}

View File

@ -16,7 +16,7 @@ type BackupHandler interface {
) (models.Channelable, error)
NewChannelsPager(
teamID string,
) api.ChannelEnumerator
) api.Pager[models.Channelable]
GetMessageByID(
ctx context.Context,
@ -24,7 +24,7 @@ type BackupHandler interface {
) (models.ChatMessageable, error)
NewMessagePager(
teamID, channelID string,
) api.ChannelMessageDeltaEnumerator
) api.DeltaPager[models.ChatMessageable]
GetMessageReplies(
ctx context.Context,
@ -34,7 +34,7 @@ type BackupHandler interface {
type BackupMessagesHandler interface {
GetMessage(ctx context.Context, teamID, channelID, itemID string) (models.ChatMessageable, error)
NewMessagePager(teamID, channelID string) api.ChannelMessageDeltaEnumerator
NewMessagePager(teamID, channelID string) api.DeltaPager[models.ChatMessageable]
GetChannel(ctx context.Context, teamID, channelID string) (models.Channelable, error)
GetReply(ctx context.Context, teamID, channelID, messageID string) (serialization.Parsable, error)
}

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
@ -41,8 +42,7 @@ func (ctrl *Controller) ProduceExportCollections(
)
switch sels.Service {
case selectors.ServiceOneDrive, selectors.ServiceSharePoint:
// OneDrive and SharePoint can share the code to create collections
case selectors.ServiceOneDrive:
expCollections, err = onedrive.ProduceExportCollections(
ctx,
backupVersion,
@ -51,6 +51,17 @@ func (ctrl *Controller) ProduceExportCollections(
dcs,
deets,
errs)
case selectors.ServiceSharePoint:
expCollections, err = sharepoint.ProduceExportCollections(
ctx,
backupVersion,
exportCfg,
opts,
dcs,
ctrl.backupDriveIDNames,
deets,
errs)
default:
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
}

View File

@ -80,7 +80,12 @@ func ProduceBackupCollections(
dbcs, canUsePreviousBackup, err = site.CollectLibraries(
ctx,
sbpc,
drive.NewGroupBackupHandler(bpc.ProtectedResource.ID(), ac.Drives(), scope),
drive.NewGroupBackupHandler(
bpc.ProtectedResource.ID(),
ptr.Val(resp.GetId()),
ac.Drives(),
scope,
),
creds.AzureTenantID,
ssmb,
su,

View File

@ -49,7 +49,7 @@ func ProduceBackupCollections(
logger.Ctx(ctx).Debug("creating OneDrive collections")
nc := drive.NewCollections(
drive.NewItemBackupHandler(ac.Drives(), scope),
drive.NewItemBackupHandler(ac.Drives(), bpc.ProtectedResource.ID(), scope),
tenant,
bpc.ProtectedResource.ID(),
su,

View File

@ -3,6 +3,7 @@ package onedrive
import "github.com/alcionai/corso/src/pkg/path"
const (
SitesPathDir = "sites"
// const used as the root dir for the drive portion of a path prefix.
// eg: tid/onedrive/ro/files/drives/driveid/...
DrivesPathDir = "drives"

View File

@ -2,14 +2,11 @@ package onedrive
import (
"context"
"strings"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export"
@ -17,121 +14,6 @@ import (
"github.com/alcionai/corso/src/pkg/path"
)
var _ export.Collection = &exportCollection{}
// exportCollection is the implementation of export.ExportCollection for OneDrive
type exportCollection struct {
// baseDir contains the path of the collection
baseDir string
// backingCollection is the restore collection from which we will
// create the export collection.
backingCollection data.RestoreCollection
// backupVersion is the backupVersion of the backup this collection was part
// of. This is required to figure out how to get the name of the
// item.
backupVersion int
}
func (ec exportCollection) BasePath() string {
return ec.baseDir
}
func (ec exportCollection) Items(ctx context.Context) <-chan export.Item {
ch := make(chan export.Item)
go items(ctx, ec, ch)
return ch
}
// items converts items in backing collection to export items
func items(ctx context.Context, ec exportCollection, ch chan<- export.Item) {
defer close(ch)
errs := fault.New(false)
// There will only be a single item in the backingCollections
// for OneDrive
for item := range ec.backingCollection.Items(ctx, errs) {
itemUUID := item.ID()
if isMetadataFile(itemUUID, ec.backupVersion) {
continue
}
name, err := getItemName(ctx, itemUUID, ec.backupVersion, ec.backingCollection)
ch <- export.Item{
ID: itemUUID,
Data: export.ItemData{
Name: name,
Body: item.ToReader(),
},
Error: err,
}
}
eitems, erecovereable := errs.ItemsAndRecovered()
// Return all the items that we failed to get from kopia at the end
for _, err := range eitems {
ch <- export.Item{
ID: err.ID,
Error: &err,
}
}
for _, ec := range erecovereable {
ch <- export.Item{
Error: ec,
}
}
}
// isMetadataFile is used to determine if a path corresponds to a
// metadata file. This is OneDrive specific logic and depends on the
// version of the backup unlike metadata.IsMetadataFile which only has
// to be concerned about the current version.
func isMetadataFile(id string, backupVersion int) bool {
if backupVersion < version.OneDrive1DataAndMetaFiles {
return false
}
return strings.HasSuffix(id, metadata.MetaFileSuffix) ||
strings.HasSuffix(id, metadata.DirMetaFileSuffix)
}
// getItemName is used to get the name of the item.
// How we get the name depends on the version of the backup.
func getItemName(
ctx context.Context,
id string,
backupVersion int,
fin data.FetchItemByNamer,
) (string, error) {
if backupVersion < version.OneDrive1DataAndMetaFiles {
return id, nil
}
if backupVersion < version.OneDrive5DirMetaNoName {
return strings.TrimSuffix(id, metadata.DataFileSuffix), nil
}
if strings.HasSuffix(id, metadata.DataFileSuffix) {
trimmedName := strings.TrimSuffix(id, metadata.DataFileSuffix)
metaName := trimmedName + metadata.MetaFileSuffix
meta, err := drive.FetchAndReadMetadata(ctx, fin, metaName)
if err != nil {
return "", clues.Wrap(err, "getting metadata").WithClues(ctx)
}
return meta.FileName, nil
}
return "", clues.New("invalid item id").WithClues(ctx)
}
// ProduceExportCollections will create the export collections for the
// given restore collections.
func ProduceExportCollections(
@ -156,11 +38,7 @@ func ProduceExportCollections(
baseDir := path.Builder{}.Append(drivePath.Folders...)
ec = append(ec, exportCollection{
baseDir: baseDir.String(),
backingCollection: dc,
backupVersion: backupVersion,
})
ec = append(ec, drive.NewExportCollection(baseDir.String(), dc, backupVersion))
}
return ec, el.Failure()

View File

@ -11,7 +11,7 @@ import (
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
"github.com/alcionai/corso/src/internal/tester"
@ -30,45 +30,6 @@ func TestExportUnitSuite(t *testing.T) {
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ExportUnitSuite) TestIsMetadataFile() {
table := []struct {
name string
id string
backupVersion int
isMeta bool
}{
{
name: "legacy",
backupVersion: version.OneDrive1DataAndMetaFiles,
isMeta: false,
},
{
name: "metadata file",
backupVersion: version.OneDrive3IsMetaMarker,
id: "name" + metadata.MetaFileSuffix,
isMeta: true,
},
{
name: "dir metadata file",
backupVersion: version.OneDrive3IsMetaMarker,
id: "name" + metadata.DirMetaFileSuffix,
isMeta: true,
},
{
name: "non metadata file",
backupVersion: version.OneDrive3IsMetaMarker,
id: "name" + metadata.DataFileSuffix,
isMeta: false,
},
}
for _, test := range table {
suite.Run(test.name, func() {
assert.Equal(suite.T(), test.isMeta, isMetadataFile(test.id, test.backupVersion), "is metadata")
})
}
}
type finD struct {
id string
name string
@ -90,66 +51,6 @@ func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, err
return nil, assert.AnError
}
func (suite *ExportUnitSuite) TestGetItemName() {
table := []struct {
tname string
id string
backupVersion int
name string
fin data.FetchItemByNamer
errFunc assert.ErrorAssertionFunc
}{
{
tname: "legacy",
id: "name",
backupVersion: version.OneDrive1DataAndMetaFiles,
name: "name",
errFunc: assert.NoError,
},
{
tname: "name in filename",
id: "name.data",
backupVersion: version.OneDrive4DirIncludesPermissions,
name: "name",
errFunc: assert.NoError,
},
{
tname: "name in metadata",
id: "id.data",
backupVersion: version.Backup,
name: "name",
fin: finD{id: "id.meta", name: "name"},
errFunc: assert.NoError,
},
{
tname: "name in metadata but error",
id: "id.data",
backupVersion: version.Backup,
name: "",
fin: finD{err: assert.AnError},
errFunc: assert.Error,
},
}
for _, test := range table {
suite.Run(test.tname, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
name, err := getItemName(
ctx,
test.id,
test.backupVersion,
test.fin)
test.errFunc(t, err)
assert.Equal(t, test.name, name, "name")
})
}
}
type mockRestoreCollection struct {
path path.Path
items []*dataMock.Item
@ -391,11 +292,7 @@ func (suite *ExportUnitSuite) TestGetItems() {
ctx, flush := tester.NewContext(t)
defer flush()
ec := exportCollection{
baseDir: "",
backingCollection: test.backingCollection,
backupVersion: test.version,
}
ec := drive.NewExportCollection("", test.backingCollection, test.version)
items := ec.Items(ctx)

View File

@ -31,12 +31,13 @@ type BackupHandler struct {
CanonPathFn canonPather
CanonPathErr error
Service path.ServiceType
Category path.CategoryType
ResourceOwner string
Service path.ServiceType
Category path.CategoryType
DrivePagerV api.DrivePager
DrivePagerV api.Pager[models.Driveable]
// driveID -> itemPager
ItemPagerV map[string]api.DriveItemDeltaEnumerator
ItemPagerV map[string]api.DeltaPager[models.DriveItemable]
LocationIDFn locationIDer
@ -45,44 +46,46 @@ type BackupHandler struct {
GetErrs []error
}
func DefaultOneDriveBH() *BackupHandler {
func DefaultOneDriveBH(resourceOwner string) *BackupHandler {
return &BackupHandler{
ItemInfo: details.ItemInfo{
OneDrive: &details.OneDriveInfo{},
Extension: &details.ExtensionData{},
},
GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultOneDrivePathPrefixer,
CanonPathFn: defaultOneDriveCanonPather,
Service: path.OneDriveService,
Category: path.FilesCategory,
LocationIDFn: defaultOneDriveLocationIDer,
GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")},
GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultOneDrivePathPrefixer,
CanonPathFn: defaultOneDriveCanonPather,
ResourceOwner: resourceOwner,
Service: path.OneDriveService,
Category: path.FilesCategory,
LocationIDFn: defaultOneDriveLocationIDer,
GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")},
}
}
func DefaultSharePointBH() *BackupHandler {
func DefaultSharePointBH(resourceOwner string) *BackupHandler {
return &BackupHandler{
ItemInfo: details.ItemInfo{
SharePoint: &details.SharePointInfo{},
Extension: &details.ExtensionData{},
},
GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultSharePointPathPrefixer,
CanonPathFn: defaultSharePointCanonPather,
Service: path.SharePointService,
Category: path.LibrariesCategory,
LocationIDFn: defaultSharePointLocationIDer,
GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")},
GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultSharePointPathPrefixer,
CanonPathFn: defaultSharePointCanonPather,
ResourceOwner: resourceOwner,
Service: path.SharePointService,
Category: path.LibrariesCategory,
LocationIDFn: defaultSharePointLocationIDer,
GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")},
}
}
func (h BackupHandler) PathPrefix(tID, ro, driveID string) (path.Path, error) {
pp, err := h.PathPrefixFn(tID, ro, driveID)
func (h BackupHandler) PathPrefix(tID, driveID string) (path.Path, error) {
pp, err := h.PathPrefixFn(tID, h.ResourceOwner, driveID)
if err != nil {
return nil, err
}
@ -90,8 +93,8 @@ func (h BackupHandler) PathPrefix(tID, ro, driveID string) (path.Path, error) {
return pp, h.PathPrefixErr
}
func (h BackupHandler) CanonicalPath(pb *path.Builder, tID, ro string) (path.Path, error) {
cp, err := h.CanonPathFn(pb, tID, ro)
func (h BackupHandler) CanonicalPath(pb *path.Builder, tID string) (path.Path, error) {
cp, err := h.CanonPathFn(pb, tID, h.ResourceOwner)
if err != nil {
return nil, err
}
@ -103,11 +106,11 @@ func (h BackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
return h.Service, h.Category
}
func (h BackupHandler) NewDrivePager(string, []string) api.DrivePager {
func (h BackupHandler) NewDrivePager(string, []string) api.Pager[models.Driveable] {
return h.DrivePagerV
}
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DriveItemDeltaEnumerator {
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DeltaPager[models.DriveItemable] {
return h.ItemPagerV[driveID]
}
@ -249,7 +252,7 @@ type RestoreHandler struct {
PostItemResp models.DriveItemable
PostItemErr error
DrivePagerV api.DrivePager
DrivePagerV api.Pager[models.Driveable]
PostDriveResp models.Driveable
PostDriveErr error
@ -264,7 +267,7 @@ func (h RestoreHandler) PostDrive(
return h.PostDriveResp, h.PostDriveErr
}
func (h RestoreHandler) NewDrivePager(string, []string) api.DrivePager {
func (h RestoreHandler) NewDrivePager(string, []string) api.Pager[models.Driveable] {
return h.DrivePagerV
}

View File

@ -80,7 +80,11 @@ func ProduceBackupCollections(
spcs, canUsePreviousBackup, err = site.CollectLibraries(
ctx,
bpc,
drive.NewLibraryBackupHandler(ac.Drives(), scope, bpc.Selector.PathService()),
drive.NewLibraryBackupHandler(
ac.Drives(),
bpc.ProtectedResource.ID(),
scope,
bpc.Selector.PathService()),
creds.AzureTenantID,
ssmb,
su,

View File

@ -50,8 +50,8 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
)
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
ep, err := drive.NewLibraryBackupHandler(api.Drives{}, nil, path.SharePointService).
CanonicalPath(pb, tenantID, siteID)
ep, err := drive.NewLibraryBackupHandler(api.Drives{}, siteID, nil, path.SharePointService).
CanonicalPath(pb, tenantID)
require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct {
@ -101,7 +101,7 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
)
c := drive.NewCollections(
drive.NewLibraryBackupHandler(api.Drives{}, test.scope, path.SharePointService),
drive.NewLibraryBackupHandler(api.Drives{}, siteID, test.scope, path.SharePointService),
tenantID,
siteID,
nil,

View File

@ -0,0 +1,58 @@
package sharepoint
import (
"context"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
)
// ProduceExportCollections will create the export collections for the
// given restore collections.
func ProduceExportCollections(
ctx context.Context,
backupVersion int,
exportCfg control.ExportConfig,
opts control.Options,
dcs []data.RestoreCollection,
backupDriveIDNames idname.CacheBuilder,
deets *details.Builder,
errs *fault.Bus,
) ([]export.Collection, error) {
var (
el = errs.Local()
ec = make([]export.Collection, 0, len(dcs))
)
for _, dc := range dcs {
drivePath, err := path.ToDrivePath(dc.FullPath())
if err != nil {
return nil, clues.Wrap(err, "transforming path to drive path").WithClues(ctx)
}
driveName, ok := backupDriveIDNames.NameOf(drivePath.DriveID)
if !ok {
// This should not happen, but just in case
logger.Ctx(ctx).With("drive_id", drivePath.DriveID).Info("drive name not found, using drive id")
driveName = drivePath.DriveID
}
baseDir := path.Builder{}.
Append("Libraries").
Append(driveName).
Append(drivePath.Folders...)
ec = append(ec, drive.NewExportCollection(baseDir.String(), dc, backupVersion))
}
return ec, el.Failure()
}

View File

@ -0,0 +1,154 @@
package sharepoint
import (
"bytes"
"context"
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
type ExportUnitSuite struct {
tester.Suite
}
func TestExportUnitSuite(t *testing.T) {
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
}
type finD struct {
id string
name string
err error
}
func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
if fd.err != nil {
return nil, fd.err
}
if name == fd.id {
return &dataMock.Item{
ItemID: fd.id,
Reader: io.NopCloser(bytes.NewBufferString(`{"filename": "` + fd.name + `"}`)),
}, nil
}
return nil, assert.AnError
}
type mockRestoreCollection struct {
path path.Path
items []*dataMock.Item
}
func (rc mockRestoreCollection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
ch := make(chan data.Item)
go func() {
defer close(ch)
el := errs.Local()
for _, item := range rc.items {
if item.ReadErr != nil {
el.AddRecoverable(ctx, item.ReadErr)
continue
}
ch <- item
}
}()
return ch
}
func (rc mockRestoreCollection) FullPath() path.Path {
return rc.path
}
func (suite *ExportUnitSuite) TestExportRestoreCollections() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
driveID = "driveID1"
driveName = "driveName1"
exportCfg = control.ExportConfig{}
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
cache = idname.NewCache(
// Cache check with lowercased ids
map[string]string{strings.ToLower(driveID): driveName},
)
dii = odStub.DriveItemInfo()
expectedPath = "Libraries/" + driveName
expectedItems = []export.Item{
{
ID: "id1.data",
Data: export.ItemData{
Name: "name1",
Body: io.NopCloser((bytes.NewBufferString("body1"))),
},
},
}
)
dii.OneDrive.ItemName = "name1"
p, err := dpb.ToDataLayerOneDrivePath("t", "u", false)
assert.NoError(t, err, "build path")
dcs := []data.RestoreCollection{
data.FetchRestoreCollection{
Collection: mockRestoreCollection{
path: p,
items: []*dataMock.Item{
{
ItemID: "id1.data",
Reader: io.NopCloser(bytes.NewBufferString("body1")),
ItemInfo: dii,
},
},
},
FetchItemByNamer: finD{id: "id1.meta", name: "name1"},
},
}
ecs, err := ProduceExportCollections(
ctx,
int(version.Backup),
exportCfg,
control.DefaultOptions(),
dcs,
cache,
nil,
fault.New(true))
assert.NoError(t, err, "export collections error")
assert.Len(t, ecs, 1, "num of collections")
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
fitems := []export.Item{}
for item := range ecs[0].Items(ctx) {
fitems = append(fitems, item)
}
assert.Equal(t, expectedItems, fitems, "items")
}

View File

@ -492,7 +492,10 @@ func consumeBackupCollections(
isIncremental bool,
errs *fault.Bus,
) (*kopia.BackupStats, *details.Builder, kopia.DetailsMergeInfoer, error) {
ctx = clues.Add(ctx, "collection_source", "operations")
ctx = clues.Add(
ctx,
"collection_source", "operations",
"snapshot_type", "item data")
progressBar := observe.MessageWithCompletion(ctx, "Backing up data")
defer close(progressBar)

View File

@ -61,6 +61,8 @@ func (ss *storeStreamer) Collect(ctx context.Context, col Collectable) error {
// Write persists the collected objects in the stream store
func (ss *storeStreamer) Write(ctx context.Context, errs *fault.Bus) (string, error) {
ctx = clues.Add(ctx, "snapshot_type", "stream store")
id, err := write(ctx, ss.kw, ss.dbcs, errs)
if err != nil {
return "", clues.Wrap(err, "writing to stream store")

View File

@ -3,7 +3,6 @@ package tester
import (
"context"
"os"
"testing"
"github.com/alcionai/clues"
"github.com/google/uuid"
@ -11,7 +10,7 @@ import (
"github.com/alcionai/corso/src/pkg/logger"
)
func NewContext(t *testing.T) (context.Context, func()) {
func NewContext(t TestT) (context.Context, func()) {
level := logger.LLInfo
format := logger.LFText
@ -34,7 +33,7 @@ func NewContext(t *testing.T) (context.Context, func()) {
}
func WithContext(
t *testing.T,
t TestT,
ctx context.Context, //revive:disable-line:context-as-argument
) (context.Context, func()) {
ls := logger.Settings{
@ -48,7 +47,7 @@ func WithContext(
}
func enrichTestCtx(
t *testing.T,
t TestT,
ctx context.Context, //revive:disable-line:context-as-argument
) context.Context {
if t == nil {

View File

@ -7,6 +7,7 @@ import (
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// AreSameFunc asserts whether the two funcs are the same func.
@ -26,8 +27,15 @@ func AreSameFunc(t *testing.T, expect, have any) {
)
}
type TestT interface {
Logf(format string, args ...any)
Name() string
TempDir() string
require.TestingT
}
// LogTimeOfTest logs the test name and the time that it was run.
func LogTimeOfTest(t *testing.T) string {
func LogTimeOfTest(t TestT) string {
now := time.Now().UTC().Format(time.RFC3339Nano)
name := t.Name()

View File

@ -49,7 +49,7 @@ type GroupsInfo struct {
// Channels Specific
ChannelName string `json:"channelName,omitempty"`
ChannelID string `json:"channelID,omitempty"`
LastResponseAt time.Time `json:"lastResponseAt,omitempty"`
LastReplyAt time.Time `json:"lastResponseAt,omitempty"`
MessageCreator string `json:"messageCreator,omitempty"`
MessagePreview string `json:"messagePreview,omitempty"`
ReplyCount int `json:"replyCount,omitempty"`

View File

@ -306,6 +306,7 @@ func (pb Builder) ToDataLayerPath(
service ServiceType,
category CategoryType,
isItem bool,
elems ...string,
) (Path, error) {
if err := ValidateServiceAndCategory(service, category); err != nil {
return nil, err
@ -315,12 +316,15 @@ func (pb Builder) ToDataLayerPath(
return nil, err
}
prefixItems := append([]string{
tenant,
service.String(),
user,
category.String(),
}, elems...)
return &dataLayerResourcePath{
Builder: *pb.withPrefix(
tenant,
service.String(),
user,
category.String()),
Builder: *pb.withPrefix(prefixItems...),
service: service,
category: category,
hasItem: isItem,

View File

@ -367,3 +367,39 @@ func (suite *BuilderUnitSuite) TestPIIHandling() {
})
}
}
func (suite *BuilderUnitSuite) TestToDataLayerPath() {
location := Builder{}.Append("foo", "bar")
table := []struct {
name string
extra []string
expect string
}{
{
name: "no extra",
extra: []string{},
expect: "t/onedrive/u/files/foo/bar",
},
{
name: "single extra",
extra: []string{"oof"},
expect: "t/onedrive/u/files/oof/foo/bar",
},
{
name: "multi extra",
extra: []string{"oof", "rab"},
expect: "t/onedrive/u/files/oof/rab/foo/bar",
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
dlp, err := location.ToDataLayerPath("t", "u", OneDriveService, FilesCategory, false, test.extra...)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, test.expect, dlp.PlainString())
})
}
}

View File

@ -126,7 +126,7 @@ func (suite *RepositoryIntegrationSuite) TestInitialize() {
table := []struct {
name string
account account.Account
storage func(*testing.T) storage.Storage
storage func(tester.TestT) storage.Storage
errCheck assert.ErrorAssertionFunc
}{
{

View File

@ -335,7 +335,7 @@ func (s *exchange) AllData() []ExchangeScope {
}
// -------------------
// Info Factories
// ItemInfo Factories
// ContactName produces one or more exchange contact name info scopes.
// Matches any contact whose name contains the provided string.
@ -352,7 +352,7 @@ func (sr *ExchangeRestore) ContactName(senderID string) []ExchangeScope {
}
}
// EventSubject produces one or more exchange event subject info scopes.
// EventOrganizer produces one or more exchange event subject info scopes.
// Matches any event where the event subject contains one of the provided strings.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]

View File

@ -6,6 +6,7 @@ import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/fault"
@ -242,7 +243,7 @@ func (s *groups) Channel(channel string) []GroupsScope {
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
func (s *sharePoint) ChannelMessages(channels, messages []string, opts ...option) []GroupsScope {
func (s *groups) ChannelMessages(channels, messages []string, opts ...option) []GroupsScope {
var (
scopes = []GroupsScope{}
os = append([]option{pathComparator()}, opts...)
@ -309,7 +310,76 @@ func (s *groups) LibraryItems(libraries, items []string, opts ...option) []Group
// -------------------
// ItemInfo Factories
// TODO
// MessageCreator produces one or more groups channelMessage info scopes.
// Matches any channel message created by the specified user.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
func (s *GroupsRestore) MessageCreator(creator string) []GroupsScope {
return []GroupsScope{
makeInfoScope[GroupsScope](
GroupsChannelMessage,
GroupsInfoChannelMessageCreator,
[]string{creator},
filters.In),
}
}
// MessageCreatedAfter produces a channel message created-after info scope.
// Matches any message where the creation time is after the timestring.
// If the input equals selectors.Any, the scope will match all times.
// If the input is empty or selectors.None, the scope will always fail comparisons.
func (s *GroupsRestore) MessageCreatedAfter(timeStrings string) []GroupsScope {
return []GroupsScope{
makeInfoScope[GroupsScope](
GroupsChannelMessage,
GroupsInfoChannelMessageCreatedAfter,
[]string{timeStrings},
filters.Less),
}
}
// MessageCreatedBefore produces a channel message created-before info scope.
// Matches any message where the creation time is after the timestring.
// If the input equals selectors.Any, the scope will match all times.
// If the input is empty or selectors.None, the scope will always fail comparisons.
func (s *GroupsRestore) MessageCreatedBefore(timeStrings string) []GroupsScope {
return []GroupsScope{
makeInfoScope[GroupsScope](
GroupsChannelMessage,
GroupsInfoChannelMessageCreatedBefore,
[]string{timeStrings},
filters.Greater),
}
}
// MessageLastReplyAfter produces a channel message last-response-after info scope.
// Matches any message where last response time is after the timestring.
// If the input equals selectors.Any, the scope will match all times.
// If the input is empty or selectors.None, the scope will always fail comparisons.
func (s *GroupsRestore) MessageLastReplyAfter(timeStrings string) []GroupsScope {
return []GroupsScope{
makeInfoScope[GroupsScope](
GroupsChannelMessage,
GroupsInfoChannelMessageLastReplyAfter,
[]string{timeStrings},
filters.Less),
}
}
// MessageLastReplyBefore produces a channel message last-response-before info scope.
// Matches any message where last response time is after the timestring.
// If the input equals selectors.Any, the scope will match all times.
// If the input is empty or selectors.None, the scope will always fail comparisons.
func (s *GroupsRestore) MessageLastReplyBefore(timeStrings string) []GroupsScope {
return []GroupsScope{
makeInfoScope[GroupsScope](
GroupsChannelMessage,
GroupsInfoChannelMessageLastReplyBefore,
[]string{timeStrings},
filters.Greater),
}
}
// ---------------------------------------------------------------------------
// Categories
@ -334,9 +404,16 @@ const (
// details.itemInfo comparables
// channel drive selection
// channel and drive selection
GroupsInfoSiteLibraryDrive groupsCategory = "GroupsInfoSiteLibraryDrive"
GroupsInfoChannel groupsCategory = "GroupsInfoChannel"
// data contained within details.ItemInfo
GroupsInfoChannelMessageCreatedAfter groupsCategory = "GroupsInfoChannelMessageCreatedAfter"
GroupsInfoChannelMessageCreatedBefore groupsCategory = "GroupsInfoChannelMessageCreatedBefore"
GroupsInfoChannelMessageCreator groupsCategory = "GroupsInfoChannelMessageCreator"
GroupsInfoChannelMessageLastReplyAfter groupsCategory = "GroupsInfoChannelMessageLastReplyAfter"
GroupsInfoChannelMessageLastReplyBefore groupsCategory = "GroupsInfoChannelMessageLastReplyBefore"
)
// groupsLeafProperties describes common metadata of the leaf categories
@ -368,7 +445,9 @@ func (c groupsCategory) leafCat() categorizer {
switch c {
// TODO: if channels ever contain more than one type of item,
// we'll need to fix this up.
case GroupsChannel, GroupsChannelMessage:
case GroupsChannel, GroupsChannelMessage,
GroupsInfoChannelMessageCreatedAfter, GroupsInfoChannelMessageCreatedBefore, GroupsInfoChannelMessageCreator,
GroupsInfoChannelMessageLastReplyAfter, GroupsInfoChannelMessageLastReplyBefore:
return GroupsChannelMessage
case GroupsLibraryFolder, GroupsLibraryItem, GroupsInfoSiteLibraryDrive:
return GroupsLibraryItem
@ -414,15 +493,15 @@ func (c groupsCategory) pathValues(
rFld string
)
if ent.Groups == nil {
return nil, clues.New("no Groups ItemInfo in details")
}
switch c {
case GroupsChannel, GroupsChannelMessage:
folderCat, itemCat = GroupsChannel, GroupsChannelMessage
rFld = ent.Groups.ParentPath
case GroupsLibraryFolder, GroupsLibraryItem:
if ent.Groups == nil {
return nil, clues.New("no Groups ItemInfo in details")
}
folderCat, itemCat = GroupsLibraryFolder, GroupsLibraryItem
rFld = ent.Groups.ParentPath
default:
@ -591,8 +670,23 @@ func (s GroupsScope) matchesInfo(dii details.ItemInfo) bool {
return matchesAny(s, GroupsInfoSiteLibraryDrive, ds)
case GroupsInfoChannel:
ds := Any()
ds := []string{}
if len(info.ChannelID) > 0 {
ds = append(ds, info.ChannelID)
}
if len(info.ChannelName) > 0 {
ds = append(ds, info.ChannelName)
}
return matchesAny(s, GroupsInfoChannel, ds)
case GroupsInfoChannelMessageCreator:
i = info.MessageCreator
case GroupsInfoChannelMessageCreatedAfter, GroupsInfoChannelMessageCreatedBefore:
i = dttm.Format(info.Created)
case GroupsInfoChannelMessageLastReplyAfter, GroupsInfoChannelMessageLastReplyBefore:
i = dttm.Format(info.LastReplyAt)
}
return s.Matches(infoCat, i)

View File

@ -1,15 +1,21 @@
package selectors
import (
"strings"
"testing"
"time"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common/dttm"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
@ -55,211 +61,236 @@ func (suite *GroupsSelectorSuite) TestToGroupsRestore() {
assert.NotZero(t, or.Scopes())
}
// TODO(rkeepers): implement
// func (suite *GroupsSelectorSuite) TestGroupsRestore_Reduce() {
// toRR := func(cat path.CategoryType, siteID string, folders []string, item string) string {
// folderElems := make([]string, 0, len(folders))
func (suite *GroupsSelectorSuite) TestGroupsRestore_Reduce() {
toRR := func(cat path.CategoryType, midID string, folders []string, item string) string {
var (
folderElems = make([]string, 0, len(folders))
isDrive = cat == path.LibrariesCategory
)
// for _, f := range folders {
// folderElems = append(folderElems, f+".d")
// }
for _, f := range folders {
if isDrive {
f = f + ".d"
}
// return stubRepoRef(
// path.GroupsService,
// cat,
// siteID,
// strings.Join(folderElems, "/"),
// item)
// }
folderElems = append(folderElems, f)
}
// var (
// prefixElems = []string{
// odConsts.DrivesPathDir,
// "drive!id",
// odConsts.RootPathDir,
// }
// itemElems1 = []string{"folderA", "folderB"}
// itemElems2 = []string{"folderA", "folderC"}
// itemElems3 = []string{"folderD", "folderE"}
// pairAC = "folderA/folderC"
// pairGH = "folderG/folderH"
// item = toRR(
// path.LibrariesCategory,
// "sid",
// append(slices.Clone(prefixElems), itemElems1...),
// "item")
// item2 = toRR(
// path.LibrariesCategory,
// "sid",
// append(slices.Clone(prefixElems), itemElems2...),
// "item2")
// item3 = toRR(
// path.LibrariesCategory,
// "sid",
// append(slices.Clone(prefixElems), itemElems3...),
// "item3")
// item4 = stubRepoRef(path.GroupsService, path.PagesCategory, "sid", pairGH, "item4")
// item5 = stubRepoRef(path.GroupsService, path.PagesCategory, "sid", pairGH, "item5")
// )
return stubRepoRef(
path.GroupsService,
cat,
midID,
strings.Join(folderElems, "/"),
item)
}
// deets := &details.Details{
// DetailsModel: details.DetailsModel{
// Entries: []details.Entry{
// {
// RepoRef: item,
// ItemRef: "item",
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems1...), "/"),
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsLibrary,
// ItemName: "itemName",
// ParentPath: strings.Join(itemElems1, "/"),
// },
// },
// },
// {
// RepoRef: item2,
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems2...), "/"),
// // ItemRef intentionally blank to test fallback case
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsLibrary,
// ItemName: "itemName2",
// ParentPath: strings.Join(itemElems2, "/"),
// },
// },
// },
// {
// RepoRef: item3,
// ItemRef: "item3",
// LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems3...), "/"),
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsLibrary,
// ItemName: "itemName3",
// ParentPath: strings.Join(itemElems3, "/"),
// },
// },
// },
// {
// RepoRef: item4,
// LocationRef: pairGH,
// ItemRef: "item4",
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsPage,
// ItemName: "itemName4",
// ParentPath: pairGH,
// },
// },
// },
// {
// RepoRef: item5,
// LocationRef: pairGH,
// // ItemRef intentionally blank to test fallback case
// ItemInfo: details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsPage,
// ItemName: "itemName5",
// ParentPath: pairGH,
// },
// },
// },
// },
// },
// }
var (
drivePrefixElems = []string{
odConsts.DrivesPathDir,
"drive!id",
odConsts.RootPathDir,
}
itemElems1 = []string{"folderA", "folderB"}
itemElems2 = []string{"folderA", "folderC"}
itemElems3 = []string{"folderD", "folderE"}
pairAC = "folderA/folderC"
libItem = toRR(
path.LibrariesCategory,
"sid",
append(slices.Clone(drivePrefixElems), itemElems1...),
"item")
libItem2 = toRR(
path.LibrariesCategory,
"sid",
append(slices.Clone(drivePrefixElems), itemElems2...),
"item2")
libItem3 = toRR(
path.LibrariesCategory,
"sid",
append(slices.Clone(drivePrefixElems), itemElems3...),
"item3")
chanItem = toRR(path.ChannelMessagesCategory, "gid", slices.Clone(itemElems1), "chitem")
chanItem2 = toRR(path.ChannelMessagesCategory, "gid", slices.Clone(itemElems2), "chitem2")
chanItem3 = toRR(path.ChannelMessagesCategory, "gid", slices.Clone(itemElems3), "chitem3")
)
// arr := func(s ...string) []string {
// return s
// }
deets := &details.Details{
DetailsModel: details.DetailsModel{
Entries: []details.Entry{
{
RepoRef: libItem,
ItemRef: "item",
LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems1...), "/"),
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.SharePointLibrary,
ItemName: "itemName",
ParentPath: strings.Join(itemElems1, "/"),
},
},
},
{
RepoRef: libItem2,
LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems2...), "/"),
// ItemRef intentionally blank to test fallback case
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.SharePointLibrary,
ItemName: "itemName2",
ParentPath: strings.Join(itemElems2, "/"),
},
},
},
{
RepoRef: libItem3,
ItemRef: "item3",
LocationRef: strings.Join(append([]string{odConsts.RootPathDir}, itemElems3...), "/"),
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.SharePointLibrary,
ItemName: "itemName3",
ParentPath: strings.Join(itemElems3, "/"),
},
},
},
{
RepoRef: chanItem,
ItemRef: "citem",
LocationRef: strings.Join(itemElems1, "/"),
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.TeamsChannelMessage,
ParentPath: strings.Join(itemElems1, "/"),
},
},
},
{
RepoRef: chanItem2,
LocationRef: strings.Join(itemElems2, "/"),
// ItemRef intentionally blank to test fallback case
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.TeamsChannelMessage,
ParentPath: strings.Join(itemElems2, "/"),
},
},
},
{
RepoRef: chanItem3,
ItemRef: "citem3",
LocationRef: strings.Join(itemElems3, "/"),
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.TeamsChannelMessage,
ParentPath: strings.Join(itemElems3, "/"),
},
},
},
},
},
}
// table := []struct {
// name string
// makeSelector func() *GroupsRestore
// expect []string
// cfg Config
// }{
// {
// name: "all",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.AllData())
// return odr
// },
// expect: arr(item, item2, item3, item4, item5),
// },
// {
// name: "only match item",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
// return odr
// },
// expect: arr(item2),
// },
// {
// name: "id doesn't match name",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.LibraryItems(Any(), []string{"item2"}))
// return odr
// },
// expect: []string{},
// cfg: Config{OnlyMatchItemNames: true},
// },
// {
// name: "only match item name",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
// return odr
// },
// expect: arr(item2),
// cfg: Config{OnlyMatchItemNames: true},
// },
// {
// name: "name doesn't match",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore(Any())
// odr.Include(odr.LibraryItems(Any(), []string{"itemName2"}))
// return odr
// },
// expect: []string{},
// },
// {
// name: "only match folder",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore([]string{"sid"})
// odr.Include(odr.LibraryFolders([]string{"folderA/folderB", pairAC}))
// return odr
// },
// expect: arr(item, item2),
// },
// {
// name: "pages match folder",
// makeSelector: func() *GroupsRestore {
// odr := NewGroupsRestore([]string{"sid"})
// odr.Include(odr.Pages([]string{pairGH, pairAC}))
// return odr
// },
// expect: arr(item4, item5),
// },
// }
// for _, test := range table {
// suite.Run(test.name, func() {
// t := suite.T()
arr := func(s ...string) []string {
return s
}
// ctx, flush := tester.NewContext(t)
// defer flush()
table := []struct {
name string
makeSelector func() *GroupsRestore
expect []string
cfg Config
}{
{
name: "all",
makeSelector: func() *GroupsRestore {
sel := NewGroupsRestore(Any())
sel.Include(sel.AllData())
return sel
},
expect: arr(libItem, libItem2, libItem3, chanItem, chanItem2, chanItem3),
},
{
name: "only match library item",
makeSelector: func() *GroupsRestore {
sel := NewGroupsRestore(Any())
sel.Include(sel.LibraryItems(Any(), []string{"item2"}))
return sel
},
expect: arr(libItem2),
},
{
name: "only match channel item",
makeSelector: func() *GroupsRestore {
sel := NewGroupsRestore(Any())
sel.Include(sel.ChannelMessages(Any(), []string{"chitem2"}))
return sel
},
expect: arr(chanItem2),
},
{
name: "library id doesn't match name",
makeSelector: func() *GroupsRestore {
sel := NewGroupsRestore(Any())
sel.Include(sel.LibraryItems(Any(), []string{"item2"}))
return sel
},
expect: []string{},
cfg: Config{OnlyMatchItemNames: true},
},
{
name: "channel id doesn't match name",
makeSelector: func() *GroupsRestore {
sel := NewGroupsRestore(Any())
sel.Include(sel.ChannelMessages(Any(), []string{"item2"}))
return sel
},
expect: []string{},
cfg: Config{OnlyMatchItemNames: true},
},
{
name: "library only match item name",
makeSelector: func() *GroupsRestore {
sel := NewGroupsRestore(Any())
sel.Include(sel.LibraryItems(Any(), []string{"itemName2"}))
return sel
},
expect: arr(libItem2),
cfg: Config{OnlyMatchItemNames: true},
},
{
name: "name doesn't match",
makeSelector: func() *GroupsRestore {
sel := NewGroupsRestore(Any())
sel.Include(sel.LibraryItems(Any(), []string{"itemName2"}))
return sel
},
expect: []string{},
},
{
name: "only match folder",
makeSelector: func() *GroupsRestore {
sel := NewGroupsRestore([]string{"sid"})
sel.Include(sel.LibraryFolders([]string{"folderA/folderB", pairAC}))
return sel
},
expect: arr(libItem, libItem2),
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
// sel := test.makeSelector()
// sel.Configure(test.cfg)
// results := sel.Reduce(ctx, deets, fault.New(true))
// paths := results.Paths()
// assert.Equal(t, test.expect, paths)
// })
// }
// }
ctx, flush := tester.NewContext(t)
defer flush()
sel := test.makeSelector()
sel.Configure(test.cfg)
results := sel.Reduce(ctx, deets, fault.New(true))
paths := results.Paths()
assert.Equal(t, test.expect, paths)
})
}
}
func (suite *GroupsSelectorSuite) TestGroupsCategory_PathValues() {
var (
@ -324,91 +355,111 @@ func (suite *GroupsSelectorSuite) TestGroupsCategory_PathValues() {
}
}
// TODO(abin): implement
// func (suite *GroupsSelectorSuite) TestGroupsScope_MatchesInfo() {
// var (
// sel = NewGroupsRestore(Any())
// host = "www.website.com"
// pth = "/foo"
// url = host + pth
// epoch = time.Time{}
// now = time.Now()
// modification = now.Add(15 * time.Minute)
// future = now.Add(45 * time.Minute)
// )
func (suite *GroupsSelectorSuite) TestGroupsScope_MatchesInfo() {
var (
sel = NewGroupsRestore(Any())
user = "user@mail.com"
host = "www.website.com"
// pth = "/foo"
// url = host + pth
epoch = time.Time{}
now = time.Now()
modification = now.Add(15 * time.Minute)
future = now.Add(45 * time.Minute)
dtch = details.TeamsChannelMessage
)
// table := []struct {
// name string
// infoURL string
// scope []GroupsScope
// expect assert.BoolAssertionFunc
// }{
// {"host match", host, sel.WebURL([]string{host}), assert.True},
// {"url match", url, sel.WebURL([]string{url}), assert.True},
// {"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True},
// {"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False},
// {"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True},
// {"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False},
// {"url does not suffix substring", url, sel.WebURL([]string{"oo"}, SuffixMatch()), assert.False},
// {"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False},
// {"file create after the epoch", host, sel.CreatedAfter(dttm.Format(epoch)), assert.True},
// {"file create after now", host, sel.CreatedAfter(dttm.Format(now)), assert.False},
// {"file create after later", url, sel.CreatedAfter(dttm.Format(future)), assert.False},
// {"file create before future", host, sel.CreatedBefore(dttm.Format(future)), assert.True},
// {"file create before now", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
// {"file create before modification", host, sel.CreatedBefore(dttm.Format(modification)), assert.True},
// {"file create before epoch", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
// {"file modified after the epoch", host, sel.ModifiedAfter(dttm.Format(epoch)), assert.True},
// {"file modified after now", host, sel.ModifiedAfter(dttm.Format(now)), assert.True},
// {"file modified after later", host, sel.ModifiedAfter(dttm.Format(future)), assert.False},
// {"file modified before future", host, sel.ModifiedBefore(dttm.Format(future)), assert.True},
// {"file modified before now", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
// {"file modified before epoch", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
// {"in library", host, sel.Library("included-library"), assert.True},
// {"not in library", host, sel.Library("not-included-library"), assert.False},
// {"library id", host, sel.Library("1234"), assert.True},
// {"not library id", host, sel.Library("abcd"), assert.False},
// }
// for _, test := range table {
// suite.Run(test.name, func() {
// t := suite.T()
table := []struct {
name string
itemType details.ItemType
creator string
scope []GroupsScope
expect assert.BoolAssertionFunc
}{
// TODO(abin): implement
// {"host match", host, sel.WebURL([]string{host}), assert.True},
// {"url match", url, sel.WebURL([]string{url}), assert.True},
// {"host suffixes host", host, sel.WebURL([]string{host}, SuffixMatch()), assert.True},
// {"url does not suffix host", url, sel.WebURL([]string{host}, SuffixMatch()), assert.False},
// {"url has path suffix", url, sel.WebURL([]string{pth}, SuffixMatch()), assert.True},
// {"host does not contain substring", host, sel.WebURL([]string{"website"}), assert.False},
// {"url does not suffix substring", url, sel.WebURL([]string{"oo"}, SuffixMatch()), assert.False},
// {"host mismatch", host, sel.WebURL([]string{"www.google.com"}), assert.False},
// {"file create after the epoch", host, sel.CreatedAfter(dttm.Format(epoch)), assert.True},
// {"file create after now", host, sel.CreatedAfter(dttm.Format(now)), assert.False},
// {"file create after later", url, sel.CreatedAfter(dttm.Format(future)), assert.False},
// {"file create before future", host, sel.CreatedBefore(dttm.Format(future)), assert.True},
// {"file create before now", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
// {"file create before modification", host, sel.CreatedBefore(dttm.Format(modification)), assert.True},
// {"file create before epoch", host, sel.CreatedBefore(dttm.Format(now)), assert.False},
// {"file modified after the epoch", host, sel.ModifiedAfter(dttm.Format(epoch)), assert.True},
// {"file modified after now", host, sel.ModifiedAfter(dttm.Format(now)), assert.True},
// {"file modified after later", host, sel.ModifiedAfter(dttm.Format(future)), assert.False},
// {"file modified before future", host, sel.ModifiedBefore(dttm.Format(future)), assert.True},
// {"file modified before now", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
// {"file modified before epoch", host, sel.ModifiedBefore(dttm.Format(now)), assert.False},
// {"in library", host, sel.Library("included-library"), assert.True},
// {"not in library", host, sel.Library("not-included-library"), assert.False},
// {"library id", host, sel.Library("1234"), assert.True},
// {"not library id", host, sel.Library("abcd"), assert.False},
// itemInfo := details.ItemInfo{
// Groups: &details.GroupsInfo{
// ItemType: details.GroupsPage,
// WebURL: test.infoURL,
// Created: now,
// Modified: modification,
// DriveName: "included-library",
// DriveID: "1234",
// },
// }
{"channel message created by", dtch, user, sel.MessageCreator(user), assert.True},
{"channel message not created by", dtch, user, sel.MessageCreator(host), assert.False},
{"chan msg create after the epoch", dtch, user, sel.MessageCreatedAfter(dttm.Format(epoch)), assert.True},
{"chan msg create after now", dtch, user, sel.MessageCreatedAfter(dttm.Format(now)), assert.False},
{"chan msg create after later", dtch, user, sel.MessageCreatedAfter(dttm.Format(future)), assert.False},
{"chan msg create before future", dtch, user, sel.MessageCreatedBefore(dttm.Format(future)), assert.True},
{"chan msg create before now", dtch, user, sel.MessageCreatedBefore(dttm.Format(now)), assert.False},
{"chan msg create before reply", dtch, user, sel.MessageCreatedBefore(dttm.Format(modification)), assert.True},
{"chan msg create before epoch", dtch, user, sel.MessageCreatedBefore(dttm.Format(now)), assert.False},
{"chan msg last reply after the epoch", dtch, user, sel.MessageLastReplyAfter(dttm.Format(epoch)), assert.True},
{"chan msg last reply after now", dtch, user, sel.MessageLastReplyAfter(dttm.Format(now)), assert.True},
{"chan msg last reply after later", dtch, user, sel.MessageLastReplyAfter(dttm.Format(future)), assert.False},
{"chan msg last reply before future", dtch, user, sel.MessageLastReplyBefore(dttm.Format(future)), assert.True},
{"chan msg last reply before now", dtch, user, sel.MessageLastReplyBefore(dttm.Format(now)), assert.False},
{"chan msg last reply before epoch", dtch, user, sel.MessageLastReplyBefore(dttm.Format(now)), assert.False},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
// scopes := setScopesToDefault(test.scope)
// for _, scope := range scopes {
// test.expect(t, scope.matchesInfo(itemInfo))
// }
// })
// }
// }
itemInfo := details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: test.itemType,
WebURL: test.creator,
MessageCreator: test.creator,
Created: now,
Modified: modification,
LastReplyAt: modification,
DriveName: "included-library",
DriveID: "1234",
},
}
scopes := setScopesToDefault(test.scope)
for _, scope := range scopes {
test.expect(t, scope.matchesInfo(itemInfo))
}
})
}
}
func (suite *GroupsSelectorSuite) TestCategory_PathType() {
table := []struct {
cat groupsCategory
pathType path.CategoryType
}{
{
cat: GroupsCategoryUnknown,
pathType: path.UnknownCategory,
},
{
cat: GroupsChannel,
pathType: path.ChannelMessagesCategory,
},
{
cat: GroupsChannelMessage,
pathType: path.ChannelMessagesCategory,
},
{GroupsCategoryUnknown, path.UnknownCategory},
{GroupsChannel, path.ChannelMessagesCategory},
{GroupsChannelMessage, path.ChannelMessagesCategory},
{GroupsInfoChannelMessageCreator, path.ChannelMessagesCategory},
{GroupsInfoChannelMessageCreatedAfter, path.ChannelMessagesCategory},
{GroupsInfoChannelMessageCreatedBefore, path.ChannelMessagesCategory},
{GroupsInfoChannelMessageLastReplyAfter, path.ChannelMessagesCategory},
{GroupsInfoChannelMessageLastReplyBefore, path.ChannelMessagesCategory},
{GroupsLibraryFolder, path.LibrariesCategory},
{GroupsLibraryItem, path.LibrariesCategory},
{GroupsInfoSiteLibraryDrive, path.LibrariesCategory},
}
for _, test := range table {
suite.Run(test.cat.String(), func() {

View File

@ -9,19 +9,79 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/logger"
)
// ---------------------------------------------------------------------------
// item pager
// channel message pager
// ---------------------------------------------------------------------------
type ChannelMessageDeltaEnumerator interface {
DeltaGetPager
ValuesInPageLinker[models.ChatMessageable]
SetNextLinker
var _ DeltaPager[models.ChatMessageable] = &channelMessageDeltaPageCtrl{}
type channelMessageDeltaPageCtrl struct {
resourceID, channelID string
gs graph.Servicer
builder *teams.ItemChannelsItemMessagesDeltaRequestBuilder
options *teams.ItemChannelsItemMessagesDeltaRequestBuilderGetRequestConfiguration
}
var _ ChannelMessageDeltaEnumerator = &ChannelMessageDeltaPageCtrl{}
func (p *channelMessageDeltaPageCtrl) SetNext(nextLink string) {
p.builder = teams.NewItemChannelsItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter())
}
func (p *channelMessageDeltaPageCtrl) GetPage(
ctx context.Context,
) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options)
return resp, graph.Stack(ctx, err).OrNil()
}
func (p *channelMessageDeltaPageCtrl) Reset(context.Context) {
p.builder = p.gs.
Client().
Teams().
ByTeamId(p.resourceID).
Channels().
ByChannelId(p.channelID).
Messages().
Delta()
}
func (p *channelMessageDeltaPageCtrl) ValuesIn(l PageLinker) ([]models.ChatMessageable, error) {
return getValues[models.ChatMessageable](l)
}
func (c Channels) NewChannelMessageDeltaPager(
teamID, channelID, prevDelta string,
) *channelMessageDeltaPageCtrl {
builder := c.Stable.
Client().
Teams().
ByTeamId(teamID).
Channels().
ByChannelId(channelID).
Messages().
Delta()
if len(prevDelta) > 0 {
builder = teams.NewItemChannelsItemMessagesDeltaRequestBuilder(prevDelta, c.Stable.Adapter())
}
options := &teams.ItemChannelsItemMessagesDeltaRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
}
return &channelMessageDeltaPageCtrl{
resourceID: teamID,
channelID: channelID,
builder: builder,
gs: c.Stable,
options: options,
}
}
// var _ ChannelMessageDeltaEnumerator = &ChannelMessageDeltaPageCtrl{}
var _ Pager[models.Channelable] = &channelPageCtrl{}
type ChannelMessageDeltaPageCtrl struct {
gs graph.Servicer
@ -88,7 +148,7 @@ type channelMessagePageCtrl struct {
func (c Channels) GetItemIDsInContainer(
ctx context.Context,
teamID, channelID string,
) (map[string]MessageItemIDType, error) {
) (map[string]struct{}, error) {
ctx = clues.Add(ctx, "channel_id", channelID)
pager := c.NewChannelItemPager(teamID, channelID)
@ -97,12 +157,10 @@ func (c Channels) GetItemIDsInContainer(
return nil, graph.Wrap(ctx, err, "enumerating contacts")
}
m := map[string]MessageItemIDType{}
m := map[string]struct{}{}
for _, item := range items {
m[ptr.Val(item.GetId())] = MessageItemIDType{
ItemID: ptr.Val(item.GetId()),
}
m[ptr.Val(item.GetId())] = struct{}{}
}
return m, nil
@ -131,6 +189,7 @@ func (c Channels) NewChannelItemPager(
return &channelMessagePageCtrl{c.Stable, builder, options}
}
//lint:ignore U1000 False Positive
func (p *channelMessagePageCtrl) getPage(ctx context.Context) (PageLinkValuer[models.ChatMessageable], error) {
page, err := p.builder.Get(ctx, p.options)
if err != nil {
@ -145,17 +204,71 @@ func (p *channelMessagePageCtrl) setNext(nextLink string) {
p.builder = teams.NewItemChannelsItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
}
// GetChannelMessagesDelta fetches a delta of all messages in the channel.
func (c Channels) GetChannelMessagesDelta(
ctx context.Context,
teamID, channelID, prevDelta string,
) ([]models.ChatMessageable, DeltaUpdate, error) {
var (
vs = []models.ChatMessageable{}
pager = c.NewChannelMessageDeltaPager(teamID, channelID, prevDelta)
invalidPrevDelta = len(prevDelta) == 0
newDeltaLink string
)
// Loop through all pages returned by Graph API.
for {
page, err := pager.GetPage(graph.ConsumeNTokens(ctx, graph.SingleGetOrDeltaLC))
if graph.IsErrInvalidDelta(err) {
logger.Ctx(ctx).Infow("Invalid previous delta", "delta_link", prevDelta)
invalidPrevDelta = true
vs = []models.ChatMessageable{}
pager.Reset(ctx)
continue
}
if err != nil {
return nil, DeltaUpdate{}, graph.Wrap(ctx, err, "retrieving page of channel messages")
}
vals, err := pager.ValuesIn(page)
if err != nil {
return nil, DeltaUpdate{}, graph.Wrap(ctx, err, "extracting channel messages from response")
}
vs = append(vs, vals...)
nextLink, deltaLink := NextAndDeltaLink(page)
if len(deltaLink) > 0 {
newDeltaLink = deltaLink
}
if len(nextLink) == 0 {
break
}
pager.SetNext(nextLink)
}
logger.Ctx(ctx).Debugf("retrieved %d channel messages", len(vs))
du := DeltaUpdate{
URL: newDeltaLink,
Reset: invalidPrevDelta,
}
return vs, du, nil
}
// ---------------------------------------------------------------------------
// channel pager
// ---------------------------------------------------------------------------
type ChannelEnumerator interface {
PageLinker
ValuesInPageLinker[models.Channelable]
SetNextLinker
}
var _ ChannelEnumerator = &channelPageCtrl{}
var _ Pager[models.Channelable] = &channelPageCtrl{}
type channelPageCtrl struct {
gs graph.Servicer
@ -163,14 +276,31 @@ type channelPageCtrl struct {
options *teams.ItemChannelsRequestBuilderGetRequestConfiguration
}
func (p *channelPageCtrl) SetNext(nextLink string) {
p.builder = teams.NewItemChannelsRequestBuilder(nextLink, p.gs.Adapter())
}
func (p *channelPageCtrl) GetPage(
ctx context.Context,
) (PageLinker, error) {
resp, err := p.builder.Get(ctx, p.options)
return resp, graph.Stack(ctx, err).OrNil()
}
func (p *channelPageCtrl) ValuesIn(l PageLinker) ([]models.Channelable, error) {
return getValues[models.Channelable](l)
}
func (c Channels) NewChannelPager(
teamID,
channelID string,
fields []string,
teamID string,
) *channelPageCtrl {
requestConfig := &teams.ItemChannelsRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize)),
}
res := &channelPageCtrl{
gs: c.Stable,
options: nil,
options: requestConfig,
builder: c.Stable.
Client().
Teams().
@ -181,30 +311,39 @@ func (c Channels) NewChannelPager(
return res
}
func (p *channelPageCtrl) SetNext(nextLink string) {
p.builder = teams.NewItemChannelsRequestBuilder(nextLink, p.gs.Adapter())
}
func (p *channelPageCtrl) GetPage(ctx context.Context) (PageLinker, error) {
// GetChannels fetches all channels in the team.
func (c Channels) GetChannels(
ctx context.Context,
teamID string,
) ([]models.Channelable, error) {
var (
resp PageLinker
err error
vs = []models.Channelable{}
pager = c.NewChannelPager(teamID)
)
resp, err = p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
// Loop through all pages returned by Graph API.
for {
page, err := pager.GetPage(ctx)
if err != nil {
return nil, graph.Wrap(ctx, err, "retrieving page of channels")
}
vals, err := pager.ValuesIn(page)
if err != nil {
return nil, graph.Wrap(ctx, err, "extracting channels from response")
}
vs = append(vs, vals...)
nextLink := ptr.Val(page.GetOdataNextLink())
if len(nextLink) == 0 {
break
}
pager.SetNext(nextLink)
}
return resp, nil
}
logger.Ctx(ctx).Debugf("retrieved %d channels", len(vs))
func (p *channelPageCtrl) ValuesIn(l PageLinker) ([]models.Channelable, error) {
return getValues[models.Channelable](l)
}
func (p *channelPageCtrl) GetOdataNextLink() *string {
// No next link preent in the API result
emptyString := ""
return &emptyString
return vs, nil
}

View File

@ -4,64 +4,70 @@ import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/tester/tconfig"
)
type ChannelPagerIntgSuite struct {
type ChannelsPagerIntgSuite struct {
tester.Suite
its intgTesterSetup
}
func TestChannelPagerIntgSuite(t *testing.T) {
suite.Run(t, &ChannelPagerIntgSuite{
suite.Run(t, &ChannelsPagerIntgSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tconfig.M365AcctCredEnvs}),
})
}
func (suite *ChannelPagerIntgSuite) SetupSuite() {
func (suite *ChannelsPagerIntgSuite) SetupSuite() {
suite.its = newIntegrationTesterSetup(suite.T())
}
// This will be added once 'pager' is implemented
// func (suite *ChannelPagerIntgSuite) TestChannels_GetPage() {
// t := suite.T()
// ctx, flush := tester.NewContext(t)
// defer flush()
// teamID := tconfig.M365TeamID(t)
// channelID := tconfig.M365ChannelID(t)
// pager := suite.its.ac.Channels().NewMessagePager(teamID, channelID, []string{})
// a, err := pager.GetPage(ctx)
// assert.NoError(t, err, clues.ToCore(err))
// assert.NotNil(t, a)
// }
func (suite *ChannelPagerIntgSuite) TestChannels_Get() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
func (suite *ChannelsPagerIntgSuite) TestEnumerateChannels() {
var (
containerName = "General"
teamID = tconfig.M365TeamID(t)
chanClient = suite.its.ac.Channels()
t = suite.T()
ac = suite.its.ac.Channels()
)
// GET channel -should be found
channel, err := chanClient.GetChannelByName(ctx, teamID, containerName)
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, ptr.Val(channel.GetDisplayName()), containerName)
ctx, flush := tester.NewContext(t)
defer flush()
// GET channel -should be found
_, err = chanClient.GetChannel(ctx, teamID, ptr.Val(channel.GetId()))
assert.NoError(t, err, clues.ToCore(err))
chans, err := ac.GetChannels(ctx, suite.its.group.id)
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, chans)
}
func (suite *ChannelsPagerIntgSuite) TestEnumerateChannelMessages() {
var (
t = suite.T()
ac = suite.its.ac.Channels()
)
ctx, flush := tester.NewContext(t)
defer flush()
msgs, du, err := ac.GetChannelMessagesDelta(
ctx,
suite.its.group.id,
suite.its.group.testContainerID,
"")
require.NoError(t, err, clues.ToCore(err))
require.NotEmpty(t, msgs)
require.NotZero(t, du.URL, "delta link")
require.True(t, du.Reset, "reset due to empty prev delta link")
msgs, du, err = ac.GetChannelMessagesDelta(
ctx,
suite.its.group.id,
suite.its.group.testContainerID,
du.URL)
require.NoError(t, err, clues.ToCore(err))
require.Empty(t, msgs, "should have no new messages from delta")
require.NotZero(t, du.URL, "delta link")
require.False(t, du.Reset, "prev delta link should be valid")
}

View File

@ -82,13 +82,10 @@ func (c Contacts) DeleteContainer(
return nil
}
// prefer GetContainerByID where possible.
// use this only in cases where the models.ContactFolderable
// is required.
func (c Contacts) GetFolder(
func (c Contacts) GetContainerByID(
ctx context.Context,
userID, containerID string,
) (models.ContactFolderable, error) {
) (graph.Container, error) {
config := &users.ItemContactFoldersContactFolderItemRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemContactFoldersContactFolderItemRequestBuilderGetQueryParameters{
Select: idAnd(displayName, parentFolderID),
@ -109,14 +106,6 @@ func (c Contacts) GetFolder(
return resp, nil
}
// interface-compliant wrapper of GetFolder
func (c Contacts) GetContainerByID(
ctx context.Context,
userID, containerID string,
) (graph.Container, error) {
return c.GetFolder(ctx, userID, containerID)
}
// GetContainerByName fetches a folder by name
func (c Contacts) GetContainerByName(
ctx context.Context,

View File

@ -191,7 +191,7 @@ func (c Contacts) GetItemIDsInContainer(
// item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &contactIDPager{}
var _ DeltaPager[getIDAndAddtler] = &contactIDPager{}
type contactIDPager struct {
gs graph.Servicer
@ -203,7 +203,7 @@ func (c Contacts) NewContactIDsPager(
ctx context.Context,
userID, containerID string,
immutableIDs bool,
) itemIDPager {
) DeltaPager[getIDAndAddtler] {
config := &users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemContactFoldersItemContactsRequestBuilderGetQueryParameters{
Select: idAnd(parentFolderID),
@ -223,7 +223,7 @@ func (c Contacts) NewContactIDsPager(
return &contactIDPager{c.Stable, builder, config}
}
func (p *contactIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
func (p *contactIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
@ -232,14 +232,14 @@ func (p *contactIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil
}
func (p *contactIDPager) setNext(nextLink string) {
func (p *contactIDPager) SetNext(nextLink string) {
p.builder = users.NewItemContactFoldersItemContactsRequestBuilder(nextLink, p.gs.Adapter())
}
// non delta pagers don't need reset
func (p *contactIDPager) reset(context.Context) {}
func (p *contactIDPager) Reset(context.Context) {}
func (p *contactIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
func (p *contactIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Contactable](pl)
}
@ -247,7 +247,7 @@ func (p *contactIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
// delta item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &contactDeltaIDPager{}
var _ DeltaPager[getIDAndAddtler] = &contactDeltaIDPager{}
type contactDeltaIDPager struct {
gs graph.Servicer
@ -271,7 +271,7 @@ func (c Contacts) NewContactDeltaIDsPager(
ctx context.Context,
userID, containerID, oldDelta string,
immutableIDs bool,
) itemIDPager {
) DeltaPager[getIDAndAddtler] {
options := &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemContactFoldersItemContactsDeltaRequestBuilderGetQueryParameters{
Select: idAnd(parentFolderID),
@ -290,7 +290,7 @@ func (c Contacts) NewContactDeltaIDsPager(
return &contactDeltaIDPager{c.Stable, userID, containerID, builder, options}
}
func (p *contactDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
func (p *contactDeltaIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
@ -299,15 +299,15 @@ func (p *contactDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, err
return resp, nil
}
func (p *contactDeltaIDPager) setNext(nextLink string) {
func (p *contactDeltaIDPager) SetNext(nextLink string) {
p.builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(nextLink, p.gs.Adapter())
}
func (p *contactDeltaIDPager) reset(ctx context.Context) {
func (p *contactDeltaIDPager) Reset(ctx context.Context) {
p.builder = getContactDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options)
}
func (p *contactDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
func (p *contactDeltaIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Contactable](pl)
}

View File

@ -39,13 +39,13 @@ func (suite *ContactsPagerIntgSuite) TestContacts_GetItemsInContainerByCollision
ctx, flush := tester.NewContext(t)
defer flush()
container, err := ac.GetContainerByID(ctx, suite.its.userID, "contacts")
container, err := ac.GetContainerByID(ctx, suite.its.user.id, "contacts")
require.NoError(t, err, clues.ToCore(err))
conts, err := ac.Stable.
Client().
Users().
ByUserId(suite.its.userID).
ByUserId(suite.its.user.id).
ContactFolders().
ByContactFolderId(ptr.Val(container.GetId())).
Contacts().
@ -61,7 +61,7 @@ func (suite *ContactsPagerIntgSuite) TestContacts_GetItemsInContainerByCollision
expect := maps.Keys(expectM)
results, err := suite.its.ac.Contacts().GetItemsInContainerByCollisionKey(ctx, suite.its.userID, "contacts")
results, err := suite.its.ac.Contacts().GetItemsInContainerByCollisionKey(ctx, suite.its.user.id, "contacts")
require.NoError(t, err, clues.ToCore(err))
require.Less(t, 0, len(results), "requires at least one result")
@ -91,13 +91,13 @@ func (suite *ContactsPagerIntgSuite) TestContacts_GetItemsIDsInContainer() {
ctx, flush := tester.NewContext(t)
defer flush()
container, err := ac.GetContainerByID(ctx, suite.its.userID, api.DefaultContacts)
container, err := ac.GetContainerByID(ctx, suite.its.user.id, api.DefaultContacts)
require.NoError(t, err, clues.ToCore(err))
msgs, err := ac.Stable.
Client().
Users().
ByUserId(suite.its.userID).
ByUserId(suite.its.user.id).
ContactFolders().
ByContactFolderId(ptr.Val(container.GetId())).
Contacts().
@ -112,7 +112,7 @@ func (suite *ContactsPagerIntgSuite) TestContacts_GetItemsIDsInContainer() {
}
results, err := suite.its.ac.Contacts().
GetItemIDsInContainer(ctx, suite.its.userID, api.DefaultContacts)
GetItemIDsInContainer(ctx, suite.its.user.id, api.DefaultContacts)
require.NoError(t, err, clues.ToCore(err))
require.Less(t, 0, len(results), "requires at least one result")
require.Equal(t, len(expect), len(results), "must have same count of items")

View File

@ -141,7 +141,7 @@ func (suite *ContactsAPIIntgSuite) TestContacts_GetContainerByName() {
cc, err := suite.its.ac.Contacts().CreateContainer(
ctx,
suite.its.userID,
suite.its.user.id,
"",
rc.Location)
require.NoError(t, err, clues.ToCore(err))
@ -168,7 +168,7 @@ func (suite *ContactsAPIIntgSuite) TestContacts_GetContainerByName() {
_, err := suite.its.ac.
Contacts().
GetContainerByName(ctx, suite.its.userID, "", test.name)
GetContainerByName(ctx, suite.its.user.id, "", test.name)
test.expectErr(t, err, clues.ToCore(err))
})
}

View File

@ -120,20 +120,11 @@ func (c Drives) GetItemIDsInContainer(
return m, nil
}
// ---------------------------------------------------------------------------
// ---------------------------------------------------------------------------
// delta item pager
// ---------------------------------------------------------------------------
type DriveItemDeltaEnumerator interface {
GetPage(context.Context) (DeltaPageLinker, error)
SetNext(nextLink string)
Reset()
ValuesIn(DeltaPageLinker) ([]models.DriveItemable, error)
}
var _ DriveItemDeltaEnumerator = &DriveItemDeltaPageCtrl{}
var _ DeltaPager[models.DriveItemable] = &DriveItemDeltaPageCtrl{}
type DriveItemDeltaPageCtrl struct {
gs graph.Servicer
@ -198,7 +189,7 @@ func (p *DriveItemDeltaPageCtrl) SetNext(link string) {
p.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, p.gs.Adapter())
}
func (p *DriveItemDeltaPageCtrl) Reset() {
func (p *DriveItemDeltaPageCtrl) Reset(context.Context) {
p.builder = p.gs.Client().
Drives().
ByDriveId(p.driveID).
@ -207,7 +198,7 @@ func (p *DriveItemDeltaPageCtrl) Reset() {
Delta()
}
func (p *DriveItemDeltaPageCtrl) ValuesIn(l DeltaPageLinker) ([]models.DriveItemable, error) {
func (p *DriveItemDeltaPageCtrl) ValuesIn(l PageLinker) ([]models.DriveItemable, error) {
return getValues[models.DriveItemable](l)
}
@ -215,7 +206,7 @@ func (p *DriveItemDeltaPageCtrl) ValuesIn(l DeltaPageLinker) ([]models.DriveItem
// user's drives pager
// ---------------------------------------------------------------------------
var _ DrivePager = &userDrivePager{}
var _ Pager[models.Driveable] = &userDrivePager{}
type userDrivePager struct {
userID string
@ -305,7 +296,7 @@ func (p *userDrivePager) ValuesIn(l PageLinker) ([]models.Driveable, error) {
// site's libraries pager
// ---------------------------------------------------------------------------
var _ DrivePager = &siteDrivePager{}
var _ Pager[models.Driveable] = &siteDrivePager{}
type siteDrivePager struct {
gs graph.Servicer
@ -367,17 +358,10 @@ func (p *siteDrivePager) ValuesIn(l PageLinker) ([]models.Driveable, error) {
// drive pager
// ---------------------------------------------------------------------------
// DrivePager pages through different types of drive owners
type DrivePager interface {
GetPage(context.Context) (PageLinker, error)
SetNext(nextLink string)
ValuesIn(PageLinker) ([]models.Driveable, error)
}
// GetAllDrives fetches all drives for the given pager
func GetAllDrives(
ctx context.Context,
pager DrivePager,
pager Pager[models.Driveable],
retry bool,
maxRetryCount int,
) ([]models.Driveable, error) {

View File

@ -39,13 +39,13 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemsInContainerByCollisionKey()
}{
{
name: "user drive",
driveID: suite.its.userDriveID,
rootFolderID: suite.its.userDriveRootFolderID,
driveID: suite.its.user.driveID,
rootFolderID: suite.its.user.driveRootFolderID,
},
{
name: "site drive",
driveID: suite.its.siteDriveID,
rootFolderID: suite.its.siteDriveRootFolderID,
driveID: suite.its.site.driveID,
rootFolderID: suite.its.site.driveRootFolderID,
},
}
for _, test := range table {
@ -75,7 +75,7 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemsInContainerByCollisionKey()
t,
ims,
"need at least one item to compare in user %s drive %s folder %s",
suite.its.userID, test.driveID, test.rootFolderID)
suite.its.user.id, test.driveID, test.rootFolderID)
results, err := suite.its.ac.
Drives().
@ -113,13 +113,13 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemIDsInContainer() {
}{
{
name: "user drive",
driveID: suite.its.userDriveID,
rootFolderID: suite.its.userDriveRootFolderID,
driveID: suite.its.user.driveID,
rootFolderID: suite.its.user.driveRootFolderID,
},
{
name: "site drive",
driveID: suite.its.siteDriveID,
rootFolderID: suite.its.siteDriveRootFolderID,
driveID: suite.its.site.driveID,
rootFolderID: suite.its.site.driveRootFolderID,
},
}
for _, test := range table {
@ -149,7 +149,7 @@ func (suite *DrivePagerIntgSuite) TestDrives_GetItemIDsInContainer() {
t,
igv,
"need at least one item to compare in user %s drive %s folder %s",
suite.its.userID, test.driveID, test.rootFolderID)
suite.its.user.id, test.driveID, test.rootFolderID)
for _, itm := range igv {
expect[ptr.Val(itm.GetId())] = api.DriveItemIDType{

View File

@ -76,8 +76,8 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
// generate a parent for the test data
parent, err := acd.PostItemInContainer(
ctx,
suite.its.userDriveID,
suite.its.userDriveRootFolderID,
suite.its.user.driveID,
suite.its.user.driveRootFolderID,
newItem(rc.Location, true),
control.Replace)
require.NoError(t, err, clues.ToCore(err))
@ -86,7 +86,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
folder := newItem("collision", true)
origFolder, err := acd.PostItemInContainer(
ctx,
suite.its.userDriveID,
suite.its.user.driveID,
ptr.Val(parent.GetId()),
folder,
control.Copy)
@ -96,7 +96,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
file := newItem("collision.txt", false)
origFile, err := acd.PostItemInContainer(
ctx,
suite.its.userDriveID,
suite.its.user.driveID,
ptr.Val(parent.GetId()),
file,
control.Copy)
@ -211,7 +211,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer() {
t := suite.T()
i, err := acd.PostItemInContainer(
ctx,
suite.its.userDriveID,
suite.its.user.driveID,
ptr.Val(parent.GetId()),
test.postItem,
test.onCollision)
@ -239,8 +239,8 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
// generate a folder for the test data
folder, err := acd.PostItemInContainer(
ctx,
suite.its.userDriveID,
suite.its.userDriveRootFolderID,
suite.its.user.driveID,
suite.its.user.driveRootFolderID,
newItem(rc.Location, true),
// skip instead of replace here to get
// an ErrItemAlreadyExistsConflict, just in case.
@ -252,7 +252,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
file := newItem(fmt.Sprintf("collision_%d.txt", i), false)
f, err := acd.PostItemInContainer(
ctx,
suite.its.userDriveID,
suite.its.user.driveID,
ptr.Val(folder.GetId()),
file,
control.Copy)
@ -263,7 +263,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
resultFolder, err := acd.PostItemInContainer(
ctx,
suite.its.userDriveID,
suite.its.user.driveID,
ptr.Val(folder.GetParentReference().GetId()),
newItem(rc.Location, true),
control.Replace)
@ -274,7 +274,7 @@ func (suite *DriveAPIIntgSuite) TestDrives_PostItemInContainer_replaceFolderRegr
resultFileColl, err := acd.Stable.
Client().
Drives().
ByDriveId(suite.its.userDriveID).
ByDriveId(suite.its.user.driveID).
Items().
ByDriveItemId(ptr.Val(resultFolder.GetId())).
Children().

View File

@ -91,13 +91,10 @@ func (c Events) DeleteContainer(
return nil
}
// prefer GetContainerByID where possible.
// use this only in cases where the models.Calendarable
// is required.
func (c Events) GetCalendar(
func (c Events) GetContainerByID(
ctx context.Context,
userID, containerID string,
) (models.Calendarable, error) {
) (graph.Container, error) {
config := &users.ItemCalendarsCalendarItemRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemCalendarsCalendarItemRequestBuilderGetQueryParameters{
Select: idAnd("name", "owner"),
@ -115,20 +112,7 @@ func (c Events) GetCalendar(
return nil, graph.Stack(ctx, err)
}
return resp, nil
}
// interface-compliant wrapper of GetCalendar
func (c Events) GetContainerByID(
ctx context.Context,
userID, containerID string,
) (graph.Container, error) {
cal, err := c.GetCalendar(ctx, userID, containerID)
if err != nil {
return nil, err
}
return graph.CalendarDisplayable{Calendarable: cal}, nil
return graph.CalendarDisplayable{Calendarable: resp}, nil
}
// GetContainerByName fetches a calendar by name

View File

@ -173,7 +173,7 @@ func (c Events) GetItemsInContainerByCollisionKey(
// item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &eventIDPager{}
var _ DeltaPager[getIDAndAddtler] = &eventIDPager{}
type eventIDPager struct {
gs graph.Servicer
@ -185,7 +185,7 @@ func (c Events) NewEventIDsPager(
ctx context.Context,
userID, containerID string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
options := &users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
QueryParameters: &users.ItemCalendarsItemEventsRequestBuilderGetQueryParameters{
@ -204,7 +204,7 @@ func (c Events) NewEventIDsPager(
return &eventIDPager{c.Stable, builder, options}, nil
}
func (p *eventIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
func (p *eventIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
@ -213,14 +213,14 @@ func (p *eventIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return EmptyDeltaLinker[models.Eventable]{PageLinkValuer: resp}, nil
}
func (p *eventIDPager) setNext(nextLink string) {
func (p *eventIDPager) SetNext(nextLink string) {
p.builder = users.NewItemCalendarsItemEventsRequestBuilder(nextLink, p.gs.Adapter())
}
// non delta pagers don't need reset
func (p *eventIDPager) reset(context.Context) {}
func (p *eventIDPager) Reset(context.Context) {}
func (p *eventIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
func (p *eventIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Eventable](pl)
}
@ -228,7 +228,7 @@ func (p *eventIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
// delta item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &eventDeltaIDPager{}
var _ DeltaPager[getIDAndAddtler] = &eventDeltaIDPager{}
type eventDeltaIDPager struct {
gs graph.Servicer
@ -242,7 +242,7 @@ func (c Events) NewEventDeltaIDsPager(
ctx context.Context,
userID, containerID, oldDelta string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
options := &users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration{
Headers: newPreferHeaders(preferPageSize(c.options.DeltaPageSize), preferImmutableIDs(immutableIDs)),
QueryParameters: &users.ItemCalendarsItemEventsDeltaRequestBuilderGetQueryParameters{
@ -281,7 +281,7 @@ func getEventDeltaBuilder(
return builder
}
func (p *eventDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
func (p *eventDeltaIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
resp, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
@ -290,15 +290,15 @@ func (p *eventDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error
return resp, nil
}
func (p *eventDeltaIDPager) setNext(nextLink string) {
func (p *eventDeltaIDPager) SetNext(nextLink string) {
p.builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(nextLink, p.gs.Adapter())
}
func (p *eventDeltaIDPager) reset(ctx context.Context) {
func (p *eventDeltaIDPager) Reset(ctx context.Context) {
p.builder = getEventDeltaBuilder(ctx, p.gs, p.userID, p.containerID, p.options)
}
func (p *eventDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
func (p *eventDeltaIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Eventable](pl)
}

View File

@ -39,13 +39,13 @@ func (suite *EventsPagerIntgSuite) TestEvents_GetItemsInContainerByCollisionKey(
ctx, flush := tester.NewContext(t)
defer flush()
container, err := ac.GetContainerByID(ctx, suite.its.userID, "calendar")
container, err := ac.GetContainerByID(ctx, suite.its.user.id, "calendar")
require.NoError(t, err, clues.ToCore(err))
evts, err := ac.Stable.
Client().
Users().
ByUserId(suite.its.userID).
ByUserId(suite.its.user.id).
Calendars().
ByCalendarId(ptr.Val(container.GetId())).
Events().
@ -63,7 +63,7 @@ func (suite *EventsPagerIntgSuite) TestEvents_GetItemsInContainerByCollisionKey(
results, err := suite.its.ac.
Events().
GetItemsInContainerByCollisionKey(ctx, suite.its.userID, "calendar")
GetItemsInContainerByCollisionKey(ctx, suite.its.user.id, "calendar")
require.NoError(t, err, clues.ToCore(err))
require.Less(t, 0, len(results), "requires at least one result")

View File

@ -289,7 +289,7 @@ func (suite *EventsAPIIntgSuite) TestEvents_canFindNonStandardFolder() {
ac := suite.its.ac.Events()
rc := testdata.DefaultRestoreConfig("api_calendar_discovery")
cal, err := ac.CreateContainer(ctx, suite.its.userID, "", rc.Location)
cal, err := ac.CreateContainer(ctx, suite.its.user.id, "", rc.Location)
require.NoError(t, err, clues.ToCore(err))
var (
@ -306,7 +306,7 @@ func (suite *EventsAPIIntgSuite) TestEvents_canFindNonStandardFolder() {
err = ac.EnumerateContainers(
ctx,
suite.its.userID,
suite.its.user.id,
"Calendar",
findContainer,
fault.New(true))
@ -342,7 +342,7 @@ func (suite *EventsAPIIntgSuite) TestEvents_GetContainerByName() {
_, err := suite.its.ac.
Events().
GetContainerByName(ctx, suite.its.userID, "", test.name)
GetContainerByName(ctx, suite.its.user.id, "", test.name)
test.expectErr(t, err, clues.ToCore(err))
})
}

View File

@ -112,7 +112,7 @@ func (suite *GroupsIntgSuite) TestGetAll() {
func (suite *GroupsIntgSuite) TestGroups_GetByID() {
var (
groupID = suite.its.groupID
groupID = suite.its.group.id
groupsAPI = suite.its.ac.Groups()
)

View File

@ -74,16 +74,19 @@ func parseableToMap(t *testing.T, thing serialization.Parsable) map[string]any {
// Suite Setup
// ---------------------------------------------------------------------------
type ids struct {
id string
driveID string
driveRootFolderID string
testContainerID string
}
type intgTesterSetup struct {
ac api.Client
gockAC api.Client
userID string
userDriveID string
userDriveRootFolderID string
siteID string
siteDriveID string
siteDriveRootFolderID string
groupID string
ac api.Client
gockAC api.Client
user ids
site ids
group ids
}
func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
@ -106,42 +109,47 @@ func newIntegrationTesterSetup(t *testing.T) intgTesterSetup {
// user drive
its.userID = tconfig.M365UserID(t)
its.user.id = tconfig.M365UserID(t)
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, its.userID)
userDrive, err := its.ac.Users().GetDefaultDrive(ctx, its.user.id)
require.NoError(t, err, clues.ToCore(err))
its.userDriveID = ptr.Val(userDrive.GetId())
its.user.driveID = ptr.Val(userDrive.GetId())
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.userDriveID)
userDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.user.driveID)
require.NoError(t, err, clues.ToCore(err))
its.userDriveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
its.siteID = tconfig.M365SiteID(t)
its.user.driveRootFolderID = ptr.Val(userDriveRootFolder.GetId())
// site
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, its.siteID)
its.site.id = tconfig.M365SiteID(t)
siteDrive, err := its.ac.Sites().GetDefaultDrive(ctx, its.site.id)
require.NoError(t, err, clues.ToCore(err))
its.siteDriveID = ptr.Val(siteDrive.GetId())
its.site.driveID = ptr.Val(siteDrive.GetId())
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.siteDriveID)
siteDriveRootFolder, err := its.ac.Drives().GetRootFolder(ctx, its.site.driveID)
require.NoError(t, err, clues.ToCore(err))
its.siteDriveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
its.site.driveRootFolderID = ptr.Val(siteDriveRootFolder.GetId())
// group
// groups/teams
// use of the TeamID is intentional here, so that we are assured
// the group has full usage of the teams api.
its.groupID = tconfig.M365TeamID(t)
its.group.id = tconfig.M365TeamID(t)
team, err := its.ac.Groups().GetByID(ctx, its.groupID)
channel, err := its.ac.Channels().
GetChannelByName(
ctx,
its.group.id,
"Test")
require.NoError(t, err, clues.ToCore(err))
require.Equal(t, "Test", ptr.Val(channel.GetDisplayName()))
its.groupID = ptr.Val(team.GetId())
its.group.testContainerID = ptr.Val(channel.GetId())
return its
}

View File

@ -16,11 +16,31 @@ import (
// common interfaces
// ---------------------------------------------------------------------------
// TODO(keepers): replace all matching uses of GetPage with this.
type DeltaPager[T any] interface {
DeltaGetPager
Resetter
SetNextLinker
ValuesInPageLinker[T]
}
type Pager[T any] interface {
GetPager
SetNextLinker
ValuesInPageLinker[T]
}
type DeltaGetPager interface {
GetPage(context.Context) (DeltaPageLinker, error)
}
type GetPager interface {
GetPage(context.Context) (PageLinker, error)
}
type Valuer[T any] interface {
GetValue() []T
}
type ValuesInPageLinker[T any] interface {
ValuesIn(PageLinker) ([]T, error)
}
@ -34,10 +54,19 @@ type DeltaPageLinker interface {
GetOdataDeltaLink() *string
}
type PageLinkValuer[T any] interface {
PageLinker
Valuer[T]
}
type SetNextLinker interface {
SetNext(nextLink string)
}
type Resetter interface {
Reset(context.Context)
}
// ---------------------------------------------------------------------------
// common funcs
// ---------------------------------------------------------------------------
@ -55,15 +84,6 @@ func NextAndDeltaLink(pl DeltaPageLinker) (string, string) {
return NextLink(pl), ptr.Val(pl.GetOdataDeltaLink())
}
type Valuer[T any] interface {
GetValue() []T
}
type PageLinkValuer[T any] interface {
PageLinker
Valuer[T]
}
// EmptyDeltaLinker is used to convert PageLinker to DeltaPageLinker
type EmptyDeltaLinker[T any] struct {
PageLinkValuer[T]
@ -148,19 +168,6 @@ func toValues[T any](a any) ([]getIDAndAddtler, error) {
return r, nil
}
type itemIDPager interface {
// getPage get a page with the specified options from graph
getPage(context.Context) (DeltaPageLinker, error)
// setNext is used to pass in the next url got from graph
setNext(string)
// reset is used to clear delta url in delta pagers. When
// reset is called, we reset the state(delta url) that we
// currently have and start a new delta query without the token.
reset(context.Context)
// valuesIn gets us the values in a page
valuesIn(PageLinker) ([]getIDAndAddtler, error)
}
type getIDAndAddtler interface {
GetId() *string
GetAdditionalData() map[string]any
@ -169,13 +176,13 @@ type getIDAndAddtler interface {
func getAddedAndRemovedItemIDs(
ctx context.Context,
service graph.Servicer,
pager itemIDPager,
deltaPager itemIDPager,
pager DeltaPager[getIDAndAddtler],
deltaPager DeltaPager[getIDAndAddtler],
oldDelta string,
canMakeDeltaQueries bool,
) ([]string, []string, DeltaUpdate, error) {
var (
pgr itemIDPager
pgr DeltaPager[getIDAndAddtler]
resetDelta bool
)
@ -204,7 +211,7 @@ func getAddedAndRemovedItemIDs(
}
// reset deltaPager
pgr.reset(ctx)
pgr.Reset(ctx)
added, removed, deltaURL, err = getItemsAddedAndRemovedFromContainer(ctx, pgr)
if err != nil {
@ -217,7 +224,7 @@ func getAddedAndRemovedItemIDs(
// generic controller for retrieving all item ids in a container.
func getItemsAddedAndRemovedFromContainer(
ctx context.Context,
pager itemIDPager,
pager DeltaPager[getIDAndAddtler],
) ([]string, []string, string, error) {
var (
addedIDs = []string{}
@ -229,14 +236,14 @@ func getItemsAddedAndRemovedFromContainer(
for {
// get the next page of data, check for standard errors
resp, err := pager.getPage(ctx)
resp, err := pager.GetPage(ctx)
if err != nil {
return nil, nil, deltaURL, graph.Stack(ctx, err)
}
// each category type responds with a different interface, but all
// of them comply with GetValue, which is where we'll get our item data.
items, err := pager.valuesIn(resp)
items, err := pager.ValuesIn(resp)
if err != nil {
return nil, nil, "", graph.Stack(ctx, err)
}
@ -278,7 +285,7 @@ func getItemsAddedAndRemovedFromContainer(
break
}
pager.setNext(nextLink)
pager.SetNext(nextLink)
}
logger.Ctx(ctx).Infow("completed enumeration", "count", itemCount)

View File

@ -95,7 +95,7 @@ func (p *testPager) setNext(nextLink string) {}
// mock id pager
var _ itemIDPager = &testIDsPager{}
var _ DeltaPager[getIDAndAddtler] = &testIDsPager{}
type testIDsPager struct {
t *testing.T
@ -105,7 +105,7 @@ type testIDsPager struct {
needsReset bool
}
func (p *testIDsPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
func (p *testIDsPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
if p.errorCode != "" {
ierr := odataerrors.NewMainError()
ierr.SetCode(&p.errorCode)
@ -118,8 +118,8 @@ func (p *testIDsPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return testPage{}, nil
}
func (p *testIDsPager) setNext(string) {}
func (p *testIDsPager) reset(context.Context) {
func (p *testIDsPager) SetNext(string) {}
func (p *testIDsPager) Reset(context.Context) {
if !p.needsReset {
require.Fail(p.t, "reset should not be called")
}
@ -128,7 +128,7 @@ func (p *testIDsPager) reset(context.Context) {
p.errorCode = ""
}
func (p *testIDsPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
func (p *testIDsPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
items := []getIDAndAddtler{}
for _, id := range p.added {
@ -208,15 +208,21 @@ func (suite *ItemPagerUnitSuite) TestEnumerateItems() {
func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
tests := []struct {
name string
pagerGetter func(*testing.T, context.Context, graph.Servicer, string, string, bool) (itemIDPager, error)
name string
pagerGetter func(
*testing.T,
context.Context,
graph.Servicer,
string, string,
bool,
) (DeltaPager[getIDAndAddtler], error)
deltaPagerGetter func(
*testing.T,
context.Context,
graph.Servicer,
string, string, string,
bool,
) (itemIDPager, error)
) (DeltaPager[getIDAndAddtler], error)
added []string
removed []string
deltaUpdate DeltaUpdate
@ -232,7 +238,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
user string,
directory string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
// this should not be called
return nil, assert.AnError
},
@ -244,7 +250,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
directory string,
delta string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
return &testIDsPager{
t: t,
added: []string{"uno", "dos"},
@ -265,7 +271,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
user string,
directory string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
// this should not be called
return nil, assert.AnError
},
@ -277,7 +283,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
directory string,
delta string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
return &testIDsPager{
t: t,
added: []string{"uno", "dos"},
@ -299,7 +305,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
user string,
directory string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
// this should not be called
return nil, assert.AnError
},
@ -311,7 +317,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
directory string,
delta string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
return &testIDsPager{
t: t,
added: []string{"uno", "dos"},
@ -335,7 +341,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
user string,
directory string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
return &testIDsPager{
t: t,
added: []string{"uno", "dos"},
@ -350,7 +356,7 @@ func (suite *ItemPagerUnitSuite) TestGetAddedAndRemovedItemIDs() {
directory string,
delta string,
immutableIDs bool,
) (itemIDPager, error) {
) (DeltaPager[getIDAndAddtler], error) {
return &testIDsPager{errorCode: "ErrorQuotaExceeded"}, nil
},
added: []string{"uno", "dos"},

View File

@ -41,7 +41,7 @@ func (suite *ListsAPIIntgSuite) TestLists_PostDrive() {
var (
acl = suite.its.ac.Lists()
driveName = testdata.DefaultRestoreConfig("list_api_post_drive").Location
siteID = suite.its.siteID
siteID = suite.its.site.id
)
// first post, should have no errors

View File

@ -41,46 +41,6 @@ type Mail struct {
// containers
// ---------------------------------------------------------------------------
// CreateMailFolder makes a mail folder iff a folder of the same name does not exist
// Reference: https://docs.microsoft.com/en-us/graph/api/user-post-mailfolders?view=graph-rest-1.0&tabs=http
func (c Mail) CreateMailFolder(
ctx context.Context,
userID, containerName string,
) (models.MailFolderable, error) {
isHidden := false
body := models.NewMailFolder()
body.SetDisplayName(&containerName)
body.SetIsHidden(&isHidden)
mdl, err := c.Stable.Client().
Users().
ByUserId(userID).
MailFolders().
Post(ctx, body, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "creating mail folder")
}
return mdl, nil
}
func (c Mail) DeleteMailFolder(
ctx context.Context,
userID, id string,
) error {
err := c.Stable.Client().
Users().
ByUserId(userID).
MailFolders().
ByMailFolderId(id).
Delete(ctx, nil)
if err != nil {
return graph.Wrap(ctx, err, "deleting mail folder")
}
return nil
}
func (c Mail) CreateContainer(
ctx context.Context,
userID, parentContainerID, containerName string,
@ -131,13 +91,10 @@ func (c Mail) DeleteContainer(
return nil
}
// prefer GetContainerByID where possible.
// use this only in cases where the models.MailFolderable
// is required.
func (c Mail) GetFolder(
func (c Mail) GetContainerByID(
ctx context.Context,
userID, containerID string,
) (models.MailFolderable, error) {
) (graph.Container, error) {
config := &users.ItemMailFoldersMailFolderItemRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMailFoldersMailFolderItemRequestBuilderGetQueryParameters{
Select: idAnd(displayName, parentFolderID),
@ -158,14 +115,6 @@ func (c Mail) GetFolder(
return resp, nil
}
// interface-compliant wrapper of GetFolder
func (c Mail) GetContainerByID(
ctx context.Context,
userID, containerID string,
) (graph.Container, error) {
return c.GetFolder(ctx, userID, containerID)
}
// GetContainerByName fetches a folder by name
func (c Mail) GetContainerByName(
ctx context.Context,

View File

@ -174,7 +174,7 @@ func (p *mailPageCtrl) setNext(nextLink string) {
// item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &mailIDPager{}
var _ DeltaPager[getIDAndAddtler] = &mailIDPager{}
type mailIDPager struct {
gs graph.Servicer
@ -186,7 +186,7 @@ func (c Mail) NewMailIDsPager(
ctx context.Context,
userID, containerID string,
immutableIDs bool,
) itemIDPager {
) DeltaPager[getIDAndAddtler] {
config := &users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMailFoldersItemMessagesRequestBuilderGetQueryParameters{
Select: idAnd("isRead"),
@ -206,7 +206,7 @@ func (c Mail) NewMailIDsPager(
return &mailIDPager{c.Stable, builder, config}
}
func (p *mailIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
func (p *mailIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
page, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
@ -215,14 +215,14 @@ func (p *mailIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
return EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil
}
func (p *mailIDPager) setNext(nextLink string) {
func (p *mailIDPager) SetNext(nextLink string) {
p.builder = users.NewItemMailFoldersItemMessagesRequestBuilder(nextLink, p.gs.Adapter())
}
// non delta pagers don't have reset
func (p *mailIDPager) reset(context.Context) {}
func (p *mailIDPager) Reset(context.Context) {}
func (p *mailIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
func (p *mailIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Messageable](pl)
}
@ -272,7 +272,7 @@ func (c Mail) GetItemIDsInContainer(
// delta item ID pager
// ---------------------------------------------------------------------------
var _ itemIDPager = &mailDeltaIDPager{}
var _ DeltaPager[getIDAndAddtler] = &mailDeltaIDPager{}
type mailDeltaIDPager struct {
gs graph.Servicer
@ -304,7 +304,7 @@ func (c Mail) NewMailDeltaIDsPager(
ctx context.Context,
userID, containerID, oldDelta string,
immutableIDs bool,
) itemIDPager {
) DeltaPager[getIDAndAddtler] {
config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
Select: idAnd("isRead"),
@ -324,7 +324,7 @@ func (c Mail) NewMailDeltaIDsPager(
return &mailDeltaIDPager{c.Stable, userID, containerID, builder, config}
}
func (p *mailDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
func (p *mailDeltaIDPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
page, err := p.builder.Get(ctx, p.options)
if err != nil {
return nil, graph.Stack(ctx, err)
@ -333,11 +333,11 @@ func (p *mailDeltaIDPager) getPage(ctx context.Context) (DeltaPageLinker, error)
return page, nil
}
func (p *mailDeltaIDPager) setNext(nextLink string) {
func (p *mailDeltaIDPager) SetNext(nextLink string) {
p.builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(nextLink, p.gs.Adapter())
}
func (p *mailDeltaIDPager) reset(ctx context.Context) {
func (p *mailDeltaIDPager) Reset(ctx context.Context) {
p.builder = p.gs.
Client().
Users().
@ -348,7 +348,7 @@ func (p *mailDeltaIDPager) reset(ctx context.Context) {
Delta()
}
func (p *mailDeltaIDPager) valuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
func (p *mailDeltaIDPager) ValuesIn(pl PageLinker) ([]getIDAndAddtler, error) {
return toValues[models.Messageable](pl)
}

View File

@ -40,13 +40,13 @@ func (suite *MailPagerIntgSuite) TestMail_GetItemsInContainerByCollisionKey() {
ctx, flush := tester.NewContext(t)
defer flush()
container, err := ac.GetContainerByID(ctx, suite.its.userID, api.MailInbox)
container, err := ac.GetContainerByID(ctx, suite.its.user.id, api.MailInbox)
require.NoError(t, err, clues.ToCore(err))
msgs, err := ac.Stable.
Client().
Users().
ByUserId(suite.its.userID).
ByUserId(suite.its.user.id).
MailFolders().
ByMailFolderId(ptr.Val(container.GetId())).
Messages().
@ -62,7 +62,7 @@ func (suite *MailPagerIntgSuite) TestMail_GetItemsInContainerByCollisionKey() {
expect := maps.Keys(expectM)
results, err := suite.its.ac.Mail().GetItemsInContainerByCollisionKey(ctx, suite.its.userID, api.MailInbox)
results, err := suite.its.ac.Mail().GetItemsInContainerByCollisionKey(ctx, suite.its.user.id, api.MailInbox)
require.NoError(t, err, clues.ToCore(err))
require.Less(t, 0, len(results), "requires at least one result")
@ -101,7 +101,7 @@ func (suite *MailPagerIntgSuite) TestMail_GetItemsIDsInContainer() {
msgs, err := ac.Stable.
Client().
Users().
ByUserId(suite.its.userID).
ByUserId(suite.its.user.id).
MailFolders().
ByMailFolderId(api.MailInbox).
Messages().
@ -116,7 +116,7 @@ func (suite *MailPagerIntgSuite) TestMail_GetItemsIDsInContainer() {
}
results, err := suite.its.ac.Mail().
GetItemIDsInContainer(ctx, suite.its.userID, api.MailInbox)
GetItemIDsInContainer(ctx, suite.its.user.id, api.MailInbox)
require.NoError(t, err, clues.ToCore(err))
require.Less(t, 0, len(results), "requires at least one result")
require.Equal(t, len(expect), len(results), "must have same count of items")

View File

@ -383,7 +383,7 @@ func (suite *MailAPIIntgSuite) TestMail_RestoreLargeAttachment() {
folderName := testdata.DefaultRestoreConfig("maillargeattachmenttest").Location
msgs := suite.its.ac.Mail()
mailfolder, err := msgs.CreateMailFolder(ctx, userID, folderName)
mailfolder, err := msgs.CreateContainer(ctx, userID, api.MsgFolderRoot, folderName)
require.NoError(t, err, clues.ToCore(err))
msg := models.NewMessage()
@ -414,7 +414,7 @@ func (suite *MailAPIIntgSuite) TestMail_GetContainerByName() {
ctx, flush := tester.NewContext(t)
defer flush()
parent, err := acm.CreateContainer(ctx, suite.its.userID, "msgfolderroot", rc.Location)
parent, err := acm.CreateContainer(ctx, suite.its.user.id, "msgfolderroot", rc.Location)
require.NoError(t, err, clues.ToCore(err))
table := []struct {
@ -448,7 +448,7 @@ func (suite *MailAPIIntgSuite) TestMail_GetContainerByName() {
ctx, flush := tester.NewContext(t)
defer flush()
_, err := acm.GetContainerByName(ctx, suite.its.userID, test.parentContainerID, test.name)
_, err := acm.GetContainerByName(ctx, suite.its.user.id, test.parentContainerID, test.name)
test.expectErr(t, err, clues.ToCore(err))
})
}
@ -460,10 +460,10 @@ func (suite *MailAPIIntgSuite) TestMail_GetContainerByName() {
ctx, flush := tester.NewContext(t)
defer flush()
child, err := acm.CreateContainer(ctx, suite.its.userID, pid, rc.Location)
child, err := acm.CreateContainer(ctx, suite.its.user.id, pid, rc.Location)
require.NoError(t, err, clues.ToCore(err))
result, err := acm.GetContainerByName(ctx, suite.its.userID, pid, rc.Location)
result, err := acm.GetContainerByName(ctx, suite.its.user.id, pid, rc.Location)
assert.NoError(t, err, clues.ToCore(err))
assert.Equal(t, ptr.Val(child.GetId()), ptr.Val(result.GetId()))
})

View File

@ -1,56 +0,0 @@
package mock
import (
"context"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type PageLink struct {
Link *string
}
func (pl *PageLink) GetOdataNextLink() *string {
return pl.Link
}
type PagerResult struct {
Drives []models.Driveable
NextLink *string
Err error
}
type DrivePager struct {
ToReturn []PagerResult
GetIdx int
}
func (p *DrivePager) GetPage(context.Context) (api.PageLinker, error) {
if len(p.ToReturn) <= p.GetIdx {
return nil, clues.New("ToReturn index out of bounds")
}
idx := p.GetIdx
p.GetIdx++
return &PageLink{p.ToReturn[idx].NextLink}, p.ToReturn[idx].Err
}
func (p *DrivePager) SetNext(string) {}
func (p *DrivePager) ValuesIn(api.PageLinker) ([]models.Driveable, error) {
idx := p.GetIdx
if idx > 0 {
// Return values lag by one since we increment in GetPage().
idx--
}
if len(p.ToReturn) <= idx {
return nil, clues.New("ToReturn index out of bounds")
}
return p.ToReturn[idx].Drives, nil
}

View File

@ -0,0 +1,113 @@
package mock
import (
"context"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type DeltaNextLinks struct {
Next *string
Delta *string
}
func (dnl *DeltaNextLinks) GetOdataNextLink() *string {
return dnl.Next
}
func (dnl *DeltaNextLinks) GetOdataDeltaLink() *string {
return dnl.Delta
}
type PagerResult[T any] struct {
Values []T
NextLink *string
DeltaLink *string
Err error
}
// ---------------------------------------------------------------------------
// non-delta pager
// ---------------------------------------------------------------------------
type Pager[T any] struct {
ToReturn []PagerResult[T]
getIdx int
}
func (p *Pager[T]) GetPage(context.Context) (api.PageLinker, error) {
if len(p.ToReturn) <= p.getIdx {
return nil, clues.New("index out of bounds").
With("index", p.getIdx, "values", p.ToReturn)
}
idx := p.getIdx
p.getIdx++
link := DeltaNextLinks{Next: p.ToReturn[idx].NextLink}
return &link, p.ToReturn[idx].Err
}
func (p *Pager[T]) SetNext(string) {}
func (p *Pager[T]) ValuesIn(api.PageLinker) ([]T, error) {
idx := p.getIdx
if idx > 0 {
// Return values lag by one since we increment in GetPage().
idx--
}
if len(p.ToReturn) <= idx {
return nil, clues.New("index out of bounds").
With("index", idx, "values", p.ToReturn)
}
return p.ToReturn[idx].Values, nil
}
// ---------------------------------------------------------------------------
// delta pager
// ---------------------------------------------------------------------------
type DeltaPager[T any] struct {
ToReturn []PagerResult[T]
getIdx int
}
func (p *DeltaPager[T]) GetPage(context.Context) (api.DeltaPageLinker, error) {
if len(p.ToReturn) <= p.getIdx {
return nil, clues.New("index out of bounds").
With("index", p.getIdx, "values", p.ToReturn)
}
idx := p.getIdx
p.getIdx++
link := DeltaNextLinks{
Next: p.ToReturn[idx].NextLink,
Delta: p.ToReturn[idx].DeltaLink,
}
return &link, p.ToReturn[idx].Err
}
func (p *DeltaPager[T]) SetNext(string) {}
func (p *DeltaPager[T]) Reset(context.Context) {}
func (p *DeltaPager[T]) ValuesIn(api.PageLinker) ([]T, error) {
idx := p.getIdx
if idx > 0 {
// Return values lag by one since we increment in GetPage().
idx--
}
if len(p.ToReturn) <= idx {
return nil, clues.New("index out of bounds").
With("index", idx, "values", p.ToReturn)
}
return p.ToReturn[idx].Values, nil
}

View File

@ -238,7 +238,7 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
return nil, clues.Stack(err)
}
userInfo.Mailbox.QuotaExceeded = graph.IsErrQuotaExceeded(err)
mi.QuotaExceeded = graph.IsErrQuotaExceeded(err)
}
userInfo.Mailbox = mi

View File

@ -7,6 +7,7 @@ import (
"github.com/h2non/gock"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/m365/graph"
@ -250,3 +251,29 @@ func (suite *UsersIntgSuite) TestUsers_GetInfo_errors() {
})
}
}
func (suite *UsersIntgSuite) TestUsers_GetInfo_quotaExceeded() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
defer gock.Off()
gock.EnableNetworking()
gock.New(graphAPIHostURL).
// Wildcard match on the inbox folder ID.
Get(v1APIURLPath("users", suite.its.user.id, "mailFolders", "(.*)", "messages", "delta")).
Reply(403).
SetHeaders(
map[string]string{
"Content-Type": "application/json; odata.metadata=minimal; " +
"odata.streaming=true; IEEE754Compatible=false; charset=utf-8",
},
).
BodyString(`{"error":{"code":"ErrorQuotaExceeded","message":"The process failed to get the correct properties."}}`)
output, err := suite.its.gockAC.Users().GetInfo(ctx, suite.its.user.id)
require.NoError(t, err, clues.ToCore(err))
assert.True(t, output.Mailbox.QuotaExceeded)
}

View File

@ -2,7 +2,6 @@ package testdata
import (
"os"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/require"
@ -28,7 +27,7 @@ var AWSStorageCredEnvs = []string{
// Uses t.TempDir() to generate a unique config storage and caching directory for this
// test. Suites that need to identify this value can retrieve it again from the common
// configs.
func NewPrefixedS3Storage(t *testing.T) storage.Storage {
func NewPrefixedS3Storage(t tester.TestT) storage.Storage {
now := tester.LogTimeOfTest(t)
cfg, err := tconfig.ReadTestConfig()

View File

@ -1,6 +1,6 @@
.PHONY: buildimage build dev shell check genclidocs _validatemdgen publish sync
GO_VERSION := 1.19
GO_VERSION := 1.20
CORSO_BUILD_DIR := /tmp/.corsobuild
CORSO_BUILD_CACHE := ${CORSO_BUILD_DIR}/cache
CORSO_BUILD_MOD := ${CORSO_BUILD_DIR}/mod

View File

@ -54,10 +54,10 @@ then click **Add permissions**.
| Calendars.ReadWrite | Application | Read and write calendars in all mailboxes |
| Contacts.ReadWrite | Application | Read and write contacts in all mailboxes |
| Files.ReadWrite.All | Application | Read and write files in all site collections |
| Mail.ReadWrite | Application | Read and write mail in all mailboxes |
| User.Read.All | Application | Read all users' full profiles |
| Sites.FullControl.All | Application | Have full control of all site collections |
| MailboxSettings.Read | Application | Read all user mailbox settings |
| Mail.ReadWrite | Application | Read and write mail in all mailboxes |
| Sites.FullControl.All | Application | Have full control of all site collections |
| User.Read.All | Application | Read all users' full profiles |
<!-- vale Microsoft.Spacing = YES -->

View File

@ -20,7 +20,7 @@
"feather-icons": "^4.29.0",
"jarallax": "^2.1.3",
"mdx-mermaid": "^1.3.2",
"mermaid": "^10.3.1",
"mermaid": "^10.4.0",
"prism-react-renderer": "^1.3.5",
"react": "^17.0.2",
"react-dom": "^17.0.2",
@ -9344,9 +9344,9 @@
}
},
"node_modules/mermaid": {
"version": "10.3.1",
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.3.1.tgz",
"integrity": "sha512-hkenh7WkuRWPcob3oJtrN3W+yzrrIYuWF1OIfk/d0xGE8UWlvDhfexaHmDwwe8DKQgqMLI8DWEPwGprxkumjuw==",
"version": "10.4.0",
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.4.0.tgz",
"integrity": "sha512-4QCQLp79lvz7UZxow5HUX7uWTPJOaQBVExduo91tliXC7v78i6kssZOPHxLL+Xs30KU72cpPn3g3imw/xm/gaw==",
"dependencies": {
"@braintree/sanitize-url": "^6.0.1",
"@types/d3-scale": "^4.0.3",
@ -21856,9 +21856,9 @@
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="
},
"mermaid": {
"version": "10.3.1",
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.3.1.tgz",
"integrity": "sha512-hkenh7WkuRWPcob3oJtrN3W+yzrrIYuWF1OIfk/d0xGE8UWlvDhfexaHmDwwe8DKQgqMLI8DWEPwGprxkumjuw==",
"version": "10.4.0",
"resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.4.0.tgz",
"integrity": "sha512-4QCQLp79lvz7UZxow5HUX7uWTPJOaQBVExduo91tliXC7v78i6kssZOPHxLL+Xs30KU72cpPn3g3imw/xm/gaw==",
"requires": {
"@braintree/sanitize-url": "^6.0.1",
"@types/d3-scale": "^4.0.3",

View File

@ -26,7 +26,7 @@
"feather-icons": "^4.29.0",
"jarallax": "^2.1.3",
"mdx-mermaid": "^1.3.2",
"mermaid": "^10.3.1",
"mermaid": "^10.4.0",
"prism-react-renderer": "^1.3.5",
"react": "^17.0.2",
"react-dom": "^17.0.2",

View File

@ -79,7 +79,8 @@ const sidebars = {
'cli/corso-backup-list-onedrive',
'cli/corso-backup-details-onedrive',
'cli/corso-backup-delete-onedrive',
'cli/corso-restore-onedrive']
'cli/corso-restore-onedrive',
'cli/corso-export-onedrive']
},
{
type: 'category',
@ -93,7 +94,8 @@ const sidebars = {
'cli/corso-backup-list-sharepoint',
'cli/corso-backup-details-sharepoint',
'cli/corso-backup-delete-sharepoint',
'cli/corso-restore-sharepoint']
'cli/corso-restore-sharepoint',
'cli/corso-export-sharepoint']
}
]
},