Merge branch 'main' of https://github.com/alcionai/corso into issue#3392
This commit is contained in:
commit
e359ce2b74
@ -6,7 +6,7 @@ COPY src .
|
||||
ARG CORSO_BUILD_LDFLAGS=""
|
||||
RUN go build -o corso -ldflags "$CORSO_BUILD_LDFLAGS"
|
||||
|
||||
FROM alpine:3.17
|
||||
FROM alpine:3
|
||||
|
||||
LABEL org.opencontainers.image.title="Corso"
|
||||
LABEL org.opencontainers.image.description="Free, Secure, and Open-Source Backup for Microsoft 365"
|
||||
|
||||
@ -1,12 +1,21 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
)
|
||||
|
||||
const (
|
||||
initCommand = "init"
|
||||
connectCommand = "connect"
|
||||
initCommand = "init"
|
||||
connectCommand = "connect"
|
||||
maintenanceCommand = "maintenance"
|
||||
)
|
||||
|
||||
var repoCommands = []func(cmd *cobra.Command) *cobra.Command{
|
||||
@ -18,15 +27,24 @@ func AddCommands(cmd *cobra.Command) {
|
||||
var (
|
||||
// Get new instances so that setting the context during tests works
|
||||
// properly.
|
||||
repoCmd = repoCmd()
|
||||
initCmd = initCmd()
|
||||
connectCmd = connectCmd()
|
||||
repoCmd = repoCmd()
|
||||
initCmd = initCmd()
|
||||
connectCmd = connectCmd()
|
||||
maintenanceCmd = maintenanceCmd()
|
||||
)
|
||||
|
||||
cmd.AddCommand(repoCmd)
|
||||
repoCmd.AddCommand(initCmd)
|
||||
repoCmd.AddCommand(connectCmd)
|
||||
|
||||
utils.AddCommand(
|
||||
repoCmd,
|
||||
maintenanceCmd,
|
||||
utils.HideCommand(),
|
||||
utils.MarkPreReleaseCommand())
|
||||
utils.AddMaintenanceModeFlag(maintenanceCmd)
|
||||
utils.AddForceMaintenanceFlag(maintenanceCmd)
|
||||
|
||||
for _, addRepoTo := range repoCommands {
|
||||
addRepoTo(initCmd)
|
||||
addRepoTo(connectCmd)
|
||||
@ -84,3 +102,65 @@ func connectCmd() *cobra.Command {
|
||||
func handleConnectCmd(cmd *cobra.Command, args []string) error {
|
||||
return cmd.Help()
|
||||
}
|
||||
|
||||
func maintenanceCmd() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: maintenanceCommand,
|
||||
Short: "Run maintenance on an existing repository",
|
||||
Long: `Run maintenance on an existing repository to optimize performance and storage use`,
|
||||
RunE: handleMaintenanceCmd,
|
||||
Args: cobra.NoArgs,
|
||||
}
|
||||
}
|
||||
|
||||
func handleMaintenanceCmd(cmd *cobra.Command, args []string) error {
|
||||
ctx := cmd.Context()
|
||||
|
||||
t, err := getMaintenanceType(utils.MaintenanceModeFV)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r, _, err := utils.GetAccountAndConnect(ctx)
|
||||
if err != nil {
|
||||
return print.Only(ctx, err)
|
||||
}
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
m, err := r.NewMaintenance(
|
||||
ctx,
|
||||
repository.Maintenance{
|
||||
Type: t,
|
||||
Safety: repository.FullMaintenanceSafety,
|
||||
Force: utils.ForceMaintenanceFV,
|
||||
})
|
||||
if err != nil {
|
||||
return print.Only(ctx, err)
|
||||
}
|
||||
|
||||
err = m.Run(ctx)
|
||||
if err != nil {
|
||||
return print.Only(ctx, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getMaintenanceType(t string) (repository.MaintenanceType, error) {
|
||||
res, ok := repository.StringToMaintenanceType[t]
|
||||
if !ok {
|
||||
modes := maps.Keys(repository.StringToMaintenanceType)
|
||||
allButLast := []string{}
|
||||
|
||||
for i := 0; i < len(modes)-1; i++ {
|
||||
allButLast = append(allButLast, string(modes[i]))
|
||||
}
|
||||
|
||||
valuesStr := strings.Join(allButLast, ", ") + " or " + string(modes[len(modes)-1])
|
||||
|
||||
return res, clues.New(t + " is an unrecognized maintenance mode; must be one of " + valuesStr)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
41
src/cli/repo/repo_test.go
Normal file
41
src/cli/repo/repo_test.go
Normal file
@ -0,0 +1,41 @@
|
||||
package repo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
)
|
||||
|
||||
type RepoUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestRepoUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &RepoUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *RepoUnitSuite) TestAddRepoCommands() {
|
||||
t := suite.T()
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
AddCommands(cmd)
|
||||
|
||||
var found bool
|
||||
|
||||
// This is the repo command.
|
||||
repoCmds := cmd.Commands()
|
||||
require.Len(t, repoCmds, 1)
|
||||
|
||||
for _, c := range repoCmds[0].Commands() {
|
||||
if c.Use == maintenanceCommand {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
|
||||
assert.True(t, found, "looking for maintenance command")
|
||||
}
|
||||
@ -9,6 +9,7 @@ import (
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
@ -37,6 +38,9 @@ var (
|
||||
|
||||
// for selection of data by category. eg: `--data email,contacts`
|
||||
CategoryDataFV []string
|
||||
|
||||
MaintenanceModeFV string
|
||||
ForceMaintenanceFV bool
|
||||
)
|
||||
|
||||
// common flag names (eg: FN)
|
||||
@ -59,6 +63,10 @@ const (
|
||||
FileCreatedBeforeFN = "file-created-before"
|
||||
FileModifiedAfterFN = "file-modified-after"
|
||||
FileModifiedBeforeFN = "file-modified-before"
|
||||
|
||||
// Maintenance stuff.
|
||||
MaintenanceModeFN = "mode"
|
||||
ForceMaintenanceFN = "force"
|
||||
)
|
||||
|
||||
// well-known flag values
|
||||
@ -168,6 +176,30 @@ func AddSiteFlag(cmd *cobra.Command) {
|
||||
"Backup data by site URL; accepts '"+Wildcard+"' to select all sites.")
|
||||
}
|
||||
|
||||
func AddMaintenanceModeFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.StringVar(
|
||||
&MaintenanceModeFV,
|
||||
MaintenanceModeFN,
|
||||
repository.CompleteMaintenance.String(),
|
||||
"Type of maintenance operation to run. Pass '"+
|
||||
repository.MetadataMaintenance.String()+"' to run a faster maintenance "+
|
||||
"that does minimal clean-up and optimization. Pass '"+
|
||||
repository.CompleteMaintenance.String()+"' to fully compact existing "+
|
||||
"data and delete unused data.")
|
||||
cobra.CheckErr(fs.MarkHidden(MaintenanceModeFN))
|
||||
}
|
||||
|
||||
func AddForceMaintenanceFlag(cmd *cobra.Command) {
|
||||
fs := cmd.Flags()
|
||||
fs.BoolVar(
|
||||
&ForceMaintenanceFV,
|
||||
ForceMaintenanceFN,
|
||||
false,
|
||||
"Force maintenance. Caution: user must ensure this is not run concurrently on a single repo")
|
||||
cobra.CheckErr(fs.MarkHidden(ForceMaintenanceFN))
|
||||
}
|
||||
|
||||
type PopulatedFlags map[string]struct{}
|
||||
|
||||
func (fs PopulatedFlags) populate(pf *pflag.Flag) {
|
||||
|
||||
@ -1,11 +1,8 @@
|
||||
package impl
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
@ -20,7 +17,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
@ -83,7 +79,7 @@ func generateAndRestoreItems(
|
||||
}
|
||||
|
||||
collections := []collection{{
|
||||
pathElements: []string{destFldr},
|
||||
PathElements: []string{destFldr},
|
||||
category: cat,
|
||||
items: items,
|
||||
}}
|
||||
@ -160,7 +156,7 @@ type collection struct {
|
||||
// only contain elements after the prefix that corso uses for the path. For
|
||||
// example, a collection for the Inbox folder in exchange mail would just be
|
||||
// "Inbox".
|
||||
pathElements []string
|
||||
PathElements []string
|
||||
category path.CategoryType
|
||||
items []item
|
||||
}
|
||||
@ -180,7 +176,7 @@ func buildCollections(
|
||||
service,
|
||||
c.category,
|
||||
false,
|
||||
c.pathElements...)
|
||||
c.PathElements...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -198,45 +194,6 @@ func buildCollections(
|
||||
return collections, nil
|
||||
}
|
||||
|
||||
type permData struct {
|
||||
user string // user is only for older versions
|
||||
entityID string
|
||||
roles []string
|
||||
sharingMode metadata.SharingMode
|
||||
}
|
||||
|
||||
type itemData struct {
|
||||
name string
|
||||
data []byte
|
||||
perms permData
|
||||
}
|
||||
|
||||
type itemInfo struct {
|
||||
// lookupKey is a string that can be used to find this data from a set of
|
||||
// other data in the same collection. This key should be something that will
|
||||
// be the same before and after restoring the item in M365 and may not be
|
||||
// the M365 ID. When restoring items out of place, the item is assigned a
|
||||
// new ID making it unsuitable for a lookup key.
|
||||
lookupKey string
|
||||
name string
|
||||
data []byte
|
||||
}
|
||||
|
||||
type onedriveCollection struct {
|
||||
service path.ServiceType
|
||||
pathElements []string
|
||||
items []itemInfo
|
||||
aux []itemInfo
|
||||
backupVersion int
|
||||
}
|
||||
|
||||
type onedriveColInfo struct {
|
||||
pathElements []string
|
||||
perms permData
|
||||
files []itemData
|
||||
folders []itemData
|
||||
}
|
||||
|
||||
var (
|
||||
folderAName = "folder-a"
|
||||
folderBName = "b"
|
||||
@ -276,14 +233,14 @@ func generateAndRestoreDriveItems(
|
||||
|
||||
switch service {
|
||||
case path.SharePointService:
|
||||
d, err := gc.Service.Client().SitesById(resourceOwner).Drive().Get(ctx, nil)
|
||||
d, err := gc.Service.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "getting site's default drive")
|
||||
}
|
||||
|
||||
driveID = ptr.Val(d.GetId())
|
||||
default:
|
||||
d, err := gc.Service.Client().UsersById(resourceOwner).Drive().Get(ctx, nil)
|
||||
d, err := gc.Service.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "getting user's default drive")
|
||||
}
|
||||
@ -292,7 +249,7 @@ func generateAndRestoreDriveItems(
|
||||
}
|
||||
|
||||
var (
|
||||
cols []onedriveColInfo
|
||||
cols []connector.OnedriveColInfo
|
||||
|
||||
rootPath = []string{"drives", driveID, "root:"}
|
||||
folderAPath = []string{"drives", driveID, "root:", folderAName}
|
||||
@ -306,43 +263,43 @@ func generateAndRestoreDriveItems(
|
||||
)
|
||||
|
||||
for i := 0; i < count; i++ {
|
||||
col := []onedriveColInfo{
|
||||
col := []connector.OnedriveColInfo{
|
||||
// basic folder and file creation
|
||||
{
|
||||
pathElements: rootPath,
|
||||
files: []itemData{
|
||||
PathElements: rootPath,
|
||||
Files: []connector.ItemData{
|
||||
{
|
||||
name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
|
||||
data: fileAData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fmt.Sprintf("file-1st-count-%d-at-%s", i, currentTime),
|
||||
Data: fileAData,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime),
|
||||
data: fileBData,
|
||||
Name: fmt.Sprintf("file-2nd-count-%d-at-%s", i, currentTime),
|
||||
Data: fileBData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []connector.ItemData{
|
||||
{
|
||||
name: folderBName,
|
||||
Name: folderBName,
|
||||
},
|
||||
{
|
||||
name: folderAName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderAName,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: folderCName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderCName,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -350,62 +307,62 @@ func generateAndRestoreDriveItems(
|
||||
{
|
||||
// a folder that has permissions with an item in the folder with
|
||||
// the different permissions.
|
||||
pathElements: folderAPath,
|
||||
files: []itemData{
|
||||
PathElements: folderAPath,
|
||||
Files: []connector.ItemData{
|
||||
{
|
||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
data: fileEData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
Data: fileEData,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
// a folder that has permissions with an item in the folder with
|
||||
// no permissions.
|
||||
pathElements: folderCPath,
|
||||
files: []itemData{
|
||||
PathElements: folderCPath,
|
||||
Files: []connector.ItemData{
|
||||
{
|
||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
data: fileAData,
|
||||
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
Data: fileAData,
|
||||
},
|
||||
},
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: folderBPath,
|
||||
files: []itemData{
|
||||
PathElements: folderBPath,
|
||||
Files: []connector.ItemData{
|
||||
{
|
||||
// restoring a file in a non-root folder that doesn't inherit
|
||||
// permissions.
|
||||
name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
data: fileBData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fmt.Sprintf("file-count-%d-at-%s", i, currentTime),
|
||||
Data: fileBData,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []connector.ItemData{
|
||||
{
|
||||
name: folderAName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderAName,
|
||||
Perms: connector.PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -415,302 +372,40 @@ func generateAndRestoreDriveItems(
|
||||
cols = append(cols, col...)
|
||||
}
|
||||
|
||||
input := dataForInfo(service, cols, version.Backup)
|
||||
input, err := connector.DataForInfo(service, cols, version.Backup)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
collections := getCollections(
|
||||
service,
|
||||
tenantID,
|
||||
[]string{resourceOwner},
|
||||
input,
|
||||
version.Backup)
|
||||
// collections := getCollections(
|
||||
// service,
|
||||
// tenantID,
|
||||
// []string{resourceOwner},
|
||||
// input,
|
||||
// version.Backup)
|
||||
|
||||
opts := control.Options{
|
||||
RestorePermissions: true,
|
||||
ToggleFeatures: control.Toggles{},
|
||||
}
|
||||
|
||||
config := connector.ConfigInfo{
|
||||
Acct: acct,
|
||||
Opts: opts,
|
||||
Resource: connector.Users,
|
||||
Service: service,
|
||||
Tenant: tenantID,
|
||||
ResourceOwners: []string{resourceOwner},
|
||||
Dest: tester.DefaultTestRestoreDestination(""),
|
||||
}
|
||||
|
||||
_, _, collections, _, err := connector.GetCollectionsAndExpected(
|
||||
config,
|
||||
input,
|
||||
version.Backup)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
|
||||
}
|
||||
|
||||
func getCollections(
|
||||
service path.ServiceType,
|
||||
tenant string,
|
||||
resourceOwners []string,
|
||||
testCollections []colInfo,
|
||||
backupVersion int,
|
||||
) []data.RestoreCollection {
|
||||
var collections []data.RestoreCollection
|
||||
|
||||
for _, owner := range resourceOwners {
|
||||
ownerCollections := collectionsForInfo(
|
||||
service,
|
||||
tenant,
|
||||
owner,
|
||||
testCollections,
|
||||
backupVersion,
|
||||
)
|
||||
|
||||
collections = append(collections, ownerCollections...)
|
||||
}
|
||||
|
||||
return collections
|
||||
}
|
||||
|
||||
type mockRestoreCollection struct {
|
||||
data.Collection
|
||||
auxItems map[string]data.Stream
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) Fetch(
|
||||
ctx context.Context,
|
||||
name string,
|
||||
) (data.Stream, error) {
|
||||
res := rc.auxItems[name]
|
||||
if res == nil {
|
||||
return nil, data.ErrNotFound
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func collectionsForInfo(
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
allInfo []colInfo,
|
||||
backupVersion int,
|
||||
) []data.RestoreCollection {
|
||||
collections := make([]data.RestoreCollection, 0, len(allInfo))
|
||||
|
||||
for _, info := range allInfo {
|
||||
pth := mustToDataLayerPath(
|
||||
service,
|
||||
tenant,
|
||||
user,
|
||||
info.category,
|
||||
info.pathElements,
|
||||
false)
|
||||
|
||||
mc := exchMock.NewCollection(pth, pth, len(info.items))
|
||||
|
||||
for i := 0; i < len(info.items); i++ {
|
||||
mc.Names[i] = info.items[i].name
|
||||
mc.Data[i] = info.items[i].data
|
||||
|
||||
// We do not count metadata files against item count
|
||||
if backupVersion > 0 && metadata.HasMetaSuffix(info.items[i].name) &&
|
||||
(service == path.OneDriveService || service == path.SharePointService) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||
|
||||
for _, aux := range info.auxItems {
|
||||
c.auxItems[aux.name] = &exchMock.Data{
|
||||
ID: aux.name,
|
||||
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||
}
|
||||
}
|
||||
|
||||
collections = append(collections, c)
|
||||
}
|
||||
|
||||
return collections
|
||||
}
|
||||
|
||||
func mustToDataLayerPath(
|
||||
service path.ServiceType,
|
||||
tenant, resourceOwner string,
|
||||
category path.CategoryType,
|
||||
elements []string,
|
||||
isItem bool,
|
||||
) path.Path {
|
||||
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||
if err != nil {
|
||||
fmt.Println("building path", clues.ToCore(err))
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
type colInfo struct {
|
||||
// Elements (in order) for the path representing this collection. Should
|
||||
// only contain elements after the prefix that corso uses for the path. For
|
||||
// example, a collection for the Inbox folder in exchange mail would just be
|
||||
// "Inbox".
|
||||
pathElements []string
|
||||
category path.CategoryType
|
||||
items []itemInfo
|
||||
// auxItems are items that can be retrieved with Fetch but won't be returned
|
||||
// by Items().
|
||||
auxItems []itemInfo
|
||||
}
|
||||
|
||||
func newOneDriveCollection(
|
||||
service path.ServiceType,
|
||||
pathElements []string,
|
||||
backupVersion int,
|
||||
) *onedriveCollection {
|
||||
return &onedriveCollection{
|
||||
service: service,
|
||||
pathElements: pathElements,
|
||||
backupVersion: backupVersion,
|
||||
}
|
||||
}
|
||||
|
||||
func dataForInfo(
|
||||
service path.ServiceType,
|
||||
cols []onedriveColInfo,
|
||||
backupVersion int,
|
||||
) []colInfo {
|
||||
var res []colInfo
|
||||
|
||||
for _, c := range cols {
|
||||
onedriveCol := newOneDriveCollection(service, c.pathElements, backupVersion)
|
||||
|
||||
for _, f := range c.files {
|
||||
onedriveCol.withFile(f.name, f.data, f.perms)
|
||||
}
|
||||
|
||||
onedriveCol.withPermissions(c.perms)
|
||||
|
||||
res = append(res, onedriveCol.collection())
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (c onedriveCollection) collection() colInfo {
|
||||
cat := path.FilesCategory
|
||||
if c.service == path.SharePointService {
|
||||
cat = path.LibrariesCategory
|
||||
}
|
||||
|
||||
return colInfo{
|
||||
pathElements: c.pathElements,
|
||||
category: cat,
|
||||
items: c.items,
|
||||
auxItems: c.aux,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *onedriveCollection) withFile(name string, fileData []byte, perm permData) *onedriveCollection {
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
name+metadata.DataFileSuffix,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData))
|
||||
|
||||
md := onedriveMetadata(
|
||||
name,
|
||||
name+metadata.MetaFileSuffix,
|
||||
name,
|
||||
perm,
|
||||
true)
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// withPermissions adds permissions to the folder represented by this
|
||||
// onedriveCollection.
|
||||
func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection {
|
||||
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||
return c
|
||||
}
|
||||
|
||||
name := c.pathElements[len(c.pathElements)-1]
|
||||
metaName := name
|
||||
|
||||
if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
||||
// We switched to just .dirmeta for metadata file names.
|
||||
metaName = ""
|
||||
}
|
||||
|
||||
if name == "root:" {
|
||||
return c
|
||||
}
|
||||
|
||||
md := onedriveMetadata(
|
||||
name,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
perm,
|
||||
true)
|
||||
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
type oneDriveData struct {
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
Data []byte `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
func onedriveItemWithData(
|
||||
name, lookupKey string,
|
||||
fileData []byte,
|
||||
) itemInfo {
|
||||
content := oneDriveData{
|
||||
FileName: lookupKey,
|
||||
Data: fileData,
|
||||
}
|
||||
|
||||
serialized, _ := json.Marshal(content)
|
||||
|
||||
return itemInfo{
|
||||
name: name,
|
||||
data: serialized,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
|
||||
func onedriveMetadata(
|
||||
fileName, itemID, lookupKey string,
|
||||
perm permData,
|
||||
permUseID bool,
|
||||
) itemInfo {
|
||||
meta := getMetadata(fileName, perm, permUseID)
|
||||
|
||||
metaJSON, err := json.Marshal(meta)
|
||||
if err != nil {
|
||||
fmt.Println("marshalling metadata", clues.ToCore(err))
|
||||
}
|
||||
|
||||
return itemInfo{
|
||||
name: itemID,
|
||||
data: metaJSON,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
|
||||
func getMetadata(fileName string, perm permData, permUseID bool) metadata.Metadata {
|
||||
if len(perm.user) == 0 || len(perm.roles) == 0 ||
|
||||
perm.sharingMode != metadata.SharingModeCustom {
|
||||
return metadata.Metadata{
|
||||
FileName: fileName,
|
||||
SharingMode: perm.sharingMode,
|
||||
}
|
||||
}
|
||||
|
||||
// In case of permissions, the id will usually be same for same
|
||||
// user/role combo unless deleted and readded, but we have to do
|
||||
// this as we only have two users of which one is already taken.
|
||||
id := uuid.NewString()
|
||||
uperm := metadata.Permission{ID: id, Roles: perm.roles}
|
||||
|
||||
if permUseID {
|
||||
uperm.EntityID = perm.entityID
|
||||
} else {
|
||||
uperm.Email = perm.user
|
||||
}
|
||||
|
||||
meta := metadata.Metadata{
|
||||
FileName: fileName,
|
||||
Permissions: []metadata.Permission{uperm},
|
||||
}
|
||||
|
||||
return meta
|
||||
}
|
||||
|
||||
@ -16,12 +16,12 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/credentials"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// Required inputs from user for command execution
|
||||
|
||||
@ -22,9 +22,9 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/credentials"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
const downloadURLKey = "@microsoft.graph.downloadUrl"
|
||||
|
||||
@ -109,7 +109,7 @@ func checkEmailRestoration(
|
||||
restoreFolder models.MailFolderable
|
||||
itemCount = make(map[string]int32)
|
||||
restoreItemCount = make(map[string]int32)
|
||||
builder = client.UsersById(testUser).MailFolders()
|
||||
builder = client.Users().ByUserId(testUser).MailFolders()
|
||||
)
|
||||
|
||||
for {
|
||||
@ -152,8 +152,10 @@ func checkEmailRestoration(
|
||||
"restore_folder_name", folderName)
|
||||
|
||||
childFolder, err := client.
|
||||
UsersById(testUser).
|
||||
MailFoldersById(folderID).
|
||||
Users().
|
||||
ByUserId(testUser).
|
||||
MailFolders().
|
||||
ByMailFolderId(folderID).
|
||||
ChildFolders().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -213,8 +215,10 @@ func getAllMailSubFolders(
|
||||
ctx = clues.Add(ctx, "parent_folder_id", folderID)
|
||||
|
||||
childFolder, err := client.
|
||||
UsersById(testUser).
|
||||
MailFoldersById(folderID).
|
||||
Users().
|
||||
ByUserId(testUser).
|
||||
MailFolders().
|
||||
ByMailFolderId(folderID).
|
||||
ChildFolders().
|
||||
Get(ctx, options)
|
||||
if err != nil {
|
||||
@ -263,8 +267,10 @@ func checkAllSubFolder(
|
||||
)
|
||||
|
||||
childFolder, err := client.
|
||||
UsersById(testUser).
|
||||
MailFoldersById(folderID).
|
||||
Users().
|
||||
ByUserId(testUser).
|
||||
MailFolders().
|
||||
ByMailFolderId(folderID).
|
||||
ChildFolders().
|
||||
Get(ctx, options)
|
||||
if err != nil {
|
||||
@ -303,7 +309,8 @@ func checkOneDriveRestoration(
|
||||
startTime time.Time,
|
||||
) {
|
||||
drive, err := client.
|
||||
UsersById(userID).
|
||||
Users().
|
||||
ByUserId(userID).
|
||||
Drive().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -333,7 +340,8 @@ func checkSharePointRestoration(
|
||||
startTime time.Time,
|
||||
) {
|
||||
drive, err := client.
|
||||
SitesById(siteID).
|
||||
Sites().
|
||||
BySiteId(siteID).
|
||||
Drive().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -381,8 +389,10 @@ func checkDriveRestoration(
|
||||
ctx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName)
|
||||
|
||||
response, err := client.
|
||||
DrivesById(driveID).
|
||||
Root().
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId("root").
|
||||
Children().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -521,7 +531,7 @@ func getOneDriveChildFolder(
|
||||
folderPermission map[string][]permissionInfo,
|
||||
startTime time.Time,
|
||||
) {
|
||||
response, err := client.DrivesById(driveID).ItemsById(itemID).Children().Get(ctx, nil)
|
||||
response, err := client.Drives().ByDriveId(driveID).Items().ByDriveItemId(itemID).Children().Get(ctx, nil)
|
||||
if err != nil {
|
||||
fatal(ctx, "getting child folder", err)
|
||||
}
|
||||
@ -569,8 +579,10 @@ func getRestoredDrive(
|
||||
startTime time.Time,
|
||||
) {
|
||||
restored, err := client.
|
||||
DrivesById(driveID).
|
||||
ItemsById(restoreFolderID).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(restoreFolderID).
|
||||
Children().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -610,8 +622,10 @@ func permissionIn(
|
||||
pi := []permissionInfo{}
|
||||
|
||||
pcr, err := client.
|
||||
DrivesById(driveID).
|
||||
ItemsById(itemID).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(itemID).
|
||||
Permissions().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
|
||||
31
src/go.mod
31
src/go.mod
@ -8,19 +8,19 @@ require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0
|
||||
github.com/alcionai/clues v0.0.0-20230406223931-f48777f4773c
|
||||
github.com/armon/go-metrics v0.4.1
|
||||
github.com/aws/aws-sdk-go v1.44.262
|
||||
github.com/aws/aws-sdk-go v1.44.263
|
||||
github.com/aws/aws-xray-sdk-go v1.8.1
|
||||
github.com/cenkalti/backoff/v4 v4.2.1
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/h2non/gock v1.2.0
|
||||
github.com/kopia/kopia v0.12.2-0.20230327171220-747baeebdab1
|
||||
github.com/microsoft/kiota-abstractions-go v0.18.0
|
||||
github.com/microsoft/kiota-authentication-azure-go v0.6.0
|
||||
github.com/microsoft/kiota-http-go v0.16.1
|
||||
github.com/microsoft/kiota-serialization-form-go v0.8.2
|
||||
github.com/microsoft/kiota-serialization-json-go v0.8.2
|
||||
github.com/microsoftgraph/msgraph-sdk-go v0.53.0
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v0.33.0
|
||||
github.com/microsoft/kiota-abstractions-go v1.0.0
|
||||
github.com/microsoft/kiota-authentication-azure-go v1.0.0
|
||||
github.com/microsoft/kiota-http-go v1.0.0
|
||||
github.com/microsoft/kiota-serialization-form-go v1.0.0
|
||||
github.com/microsoft/kiota-serialization-json-go v1.0.0
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.1.0
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rudderlabs/analytics-go v3.3.3+incompatible
|
||||
github.com/spatialcurrent/go-lazy v0.0.0-20211115014721-47315cc003d1
|
||||
@ -33,9 +33,8 @@ require (
|
||||
github.com/vbauerster/mpb/v8 v8.1.6
|
||||
go.uber.org/zap v1.24.0
|
||||
golang.org/x/exp v0.0.0-20230213192124-5e25df0256eb
|
||||
golang.org/x/time v0.1.0
|
||||
golang.org/x/time v0.3.0
|
||||
golang.org/x/tools v0.9.1
|
||||
gopkg.in/resty.v1 v1.12.0
|
||||
)
|
||||
|
||||
require (
|
||||
@ -60,8 +59,8 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
|
||||
@ -71,7 +70,7 @@ require (
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/edsrzf/mmap-go v1.1.0 // indirect
|
||||
github.com/go-logr/logr v1.2.3 // indirect
|
||||
github.com/go-logr/logr v1.2.4 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
|
||||
github.com/golang/protobuf v1.5.3 // indirect
|
||||
@ -89,7 +88,7 @@ require (
|
||||
github.com/mattn/go-runewidth v0.0.14 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||
github.com/microsoft/kiota-serialization-text-go v0.7.0
|
||||
github.com/microsoft/kiota-serialization-text-go v1.0.0
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/minio/minio-go/v7 v7.0.52 // indirect
|
||||
github.com/minio/sha256-simd v1.0.0 // indirect
|
||||
@ -112,8 +111,8 @@ require (
|
||||
github.com/xtgo/uuid v0.0.0-20140804021211-a0b114877d4c // indirect
|
||||
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
|
||||
github.com/zeebo/blake3 v0.2.3 // indirect
|
||||
go.opentelemetry.io/otel v1.14.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.14.0 // indirect
|
||||
go.opentelemetry.io/otel v1.15.1 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.15.1 // indirect
|
||||
go.uber.org/atomic v1.10.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
golang.org/x/crypto v0.8.0 // indirect
|
||||
|
||||
63
src/go.sum
63
src/go.sum
@ -36,12 +36,12 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.0 h1:VuHAcMq8pU1IWNT/m5yRaGqbK0BiQKHT8X4DTp9CHdI=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.3.0/go.mod h1:tZoQYdDZNOiIjdSn0dVWVfl0NEPGOJqVLzSrcFk4Is0=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0 h1:8kDqDngH+DmVBiCtIjCFTGa7MBnsIOkF9IccInFEbjk=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.6.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 h1:t/W5MYAuQy81cvM8VUNfRLzhtKpXhVUAN7Cd7KVbTyc=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0/go.mod h1:NBanQUfSWiWn3QEpWDTCU0IjBECKOYvl2R8xdRtMtiM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2 h1:+5VZ72z0Qan5Bog5C+ZkgSqUbeVUd9wgtHOrIKuc5b8=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.1.2/go.mod h1:eWRD7oawr1Mu1sLCawqVc0CUiF43ia3qQMxLscsKQ9w=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0 h1:VgSJlZH5u0k2qxSpqyghcFQKmvYckj46uymKK5XzkBM=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v0.7.0/go.mod h1:BDJ5qMFKx9DugEg3+uQSDCdbYPr5s9vBTrL9P8TpqOU=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
@ -66,8 +66,8 @@ github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY
|
||||
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
||||
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
||||
github.com/aws/aws-sdk-go v1.44.262 h1:gyXpcJptWoNkK+DiAiaBltlreoWKQXjAIh6FRh60F+I=
|
||||
github.com/aws/aws-sdk-go v1.44.262/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||
github.com/aws/aws-sdk-go v1.44.263 h1:Dkt5fcdtL8QtK3cz0bOTQ84m9dGx+YDeTsDl+wY2yW4=
|
||||
github.com/aws/aws-sdk-go v1.44.263/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||
github.com/aws/aws-xray-sdk-go v1.8.1 h1:O4pXV+hnCskaamGsZnFpzHyAmgPGusBMN6i7nnsy0Fo=
|
||||
github.com/aws/aws-xray-sdk-go v1.8.1/go.mod h1:wMmVYzej3sykAttNBkXQHK/+clAPWTOrPiajEk7Cp3A=
|
||||
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
||||
@ -124,8 +124,8 @@ github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
|
||||
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ=
|
||||
github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
@ -273,22 +273,22 @@ github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zk
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
|
||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
|
||||
github.com/microsoft/kiota-abstractions-go v0.18.0 h1:H1kQE5hAq/7Q8gENPJ1Y7DuvG9QqKCpglN8D7TJi9qY=
|
||||
github.com/microsoft/kiota-abstractions-go v0.18.0/go.mod h1:0lbPErVO6Rj3HHpntNYW/OFmHhJJ1ewPdsi1xPxYIMc=
|
||||
github.com/microsoft/kiota-authentication-azure-go v0.6.0 h1:Il9bLO34J6D8DY89xYAXoGh9muvlphayqG4eihyT6B8=
|
||||
github.com/microsoft/kiota-authentication-azure-go v0.6.0/go.mod h1:EJCHiLWLXW1/mSgX7lYReAhVO37MzRT5Xi2mcPTwCRQ=
|
||||
github.com/microsoft/kiota-http-go v0.16.1 h1:5SZbSwHs14Xve5VMQHHz00lwL/kEg3H9rgESAUrXnvw=
|
||||
github.com/microsoft/kiota-http-go v0.16.1/go.mod h1:pKSaeSaBwh3Zadbnzw3kALEZbCZA1gq7A5PuxwVd/aU=
|
||||
github.com/microsoft/kiota-serialization-form-go v0.8.2 h1:qrkJGBObZo0NjJwwbT3lUySjaLKqjz+r4TQGQTX/C/c=
|
||||
github.com/microsoft/kiota-serialization-form-go v0.8.2/go.mod h1:FQqYzIrGX6KUoDOlg+DhDWoGaZoB8AicBYGOsBq0Dw4=
|
||||
github.com/microsoft/kiota-serialization-json-go v0.8.2 h1:vLKZAOiMsaUxq36RDo3S/FfQbW2VQCdAIu4DS7+Qhrk=
|
||||
github.com/microsoft/kiota-serialization-json-go v0.8.2/go.mod h1:gGcLNSdIdOZ4la2qztA0vaCq/LtlA53gpP+ur8n/+oA=
|
||||
github.com/microsoft/kiota-serialization-text-go v0.7.0 h1:uayeq8fpDcZgL0zDyLkYZsH6zNnEXKgp+bRWfR5LcxA=
|
||||
github.com/microsoft/kiota-serialization-text-go v0.7.0/go.mod h1:2su1PTllHCMNkHugmvpYad+AKBXUUGoiNP3xOAJUL7w=
|
||||
github.com/microsoftgraph/msgraph-sdk-go v0.53.0 h1:HpQd1Nvr8yQNeqhDuiVSbqn1fkHsFbRFDmnuhhXJXOQ=
|
||||
github.com/microsoftgraph/msgraph-sdk-go v0.53.0/go.mod h1:BZLyon4n4T4EuLIAlX+kJ5JgneFTXVQDah1AJuq3FRY=
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v0.33.0 h1:cDL3ov/IZ2ZarUJdGGPsdR+46ALdd3CRAiDBIylLCoA=
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v0.33.0/go.mod h1:d0mU3PQAWnN/C4CwPJEZz2QhesrnR5UDnqRu2ODWPkI=
|
||||
github.com/microsoft/kiota-abstractions-go v1.0.0 h1:teQS3yOmcTyps+O48AD17LI8TR1B3wCEwGFcwC6K75c=
|
||||
github.com/microsoft/kiota-abstractions-go v1.0.0/go.mod h1:2yaRQnx2KU7UaenYSApiTT4pf7fFkPV0B71Rm2uYynQ=
|
||||
github.com/microsoft/kiota-authentication-azure-go v1.0.0 h1:29FNZZ/4nnCOwFcGWlB/sxPvWz487HA2bXH8jR5k2Rk=
|
||||
github.com/microsoft/kiota-authentication-azure-go v1.0.0/go.mod h1:rnx3PRlkGdXDcA/0lZQTbBwyYGmc+3POt7HpE/e4jGw=
|
||||
github.com/microsoft/kiota-http-go v1.0.0 h1:F1hd6gMlLeEgH2CkRB7z13ow7LxMKMWEmms/t0VfS+k=
|
||||
github.com/microsoft/kiota-http-go v1.0.0/go.mod h1:eujxJliqodotsYepIc6ihhK+vXMMt5Q8YiSNL7+7M7U=
|
||||
github.com/microsoft/kiota-serialization-form-go v1.0.0 h1:UNdrkMnLFqUCccQZerKjblsyVgifS11b3WCx+eFEsAI=
|
||||
github.com/microsoft/kiota-serialization-form-go v1.0.0/go.mod h1:h4mQOO6KVTNciMF6azi1J9QB19ujSw3ULKcSNyXXOMA=
|
||||
github.com/microsoft/kiota-serialization-json-go v1.0.0 h1:snT+SwS/R4CMjkmj7mjCHrmib2nKWqGvUWaedgliMbI=
|
||||
github.com/microsoft/kiota-serialization-json-go v1.0.0/go.mod h1:psfgIfqWm/9P1JAdl2cxHHIg9SdEtYHOetfDLIQ5/dw=
|
||||
github.com/microsoft/kiota-serialization-text-go v1.0.0 h1:XOaRhAXy+g8ZVpcq7x7a0jlETWnWrEum0RhmbYrTFnA=
|
||||
github.com/microsoft/kiota-serialization-text-go v1.0.0/go.mod h1:sM1/C6ecnQ7IquQOGUrUldaO5wj+9+v7G2W3sQ3fy6M=
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.1.0 h1:NtFsFVIt8lpXcTlRbLG1WuCOTzltzS5j+U8Fecqdnr4=
|
||||
github.com/microsoftgraph/msgraph-sdk-go v1.1.0/go.mod h1:NIk9kSn7lQ5Hnhhn3FM4NrJWz54JfDHD0JvhJZky27g=
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0 h1:7NWTfyXvOjoizW7PmxNp3+8wCKPgpODs/D1cUZ3fkAY=
|
||||
github.com/microsoftgraph/msgraph-sdk-go-core v1.0.0/go.mod h1:tQb4q3YMIj2dWhhXhQSJ4ELpol931ANKzHSYK5kX1qE=
|
||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||
github.com/minio/minio-go/v7 v7.0.52 h1:8XhG36F6oKQUDDSuz6dY3rioMzovKjW40W6ANuN0Dps=
|
||||
@ -431,10 +431,10 @@ go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
||||
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
|
||||
go.opentelemetry.io/otel v1.14.0 h1:/79Huy8wbf5DnIPhemGB+zEPVwnN6fuQybr/SRXa6hM=
|
||||
go.opentelemetry.io/otel v1.14.0/go.mod h1:o4buv+dJzx8rohcUeRmWUZhqupFvzWis188WlggnNeU=
|
||||
go.opentelemetry.io/otel/trace v1.14.0 h1:wp2Mmvj41tDsyAJXiWDWpfNsOiIyd38fy85pyKcFq/M=
|
||||
go.opentelemetry.io/otel/trace v1.14.0/go.mod h1:8avnQLK+CG77yNLUae4ea2JDQ6iT+gozhnZjy/rw9G8=
|
||||
go.opentelemetry.io/otel v1.15.1 h1:3Iwq3lfRByPaws0f6bU3naAqOR1n5IeDWd9390kWHa8=
|
||||
go.opentelemetry.io/otel v1.15.1/go.mod h1:mHHGEHVDLal6YrKMmk9LqC4a3sF5g+fHfrttQIB1NTc=
|
||||
go.opentelemetry.io/otel/trace v1.15.1 h1:uXLo6iHJEzDfrNC0L0mNjItIp06SyaBQxu5t3xMlngY=
|
||||
go.opentelemetry.io/otel/trace v1.15.1/go.mod h1:IWdQG/5N1x7f6YUlmdLeJvH9yxtuJAfc4VW5Agv9r/8=
|
||||
go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ=
|
||||
go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
||||
go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI=
|
||||
@ -495,7 +495,6 @@ golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
@ -623,8 +622,8 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.1.0 h1:xYY+Bajn2a7VBmTM5GikTmnK8ZuX8YgnQCqZpbBNtmA=
|
||||
golang.org/x/time v0.1.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
|
||||
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
@ -783,8 +782,6 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/resty.v1 v1.12.0 h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=
|
||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
||||
@ -1,147 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// common types and consts
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// DeltaUpdate holds the results of a current delta token. It normally
|
||||
// gets produced when aggregating the addition and removal of items in
|
||||
// a delta-queryable folder.
|
||||
type DeltaUpdate struct {
|
||||
// the deltaLink itself
|
||||
URL string
|
||||
// true if the old delta was marked as invalid
|
||||
Reset bool
|
||||
}
|
||||
|
||||
// GraphQuery represents functions which perform exchange-specific queries
|
||||
// into M365 backstore. Responses -> returned items will only contain the information
|
||||
// that is included in the options
|
||||
// TODO: use selector or path for granularity into specific folders or specific date ranges
|
||||
type GraphQuery func(ctx context.Context, userID string) (serialization.Parsable, error)
|
||||
|
||||
// GraphRetrievalFunctions are functions from the Microsoft Graph API that retrieve
|
||||
// the default associated data of a M365 object. This varies by object. Additional
|
||||
// Queries must be run to obtain the omitted fields.
|
||||
type GraphRetrievalFunc func(
|
||||
ctx context.Context,
|
||||
user, m365ID string,
|
||||
) (serialization.Parsable, error)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// interfaces
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Client is used to fulfill the interface for exchange
|
||||
// queries that are traditionally backed by GraphAPI. A
|
||||
// struct is used in this case, instead of deferring to
|
||||
// pure function wrappers, so that the boundary separates the
|
||||
// granular implementation of the graphAPI and kiota away
|
||||
// from the exchange package's broader intents.
|
||||
type Client struct {
|
||||
Credentials account.M365Config
|
||||
|
||||
// The Stable service is re-usable for any non-paged request.
|
||||
// This allows us to maintain performance across async requests.
|
||||
Stable graph.Servicer
|
||||
|
||||
// The LargeItem graph servicer is configured specifically for
|
||||
// downloading large items. Specifically for use when handling
|
||||
// attachments, and for no other use.
|
||||
LargeItem graph.Servicer
|
||||
}
|
||||
|
||||
// NewClient produces a new exchange api client. Must be used in
|
||||
// place of creating an ad-hoc client struct.
|
||||
func NewClient(creds account.M365Config) (Client, error) {
|
||||
s, err := NewService(creds)
|
||||
if err != nil {
|
||||
return Client{}, err
|
||||
}
|
||||
|
||||
li, err := newLargeItemService(creds)
|
||||
if err != nil {
|
||||
return Client{}, err
|
||||
}
|
||||
|
||||
return Client{creds, s, li}, nil
|
||||
}
|
||||
|
||||
// service generates a new service. Used for paged and other long-running
|
||||
// requests instead of the client's stable service, so that in-flight state
|
||||
// within the adapter doesn't get clobbered
|
||||
func (c Client) service() (*graph.Service, error) {
|
||||
s, err := NewService(c.Credentials)
|
||||
return s, err
|
||||
}
|
||||
|
||||
func NewService(creds account.M365Config, opts ...graph.Option) (*graph.Service, error) {
|
||||
a, err := graph.CreateAdapter(
|
||||
creds.AzureTenantID,
|
||||
creds.AzureClientID,
|
||||
creds.AzureClientSecret,
|
||||
opts...)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "generating graph adapter")
|
||||
}
|
||||
|
||||
return graph.NewService(a), nil
|
||||
}
|
||||
|
||||
func newLargeItemService(creds account.M365Config) (*graph.Service, error) {
|
||||
a, err := NewService(creds, graph.NoTimeout())
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "generating no-timeout graph adapter")
|
||||
}
|
||||
|
||||
return a, nil
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// helper funcs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// checkIDAndName is a helper function to ensure that
|
||||
// the ID and name pointers are set prior to being called.
|
||||
func checkIDAndName(c graph.Container) error {
|
||||
id := ptr.Val(c.GetId())
|
||||
if len(id) == 0 {
|
||||
return clues.New("container missing ID")
|
||||
}
|
||||
|
||||
dn := ptr.Val(c.GetDisplayName())
|
||||
if len(dn) == 0 {
|
||||
return clues.New("container missing display name").With("container_id", id)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func HasAttachments(body models.ItemBodyable) bool {
|
||||
if body == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if ct, ok := ptr.ValOK(body.GetContentType()); !ok || ct == models.TEXT_BODYTYPE {
|
||||
return false
|
||||
}
|
||||
|
||||
if body, ok := ptr.ValOK(body.GetContent()); !ok || len(body) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
return strings.Contains(ptr.Val(body.GetContent()), "src=\"cid:")
|
||||
}
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/connector/uploadsession"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
@ -93,10 +92,12 @@ func uploadLargeAttachment(
|
||||
uploader attachmentUploadable,
|
||||
attachment models.Attachmentable,
|
||||
) error {
|
||||
var (
|
||||
bs = attachmentBytes(attachment)
|
||||
size = int64(len(bs))
|
||||
)
|
||||
bs, err := GetAttachmentBytes(attachment)
|
||||
if err != nil {
|
||||
return clues.Stack(err).WithClues(ctx)
|
||||
}
|
||||
|
||||
size := int64(len(bs))
|
||||
|
||||
session, err := uploader.uploadSession(ctx, ptr.Val(attachment.GetName()), size)
|
||||
if err != nil {
|
||||
@ -104,7 +105,7 @@ func uploadLargeAttachment(
|
||||
}
|
||||
|
||||
url := ptr.Val(session.GetUploadUrl())
|
||||
aw := uploadsession.NewWriter(uploader.getItemID(), url, size)
|
||||
aw := graph.NewLargeItemWriter(uploader.getItemID(), url, size)
|
||||
logger.Ctx(ctx).Debugw("uploading large attachment", "attachment_url", graph.LoggableURL(url))
|
||||
|
||||
// Upload the stream data
|
||||
|
||||
@ -37,9 +37,12 @@ func (mau *mailAttachmentUploader) getItemID() string {
|
||||
|
||||
func (mau *mailAttachmentUploader) uploadSmallAttachment(ctx context.Context, attach models.Attachmentable) error {
|
||||
_, err := mau.service.Client().
|
||||
UsersById(mau.userID).
|
||||
MailFoldersById(mau.folderID).
|
||||
MessagesById(mau.itemID).
|
||||
Users().
|
||||
ByUserId(mau.userID).
|
||||
MailFolders().
|
||||
ByMailFolderId(mau.folderID).
|
||||
Messages().
|
||||
ByMessageId(mau.itemID).
|
||||
Attachments().
|
||||
Post(ctx, attach, nil)
|
||||
if err != nil {
|
||||
@ -60,9 +63,12 @@ func (mau *mailAttachmentUploader) uploadSession(
|
||||
r, err := mau.
|
||||
service.
|
||||
Client().
|
||||
UsersById(mau.userID).
|
||||
MailFoldersById(mau.folderID).
|
||||
MessagesById(mau.itemID).
|
||||
Users().
|
||||
ByUserId(mau.userID).
|
||||
MailFolders().
|
||||
ByMailFolderId(mau.folderID).
|
||||
Messages().
|
||||
ByMessageId(mau.itemID).
|
||||
Attachments().
|
||||
CreateUploadSession().
|
||||
Post(ctx, session, nil)
|
||||
@ -87,9 +93,12 @@ func (eau *eventAttachmentUploader) getItemID() string {
|
||||
|
||||
func (eau *eventAttachmentUploader) uploadSmallAttachment(ctx context.Context, attach models.Attachmentable) error {
|
||||
_, err := eau.service.Client().
|
||||
UsersById(eau.userID).
|
||||
CalendarsById(eau.calendarID).
|
||||
EventsById(eau.itemID).
|
||||
Users().
|
||||
ByUserId(eau.userID).
|
||||
Calendars().
|
||||
ByCalendarId(eau.calendarID).
|
||||
Events().
|
||||
ByEventId(eau.itemID).
|
||||
Attachments().
|
||||
Post(ctx, attach, nil)
|
||||
if err != nil {
|
||||
@ -108,9 +117,12 @@ func (eau *eventAttachmentUploader) uploadSession(
|
||||
session.SetAttachmentItem(makeSessionAttachment(attachmentName, attachmentSize))
|
||||
|
||||
r, err := eau.service.Client().
|
||||
UsersById(eau.userID).
|
||||
CalendarsById(eau.calendarID).
|
||||
EventsById(eau.itemID).
|
||||
Users().
|
||||
ByUserId(eau.userID).
|
||||
Calendars().
|
||||
ByCalendarId(eau.calendarID).
|
||||
Events().
|
||||
ByEventId(eau.itemID).
|
||||
Attachments().
|
||||
CreateUploadSession().
|
||||
Post(ctx, session, nil)
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -18,6 +17,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// MetadataFileNames produces the category-specific set of filenames used to
|
||||
|
||||
@ -12,7 +12,6 @@ import (
|
||||
|
||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -21,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -8,11 +8,11 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type CacheResolverSuite struct {
|
||||
|
||||
@ -9,10 +9,10 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@ -67,9 +67,9 @@ func (suite *MockSuite) TestMockExchangeCollection_NewExchangeCollectionMail_Hyd
|
||||
|
||||
t := suite.T()
|
||||
mdc := NewCollection(nil, nil, 3)
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
for stream := range mdc.Items(ctx, fault.New(true)) {
|
||||
buf := &bytes.Buffer{}
|
||||
_, err := buf.ReadFrom(stream.ToReader())
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
|
||||
@ -10,7 +10,6 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -18,6 +17,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type ExchangeRestoreSuite struct {
|
||||
|
||||
@ -6,13 +6,13 @@ import (
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
var ErrFolderNotFound = clues.New("folder not found")
|
||||
|
||||
@ -7,7 +7,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/pii"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -16,6 +15,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type addedAndRemovedItemIDsGetter interface {
|
||||
|
||||
@ -11,7 +11,6 @@ import (
|
||||
|
||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -21,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -4,7 +4,6 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"runtime/trace"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
@ -12,7 +11,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -24,6 +22,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// RestoreExchangeObject directs restore pipeline towards restore function
|
||||
@ -74,7 +73,13 @@ func RestoreExchangeContact(
|
||||
|
||||
ctx = clues.Add(ctx, "item_id", ptr.Val(contact.GetId()))
|
||||
|
||||
response, err := service.Client().UsersById(user).ContactFoldersById(destination).Contacts().Post(ctx, contact, nil)
|
||||
response, err := service.Client().
|
||||
Users().
|
||||
ByUserId(user).
|
||||
ContactFolders().
|
||||
ByContactFolderId(destination).
|
||||
Contacts().
|
||||
Post(ctx, contact, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "uploading Contact")
|
||||
}
|
||||
@ -122,7 +127,13 @@ func RestoreExchangeEvent(
|
||||
transformedEvent.SetAttachments([]models.Attachmentable{})
|
||||
}
|
||||
|
||||
response, err := service.Client().UsersById(user).CalendarsById(destination).Events().Post(ctx, transformedEvent, nil)
|
||||
response, err := service.Client().
|
||||
Users().
|
||||
ByUserId(user).
|
||||
Calendars().
|
||||
ByCalendarId(destination).
|
||||
Events().
|
||||
Post(ctx, transformedEvent, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "uploading event")
|
||||
}
|
||||
@ -223,10 +234,19 @@ func RestoreMailMessage(
|
||||
return info, nil
|
||||
}
|
||||
|
||||
// attachmentBytes is a helper to retrieve the attachment content from a models.Attachmentable
|
||||
// TODO: Revisit how we retrieve/persist attachment content during backup so this is not needed
|
||||
func attachmentBytes(attachment models.Attachmentable) []byte {
|
||||
return reflect.Indirect(reflect.ValueOf(attachment)).FieldByName("contentBytes").Bytes()
|
||||
// GetAttachmentBytes is a helper to retrieve the attachment content from a models.Attachmentable
|
||||
func GetAttachmentBytes(attachment models.Attachmentable) ([]byte, error) {
|
||||
bi, err := attachment.GetBackingStore().Get("contentBytes")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bts, ok := bi.([]byte)
|
||||
if !ok {
|
||||
return nil, clues.New(fmt.Sprintf("unexpected type for attachment content: %T", bi))
|
||||
}
|
||||
|
||||
return bts, nil
|
||||
}
|
||||
|
||||
// SendMailToBackStore function for transporting in-memory messageable item to M365 backstore
|
||||
@ -245,7 +265,13 @@ func SendMailToBackStore(
|
||||
// Item.Attachments --> HasAttachments doesn't always have a value populated when deserialized
|
||||
message.SetAttachments([]models.Attachmentable{})
|
||||
|
||||
response, err := service.Client().UsersById(user).MailFoldersById(destination).Messages().Post(ctx, message, nil)
|
||||
response, err := service.Client().
|
||||
Users().
|
||||
ByUserId(user).
|
||||
MailFolders().
|
||||
ByMailFolderId(destination).
|
||||
Messages().
|
||||
Post(ctx, message, nil)
|
||||
if err != nil {
|
||||
return graph.Wrap(ctx, err, "restoring mail")
|
||||
}
|
||||
|
||||
@ -27,3 +27,25 @@ func NextLink(pl PageLinker) string {
|
||||
func NextAndDeltaLink(pl DeltaPageLinker) (string, string) {
|
||||
return NextLink(pl), ptr.Val(pl.GetOdataDeltaLink())
|
||||
}
|
||||
|
||||
type Valuer[T any] interface {
|
||||
GetValue() []T
|
||||
}
|
||||
|
||||
type PageLinkValuer[T any] interface {
|
||||
PageLinker
|
||||
Valuer[T]
|
||||
}
|
||||
|
||||
// EmptyDeltaLinker is used to convert PageLinker to DeltaPageLinker
|
||||
type EmptyDeltaLinker[T any] struct {
|
||||
PageLinkValuer[T]
|
||||
}
|
||||
|
||||
func (EmptyDeltaLinker[T]) GetOdataDeltaLink() *string {
|
||||
return ptr.To("")
|
||||
}
|
||||
|
||||
func (e EmptyDeltaLinker[T]) GetValue() []T {
|
||||
return e.PageLinkValuer.GetValue()
|
||||
}
|
||||
|
||||
@ -25,10 +25,10 @@ func TestGraphErrorsUnitSuite(t *testing.T) {
|
||||
}
|
||||
|
||||
func odErr(code string) *odataerrors.ODataError {
|
||||
odErr := &odataerrors.ODataError{}
|
||||
merr := odataerrors.MainError{}
|
||||
odErr := odataerrors.NewODataError()
|
||||
merr := odataerrors.NewMainError()
|
||||
merr.SetCode(&code)
|
||||
odErr.SetError(&merr)
|
||||
odErr.SetError(merr)
|
||||
|
||||
return odErr
|
||||
}
|
||||
@ -300,33 +300,33 @@ func (suite *GraphErrorsUnitSuite) TestIsErrUnauthorized() {
|
||||
|
||||
func (suite *GraphErrorsUnitSuite) TestMalwareInfo() {
|
||||
var (
|
||||
i = models.DriveItem{}
|
||||
cb = models.User{}
|
||||
i = models.NewDriveItem()
|
||||
cb = models.NewUser()
|
||||
cbID = "created-by"
|
||||
lm = models.User{}
|
||||
lm = models.NewUser()
|
||||
lmID = "last-mod-by"
|
||||
ref = models.ItemReference{}
|
||||
ref = models.NewItemReference()
|
||||
refCID = "container-id"
|
||||
refCN = "container-name"
|
||||
refCP = "/drives/b!vF-sdsdsds-sdsdsa-sdsd/root:/Folder/container-name"
|
||||
refCPexp = "/Folder/container-name"
|
||||
mal = models.Malware{}
|
||||
mal = models.NewMalware()
|
||||
malDesc = "malware-description"
|
||||
)
|
||||
|
||||
cb.SetId(&cbID)
|
||||
i.SetCreatedByUser(&cb)
|
||||
i.SetCreatedByUser(cb)
|
||||
|
||||
lm.SetId(&lmID)
|
||||
i.SetLastModifiedByUser(&lm)
|
||||
i.SetLastModifiedByUser(lm)
|
||||
|
||||
ref.SetId(&refCID)
|
||||
ref.SetName(&refCN)
|
||||
ref.SetPath(&refCP)
|
||||
i.SetParentReference(&ref)
|
||||
i.SetParentReference(ref)
|
||||
|
||||
mal.SetDescription(&malDesc)
|
||||
i.SetMalware(&mal)
|
||||
i.SetMalware(mal)
|
||||
|
||||
expect := map[string]any{
|
||||
fault.AddtlCreatedBy: cbID,
|
||||
@ -337,7 +337,7 @@ func (suite *GraphErrorsUnitSuite) TestMalwareInfo() {
|
||||
fault.AddtlMalwareDesc: malDesc,
|
||||
}
|
||||
|
||||
assert.Equal(suite.T(), expect, ItemInfo(&i))
|
||||
assert.Equal(suite.T(), expect, ItemInfo(i))
|
||||
}
|
||||
|
||||
func (suite *GraphErrorsUnitSuite) TestIsErrFolderExists() {
|
||||
|
||||
@ -227,7 +227,8 @@ func (suite *RetryMWIntgSuite) TestRetryMiddleware_RetryRequest_resetBodyAfter50
|
||||
|
||||
_, err = NewService(adpt).
|
||||
Client().
|
||||
UsersById("user").
|
||||
Users().
|
||||
ByUserId("user").
|
||||
MailFolders().
|
||||
Post(ctx, body, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
@ -247,11 +247,6 @@ func kiotaMiddlewares(
|
||||
) []khttp.Middleware {
|
||||
mw := []khttp.Middleware{}
|
||||
|
||||
// Optionally add concurrency limiter middleware if it has been initialized
|
||||
if concurrencyLim != nil {
|
||||
mw = append(mw, concurrencyLim)
|
||||
}
|
||||
|
||||
mw = append(mw, []khttp.Middleware{
|
||||
msgraphgocore.NewGraphTelemetryHandler(options),
|
||||
&RetryMiddleware{
|
||||
@ -264,10 +259,18 @@ func kiotaMiddlewares(
|
||||
khttp.NewParametersNameDecodingHandler(),
|
||||
khttp.NewUserAgentHandler(),
|
||||
&LoggingMiddleware{},
|
||||
&RateLimiterMiddleware{},
|
||||
&MetricsMiddleware{},
|
||||
}...)
|
||||
|
||||
// Optionally add concurrency limiter middleware if it has been initialized.
|
||||
if concurrencyLim != nil {
|
||||
mw = append(mw, concurrencyLim)
|
||||
}
|
||||
|
||||
mw = append(
|
||||
mw,
|
||||
&RateLimiterMiddleware{},
|
||||
&MetricsMiddleware{})
|
||||
|
||||
if len(cc.appendMiddleware) > 0 {
|
||||
mw = append(mw, cc.appendMiddleware...)
|
||||
}
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
package uploadsession
|
||||
package graph
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"gopkg.in/resty.v1"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
@ -20,7 +20,7 @@ const (
|
||||
|
||||
// Writer implements an io.Writer for a M365
|
||||
// UploadSession URL
|
||||
type writer struct {
|
||||
type largeItemWriter struct {
|
||||
// Identifier
|
||||
id string
|
||||
// Upload URL for this item
|
||||
@ -29,18 +29,20 @@ type writer struct {
|
||||
contentLength int64
|
||||
// Last item offset that was written to
|
||||
lastWrittenOffset int64
|
||||
client *resty.Client
|
||||
client httpWrapper
|
||||
}
|
||||
|
||||
func NewWriter(id, url string, size int64) *writer {
|
||||
return &writer{id: id, url: url, contentLength: size, client: resty.New()}
|
||||
func NewLargeItemWriter(id, url string, size int64) *largeItemWriter {
|
||||
return &largeItemWriter{id: id, url: url, contentLength: size, client: *NewNoTimeoutHTTPWrapper()}
|
||||
}
|
||||
|
||||
// Write will upload the provided data to M365. It sets the `Content-Length` and `Content-Range` headers based on
|
||||
// https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession
|
||||
func (iw *writer) Write(p []byte) (int, error) {
|
||||
func (iw *largeItemWriter) Write(p []byte) (int, error) {
|
||||
rangeLength := len(p)
|
||||
logger.Ctx(context.Background()).
|
||||
ctx := context.Background()
|
||||
|
||||
logger.Ctx(ctx).
|
||||
Debugf("WRITE for %s. Size:%d, Offset: %d, TotalSize: %d",
|
||||
iw.id, rangeLength, iw.lastWrittenOffset, iw.contentLength)
|
||||
|
||||
@ -48,17 +50,20 @@ func (iw *writer) Write(p []byte) (int, error) {
|
||||
|
||||
// PUT the request - set headers `Content-Range`to describe total size and `Content-Length` to describe size of
|
||||
// data in the current request
|
||||
_, err := iw.client.R().
|
||||
SetHeaders(map[string]string{
|
||||
contentRangeHeaderKey: fmt.Sprintf(
|
||||
contentRangeHeaderValueFmt,
|
||||
iw.lastWrittenOffset,
|
||||
endOffset-1,
|
||||
iw.contentLength),
|
||||
contentLengthHeaderKey: fmt.Sprintf("%d", rangeLength),
|
||||
}).
|
||||
SetBody(bytes.NewReader(p)).
|
||||
Put(iw.url)
|
||||
headers := make(map[string]string)
|
||||
headers[contentRangeHeaderKey] = fmt.Sprintf(
|
||||
contentRangeHeaderValueFmt,
|
||||
iw.lastWrittenOffset,
|
||||
endOffset-1,
|
||||
iw.contentLength)
|
||||
headers[contentLengthHeaderKey] = fmt.Sprintf("%d", rangeLength)
|
||||
|
||||
_, err := iw.client.Request(
|
||||
ctx,
|
||||
http.MethodPut,
|
||||
iw.url,
|
||||
bytes.NewReader(p),
|
||||
headers)
|
||||
if err != nil {
|
||||
return 0, clues.Wrap(err, "uploading item").With(
|
||||
"upload_id", iw.id,
|
||||
@ -1,4 +1,4 @@
|
||||
package uploadsession
|
||||
package graph
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@ -69,7 +69,7 @@ func (suite *UploadSessionSuite) TestWriter() {
|
||||
|
||||
defer ts.Close()
|
||||
|
||||
writer := NewWriter("item", ts.URL, writeSize)
|
||||
writer := NewLargeItemWriter("item", ts.URL, writeSize)
|
||||
|
||||
// Using a 32 KB buffer for the copy allows us to validate the
|
||||
// multi-part upload. `io.CopyBuffer` will only write 32 KB at
|
||||
@ -1,7 +1,6 @@
|
||||
package connector
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
@ -17,33 +16,17 @@ import (
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
func mustToDataLayerPath(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, resourceOwner string,
|
||||
category path.CategoryType,
|
||||
elements []string,
|
||||
isItem bool,
|
||||
) path.Path {
|
||||
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func testElementsMatch[T any](
|
||||
t *testing.T,
|
||||
expected []T,
|
||||
@ -116,52 +99,17 @@ func testElementsMatch[T any](
|
||||
unexpected)
|
||||
}
|
||||
|
||||
type configInfo struct {
|
||||
acct account.Account
|
||||
opts control.Options
|
||||
resource Resource
|
||||
service path.ServiceType
|
||||
tenant string
|
||||
resourceOwners []string
|
||||
dest control.RestoreDestination
|
||||
}
|
||||
|
||||
type itemInfo struct {
|
||||
// lookupKey is a string that can be used to find this data from a set of
|
||||
// other data in the same collection. This key should be something that will
|
||||
// be the same before and after restoring the item in M365 and may not be
|
||||
// the M365 ID. When restoring items out of place, the item is assigned a
|
||||
// new ID making it unsuitable for a lookup key.
|
||||
lookupKey string
|
||||
name string
|
||||
data []byte
|
||||
}
|
||||
|
||||
type colInfo struct {
|
||||
// Elements (in order) for the path representing this collection. Should
|
||||
// only contain elements after the prefix that corso uses for the path. For
|
||||
// example, a collection for the Inbox folder in exchange mail would just be
|
||||
// "Inbox".
|
||||
pathElements []string
|
||||
category path.CategoryType
|
||||
items []itemInfo
|
||||
// auxItems are items that can be retrieved with Fetch but won't be returned
|
||||
// by Items(). These files do not directly participate in comparisosn at the
|
||||
// end of a test.
|
||||
auxItems []itemInfo
|
||||
}
|
||||
|
||||
type restoreBackupInfo struct {
|
||||
name string
|
||||
service path.ServiceType
|
||||
collections []colInfo
|
||||
collections []ColInfo
|
||||
resource Resource
|
||||
}
|
||||
|
||||
type restoreBackupInfoMultiVersion struct {
|
||||
service path.ServiceType
|
||||
collectionsLatest []colInfo
|
||||
collectionsPrevious []colInfo
|
||||
collectionsLatest []ColInfo
|
||||
collectionsPrevious []ColInfo
|
||||
resource Resource
|
||||
backupVersion int
|
||||
}
|
||||
@ -170,11 +118,15 @@ func attachmentEqual(
|
||||
expected models.Attachmentable,
|
||||
got models.Attachmentable,
|
||||
) bool {
|
||||
// This is super hacky, but seems like it would be good to have a comparison
|
||||
// of the actual content. I think the only other way to really get it is to
|
||||
// serialize both structs to JSON and pull it from there or something though.
|
||||
expectedData := reflect.Indirect(reflect.ValueOf(expected)).FieldByName("contentBytes").Bytes()
|
||||
gotData := reflect.Indirect(reflect.ValueOf(got)).FieldByName("contentBytes").Bytes()
|
||||
expectedData, err := exchange.GetAttachmentBytes(expected)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
gotData, err := exchange.GetAttachmentBytes(got)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(expectedData, gotData) {
|
||||
return false
|
||||
@ -734,7 +686,7 @@ func compareDriveItem(
|
||||
t *testing.T,
|
||||
expected map[string][]byte,
|
||||
item data.Stream,
|
||||
config configInfo,
|
||||
config ConfigInfo,
|
||||
rootDir bool,
|
||||
) bool {
|
||||
// Skip Drive permissions in the folder that used to be the root. We don't
|
||||
@ -814,7 +766,7 @@ func compareDriveItem(
|
||||
assert.Equal(t, expectedMeta.FileName, itemMeta.FileName)
|
||||
}
|
||||
|
||||
if !config.opts.RestorePermissions {
|
||||
if !config.Opts.RestorePermissions {
|
||||
assert.Equal(t, 0, len(itemMeta.Permissions))
|
||||
return true
|
||||
}
|
||||
@ -835,7 +787,7 @@ func compareDriveItem(
|
||||
// sharepoint retrieves a superset of permissions
|
||||
// (all site admins, site groups, built in by default)
|
||||
// relative to the permissions changed by the test.
|
||||
config.service == path.SharePointService,
|
||||
config.Service == path.SharePointService,
|
||||
permissionEqual)
|
||||
|
||||
return true
|
||||
@ -877,7 +829,7 @@ func compareItem(
|
||||
service path.ServiceType,
|
||||
category path.CategoryType,
|
||||
item data.Stream,
|
||||
config configInfo,
|
||||
config ConfigInfo,
|
||||
rootDir bool,
|
||||
) bool {
|
||||
if mt, ok := item.(data.StreamModTime); ok {
|
||||
@ -971,7 +923,7 @@ func checkCollections(
|
||||
expectedItems int,
|
||||
expected map[string]map[string][]byte,
|
||||
got []data.BackupCollection,
|
||||
config configInfo,
|
||||
config ConfigInfo,
|
||||
) int {
|
||||
collectionsWithItems := []data.BackupCollection{}
|
||||
|
||||
@ -985,7 +937,7 @@ func checkCollections(
|
||||
category = returned.FullPath().Category()
|
||||
expectedColData = expected[returned.FullPath().String()]
|
||||
folders = returned.FullPath().Elements()
|
||||
rootDir = folders[len(folders)-1] == config.dest.ContainerName
|
||||
rootDir = folders[len(folders)-1] == config.Dest.ContainerName
|
||||
)
|
||||
|
||||
// Need to iterate through all items even if we don't expect to find a match
|
||||
@ -1166,127 +1118,6 @@ func backupSelectorForExpected(
|
||||
return selectors.Selector{}
|
||||
}
|
||||
|
||||
// backupOutputPathFromRestore returns a path.Path denoting the location in
|
||||
// kopia the data will be placed at. The location is a data-type specific
|
||||
// combination of the location the data was recently restored to and where the
|
||||
// data was originally in the hierarchy.
|
||||
func backupOutputPathFromRestore(
|
||||
t *testing.T,
|
||||
restoreDest control.RestoreDestination,
|
||||
inputPath path.Path,
|
||||
) path.Path {
|
||||
base := []string{restoreDest.ContainerName}
|
||||
|
||||
// OneDrive has leading information like the drive ID.
|
||||
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService {
|
||||
folders := inputPath.Folders()
|
||||
base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName)
|
||||
|
||||
if len(folders) > 3 {
|
||||
base = append(base, folders[3:]...)
|
||||
}
|
||||
}
|
||||
|
||||
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory {
|
||||
base = append(base, inputPath.Folders()...)
|
||||
}
|
||||
|
||||
return mustToDataLayerPath(
|
||||
t,
|
||||
inputPath.Service(),
|
||||
inputPath.Tenant(),
|
||||
inputPath.ResourceOwner(),
|
||||
inputPath.Category(),
|
||||
base,
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
||||
// TODO(ashmrtn): Make this an actual mock class that can be used in other
|
||||
// packages.
|
||||
type mockRestoreCollection struct {
|
||||
data.Collection
|
||||
auxItems map[string]data.Stream
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) Fetch(
|
||||
ctx context.Context,
|
||||
name string,
|
||||
) (data.Stream, error) {
|
||||
res := rc.auxItems[name]
|
||||
if res == nil {
|
||||
return nil, data.ErrNotFound
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func collectionsForInfo(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
dest control.RestoreDestination,
|
||||
allInfo []colInfo,
|
||||
backupVersion int,
|
||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
||||
var (
|
||||
collections = make([]data.RestoreCollection, 0, len(allInfo))
|
||||
expectedData = make(map[string]map[string][]byte, len(allInfo))
|
||||
totalItems = 0
|
||||
kopiaEntries = 0
|
||||
)
|
||||
|
||||
for _, info := range allInfo {
|
||||
pth := mustToDataLayerPath(
|
||||
t,
|
||||
service,
|
||||
tenant,
|
||||
user,
|
||||
info.category,
|
||||
info.pathElements,
|
||||
false)
|
||||
|
||||
mc := exchMock.NewCollection(pth, pth, len(info.items))
|
||||
baseDestPath := backupOutputPathFromRestore(t, dest, pth)
|
||||
|
||||
baseExpected := expectedData[baseDestPath.String()]
|
||||
if len(baseExpected) == 0 {
|
||||
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.items))
|
||||
baseExpected = expectedData[baseDestPath.String()]
|
||||
}
|
||||
|
||||
for i := 0; i < len(info.items); i++ {
|
||||
mc.Names[i] = info.items[i].name
|
||||
mc.Data[i] = info.items[i].data
|
||||
|
||||
baseExpected[info.items[i].lookupKey] = info.items[i].data
|
||||
|
||||
// We do not count metadata files against item count
|
||||
if backupVersion > 0 &&
|
||||
(service == path.OneDriveService || service == path.SharePointService) &&
|
||||
metadata.HasMetaSuffix(info.items[i].name) {
|
||||
continue
|
||||
}
|
||||
|
||||
totalItems++
|
||||
}
|
||||
|
||||
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||
|
||||
for _, aux := range info.auxItems {
|
||||
c.auxItems[aux.name] = &exchMock.Data{
|
||||
ID: aux.name,
|
||||
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||
}
|
||||
}
|
||||
|
||||
collections = append(collections, c)
|
||||
kopiaEntries += len(info.items)
|
||||
}
|
||||
|
||||
return totalItems, kopiaEntries, collections, expectedData
|
||||
}
|
||||
|
||||
func getSelectorWith(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
|
||||
@ -2,13 +2,11 @@ package connector
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
@ -25,86 +23,6 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
// For any version post this(inclusive), we expect to be using IDs for
|
||||
// permission instead of email
|
||||
const versionPermissionSwitchedToID = version.OneDrive4DirIncludesPermissions
|
||||
|
||||
func getMetadata(fileName string, perm permData, permUseID bool) metadata.Metadata {
|
||||
if len(perm.user) == 0 || len(perm.roles) == 0 ||
|
||||
perm.sharingMode != metadata.SharingModeCustom {
|
||||
return metadata.Metadata{
|
||||
FileName: fileName,
|
||||
SharingMode: perm.sharingMode,
|
||||
}
|
||||
}
|
||||
|
||||
// In case of permissions, the id will usually be same for same
|
||||
// user/role combo unless deleted and readded, but we have to do
|
||||
// this as we only have two users of which one is already taken.
|
||||
id := uuid.NewString()
|
||||
uperm := metadata.Permission{ID: id, Roles: perm.roles}
|
||||
|
||||
if permUseID {
|
||||
uperm.EntityID = perm.entityID
|
||||
} else {
|
||||
uperm.Email = perm.user
|
||||
}
|
||||
|
||||
testMeta := metadata.Metadata{
|
||||
FileName: fileName,
|
||||
Permissions: []metadata.Permission{uperm},
|
||||
}
|
||||
|
||||
return testMeta
|
||||
}
|
||||
|
||||
type testOneDriveData struct {
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
Data []byte `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
func onedriveItemWithData(
|
||||
t *testing.T,
|
||||
name, lookupKey string,
|
||||
fileData []byte,
|
||||
) itemInfo {
|
||||
t.Helper()
|
||||
|
||||
content := testOneDriveData{
|
||||
FileName: lookupKey,
|
||||
Data: fileData,
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(content)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
return itemInfo{
|
||||
name: name,
|
||||
data: serialized,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
|
||||
func onedriveMetadata(
|
||||
t *testing.T,
|
||||
fileName, itemID, lookupKey string,
|
||||
perm permData,
|
||||
permUseID bool,
|
||||
) itemInfo {
|
||||
t.Helper()
|
||||
|
||||
testMeta := getMetadata(fileName, perm, permUseID)
|
||||
|
||||
testMetaJSON, err := json.Marshal(testMeta)
|
||||
require.NoError(t, err, "marshalling metadata", clues.ToCore(err))
|
||||
|
||||
return itemInfo{
|
||||
name: itemID,
|
||||
data: testMetaJSON,
|
||||
lookupKey: lookupKey,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
fileName = "test-file.txt"
|
||||
folderAName = "folder-a"
|
||||
@ -122,204 +40,6 @@ var (
|
||||
readPerm = []string{"read"}
|
||||
)
|
||||
|
||||
func newOneDriveCollection(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
pathElements []string,
|
||||
backupVersion int,
|
||||
) *onedriveCollection {
|
||||
return &onedriveCollection{
|
||||
service: service,
|
||||
pathElements: pathElements,
|
||||
backupVersion: backupVersion,
|
||||
t: t,
|
||||
}
|
||||
}
|
||||
|
||||
type onedriveCollection struct {
|
||||
service path.ServiceType
|
||||
pathElements []string
|
||||
items []itemInfo
|
||||
aux []itemInfo
|
||||
backupVersion int
|
||||
t *testing.T
|
||||
}
|
||||
|
||||
func (c onedriveCollection) collection() colInfo {
|
||||
cat := path.FilesCategory
|
||||
if c.service == path.SharePointService {
|
||||
cat = path.LibrariesCategory
|
||||
}
|
||||
|
||||
return colInfo{
|
||||
pathElements: c.pathElements,
|
||||
category: cat,
|
||||
items: c.items,
|
||||
auxItems: c.aux,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *onedriveCollection) withFile(name string, fileData []byte, perm permData) *onedriveCollection {
|
||||
switch c.backupVersion {
|
||||
case 0:
|
||||
// Lookups will occur using the most recent version of things so we need
|
||||
// the embedded file name to match that.
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
c.t,
|
||||
name,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData))
|
||||
|
||||
// v1-5, early metadata design
|
||||
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker,
|
||||
version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName:
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
c.t,
|
||||
name+metadata.DataFileSuffix,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData))
|
||||
|
||||
md := onedriveMetadata(
|
||||
c.t,
|
||||
"",
|
||||
name+metadata.MetaFileSuffix,
|
||||
name+metadata.MetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
// v6+ current metadata design
|
||||
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
|
||||
c.items = append(c.items, onedriveItemWithData(
|
||||
c.t,
|
||||
name+metadata.DataFileSuffix,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData))
|
||||
|
||||
md := onedriveMetadata(
|
||||
c.t,
|
||||
name,
|
||||
name+metadata.MetaFileSuffix,
|
||||
name,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
default:
|
||||
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *onedriveCollection) withFolder(name string, perm permData) *onedriveCollection {
|
||||
switch c.backupVersion {
|
||||
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
|
||||
version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
|
||||
return c
|
||||
|
||||
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
|
||||
c.items = append(
|
||||
c.items,
|
||||
onedriveMetadata(
|
||||
c.t,
|
||||
"",
|
||||
name+metadata.DirMetaFileSuffix,
|
||||
name+metadata.DirMetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID))
|
||||
|
||||
default:
|
||||
assert.FailNowf(c.t, "bad backup version", "version %d", c.backupVersion)
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// withPermissions adds permissions to the folder represented by this
|
||||
// onedriveCollection.
|
||||
func (c *onedriveCollection) withPermissions(perm permData) *onedriveCollection {
|
||||
// These versions didn't store permissions for the folder or didn't store them
|
||||
// in the folder's collection.
|
||||
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||
return c
|
||||
}
|
||||
|
||||
name := c.pathElements[len(c.pathElements)-1]
|
||||
metaName := name
|
||||
|
||||
if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
||||
// We switched to just .dirmeta for metadata file names.
|
||||
metaName = ""
|
||||
}
|
||||
|
||||
if name == odConsts.RootPathDir {
|
||||
return c
|
||||
}
|
||||
|
||||
md := onedriveMetadata(
|
||||
c.t,
|
||||
name,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
type permData struct {
|
||||
user string // user is only for older versions
|
||||
entityID string
|
||||
roles []string
|
||||
sharingMode metadata.SharingMode
|
||||
}
|
||||
|
||||
type itemData struct {
|
||||
name string
|
||||
data []byte
|
||||
perms permData
|
||||
}
|
||||
|
||||
type driveColInfo struct {
|
||||
pathElements []string
|
||||
perms permData
|
||||
files []itemData
|
||||
folders []itemData
|
||||
}
|
||||
|
||||
func testDataForInfo(
|
||||
t *testing.T,
|
||||
service path.ServiceType,
|
||||
cols []driveColInfo,
|
||||
backupVersion int,
|
||||
) []colInfo {
|
||||
var res []colInfo
|
||||
|
||||
for _, c := range cols {
|
||||
onedriveCol := newOneDriveCollection(t, service, c.pathElements, backupVersion)
|
||||
|
||||
for _, f := range c.files {
|
||||
onedriveCol.withFile(f.name, f.data, f.perms)
|
||||
}
|
||||
|
||||
for _, d := range c.folders {
|
||||
onedriveCol.withFolder(d.name, d.perms)
|
||||
}
|
||||
|
||||
onedriveCol.withPermissions(c.perms)
|
||||
|
||||
res = append(res, onedriveCol.collection())
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func mustGetDefaultDriveID(
|
||||
t *testing.T,
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
@ -334,9 +54,9 @@ func mustGetDefaultDriveID(
|
||||
|
||||
switch backupService {
|
||||
case path.OneDriveService:
|
||||
d, err = service.Client().UsersById(resourceOwner).Drive().Get(ctx, nil)
|
||||
d, err = service.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
|
||||
case path.SharePointService:
|
||||
d, err = service.Client().SitesById(resourceOwner).Drive().Get(ctx, nil)
|
||||
d, err = service.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
|
||||
default:
|
||||
assert.FailNowf(t, "unknown service type %s", backupService.String())
|
||||
}
|
||||
@ -675,78 +395,80 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
|
||||
folderBName,
|
||||
}
|
||||
|
||||
cols := []driveColInfo{
|
||||
cols := []OnedriveColInfo{
|
||||
{
|
||||
pathElements: rootPath,
|
||||
files: []itemData{
|
||||
PathElements: rootPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileAData,
|
||||
Name: fileName,
|
||||
Data: fileAData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []ItemData{
|
||||
{
|
||||
name: folderAName,
|
||||
Name: folderAName,
|
||||
},
|
||||
{
|
||||
name: folderBName,
|
||||
Name: folderBName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: folderAPath,
|
||||
files: []itemData{
|
||||
PathElements: folderAPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileBData,
|
||||
Name: fileName,
|
||||
Data: fileBData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []ItemData{
|
||||
{
|
||||
name: folderBName,
|
||||
Name: folderBName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: subfolderBPath,
|
||||
files: []itemData{
|
||||
PathElements: subfolderBPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileCData,
|
||||
Name: fileName,
|
||||
Data: fileCData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []ItemData{
|
||||
{
|
||||
name: folderAName,
|
||||
Name: folderAName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: subfolderAPath,
|
||||
files: []itemData{
|
||||
PathElements: subfolderAPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileDData,
|
||||
Name: fileName,
|
||||
Data: fileDData,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: folderBPath,
|
||||
files: []itemData{
|
||||
PathElements: folderBPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileEData,
|
||||
Name: fileName,
|
||||
Data: fileEData,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup)
|
||||
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
|
||||
require.NoError(suite.T(), err)
|
||||
|
||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||
suite.Run(fmt.Sprintf("Version%d", vn), func() {
|
||||
t := suite.T()
|
||||
input := testDataForInfo(t, suite.BackupService(), cols, vn)
|
||||
input, err := DataForInfo(suite.BackupService(), cols, vn)
|
||||
require.NoError(suite.T(), err)
|
||||
|
||||
testData := restoreBackupInfoMultiVersion{
|
||||
service: suite.BackupService(),
|
||||
@ -819,71 +541,71 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
|
||||
folderCName,
|
||||
}
|
||||
|
||||
cols := []driveColInfo{
|
||||
cols := []OnedriveColInfo{
|
||||
{
|
||||
pathElements: rootPath,
|
||||
files: []itemData{
|
||||
PathElements: rootPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
// Test restoring a file that doesn't inherit permissions.
|
||||
name: fileName,
|
||||
data: fileAData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fileName,
|
||||
Data: fileAData,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
// Test restoring a file that doesn't inherit permissions and has
|
||||
// no permissions.
|
||||
name: fileName2,
|
||||
data: fileBData,
|
||||
Name: fileName2,
|
||||
Data: fileBData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []ItemData{
|
||||
{
|
||||
name: folderBName,
|
||||
Name: folderBName,
|
||||
},
|
||||
{
|
||||
name: folderAName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderAName,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: folderCName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderCName,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: folderBPath,
|
||||
files: []itemData{
|
||||
PathElements: folderBPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
// Test restoring a file in a non-root folder that doesn't inherit
|
||||
// permissions.
|
||||
name: fileName,
|
||||
data: fileBData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fileName,
|
||||
Data: fileBData,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []ItemData{
|
||||
{
|
||||
name: folderAName,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Name: folderAName,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -905,52 +627,53 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
|
||||
// },
|
||||
// },
|
||||
// },
|
||||
// perms: permData{
|
||||
// user: secondaryUserName,
|
||||
// entityID: secondaryUserID,
|
||||
// roles: readPerm,
|
||||
// Perms: PermData{
|
||||
// User: secondaryUserName,
|
||||
// EntityID: secondaryUserID,
|
||||
// Roles: readPerm,
|
||||
// },
|
||||
// },
|
||||
{
|
||||
// Tests a folder that has permissions with an item in the folder with
|
||||
// the different permissions.
|
||||
pathElements: folderAPath,
|
||||
files: []itemData{
|
||||
PathElements: folderAPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileEData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fileName,
|
||||
Data: fileEData,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
// Tests a folder that has permissions with an item in the folder with
|
||||
// no permissions.
|
||||
pathElements: folderCPath,
|
||||
files: []itemData{
|
||||
PathElements: folderCPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileAData,
|
||||
Name: fileName,
|
||||
Data: fileAData,
|
||||
},
|
||||
},
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: readPerm,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup)
|
||||
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
|
||||
require.NoError(suite.T(), err)
|
||||
bss := suite.BackupService().String()
|
||||
|
||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||
@ -959,7 +682,8 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
|
||||
// Ideally this can always be true or false and still
|
||||
// work, but limiting older versions to use emails so as
|
||||
// to validate that flow as well.
|
||||
input := testDataForInfo(t, suite.BackupService(), cols, vn)
|
||||
input, err := DataForInfo(suite.BackupService(), cols, vn)
|
||||
require.NoError(suite.T(), err)
|
||||
|
||||
testData := restoreBackupInfoMultiVersion{
|
||||
service: suite.BackupService(),
|
||||
@ -997,51 +721,53 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
|
||||
suite.Service(),
|
||||
suite.BackupResourceOwner())
|
||||
|
||||
inputCols := []driveColInfo{
|
||||
inputCols := []OnedriveColInfo{
|
||||
{
|
||||
pathElements: []string{
|
||||
PathElements: []string{
|
||||
odConsts.DrivesPathDir,
|
||||
driveID,
|
||||
odConsts.RootPathDir,
|
||||
},
|
||||
files: []itemData{
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileAData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
Name: fileName,
|
||||
Data: fileAData,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expectedCols := []driveColInfo{
|
||||
expectedCols := []OnedriveColInfo{
|
||||
{
|
||||
pathElements: []string{
|
||||
PathElements: []string{
|
||||
odConsts.DrivesPathDir,
|
||||
driveID,
|
||||
odConsts.RootPathDir,
|
||||
},
|
||||
files: []itemData{
|
||||
Files: []ItemData{
|
||||
{
|
||||
// No permissions on the output since they weren't restored.
|
||||
name: fileName,
|
||||
data: fileAData,
|
||||
Name: fileName,
|
||||
Data: fileAData,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expected := testDataForInfo(suite.T(), suite.BackupService(), expectedCols, version.Backup)
|
||||
expected, err := DataForInfo(suite.BackupService(), expectedCols, version.Backup)
|
||||
require.NoError(suite.T(), err)
|
||||
bss := suite.BackupService().String()
|
||||
|
||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
||||
t := suite.T()
|
||||
input := testDataForInfo(t, suite.BackupService(), inputCols, vn)
|
||||
input, err := DataForInfo(suite.BackupService(), inputCols, vn)
|
||||
require.NoError(suite.T(), err)
|
||||
|
||||
testData := restoreBackupInfoMultiVersion{
|
||||
service: suite.BackupService(),
|
||||
@ -1119,29 +845,29 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
|
||||
folderCName,
|
||||
}
|
||||
|
||||
fileSet := []itemData{
|
||||
fileSet := []ItemData{
|
||||
{
|
||||
name: "file-custom",
|
||||
data: fileAData,
|
||||
perms: permData{
|
||||
user: secondaryUserName,
|
||||
entityID: secondaryUserID,
|
||||
roles: writePerm,
|
||||
sharingMode: metadata.SharingModeCustom,
|
||||
Name: "file-custom",
|
||||
Data: fileAData,
|
||||
Perms: PermData{
|
||||
User: secondaryUserName,
|
||||
EntityID: secondaryUserID,
|
||||
Roles: writePerm,
|
||||
SharingMode: metadata.SharingModeCustom,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "file-inherited",
|
||||
data: fileAData,
|
||||
perms: permData{
|
||||
sharingMode: metadata.SharingModeInherited,
|
||||
Name: "file-inherited",
|
||||
Data: fileAData,
|
||||
Perms: PermData{
|
||||
SharingMode: metadata.SharingModeInherited,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "file-empty",
|
||||
data: fileAData,
|
||||
perms: permData{
|
||||
sharingMode: metadata.SharingModeCustom,
|
||||
Name: "file-empty",
|
||||
Data: fileAData,
|
||||
Perms: PermData{
|
||||
SharingMode: metadata.SharingModeCustom,
|
||||
},
|
||||
},
|
||||
}
|
||||
@ -1164,55 +890,56 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
|
||||
// - inherted-permission-file
|
||||
// - empty-permission-file (empty/empty might have interesting behavior)
|
||||
|
||||
cols := []driveColInfo{
|
||||
cols := []OnedriveColInfo{
|
||||
{
|
||||
pathElements: rootPath,
|
||||
files: []itemData{},
|
||||
folders: []itemData{
|
||||
{name: folderAName},
|
||||
PathElements: rootPath,
|
||||
Files: []ItemData{},
|
||||
Folders: []ItemData{
|
||||
{Name: folderAName},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: folderAPath,
|
||||
files: fileSet,
|
||||
folders: []itemData{
|
||||
{name: folderAName},
|
||||
{name: folderBName},
|
||||
{name: folderCName},
|
||||
PathElements: folderAPath,
|
||||
Files: fileSet,
|
||||
Folders: []ItemData{
|
||||
{Name: folderAName},
|
||||
{Name: folderBName},
|
||||
{Name: folderCName},
|
||||
},
|
||||
perms: permData{
|
||||
user: tertiaryUserName,
|
||||
entityID: tertiaryUserID,
|
||||
roles: readPerm,
|
||||
Perms: PermData{
|
||||
User: tertiaryUserName,
|
||||
EntityID: tertiaryUserID,
|
||||
Roles: readPerm,
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: subfolderAAPath,
|
||||
files: fileSet,
|
||||
perms: permData{
|
||||
user: tertiaryUserName,
|
||||
entityID: tertiaryUserID,
|
||||
roles: writePerm,
|
||||
sharingMode: metadata.SharingModeCustom,
|
||||
PathElements: subfolderAAPath,
|
||||
Files: fileSet,
|
||||
Perms: PermData{
|
||||
User: tertiaryUserName,
|
||||
EntityID: tertiaryUserID,
|
||||
Roles: writePerm,
|
||||
SharingMode: metadata.SharingModeCustom,
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: subfolderABPath,
|
||||
files: fileSet,
|
||||
perms: permData{
|
||||
sharingMode: metadata.SharingModeInherited,
|
||||
PathElements: subfolderABPath,
|
||||
Files: fileSet,
|
||||
Perms: PermData{
|
||||
SharingMode: metadata.SharingModeInherited,
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: subfolderACPath,
|
||||
files: fileSet,
|
||||
perms: permData{
|
||||
sharingMode: metadata.SharingModeCustom,
|
||||
PathElements: subfolderACPath,
|
||||
Files: fileSet,
|
||||
Perms: PermData{
|
||||
SharingMode: metadata.SharingModeCustom,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup)
|
||||
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
|
||||
require.NoError(suite.T(), err)
|
||||
bss := suite.BackupService().String()
|
||||
|
||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||
@ -1221,7 +948,8 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
|
||||
// Ideally this can always be true or false and still
|
||||
// work, but limiting older versions to use emails so as
|
||||
// to validate that flow as well.
|
||||
input := testDataForInfo(t, suite.BackupService(), cols, vn)
|
||||
input, err := DataForInfo(suite.BackupService(), cols, vn)
|
||||
require.NoError(suite.T(), err)
|
||||
|
||||
testData := restoreBackupInfoMultiVersion{
|
||||
service: suite.BackupService(),
|
||||
@ -1279,61 +1007,63 @@ func testRestoreFolderNamedFolderRegression(
|
||||
folderBName,
|
||||
}
|
||||
|
||||
cols := []driveColInfo{
|
||||
cols := []OnedriveColInfo{
|
||||
{
|
||||
pathElements: rootPath,
|
||||
files: []itemData{
|
||||
PathElements: rootPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileAData,
|
||||
Name: fileName,
|
||||
Data: fileAData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []ItemData{
|
||||
{
|
||||
name: folderNamedFolder,
|
||||
Name: folderNamedFolder,
|
||||
},
|
||||
{
|
||||
name: folderBName,
|
||||
Name: folderBName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: folderFolderPath,
|
||||
files: []itemData{
|
||||
PathElements: folderFolderPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileBData,
|
||||
Name: fileName,
|
||||
Data: fileBData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []ItemData{
|
||||
{
|
||||
name: folderBName,
|
||||
Name: folderBName,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: subfolderPath,
|
||||
files: []itemData{
|
||||
PathElements: subfolderPath,
|
||||
Files: []ItemData{
|
||||
{
|
||||
name: fileName,
|
||||
data: fileCData,
|
||||
Name: fileName,
|
||||
Data: fileCData,
|
||||
},
|
||||
},
|
||||
folders: []itemData{
|
||||
Folders: []ItemData{
|
||||
{
|
||||
name: folderNamedFolder,
|
||||
Name: folderNamedFolder,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
expected := testDataForInfo(suite.T(), suite.BackupService(), cols, version.Backup)
|
||||
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
|
||||
require.NoError(suite.T(), err)
|
||||
bss := suite.BackupService().String()
|
||||
|
||||
for vn := startVersion; vn <= version.Backup; vn++ {
|
||||
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
|
||||
t := suite.T()
|
||||
input := testDataForInfo(t, suite.BackupService(), cols, vn)
|
||||
input, err := DataForInfo(suite.BackupService(), cols, vn)
|
||||
require.NoError(suite.T(), err)
|
||||
|
||||
testData := restoreBackupInfoMultiVersion{
|
||||
service: suite.BackupService(),
|
||||
|
||||
358
src/internal/connector/graph_connector_onedrive_test_helper.go
Normal file
358
src/internal/connector/graph_connector_onedrive_test_helper.go
Normal file
@ -0,0 +1,358 @@
|
||||
package connector
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
// For any version post this(inclusive), we expect to be using IDs for
|
||||
// permission instead of email
|
||||
const versionPermissionSwitchedToID = version.OneDrive4DirIncludesPermissions
|
||||
|
||||
func getMetadata(fileName string, perm PermData, permUseID bool) metadata.Metadata {
|
||||
if len(perm.User) == 0 || len(perm.Roles) == 0 ||
|
||||
perm.SharingMode != metadata.SharingModeCustom {
|
||||
return metadata.Metadata{
|
||||
FileName: fileName,
|
||||
SharingMode: perm.SharingMode,
|
||||
}
|
||||
}
|
||||
|
||||
// In case of permissions, the id will usually be same for same
|
||||
// user/role combo unless deleted and readded, but we have to do
|
||||
// this as we only have two users of which one is already taken.
|
||||
id := uuid.NewString()
|
||||
uperm := metadata.Permission{ID: id, Roles: perm.Roles}
|
||||
|
||||
if permUseID {
|
||||
uperm.EntityID = perm.EntityID
|
||||
} else {
|
||||
uperm.Email = perm.User
|
||||
}
|
||||
|
||||
testMeta := metadata.Metadata{
|
||||
FileName: fileName,
|
||||
Permissions: []metadata.Permission{uperm},
|
||||
}
|
||||
|
||||
return testMeta
|
||||
}
|
||||
|
||||
type PermData struct {
|
||||
User string // user is only for older versions
|
||||
EntityID string
|
||||
Roles []string
|
||||
SharingMode metadata.SharingMode
|
||||
}
|
||||
|
||||
type ItemData struct {
|
||||
Name string
|
||||
Data []byte
|
||||
Perms PermData
|
||||
}
|
||||
|
||||
type OnedriveColInfo struct {
|
||||
PathElements []string
|
||||
Perms PermData
|
||||
Files []ItemData
|
||||
Folders []ItemData
|
||||
}
|
||||
|
||||
type onedriveCollection struct {
|
||||
service path.ServiceType
|
||||
PathElements []string
|
||||
items []ItemInfo
|
||||
aux []ItemInfo
|
||||
backupVersion int
|
||||
}
|
||||
|
||||
func (c onedriveCollection) collection() ColInfo {
|
||||
cat := path.FilesCategory
|
||||
if c.service == path.SharePointService {
|
||||
cat = path.LibrariesCategory
|
||||
}
|
||||
|
||||
return ColInfo{
|
||||
PathElements: c.PathElements,
|
||||
Category: cat,
|
||||
Items: c.items,
|
||||
AuxItems: c.aux,
|
||||
}
|
||||
}
|
||||
|
||||
func NewOneDriveCollection(
|
||||
service path.ServiceType,
|
||||
PathElements []string,
|
||||
backupVersion int,
|
||||
) *onedriveCollection {
|
||||
return &onedriveCollection{
|
||||
service: service,
|
||||
PathElements: PathElements,
|
||||
backupVersion: backupVersion,
|
||||
}
|
||||
}
|
||||
|
||||
func DataForInfo(
|
||||
service path.ServiceType,
|
||||
cols []OnedriveColInfo,
|
||||
backupVersion int,
|
||||
) ([]ColInfo, error) {
|
||||
var (
|
||||
res []ColInfo
|
||||
err error
|
||||
)
|
||||
|
||||
for _, c := range cols {
|
||||
onedriveCol := NewOneDriveCollection(service, c.PathElements, backupVersion)
|
||||
|
||||
for _, f := range c.Files {
|
||||
_, err = onedriveCol.withFile(f.Name, f.Data, f.Perms)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
}
|
||||
|
||||
for _, d := range c.Folders {
|
||||
_, err = onedriveCol.withFolder(d.Name, d.Perms)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
}
|
||||
|
||||
_, err = onedriveCol.withPermissions(c.Perms)
|
||||
if err != nil {
|
||||
return res, err
|
||||
}
|
||||
|
||||
res = append(res, onedriveCol.collection())
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *onedriveCollection) withFile(name string, fileData []byte, perm PermData) (*onedriveCollection, error) {
|
||||
switch c.backupVersion {
|
||||
case 0:
|
||||
// Lookups will occur using the most recent version of things so we need
|
||||
// the embedded file name to match that.
|
||||
item, err := onedriveItemWithData(
|
||||
name,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
c.items = append(c.items, item)
|
||||
|
||||
// v1-5, early metadata design
|
||||
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker,
|
||||
version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName:
|
||||
items, err := onedriveItemWithData(
|
||||
name+metadata.DataFileSuffix,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
c.items = append(c.items, items)
|
||||
|
||||
md, err := onedriveMetadata(
|
||||
"",
|
||||
name+metadata.MetaFileSuffix,
|
||||
name+metadata.MetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
// v6+ current metadata design
|
||||
case version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
|
||||
item, err := onedriveItemWithData(
|
||||
name+metadata.DataFileSuffix,
|
||||
name+metadata.DataFileSuffix,
|
||||
fileData)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
c.items = append(c.items, item)
|
||||
|
||||
md, err := onedriveMetadata(
|
||||
name,
|
||||
name+metadata.MetaFileSuffix,
|
||||
name,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
default:
|
||||
return c, clues.New(fmt.Sprintf("bad backup version. version %d", c.backupVersion))
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (c *onedriveCollection) withFolder(name string, perm PermData) (*onedriveCollection, error) {
|
||||
switch c.backupVersion {
|
||||
case 0, version.OneDrive4DirIncludesPermissions, version.OneDrive5DirMetaNoName,
|
||||
version.OneDrive6NameInMeta, version.OneDrive7LocationRef, version.All8MigrateUserPNToID:
|
||||
return c, nil
|
||||
|
||||
case version.OneDrive1DataAndMetaFiles, 2, version.OneDrive3IsMetaMarker:
|
||||
item, err := onedriveMetadata(
|
||||
"",
|
||||
name+metadata.DirMetaFileSuffix,
|
||||
name+metadata.DirMetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
|
||||
c.items = append(c.items, item)
|
||||
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
default:
|
||||
return c, clues.New(fmt.Sprintf("bad backup version.version %d", c.backupVersion))
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
// withPermissions adds permissions to the folder represented by this
|
||||
// onedriveCollection.
|
||||
func (c *onedriveCollection) withPermissions(perm PermData) (*onedriveCollection, error) {
|
||||
// These versions didn't store permissions for the folder or didn't store them
|
||||
// in the folder's collection.
|
||||
if c.backupVersion < version.OneDrive4DirIncludesPermissions {
|
||||
return c, nil
|
||||
}
|
||||
|
||||
name := c.PathElements[len(c.PathElements)-1]
|
||||
metaName := name
|
||||
|
||||
if c.backupVersion >= version.OneDrive5DirMetaNoName {
|
||||
// We switched to just .dirmeta for metadata file names.
|
||||
metaName = ""
|
||||
}
|
||||
|
||||
if name == odConsts.RootPathDir {
|
||||
return c, nil
|
||||
}
|
||||
|
||||
md, err := onedriveMetadata(
|
||||
name,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
metaName+metadata.DirMetaFileSuffix,
|
||||
perm,
|
||||
c.backupVersion >= versionPermissionSwitchedToID)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
c.items = append(c.items, md)
|
||||
c.aux = append(c.aux, md)
|
||||
|
||||
return c, err
|
||||
}
|
||||
|
||||
type testOneDriveData struct {
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
Data []byte `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
func onedriveItemWithData(
|
||||
name, lookupKey string,
|
||||
fileData []byte,
|
||||
) (ItemInfo, error) {
|
||||
content := testOneDriveData{
|
||||
FileName: lookupKey,
|
||||
Data: fileData,
|
||||
}
|
||||
|
||||
serialized, err := json.Marshal(content)
|
||||
if err != nil {
|
||||
return ItemInfo{}, clues.Stack(err)
|
||||
}
|
||||
|
||||
return ItemInfo{
|
||||
name: name,
|
||||
data: serialized,
|
||||
lookupKey: lookupKey,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func onedriveMetadata(
|
||||
fileName, itemID, lookupKey string,
|
||||
perm PermData,
|
||||
permUseID bool,
|
||||
) (ItemInfo, error) {
|
||||
testMeta := getMetadata(fileName, perm, permUseID)
|
||||
|
||||
testMetaJSON, err := json.Marshal(testMeta)
|
||||
if err != nil {
|
||||
return ItemInfo{}, clues.Wrap(err, "marshalling metadata")
|
||||
}
|
||||
|
||||
return ItemInfo{
|
||||
name: itemID,
|
||||
data: testMetaJSON,
|
||||
lookupKey: lookupKey,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func GetCollectionsAndExpected(
|
||||
config ConfigInfo,
|
||||
testCollections []ColInfo,
|
||||
backupVersion int,
|
||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) {
|
||||
var (
|
||||
collections []data.RestoreCollection
|
||||
expectedData = map[string]map[string][]byte{}
|
||||
totalItems = 0
|
||||
totalKopiaItems = 0
|
||||
)
|
||||
|
||||
for _, owner := range config.ResourceOwners {
|
||||
numItems, kopiaItems, ownerCollections, userExpectedData, err := collectionsForInfo(
|
||||
config.Service,
|
||||
config.Tenant,
|
||||
owner,
|
||||
config.Dest,
|
||||
testCollections,
|
||||
backupVersion,
|
||||
)
|
||||
if err != nil {
|
||||
return totalItems, totalKopiaItems, collections, expectedData, err
|
||||
}
|
||||
|
||||
collections = append(collections, ownerCollections...)
|
||||
totalItems += numItems
|
||||
totalKopiaItems += kopiaItems
|
||||
|
||||
maps.Copy(expectedData, userExpectedData)
|
||||
}
|
||||
|
||||
return totalItems, totalKopiaItems, collections, expectedData, nil
|
||||
}
|
||||
@ -11,7 +11,6 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
@ -407,65 +406,30 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
|
||||
// Exchange Functions
|
||||
//-------------------------------------------------------------
|
||||
|
||||
func getCollectionsAndExpected(
|
||||
t *testing.T,
|
||||
config configInfo,
|
||||
testCollections []colInfo,
|
||||
backupVersion int,
|
||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
|
||||
t.Helper()
|
||||
|
||||
var (
|
||||
collections []data.RestoreCollection
|
||||
expectedData = map[string]map[string][]byte{}
|
||||
totalItems = 0
|
||||
totalKopiaItems = 0
|
||||
)
|
||||
|
||||
for _, owner := range config.resourceOwners {
|
||||
numItems, kopiaItems, ownerCollections, userExpectedData := collectionsForInfo(
|
||||
t,
|
||||
config.service,
|
||||
config.tenant,
|
||||
owner,
|
||||
config.dest,
|
||||
testCollections,
|
||||
backupVersion)
|
||||
|
||||
collections = append(collections, ownerCollections...)
|
||||
totalItems += numItems
|
||||
totalKopiaItems += kopiaItems
|
||||
|
||||
maps.Copy(expectedData, userExpectedData)
|
||||
}
|
||||
|
||||
return totalItems, totalKopiaItems, collections, expectedData
|
||||
}
|
||||
|
||||
func runRestore(
|
||||
t *testing.T,
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
config configInfo,
|
||||
config ConfigInfo,
|
||||
backupVersion int,
|
||||
collections []data.RestoreCollection,
|
||||
numRestoreItems int,
|
||||
) {
|
||||
t.Logf(
|
||||
"Restoring collections to %s for resourceOwners(s) %v\n",
|
||||
config.dest.ContainerName,
|
||||
config.resourceOwners)
|
||||
config.Dest.ContainerName,
|
||||
config.ResourceOwners)
|
||||
|
||||
start := time.Now()
|
||||
|
||||
restoreGC := loadConnector(ctx, t, config.resource)
|
||||
restoreSel := getSelectorWith(t, config.service, config.resourceOwners, true)
|
||||
restoreGC := loadConnector(ctx, t, config.Resource)
|
||||
restoreSel := getSelectorWith(t, config.Service, config.ResourceOwners, true)
|
||||
deets, err := restoreGC.ConsumeRestoreCollections(
|
||||
ctx,
|
||||
backupVersion,
|
||||
config.acct,
|
||||
config.Acct,
|
||||
restoreSel,
|
||||
config.dest,
|
||||
config.opts,
|
||||
config.Dest,
|
||||
config.Opts,
|
||||
collections,
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
@ -489,30 +453,30 @@ func runRestore(
|
||||
func runBackupAndCompare(
|
||||
t *testing.T,
|
||||
ctx context.Context, //revive:disable-line:context-as-argument
|
||||
config configInfo,
|
||||
config ConfigInfo,
|
||||
expectedData map[string]map[string][]byte,
|
||||
totalItems int,
|
||||
totalKopiaItems int,
|
||||
inputCollections []colInfo,
|
||||
inputCollections []ColInfo,
|
||||
) {
|
||||
t.Helper()
|
||||
|
||||
// Run a backup and compare its output with what we put in.
|
||||
cats := make(map[path.CategoryType]struct{}, len(inputCollections))
|
||||
for _, c := range inputCollections {
|
||||
cats[c.category] = struct{}{}
|
||||
cats[c.Category] = struct{}{}
|
||||
}
|
||||
|
||||
var (
|
||||
expectedDests = make([]destAndCats, 0, len(config.resourceOwners))
|
||||
expectedDests = make([]destAndCats, 0, len(config.ResourceOwners))
|
||||
idToName = map[string]string{}
|
||||
nameToID = map[string]string{}
|
||||
)
|
||||
|
||||
for _, ro := range config.resourceOwners {
|
||||
for _, ro := range config.ResourceOwners {
|
||||
expectedDests = append(expectedDests, destAndCats{
|
||||
resourceOwner: ro,
|
||||
dest: config.dest.ContainerName,
|
||||
dest: config.Dest.ContainerName,
|
||||
cats: cats,
|
||||
})
|
||||
|
||||
@ -520,10 +484,10 @@ func runBackupAndCompare(
|
||||
nameToID[ro] = ro
|
||||
}
|
||||
|
||||
backupGC := loadConnector(ctx, t, config.resource)
|
||||
backupGC := loadConnector(ctx, t, config.Resource)
|
||||
backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID)
|
||||
|
||||
backupSel := backupSelectorForExpected(t, config.service, expectedDests)
|
||||
backupSel := backupSelectorForExpected(t, config.Service, expectedDests)
|
||||
t.Logf("Selective backup of %s\n", backupSel)
|
||||
|
||||
start := time.Now()
|
||||
@ -533,7 +497,7 @@ func runBackupAndCompare(
|
||||
backupSel,
|
||||
nil,
|
||||
version.NoBackup,
|
||||
config.opts,
|
||||
config.Opts,
|
||||
fault.New(true))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
// No excludes yet because this isn't an incremental backup.
|
||||
@ -570,22 +534,23 @@ func runRestoreBackupTest(
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
config := configInfo{
|
||||
acct: acct,
|
||||
opts: opts,
|
||||
resource: test.resource,
|
||||
service: test.service,
|
||||
tenant: tenant,
|
||||
resourceOwners: resourceOwners,
|
||||
dest: tester.DefaultTestRestoreDestination(""),
|
||||
config := ConfigInfo{
|
||||
Acct: acct,
|
||||
Opts: opts,
|
||||
Resource: test.resource,
|
||||
Service: test.service,
|
||||
Tenant: tenant,
|
||||
ResourceOwners: resourceOwners,
|
||||
Dest: tester.DefaultTestRestoreDestination(""),
|
||||
}
|
||||
|
||||
totalItems, totalKopiaItems, collections, expectedData := getCollectionsAndExpected(
|
||||
t,
|
||||
totalItems, totalKopiaItems, collections, expectedData, err := GetCollectionsAndExpected(
|
||||
config,
|
||||
test.collections,
|
||||
version.Backup)
|
||||
|
||||
require.NoError(t, err)
|
||||
|
||||
runRestore(
|
||||
t,
|
||||
ctx,
|
||||
@ -616,21 +581,21 @@ func runRestoreTestWithVerion(
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
config := configInfo{
|
||||
acct: acct,
|
||||
opts: opts,
|
||||
resource: test.resource,
|
||||
service: test.service,
|
||||
tenant: tenant,
|
||||
resourceOwners: resourceOwners,
|
||||
dest: tester.DefaultTestRestoreDestination(""),
|
||||
config := ConfigInfo{
|
||||
Acct: acct,
|
||||
Opts: opts,
|
||||
Resource: test.resource,
|
||||
Service: test.service,
|
||||
Tenant: tenant,
|
||||
ResourceOwners: resourceOwners,
|
||||
Dest: tester.DefaultTestRestoreDestination(""),
|
||||
}
|
||||
|
||||
totalItems, _, collections, _ := getCollectionsAndExpected(
|
||||
t,
|
||||
totalItems, _, collections, _, err := GetCollectionsAndExpected(
|
||||
config,
|
||||
test.collectionsPrevious,
|
||||
test.backupVersion)
|
||||
require.NoError(t, err)
|
||||
|
||||
runRestore(
|
||||
t,
|
||||
@ -655,21 +620,21 @@ func runRestoreBackupTestVersions(
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
config := configInfo{
|
||||
acct: acct,
|
||||
opts: opts,
|
||||
resource: test.resource,
|
||||
service: test.service,
|
||||
tenant: tenant,
|
||||
resourceOwners: resourceOwners,
|
||||
dest: tester.DefaultTestRestoreDestination(""),
|
||||
config := ConfigInfo{
|
||||
Acct: acct,
|
||||
Opts: opts,
|
||||
Resource: test.resource,
|
||||
Service: test.service,
|
||||
Tenant: tenant,
|
||||
ResourceOwners: resourceOwners,
|
||||
Dest: tester.DefaultTestRestoreDestination(""),
|
||||
}
|
||||
|
||||
totalItems, _, collections, _ := getCollectionsAndExpected(
|
||||
t,
|
||||
totalItems, _, collections, _, err := GetCollectionsAndExpected(
|
||||
config,
|
||||
test.collectionsPrevious,
|
||||
test.backupVersion)
|
||||
require.NoError(t, err)
|
||||
|
||||
runRestore(
|
||||
t,
|
||||
@ -680,11 +645,11 @@ func runRestoreBackupTestVersions(
|
||||
totalItems)
|
||||
|
||||
// Get expected output for new version.
|
||||
totalItems, totalKopiaItems, _, expectedData := getCollectionsAndExpected(
|
||||
t,
|
||||
totalItems, totalKopiaItems, _, expectedData, err := GetCollectionsAndExpected(
|
||||
config,
|
||||
test.collectionsLatest,
|
||||
version.Backup)
|
||||
require.NoError(t, err)
|
||||
|
||||
runBackupAndCompare(
|
||||
t,
|
||||
@ -705,11 +670,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
name: "EmailsWithAttachments",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Inbox"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Inbox"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.MessageWithDirectAttachment(
|
||||
@ -732,11 +697,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
name: "MultipleEmailsMultipleFolders",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Inbox"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Inbox"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.MessageWithBodyBytes(
|
||||
@ -749,9 +714,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Work"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID2",
|
||||
data: exchMock.MessageWithBodyBytes(
|
||||
@ -773,9 +738,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Work", "Inbox"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work", "Inbox"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID4",
|
||||
data: exchMock.MessageWithBodyBytes(
|
||||
@ -788,9 +753,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Work", "Inbox", "Work"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work", "Inbox", "Work"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID5",
|
||||
data: exchMock.MessageWithBodyBytes(
|
||||
@ -808,11 +773,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
name: "MultipleContactsSingleFolder",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Contacts"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Contacts"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.ContactBytes("Ghimley"),
|
||||
@ -836,11 +801,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
name: "MultipleContactsMultipleFolders",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Work"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.ContactBytes("Ghimley"),
|
||||
@ -859,9 +824,9 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Personal"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Personal"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID4",
|
||||
data: exchMock.ContactBytes("Argon"),
|
||||
@ -971,11 +936,11 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
name: "Contacts",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Work"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Work"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID",
|
||||
data: exchMock.ContactBytes("Ghimley"),
|
||||
@ -984,9 +949,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
},
|
||||
},
|
||||
{
|
||||
pathElements: []string{"Personal"},
|
||||
category: path.ContactsCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Personal"},
|
||||
Category: path.ContactsCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "someencodeditemID2",
|
||||
data: exchMock.ContactBytes("Irgot"),
|
||||
@ -1012,9 +977,9 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// pathElements: []string{"Personal"},
|
||||
// category: path.EventsCategory,
|
||||
// items: []itemInfo{
|
||||
// PathElements: []string{"Personal"},
|
||||
// Category: path.EventsCategory,
|
||||
// Items: []ItemInfo{
|
||||
// {
|
||||
// name: "someencodeditemID2",
|
||||
// data: exchMock.EventWithSubjectBytes("Irgot"),
|
||||
@ -1045,19 +1010,20 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
resourceOwner: suite.user,
|
||||
dest: dest.ContainerName,
|
||||
cats: map[path.CategoryType]struct{}{
|
||||
collection.category: {},
|
||||
collection.Category: {},
|
||||
},
|
||||
})
|
||||
|
||||
totalItems, _, collections, expectedData := collectionsForInfo(
|
||||
t,
|
||||
totalItems, _, collections, expectedData, err := collectionsForInfo(
|
||||
test.service,
|
||||
suite.connector.tenant,
|
||||
suite.user,
|
||||
dest,
|
||||
[]colInfo{collection},
|
||||
[]ColInfo{collection},
|
||||
version.Backup,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
allItems += totalItems
|
||||
|
||||
for k, v := range expectedData {
|
||||
@ -1123,10 +1089,10 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
||||
|
||||
t.Log("Backup enumeration complete")
|
||||
|
||||
ci := configInfo{
|
||||
opts: control.Options{RestorePermissions: true},
|
||||
ci := ConfigInfo{
|
||||
Opts: control.Options{RestorePermissions: true},
|
||||
// Alright to be empty, needed for OneDrive.
|
||||
dest: control.RestoreDestination{},
|
||||
Dest: control.RestoreDestination{},
|
||||
}
|
||||
|
||||
// Pull the data prior to waiting for the status as otherwise it will
|
||||
@ -1149,11 +1115,11 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
|
||||
name: "EmailsWithLargeAttachments",
|
||||
service: path.ExchangeService,
|
||||
resource: Users,
|
||||
collections: []colInfo{
|
||||
collections: []ColInfo{
|
||||
{
|
||||
pathElements: []string{"Inbox"},
|
||||
category: path.EmailCategory,
|
||||
items: []itemInfo{
|
||||
PathElements: []string{"Inbox"},
|
||||
Category: path.EmailCategory,
|
||||
Items: []ItemInfo{
|
||||
{
|
||||
name: "35mbAttachment",
|
||||
data: exchMock.MessageWithSizedAttachment(subjectText, 35),
|
||||
|
||||
188
src/internal/connector/graph_connector_test_helper.go
Normal file
188
src/internal/connector/graph_connector_test_helper.go
Normal file
@ -0,0 +1,188 @@
|
||||
package connector
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type ColInfo struct {
|
||||
// Elements (in order) for the path representing this collection. Should
|
||||
// only contain elements after the prefix that corso uses for the path. For
|
||||
// example, a collection for the Inbox folder in exchange mail would just be
|
||||
// "Inbox".
|
||||
PathElements []string
|
||||
Category path.CategoryType
|
||||
Items []ItemInfo
|
||||
// auxItems are items that can be retrieved with Fetch but won't be returned
|
||||
// by Items(). These files do not directly participate in comparisosn at the
|
||||
// end of a test.
|
||||
AuxItems []ItemInfo
|
||||
}
|
||||
|
||||
type ItemInfo struct {
|
||||
// lookupKey is a string that can be used to find this data from a set of
|
||||
// other data in the same collection. This key should be something that will
|
||||
// be the same before and after restoring the item in M365 and may not be
|
||||
// the M365 ID. When restoring items out of place, the item is assigned a
|
||||
// new ID making it unsuitable for a lookup key.
|
||||
lookupKey string
|
||||
name string
|
||||
data []byte
|
||||
}
|
||||
|
||||
type ConfigInfo struct {
|
||||
Acct account.Account
|
||||
Opts control.Options
|
||||
Resource Resource
|
||||
Service path.ServiceType
|
||||
Tenant string
|
||||
ResourceOwners []string
|
||||
Dest control.RestoreDestination
|
||||
}
|
||||
|
||||
func mustToDataLayerPath(
|
||||
service path.ServiceType,
|
||||
tenant, resourceOwner string,
|
||||
category path.CategoryType,
|
||||
elements []string,
|
||||
isItem bool,
|
||||
) (path.Path, error) {
|
||||
res, err := path.Build(tenant, resourceOwner, service, category, isItem, elements...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return res, err
|
||||
}
|
||||
|
||||
// backupOutputPathFromRestore returns a path.Path denoting the location in
|
||||
// kopia the data will be placed at. The location is a data-type specific
|
||||
// combination of the location the data was recently restored to and where the
|
||||
// data was originally in the hierarchy.
|
||||
func backupOutputPathFromRestore(
|
||||
restoreDest control.RestoreDestination,
|
||||
inputPath path.Path,
|
||||
) (path.Path, error) {
|
||||
base := []string{restoreDest.ContainerName}
|
||||
|
||||
// OneDrive has leading information like the drive ID.
|
||||
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService {
|
||||
folders := inputPath.Folders()
|
||||
base = append(append([]string{}, folders[:3]...), restoreDest.ContainerName)
|
||||
|
||||
if len(folders) > 3 {
|
||||
base = append(base, folders[3:]...)
|
||||
}
|
||||
}
|
||||
|
||||
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory {
|
||||
base = append(base, inputPath.Folders()...)
|
||||
}
|
||||
|
||||
return mustToDataLayerPath(
|
||||
inputPath.Service(),
|
||||
inputPath.Tenant(),
|
||||
inputPath.ResourceOwner(),
|
||||
inputPath.Category(),
|
||||
base,
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
||||
// TODO(ashmrtn): Make this an actual mock class that can be used in other
|
||||
// packages.
|
||||
type mockRestoreCollection struct {
|
||||
data.Collection
|
||||
auxItems map[string]data.Stream
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) Fetch(
|
||||
ctx context.Context,
|
||||
name string,
|
||||
) (data.Stream, error) {
|
||||
res := rc.auxItems[name]
|
||||
if res == nil {
|
||||
return nil, data.ErrNotFound
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func collectionsForInfo(
|
||||
service path.ServiceType,
|
||||
tenant, user string,
|
||||
dest control.RestoreDestination,
|
||||
allInfo []ColInfo,
|
||||
backupVersion int,
|
||||
) (int, int, []data.RestoreCollection, map[string]map[string][]byte, error) {
|
||||
var (
|
||||
collections = make([]data.RestoreCollection, 0, len(allInfo))
|
||||
expectedData = make(map[string]map[string][]byte, len(allInfo))
|
||||
totalItems = 0
|
||||
kopiaEntries = 0
|
||||
)
|
||||
|
||||
for _, info := range allInfo {
|
||||
pth, err := mustToDataLayerPath(
|
||||
service,
|
||||
tenant,
|
||||
user,
|
||||
info.Category,
|
||||
info.PathElements,
|
||||
false)
|
||||
if err != nil {
|
||||
return totalItems, kopiaEntries, collections, expectedData, err
|
||||
}
|
||||
|
||||
mc := exchMock.NewCollection(pth, pth, len(info.Items))
|
||||
|
||||
baseDestPath, err := backupOutputPathFromRestore(dest, pth)
|
||||
if err != nil {
|
||||
return totalItems, kopiaEntries, collections, expectedData, err
|
||||
}
|
||||
|
||||
baseExpected := expectedData[baseDestPath.String()]
|
||||
if baseExpected == nil {
|
||||
expectedData[baseDestPath.String()] = make(map[string][]byte, len(info.Items))
|
||||
baseExpected = expectedData[baseDestPath.String()]
|
||||
}
|
||||
|
||||
for i := 0; i < len(info.Items); i++ {
|
||||
mc.Names[i] = info.Items[i].name
|
||||
mc.Data[i] = info.Items[i].data
|
||||
|
||||
baseExpected[info.Items[i].lookupKey] = info.Items[i].data
|
||||
|
||||
// We do not count metadata files against item count
|
||||
if backupVersion > 0 &&
|
||||
(service == path.OneDriveService || service == path.SharePointService) &&
|
||||
metadata.HasMetaSuffix(info.Items[i].name) {
|
||||
continue
|
||||
}
|
||||
|
||||
totalItems++
|
||||
}
|
||||
|
||||
c := mockRestoreCollection{Collection: mc, auxItems: map[string]data.Stream{}}
|
||||
|
||||
for _, aux := range info.AuxItems {
|
||||
c.auxItems[aux.name] = &exchMock.Data{
|
||||
ID: aux.name,
|
||||
Reader: io.NopCloser(bytes.NewReader(aux.data)),
|
||||
}
|
||||
}
|
||||
|
||||
collections = append(collections, c)
|
||||
kopiaEntries += len(info.Items)
|
||||
}
|
||||
|
||||
return totalItems, kopiaEntries, collections, expectedData, nil
|
||||
}
|
||||
@ -15,7 +15,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -25,6 +24,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -318,6 +318,7 @@ func (i *Item) ModTime() time.Time { return i.info.Modified() }
|
||||
// getDriveItemContent fetch drive item's contents with retries
|
||||
func (oc *Collection) getDriveItemContent(
|
||||
ctx context.Context,
|
||||
driveID string,
|
||||
item models.DriveItemable,
|
||||
errs *fault.Bus,
|
||||
) (io.ReadCloser, error) {
|
||||
@ -338,14 +339,14 @@ func (oc *Collection) getDriveItemContent(
|
||||
if err != nil {
|
||||
if clues.HasLabel(err, graph.LabelsMalware) || (item != nil && item.GetMalware() != nil) {
|
||||
logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipMalware).Info("item flagged as malware")
|
||||
el.AddSkip(fault.FileSkip(fault.SkipMalware, itemID, itemName, graph.ItemInfo(item)))
|
||||
el.AddSkip(fault.FileSkip(fault.SkipMalware, driveID, itemID, itemName, graph.ItemInfo(item)))
|
||||
|
||||
return nil, clues.Wrap(err, "malware item").Label(graph.LabelsSkippable)
|
||||
}
|
||||
|
||||
if clues.HasLabel(err, graph.LabelStatus(http.StatusNotFound)) || graph.IsErrDeletedInFlight(err) {
|
||||
logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipNotFound).Info("item not found")
|
||||
el.AddSkip(fault.FileSkip(fault.SkipNotFound, itemID, itemName, graph.ItemInfo(item)))
|
||||
el.AddSkip(fault.FileSkip(fault.SkipNotFound, driveID, itemID, itemName, graph.ItemInfo(item)))
|
||||
|
||||
return nil, clues.Wrap(err, "deleted item").Label(graph.LabelsSkippable)
|
||||
}
|
||||
@ -360,7 +361,7 @@ func (oc *Collection) getDriveItemContent(
|
||||
// restore, or we have to handle it separately by somehow
|
||||
// deleting the entire collection.
|
||||
logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipBigOneNote).Info("max OneNote file size exceeded")
|
||||
el.AddSkip(fault.FileSkip(fault.SkipBigOneNote, itemID, itemName, graph.ItemInfo(item)))
|
||||
el.AddSkip(fault.FileSkip(fault.SkipBigOneNote, driveID, itemID, itemName, graph.ItemInfo(item)))
|
||||
|
||||
return nil, clues.Wrap(err, "max oneNote item").Label(graph.LabelsSkippable)
|
||||
}
|
||||
@ -528,7 +529,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
||||
// attempts to read bytes. Assumption is that kopia will check things
|
||||
// like file modtimes before attempting to read.
|
||||
itemReader := lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
|
||||
itemData, err := oc.getDriveItemContent(ctx, item, errs)
|
||||
itemData, err := oc.getDriveItemContent(ctx, oc.driveID, item, errs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -710,7 +710,7 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
|
||||
return item, nil
|
||||
}
|
||||
|
||||
_, err := col.getDriveItemContent(ctx, item, errs)
|
||||
_, err := col.getDriveItemContent(ctx, "driveID", item, errs)
|
||||
if test.err == nil {
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
return
|
||||
@ -734,8 +734,8 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
gr graph.Requester
|
||||
driveID string
|
||||
iorc = io.NopCloser(bytes.NewReader([]byte("fnords")))
|
||||
item = &models.DriveItem{}
|
||||
itemWID = &models.DriveItem{}
|
||||
item = models.NewDriveItem()
|
||||
itemWID = models.NewDriveItem()
|
||||
)
|
||||
|
||||
itemWID.SetId(ptr.To("brainhooldy"))
|
||||
|
||||
@ -9,13 +9,11 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -24,6 +22,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type driveSource int
|
||||
@ -193,42 +192,41 @@ func deserializeMetadata(
|
||||
continue
|
||||
}
|
||||
|
||||
// This is conservative, but report an error if any of the items for
|
||||
// any of the deserialized maps have duplicate drive IDs. This will
|
||||
// cause the entire backup to fail, but it's not clear if higher
|
||||
// layers would have caught this. Worst case if we don't handle this
|
||||
// we end up in a situation where we're sourcing items from the wrong
|
||||
// base in kopia wrapper.
|
||||
if errors.Is(err, errExistingMapping) {
|
||||
return nil, nil, clues.Wrap(err, "deserializing metadata file").WithClues(ictx)
|
||||
// This is conservative, but report an error if either any of the items
|
||||
// for any of the deserialized maps have duplicate drive IDs or there's
|
||||
// some other problem deserializing things. This will cause the entire
|
||||
// backup to fail, but it's not clear if higher layers would have caught
|
||||
// these cases. We can make the logic for deciding when to continue vs.
|
||||
// when to fail less strict in the future if needed.
|
||||
if err != nil {
|
||||
return nil, nil, clues.Stack(err).WithClues(ictx)
|
||||
}
|
||||
|
||||
err = clues.Stack(err).WithClues(ictx)
|
||||
|
||||
el.AddRecoverable(err)
|
||||
logger.CtxErr(ictx, err).Error("deserializing base backup metadata")
|
||||
}
|
||||
}
|
||||
|
||||
// Go through and remove partial results (i.e. path mapping but no delta URL
|
||||
// or vice-versa).
|
||||
for k, v := range prevDeltas {
|
||||
// Remove entries with an empty delta token as it's not useful.
|
||||
if len(v) == 0 {
|
||||
delete(prevDeltas, k)
|
||||
delete(prevFolders, k)
|
||||
// Go through and remove delta tokens if we didn't have any paths for them
|
||||
// or one or more paths are empty (incorrect somehow). This will ensure we
|
||||
// don't accidentally try to pull in delta results when we should have
|
||||
// enumerated everything instead.
|
||||
//
|
||||
// Loop over the set of previous deltas because it's alright to have paths
|
||||
// without a delta but not to have a delta without paths. This way ensures
|
||||
// we check at least all the path sets for the deltas we have.
|
||||
for drive := range prevDeltas {
|
||||
paths := prevFolders[drive]
|
||||
if len(paths) == 0 {
|
||||
delete(prevDeltas, drive)
|
||||
}
|
||||
|
||||
// Remove entries without a folders map as we can't tell kopia the
|
||||
// hierarchy changes.
|
||||
if _, ok := prevFolders[k]; !ok {
|
||||
delete(prevDeltas, k)
|
||||
}
|
||||
}
|
||||
|
||||
for k := range prevFolders {
|
||||
if _, ok := prevDeltas[k]; !ok {
|
||||
delete(prevFolders, k)
|
||||
// Drives have only a single delta token. If we find any folder that
|
||||
// seems like the path is bad we need to drop the entire token and start
|
||||
// fresh. Since we know the token will be gone we can also stop checking
|
||||
// for other possibly incorrect folder paths.
|
||||
for _, prevPath := range paths {
|
||||
if len(prevPath) == 0 {
|
||||
delete(prevDeltas, drive)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -717,10 +715,10 @@ func (c *Collections) UpdateCollections(
|
||||
|
||||
if item.GetMalware() != nil {
|
||||
addtl := graph.ItemInfo(item)
|
||||
skip := fault.FileSkip(fault.SkipMalware, itemID, itemName, addtl)
|
||||
skip := fault.FileSkip(fault.SkipMalware, driveID, itemID, itemName, addtl)
|
||||
|
||||
if isFolder {
|
||||
skip = fault.ContainerSkip(fault.SkipMalware, itemID, itemName, addtl)
|
||||
skip = fault.ContainerSkip(fault.SkipMalware, driveID, itemID, itemName, addtl)
|
||||
}
|
||||
|
||||
errs.AddSkip(skip)
|
||||
|
||||
@ -19,8 +19,6 @@ import (
|
||||
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
gapi "github.com/alcionai/corso/src/internal/connector/graph/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -29,6 +27,8 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
)
|
||||
|
||||
type statePath struct {
|
||||
@ -912,8 +912,12 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{},
|
||||
expectedPaths: map[string]map[string]string{},
|
||||
errCheck: assert.NoError,
|
||||
expectedPaths: map[string]map[string]string{
|
||||
driveID1: {
|
||||
folderID1: path1,
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
// An empty path map but valid delta results in metadata being returned
|
||||
@ -936,7 +940,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
}
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{driveID1: deltaURL1},
|
||||
expectedDeltas: map[string]string{},
|
||||
expectedPaths: map[string]map[string]string{driveID1: {}},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
@ -965,9 +969,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
}
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{},
|
||||
expectedPaths: map[string]map[string]string{},
|
||||
errCheck: assert.NoError,
|
||||
expectedDeltas: map[string]string{driveID1: ""},
|
||||
expectedPaths: map[string]map[string]string{
|
||||
driveID1: {
|
||||
folderID1: path1,
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "SuccessTwoDrivesTwoCollections",
|
||||
@ -1033,9 +1041,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
}
|
||||
},
|
||||
},
|
||||
expectedDeltas: map[string]string{},
|
||||
expectedPaths: map[string]map[string]string{},
|
||||
errCheck: assert.Error,
|
||||
errCheck: assert.Error,
|
||||
},
|
||||
{
|
||||
// Unexpected files are logged and skipped. They don't cause an error to
|
||||
@ -1167,8 +1173,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
deltas, paths, err := deserializeMetadata(ctx, cols, fault.New(true))
|
||||
test.errCheck(t, err)
|
||||
|
||||
assert.Equal(t, test.expectedDeltas, deltas)
|
||||
assert.Equal(t, test.expectedPaths, paths)
|
||||
assert.Equal(t, test.expectedDeltas, deltas, "deltas")
|
||||
assert.Equal(t, test.expectedPaths, paths, "paths")
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1281,9 +1287,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
prevFolderPaths map[string]map[string]string
|
||||
// Collection name -> set of item IDs. We can't check item data because
|
||||
// that's not mocked out. Metadata is checked separately.
|
||||
expectedCollections map[string]map[data.CollectionState][]string
|
||||
expectedDeltaURLs map[string]string
|
||||
expectedFolderPaths map[string]map[string]string
|
||||
expectedCollections map[string]map[data.CollectionState][]string
|
||||
expectedDeltaURLs map[string]string
|
||||
expectedFolderPaths map[string]map[string]string
|
||||
// Items that should be excluded from the base. Only populated if the delta
|
||||
// was valid and there was at least 1 previous folder path.
|
||||
expectedDelList *pmMock.PrefixMap
|
||||
expectedSkippedCount int
|
||||
// map full or previous path (prefers full) -> bool
|
||||
@ -1366,10 +1374,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{
|
||||
driveID1: {},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{},
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NewState: {}},
|
||||
folderPath1: {data.NewState: {"folder", "file"}},
|
||||
@ -1383,9 +1389,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_OneItemPage_NoErrors_FileRenamedMultiple",
|
||||
@ -1403,10 +1411,8 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{
|
||||
driveID1: {},
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{},
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NewState: {}},
|
||||
folderPath1: {data.NewState: {"folder", "file"}},
|
||||
@ -1420,9 +1426,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_OneItemPage_NoErrors_FileMovedMultiple",
|
||||
@ -1442,7 +1450,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{
|
||||
driveID1: {},
|
||||
driveID1: {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NotMovedState: {"file"}},
|
||||
@ -1484,11 +1494,18 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
rootFolderPath1: {data.NewState: {}},
|
||||
folderPath1: {data.NewState: {"folder", "file"}},
|
||||
},
|
||||
expectedDeltaURLs: map[string]string{},
|
||||
expectedFolderPaths: map[string]map[string]string{},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDeltaURLs: map[string]string{},
|
||||
expectedFolderPaths: map[string]map[string]string{
|
||||
driveID1: {
|
||||
"root": rootFolderPath1,
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_TwoItemPages_NoErrors",
|
||||
@ -1530,9 +1547,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file", "file2"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TwoDrives_OneItemPageEach_NoErrors",
|
||||
@ -1587,10 +1606,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder2": folderPath2,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
rootFolderPath2: getDelList("file2"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
rootFolderPath2: true,
|
||||
folderPath2: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TwoDrives_DuplicateIDs_OneItemPageEach_NoErrors",
|
||||
@ -1645,10 +1667,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath2,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
rootFolderPath2: getDelList("file2"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
rootFolderPath2: true,
|
||||
folderPath2: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "OneDrive_OneItemPage_Errors",
|
||||
@ -1772,7 +1797,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
errCheck: assert.NoError,
|
||||
prevFolderPaths: map[string]map[string]string{
|
||||
driveID1: {},
|
||||
driveID1: {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedCollections: map[string]map[data.CollectionState][]string{
|
||||
rootFolderPath1: {data.NotMovedState: {"file"}},
|
||||
@ -1929,9 +1956,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file", "file2"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
expectedSkippedCount: 2,
|
||||
},
|
||||
{
|
||||
@ -2110,9 +2139,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One Drive Item Made And Deleted",
|
||||
@ -2153,9 +2183,11 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"folder": folderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
folderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One Drive Random Folder Delete",
|
||||
@ -2187,6 +2219,9 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "One Drive Random Item Delete",
|
||||
@ -2217,9 +2252,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
"root": rootFolderPath1,
|
||||
},
|
||||
},
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{
|
||||
rootFolderPath1: getDelList("file"),
|
||||
}),
|
||||
expectedDelList: pmMock.NewPrefixMap(map[string]map[string]struct{}{}),
|
||||
doNotMergeItems: map[string]bool{
|
||||
rootFolderPath1: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TwoPriorDrives_OneTombstoned",
|
||||
@ -2352,7 +2388,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
}
|
||||
|
||||
assert.Equal(t, test.expectedDeltaURLs, deltas, "delta urls")
|
||||
assert.Equal(t, test.expectedFolderPaths, paths, "folder paths")
|
||||
assert.Equal(t, test.expectedFolderPaths, paths, "folder paths")
|
||||
|
||||
continue
|
||||
}
|
||||
@ -2426,7 +2462,7 @@ func coreItem(
|
||||
case isFolder:
|
||||
item.SetFolder(models.NewFolder())
|
||||
case isPackage:
|
||||
item.SetPackage(models.NewPackage_escaped())
|
||||
item.SetPackage(models.NewPackageEscaped())
|
||||
}
|
||||
|
||||
return item
|
||||
@ -2493,7 +2529,7 @@ func delItem(
|
||||
case isFolder:
|
||||
item.SetFolder(models.NewFolder())
|
||||
case isPackage:
|
||||
item.SetPackage(models.NewPackage_escaped())
|
||||
item.SetPackage(models.NewPackageEscaped())
|
||||
}
|
||||
|
||||
return item
|
||||
|
||||
@ -7,4 +7,6 @@ const (
|
||||
// const used as the root-of-drive dir for the drive portion of a path prefix.
|
||||
// eg: tid/onedrive/ro/files/drives/driveid/root:/...
|
||||
RootPathDir = "root:"
|
||||
// root id for drive items
|
||||
RootID = "root"
|
||||
)
|
||||
|
||||
@ -6,18 +6,18 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drive"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"golang.org/x/exp/maps"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
gapi "github.com/alcionai/corso/src/internal/connector/graph/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -224,7 +224,7 @@ func CreateItem(
|
||||
// Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended
|
||||
// here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310
|
||||
rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID)
|
||||
builder := drive.NewItemsRequestBuilder(rawURL, service.Adapter())
|
||||
builder := drives.NewItemItemsRequestBuilder(rawURL, service.Adapter())
|
||||
|
||||
newItem, err := builder.Post(ctx, newItem, nil)
|
||||
if err != nil {
|
||||
@ -266,7 +266,7 @@ func GetAllFolders(
|
||||
prefix string,
|
||||
errs *fault.Bus,
|
||||
) ([]*Displayable, error) {
|
||||
drives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
||||
drvs, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "getting OneDrive folders")
|
||||
}
|
||||
@ -276,7 +276,7 @@ func GetAllFolders(
|
||||
el = errs.Local()
|
||||
)
|
||||
|
||||
for _, d := range drives {
|
||||
for _, d := range drvs {
|
||||
if el.Failure() != nil {
|
||||
break
|
||||
}
|
||||
@ -358,7 +358,12 @@ func DeleteItem(
|
||||
driveID string,
|
||||
itemID string,
|
||||
) error {
|
||||
err := gs.Client().DrivesById(driveID).ItemsById(itemID).Delete(ctx, nil)
|
||||
err := gs.Client().
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(itemID).
|
||||
Delete(ctx, nil)
|
||||
if err != nil {
|
||||
return graph.Wrap(ctx, err, "deleting item").With("item_id", itemID)
|
||||
}
|
||||
|
||||
@ -17,8 +17,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
@ -26,6 +24,8 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
)
|
||||
|
||||
// Unit tests
|
||||
@ -43,10 +43,10 @@ const (
|
||||
)
|
||||
|
||||
func odErr(code string) *odataerrors.ODataError {
|
||||
odErr := &odataerrors.ODataError{}
|
||||
merr := odataerrors.MainError{}
|
||||
odErr := odataerrors.NewODataError()
|
||||
merr := odataerrors.NewMainError()
|
||||
merr.SetCode(&code)
|
||||
odErr.SetError(&merr)
|
||||
odErr.SetError(merr)
|
||||
|
||||
return odErr
|
||||
}
|
||||
|
||||
@ -14,11 +14,10 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/connector/uploadsession"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// downloadUrlKeys is used to find the download URL in a DriveItem response.
|
||||
@ -346,8 +345,10 @@ func driveItemWriter(
|
||||
ctx = clues.Add(ctx, "upload_item_id", itemID)
|
||||
|
||||
r, err := service.Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(itemID).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(itemID).
|
||||
CreateUploadSession().
|
||||
Post(ctx, session, nil)
|
||||
if err != nil {
|
||||
@ -358,7 +359,7 @@ func driveItemWriter(
|
||||
|
||||
url := ptr.Val(r.GetUploadUrl())
|
||||
|
||||
return uploadsession.NewWriter(itemID, url, itemSize), nil
|
||||
return graph.NewLargeItemWriter(itemID, url, itemSize), nil
|
||||
}
|
||||
|
||||
// constructWebURL helper function for recreating the webURL
|
||||
|
||||
@ -15,10 +15,10 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type ItemIntegrationSuite struct {
|
||||
@ -151,7 +151,7 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
||||
t := suite.T()
|
||||
srv := suite.service
|
||||
|
||||
root, err := srv.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
|
||||
root, err := srv.Client().Drives().ByDriveId(test.driveID).Root().Get(ctx, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
newFolderName := tester.DefaultTestRestoreDestination("folder").ContainerName
|
||||
@ -229,7 +229,7 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
|
||||
t := suite.T()
|
||||
srv := suite.service
|
||||
|
||||
root, err := srv.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
|
||||
root, err := srv.Client().Drives().ByDriveId(test.driveID).Root().Get(ctx, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// Lookup a folder that doesn't exist
|
||||
|
||||
@ -5,17 +5,17 @@ import (
|
||||
"fmt"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drive"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
func getParentMetadata(
|
||||
@ -170,9 +170,12 @@ func UpdatePermissions(
|
||||
|
||||
err = graph.NewService(a).
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(itemID).
|
||||
PermissionsById(pid).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(itemID).
|
||||
Permissions().
|
||||
ByPermissionId(pid).
|
||||
Delete(graph.ConsumeNTokens(ictx, graph.PermissionsLC), nil)
|
||||
if err != nil {
|
||||
return graph.Wrap(ictx, err, "removing permissions")
|
||||
@ -201,7 +204,7 @@ func UpdatePermissions(
|
||||
continue
|
||||
}
|
||||
|
||||
pbody := drive.NewItemsItemInvitePostRequestBody()
|
||||
pbody := drives.NewItemItemsItemInvitePostRequestBody()
|
||||
pbody.SetRoles(roles)
|
||||
|
||||
if p.Expiration != nil {
|
||||
|
||||
@ -13,7 +13,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
@ -26,6 +25,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// copyBufferSize is used for chunked upload
|
||||
|
||||
@ -91,7 +91,7 @@ func GetSite(ctx context.Context, gs graph.Servicer, siteID string) (models.Site
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := gs.Client().SitesById(siteID).Get(ctx, options)
|
||||
resp, err := gs.Client().Sites().BySiteId(siteID).Get(ctx, options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -174,11 +174,13 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
||||
|
||||
// TestRestoreListCollection verifies Graph Restore API for the List Collection
|
||||
func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
|
||||
t := suite.T()
|
||||
// https://github.com/microsoftgraph/msgraph-sdk-go/issues/490
|
||||
t.Skip("disabled until upstream issue with list restore is fixed.")
|
||||
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
t := suite.T()
|
||||
|
||||
service := createTestService(t, suite.creds)
|
||||
listing := spMock.ListDefault("Mock List")
|
||||
testName := "MockListing"
|
||||
@ -200,7 +202,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
|
||||
|
||||
// Clean-Up
|
||||
var (
|
||||
builder = service.Client().SitesById(suite.siteID).Lists()
|
||||
builder = service.Client().Sites().BySiteId(suite.siteID).Lists()
|
||||
isFound bool
|
||||
deleteID string
|
||||
)
|
||||
|
||||
@ -36,7 +36,7 @@ func preFetchLists(
|
||||
siteID string,
|
||||
) ([]listTuple, error) {
|
||||
var (
|
||||
builder = gs.Client().SitesById(siteID).Lists()
|
||||
builder = gs.Client().Sites().BySiteId(siteID).Lists()
|
||||
options = preFetchListOptions()
|
||||
listTuples = make([]listTuple, 0)
|
||||
)
|
||||
@ -128,7 +128,7 @@ func loadSiteLists(
|
||||
err error
|
||||
)
|
||||
|
||||
entry, err = gs.Client().SitesById(siteID).ListsById(id).Get(ctx, nil)
|
||||
entry, err = gs.Client().Sites().BySiteId(siteID).Lists().ByListId(id).Get(ctx, nil)
|
||||
if err != nil {
|
||||
el.AddRecoverable(graph.Wrap(ctx, err, "getting site list"))
|
||||
return
|
||||
@ -195,7 +195,7 @@ func fetchListItems(
|
||||
errs *fault.Bus,
|
||||
) ([]models.ListItemable, error) {
|
||||
var (
|
||||
prefix = gs.Client().SitesById(siteID).ListsById(listID)
|
||||
prefix = gs.Client().Sites().BySiteId(siteID).Lists().ByListId(listID)
|
||||
builder = prefix.Items()
|
||||
itms = make([]models.ListItemable, 0)
|
||||
el = errs.Local()
|
||||
@ -216,7 +216,7 @@ func fetchListItems(
|
||||
break
|
||||
}
|
||||
|
||||
newPrefix := prefix.ItemsById(ptr.Val(itm.GetId()))
|
||||
newPrefix := prefix.Items().ByListItemId(ptr.Val(itm.GetId()))
|
||||
|
||||
fields, err := newPrefix.Fields().Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -252,7 +252,7 @@ func fetchColumns(
|
||||
cs := make([]models.ColumnDefinitionable, 0)
|
||||
|
||||
if len(cTypeID) == 0 {
|
||||
builder := gs.Client().SitesById(siteID).ListsById(listID).Columns()
|
||||
builder := gs.Client().Sites().BySiteId(siteID).Lists().ByListId(listID).Columns()
|
||||
|
||||
for {
|
||||
resp, err := builder.Get(ctx, nil)
|
||||
@ -270,7 +270,14 @@ func fetchColumns(
|
||||
builder = sites.NewItemListsItemColumnsRequestBuilder(link, gs.Adapter())
|
||||
}
|
||||
} else {
|
||||
builder := gs.Client().SitesById(siteID).ListsById(listID).ContentTypesById(cTypeID).Columns()
|
||||
builder := gs.Client().
|
||||
Sites().
|
||||
BySiteId(siteID).
|
||||
Lists().
|
||||
ByListId(listID).
|
||||
ContentTypes().
|
||||
ByContentTypeId(cTypeID).
|
||||
Columns()
|
||||
|
||||
for {
|
||||
resp, err := builder.Get(ctx, nil)
|
||||
@ -307,7 +314,7 @@ func fetchContentTypes(
|
||||
var (
|
||||
el = errs.Local()
|
||||
cTypes = make([]models.ContentTypeable, 0)
|
||||
builder = gs.Client().SitesById(siteID).ListsById(listID).ContentTypes()
|
||||
builder = gs.Client().Sites().BySiteId(siteID).Lists().ByListId(listID).ContentTypes()
|
||||
)
|
||||
|
||||
for {
|
||||
@ -363,8 +370,15 @@ func fetchColumnLinks(
|
||||
siteID, listID, cTypeID string,
|
||||
) ([]models.ColumnLinkable, error) {
|
||||
var (
|
||||
builder = gs.Client().SitesById(siteID).ListsById(listID).ContentTypesById(cTypeID).ColumnLinks()
|
||||
links = make([]models.ColumnLinkable, 0)
|
||||
builder = gs.Client().
|
||||
Sites().
|
||||
BySiteId(siteID).
|
||||
Lists().
|
||||
ByListId(listID).
|
||||
ContentTypes().
|
||||
ByContentTypeId(cTypeID).
|
||||
ColumnLinks()
|
||||
links = make([]models.ColumnLinkable, 0)
|
||||
)
|
||||
|
||||
for {
|
||||
@ -396,7 +410,7 @@ func DeleteList(
|
||||
gs graph.Servicer,
|
||||
siteID, listID string,
|
||||
) error {
|
||||
err := gs.Client().SitesById(siteID).ListsById(listID).Delete(ctx, nil)
|
||||
err := gs.Client().Sites().BySiteId(siteID).Lists().ByListId(listID).Delete(ctx, nil)
|
||||
if err != nil {
|
||||
return graph.Wrap(ctx, err, "deleting list")
|
||||
}
|
||||
|
||||
@ -184,11 +184,7 @@ func restoreListItem(
|
||||
newList.SetItems(contents)
|
||||
|
||||
// Restore to List base to M365 back store
|
||||
restoredList, err := service.
|
||||
Client().
|
||||
SitesById(siteID).
|
||||
Lists().
|
||||
Post(ctx, newList, nil)
|
||||
restoredList, err := service.Client().Sites().BySiteId(siteID).Lists().Post(ctx, newList, nil)
|
||||
if err != nil {
|
||||
return dii, graph.Wrap(ctx, err, "restoring list")
|
||||
}
|
||||
@ -198,8 +194,10 @@ func restoreListItem(
|
||||
if len(contents) > 0 {
|
||||
for _, lItem := range contents {
|
||||
_, err := service.Client().
|
||||
SitesById(siteID).
|
||||
ListsById(ptr.Val(restoredList.GetId())).
|
||||
Sites().
|
||||
BySiteId(siteID).
|
||||
Lists().
|
||||
ByListId(ptr.Val(restoredList.GetId())).
|
||||
Items().
|
||||
Post(ctx, lItem, nil)
|
||||
if err != nil {
|
||||
|
||||
@ -213,7 +213,7 @@ func (suite *DataSupportSuite) TestCreatePageFromBytes() {
|
||||
pg.SetWebUrl(&title)
|
||||
|
||||
writer := kioser.NewJsonSerializationWriter()
|
||||
err := pg.Serialize(writer)
|
||||
err := writer.WriteObjectValue("", pg)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
byteArray, err := writer.GetSerializedContent()
|
||||
@ -231,6 +231,11 @@ func (suite *DataSupportSuite) TestCreatePageFromBytes() {
|
||||
result, err := CreatePageFromBytes(test.getBytes(t))
|
||||
test.checkError(t, err)
|
||||
test.isNil(t, result)
|
||||
if result != nil {
|
||||
assert.Equal(t, "Tested", *result.GetName(), "name")
|
||||
assert.Equal(t, "Tested", *result.GetTitle(), "title")
|
||||
assert.Equal(t, "Tested", *result.GetWebUrl(), "webURL")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -28,13 +28,15 @@ const (
|
||||
tenantIDDeprecated = "m365_tenant_hash_deprecated"
|
||||
|
||||
// Event Keys
|
||||
CorsoStart = "Corso Start"
|
||||
RepoInit = "Repo Init"
|
||||
RepoConnect = "Repo Connect"
|
||||
BackupStart = "Backup Start"
|
||||
BackupEnd = "Backup End"
|
||||
RestoreStart = "Restore Start"
|
||||
RestoreEnd = "Restore End"
|
||||
CorsoStart = "Corso Start"
|
||||
RepoInit = "Repo Init"
|
||||
RepoConnect = "Repo Connect"
|
||||
BackupStart = "Backup Start"
|
||||
BackupEnd = "Backup End"
|
||||
RestoreStart = "Restore Start"
|
||||
RestoreEnd = "Restore End"
|
||||
MaintenanceStart = "Maintenance Start"
|
||||
MaintenanceEnd = "Maintenance End"
|
||||
|
||||
// Event Data Keys
|
||||
BackupCreateTime = "backup_creation_time"
|
||||
|
||||
@ -9,7 +9,7 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drive"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/users"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@ -23,12 +23,10 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||
exapi "github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive"
|
||||
odapi "github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||
"github.com/alcionai/corso/src/internal/connector/support"
|
||||
@ -51,6 +49,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
selTD "github.com/alcionai/corso/src/pkg/selectors/testdata"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
@ -763,7 +762,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
m365, err := acct.M365Config()
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
ac, err := exapi.NewClient(m365)
|
||||
ac, err := api.NewClient(m365)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// generate 3 new folders with two items each.
|
||||
@ -982,8 +981,10 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
|
||||
_, err := gc.Service.
|
||||
Client().
|
||||
UsersById(uidn.ID()).
|
||||
MailFoldersById(from.containerID).
|
||||
Users().
|
||||
ByUserId(uidn.ID()).
|
||||
MailFolders().
|
||||
ByMailFolderId(from.containerID).
|
||||
Move().
|
||||
Post(ctx, body, nil)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
@ -1078,7 +1079,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
name: "rename a folder",
|
||||
updateUserData: func(t *testing.T) {
|
||||
for category, d := range dataset {
|
||||
cli := gc.Service.Client().UsersById(uidn.ID())
|
||||
cli := gc.Service.Client().Users().ByUserId(uidn.ID())
|
||||
containerID := d.dests[container3].containerID
|
||||
newLoc := containerRename
|
||||
|
||||
@ -1098,7 +1099,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
|
||||
switch category {
|
||||
case path.EmailCategory:
|
||||
cmf := cli.MailFoldersById(containerID)
|
||||
cmf := cli.MailFolders().ByMailFolderId(containerID)
|
||||
|
||||
body, err := cmf.Get(ctx, nil)
|
||||
require.NoError(t, err, "getting mail folder", clues.ToCore(err))
|
||||
@ -1108,7 +1109,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
require.NoError(t, err, "updating mail folder name", clues.ToCore(err))
|
||||
|
||||
case path.ContactsCategory:
|
||||
ccf := cli.ContactFoldersById(containerID)
|
||||
ccf := cli.ContactFolders().ByContactFolderId(containerID)
|
||||
|
||||
body, err := ccf.Get(ctx, nil)
|
||||
require.NoError(t, err, "getting contact folder", clues.ToCore(err))
|
||||
@ -1118,7 +1119,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
require.NoError(t, err, "updating contact folder name", clues.ToCore(err))
|
||||
|
||||
case path.EventsCategory:
|
||||
cbi := cli.CalendarsById(containerID)
|
||||
cbi := cli.Calendars().ByCalendarId(containerID)
|
||||
|
||||
body, err := cbi.Get(ctx, nil)
|
||||
require.NoError(t, err, "getting calendar", clues.ToCore(err))
|
||||
@ -1141,7 +1142,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
updateUserData: func(t *testing.T) {
|
||||
for category, d := range dataset {
|
||||
containerID := d.dests[container1].containerID
|
||||
cli := gc.Service.Client().UsersById(uidn.ID())
|
||||
cli := gc.Service.Client().Users().ByUserId(uidn.ID())
|
||||
|
||||
switch category {
|
||||
case path.EmailCategory:
|
||||
@ -1149,7 +1150,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
body, err := support.CreateMessageFromBytes(itemData)
|
||||
require.NoError(t, err, "transforming mail bytes to messageable", clues.ToCore(err))
|
||||
|
||||
itm, err := cli.MailFoldersById(containerID).Messages().Post(ctx, body, nil)
|
||||
itm, err := cli.MailFolders().ByMailFolderId(containerID).Messages().Post(ctx, body, nil)
|
||||
require.NoError(t, err, "posting email item", clues.ToCore(err))
|
||||
|
||||
expectDeets.AddItem(
|
||||
@ -1162,7 +1163,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
body, err := support.CreateContactFromBytes(itemData)
|
||||
require.NoError(t, err, "transforming contact bytes to contactable", clues.ToCore(err))
|
||||
|
||||
itm, err := cli.ContactFoldersById(containerID).Contacts().Post(ctx, body, nil)
|
||||
itm, err := cli.ContactFolders().ByContactFolderId(containerID).Contacts().Post(ctx, body, nil)
|
||||
require.NoError(t, err, "posting contact item", clues.ToCore(err))
|
||||
|
||||
expectDeets.AddItem(
|
||||
@ -1175,7 +1176,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
body, err := support.CreateEventFromBytes(itemData)
|
||||
require.NoError(t, err, "transforming event bytes to eventable", clues.ToCore(err))
|
||||
|
||||
itm, err := cli.CalendarsById(containerID).Events().Post(ctx, body, nil)
|
||||
itm, err := cli.Calendars().ByCalendarId(containerID).Events().Post(ctx, body, nil)
|
||||
require.NoError(t, err, "posting events item", clues.ToCore(err))
|
||||
|
||||
expectDeets.AddItem(
|
||||
@ -1195,7 +1196,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
updateUserData: func(t *testing.T) {
|
||||
for category, d := range dataset {
|
||||
containerID := d.dests[container1].containerID
|
||||
cli := gc.Service.Client().UsersById(uidn.ID())
|
||||
cli := gc.Service.Client().Users().ByUserId(uidn.ID())
|
||||
|
||||
switch category {
|
||||
case path.EmailCategory:
|
||||
@ -1203,7 +1204,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
require.NoError(t, err, "getting message ids", clues.ToCore(err))
|
||||
require.NotEmpty(t, ids, "message ids in folder")
|
||||
|
||||
err = cli.MessagesById(ids[0]).Delete(ctx, nil)
|
||||
err = cli.Messages().ByMessageId(ids[0]).Delete(ctx, nil)
|
||||
require.NoError(t, err, "deleting email item", clues.ToCore(err))
|
||||
|
||||
expectDeets.RemoveItem(
|
||||
@ -1216,7 +1217,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
require.NoError(t, err, "getting contact ids", clues.ToCore(err))
|
||||
require.NotEmpty(t, ids, "contact ids in folder")
|
||||
|
||||
err = cli.ContactsById(ids[0]).Delete(ctx, nil)
|
||||
err = cli.Contacts().ByContactId(ids[0]).Delete(ctx, nil)
|
||||
require.NoError(t, err, "deleting contact item", clues.ToCore(err))
|
||||
|
||||
expectDeets.RemoveItem(
|
||||
@ -1229,7 +1230,7 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
|
||||
require.NoError(t, err, "getting event ids", clues.ToCore(err))
|
||||
require.NotEmpty(t, ids, "event ids in folder")
|
||||
|
||||
err = cli.CalendarsById(ids[0]).Delete(ctx, nil)
|
||||
err = cli.Calendars().ByCalendarId(ids[0]).Delete(ctx, nil)
|
||||
require.NoError(t, err, "deleting calendar", clues.ToCore(err))
|
||||
|
||||
expectDeets.RemoveItem(
|
||||
@ -1331,7 +1332,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalOneDrive() {
|
||||
ctx context.Context,
|
||||
gs graph.Servicer,
|
||||
) string {
|
||||
d, err := odapi.GetUsersDrive(ctx, gs, suite.user)
|
||||
d, err := api.GetUsersDrive(ctx, gs, suite.user)
|
||||
if err != nil {
|
||||
err = graph.Wrap(ctx, err, "retrieving default user drive").
|
||||
With("user", suite.user)
|
||||
@ -1370,7 +1371,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() {
|
||||
ctx context.Context,
|
||||
gs graph.Servicer,
|
||||
) string {
|
||||
d, err := odapi.GetSitesDefaultDrive(ctx, gs, suite.site)
|
||||
d, err := api.GetSitesDefaultDrive(ctx, gs, suite.site)
|
||||
if err != nil {
|
||||
err = graph.Wrap(ctx, err, "retrieving default site drive").
|
||||
With("site", suite.site)
|
||||
@ -1499,8 +1500,8 @@ func runDriveIncrementalTest(
|
||||
// Use path-based indexing to get the folder's ID. This is sourced from the
|
||||
// onedrive package `getFolder` function.
|
||||
itemURL := fmt.Sprintf("https://graph.microsoft.com/v1.0/drives/%s/root:/%s", driveID, destName)
|
||||
resp, err := drive.
|
||||
NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
|
||||
resp, err := drives.
|
||||
NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
|
||||
Get(ctx, nil)
|
||||
require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err))
|
||||
|
||||
@ -1659,10 +1660,12 @@ func runDriveIncrementalTest(
|
||||
{
|
||||
name: "update contents of a file",
|
||||
updateFiles: func(t *testing.T) {
|
||||
err := gc.Service.
|
||||
_, err := gc.Service.
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(ptr.Val(newFile.GetId())).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(ptr.Val(newFile.GetId())).
|
||||
Content().
|
||||
Put(ctx, []byte("new content"), nil)
|
||||
require.NoErrorf(t, err, "updating file contents: %v", clues.ToCore(err))
|
||||
@ -1685,8 +1688,10 @@ func runDriveIncrementalTest(
|
||||
|
||||
_, err := gc.Service.
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(ptr.Val(newFile.GetId())).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(ptr.Val(newFile.GetId())).
|
||||
Patch(ctx, driveItem, nil)
|
||||
require.NoError(t, err, "renaming file %v", clues.ToCore(err))
|
||||
},
|
||||
@ -1707,8 +1712,10 @@ func runDriveIncrementalTest(
|
||||
|
||||
_, err := gc.Service.
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(ptr.Val(newFile.GetId())).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(ptr.Val(newFile.GetId())).
|
||||
Patch(ctx, driveItem, nil)
|
||||
require.NoErrorf(t, err, "moving file between folders %v", clues.ToCore(err))
|
||||
|
||||
@ -1728,8 +1735,10 @@ func runDriveIncrementalTest(
|
||||
// https://github.com/alcionai/corso/issues/2707
|
||||
err = newDeleteServicer(t).
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(ptr.Val(newFile.GetId())).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(ptr.Val(newFile.GetId())).
|
||||
Delete(ctx, nil)
|
||||
require.NoErrorf(t, err, "deleting file %v", clues.ToCore(err))
|
||||
|
||||
@ -1752,8 +1761,10 @@ func runDriveIncrementalTest(
|
||||
|
||||
_, err := gc.Service.
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(child).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(child).
|
||||
Patch(ctx, driveItem, nil)
|
||||
require.NoError(t, err, "moving folder", clues.ToCore(err))
|
||||
|
||||
@ -1779,8 +1790,10 @@ func runDriveIncrementalTest(
|
||||
|
||||
_, err := gc.Service.
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(child).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(child).
|
||||
Patch(ctx, driveItem, nil)
|
||||
require.NoError(t, err, "renaming folder", clues.ToCore(err))
|
||||
|
||||
@ -1802,8 +1815,10 @@ func runDriveIncrementalTest(
|
||||
// https://github.com/alcionai/corso/issues/2707
|
||||
err = newDeleteServicer(t).
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(container).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(container).
|
||||
Delete(ctx, nil)
|
||||
require.NoError(t, err, "deleting folder", clues.ToCore(err))
|
||||
|
||||
@ -1833,7 +1848,7 @@ func runDriveIncrementalTest(
|
||||
"https://graph.microsoft.com/v1.0/drives/%s/root:/%s",
|
||||
driveID,
|
||||
container3)
|
||||
resp, err := drive.NewItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
|
||||
resp, err := drives.NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
|
||||
Get(ctx, nil)
|
||||
require.NoError(t, err, "getting drive folder ID", "folder name", container3, clues.ToCore(err))
|
||||
|
||||
|
||||
@ -7,6 +7,7 @@ import (
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/crash"
|
||||
"github.com/alcionai/corso/src/internal/common/dttm"
|
||||
"github.com/alcionai/corso/src/internal/events"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
"github.com/alcionai/corso/src/internal/stats"
|
||||
@ -49,20 +50,45 @@ func (op *MaintenanceOperation) Run(ctx context.Context) (err error) {
|
||||
if crErr := crash.Recovery(ctx, recover(), "maintenance"); crErr != nil {
|
||||
err = crErr
|
||||
}
|
||||
|
||||
// TODO(ashmrtn): Send success/failure usage stat?
|
||||
|
||||
op.Results.CompletedAt = time.Now()
|
||||
}()
|
||||
|
||||
op.Results.StartedAt = time.Now()
|
||||
|
||||
// TODO(ashmrtn): Send usage statistics?
|
||||
op.bus.Event(
|
||||
ctx,
|
||||
events.MaintenanceStart,
|
||||
map[string]any{
|
||||
events.StartTime: op.Results.StartedAt,
|
||||
})
|
||||
|
||||
err = op.operation.kopia.RepoMaintenance(ctx, op.mOpts)
|
||||
defer func() {
|
||||
op.bus.Event(
|
||||
ctx,
|
||||
events.MaintenanceEnd,
|
||||
map[string]any{
|
||||
events.StartTime: op.Results.StartedAt,
|
||||
events.Duration: op.Results.CompletedAt.Sub(op.Results.StartedAt),
|
||||
events.EndTime: dttm.Format(op.Results.CompletedAt),
|
||||
events.Status: op.Status.String(),
|
||||
events.Resources: op.mOpts.Type.String(),
|
||||
})
|
||||
}()
|
||||
|
||||
return op.do(ctx)
|
||||
}
|
||||
|
||||
func (op *MaintenanceOperation) do(ctx context.Context) error {
|
||||
defer func() {
|
||||
op.Results.CompletedAt = time.Now()
|
||||
}()
|
||||
|
||||
err := op.operation.kopia.RepoMaintenance(ctx, op.mOpts)
|
||||
if err != nil {
|
||||
op.Status = Failed
|
||||
return clues.Wrap(err, "running maintenance operation")
|
||||
}
|
||||
|
||||
op.Status = Completed
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -17,7 +17,6 @@ import (
|
||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/mock"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/events"
|
||||
evmock "github.com/alcionai/corso/src/internal/events/mock"
|
||||
@ -30,6 +29,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
|
||||
@ -108,8 +108,8 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
|
||||
bus := fault.New(false)
|
||||
bus.Fail(clues.New("foo"))
|
||||
bus.AddRecoverable(clues.New("bar"))
|
||||
bus.AddRecoverable(fault.FileErr(clues.New("file"), "file-id", "file-name", map[string]any{"foo": "bar"}))
|
||||
bus.AddSkip(fault.FileSkip(fault.SkipMalware, "file-id", "file-name", map[string]any{"foo": "bar"}))
|
||||
bus.AddRecoverable(fault.FileErr(clues.New("file"), "ns", "file-id", "file-name", map[string]any{"foo": "bar"}))
|
||||
bus.AddSkip(fault.FileSkip(fault.SkipMalware, "ns", "file-id", "file-name", map[string]any{"foo": "bar"}))
|
||||
|
||||
fe := bus.Errors()
|
||||
return fe
|
||||
@ -137,8 +137,8 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
|
||||
bus := fault.New(false)
|
||||
bus.Fail(clues.New("foo"))
|
||||
bus.AddRecoverable(clues.New("bar"))
|
||||
bus.AddRecoverable(fault.FileErr(clues.New("file"), "file-id", "file-name", map[string]any{"foo": "bar"}))
|
||||
bus.AddSkip(fault.FileSkip(fault.SkipMalware, "file-id", "file-name", map[string]any{"foo": "bar"}))
|
||||
bus.AddRecoverable(fault.FileErr(clues.New("file"), "ns", "file-id", "file-name", map[string]any{"foo": "bar"}))
|
||||
bus.AddSkip(fault.FileSkip(fault.SkipMalware, "ns", "file-id", "file-name", map[string]any{"foo": "bar"}))
|
||||
|
||||
fe := bus.Errors()
|
||||
return fe
|
||||
|
||||
@ -26,6 +26,11 @@ const (
|
||||
MetadataMaintenance // metadata
|
||||
)
|
||||
|
||||
var StringToMaintenanceType = map[string]MaintenanceType{
|
||||
CompleteMaintenance.String(): CompleteMaintenance,
|
||||
MetadataMaintenance.String(): MetadataMaintenance,
|
||||
}
|
||||
|
||||
type MaintenanceSafety int
|
||||
|
||||
// Can't be reordered as we rely on iota for numbering.
|
||||
@ -33,5 +38,9 @@ type MaintenanceSafety int
|
||||
//go:generate stringer -type=MaintenanceSafety -linecomment
|
||||
const (
|
||||
FullMaintenanceSafety MaintenanceSafety = iota
|
||||
//nolint:lll
|
||||
// Use only if there's no other kopia instances accessing the repo and the
|
||||
// storage backend is strongly consistent.
|
||||
// https://github.com/kopia/kopia/blob/f9de453efc198b6e993af8922f953a7e5322dc5f/repo/maintenance/maintenance_safety.go#L42
|
||||
NoMaintenanceSafety
|
||||
)
|
||||
|
||||
@ -428,6 +428,7 @@ func ExampleBus_AddSkip() {
|
||||
// error. Our only option is to skip it.
|
||||
errs.AddSkip(fault.FileSkip(
|
||||
fault.SkipMalware,
|
||||
"deduplication-namespace",
|
||||
"file-id",
|
||||
"file-name",
|
||||
map[string]any{"foo": "bar"},
|
||||
|
||||
@ -204,7 +204,7 @@ type Errors struct {
|
||||
|
||||
// Items are the reduction of all errors (both the failure and the
|
||||
// recovered values) in the Errors struct into a slice of items,
|
||||
// deduplicated by their ID.
|
||||
// deduplicated by their Namespace + ID.
|
||||
Items []Item `json:"items"`
|
||||
|
||||
// Skipped is the accumulation of skipped items. Skipped items
|
||||
@ -218,7 +218,8 @@ type Errors struct {
|
||||
}
|
||||
|
||||
// itemsIn reduces all errors (both the failure and recovered values)
|
||||
// in the Errors struct into a slice of items, deduplicated by their ID.
|
||||
// in the Errors struct into a slice of items, deduplicated by their
|
||||
// Namespace + ID.
|
||||
// Any non-item error is serialized to a clues.ErrCore and returned in
|
||||
// the second list.
|
||||
func itemsIn(failure error, recovered []error) ([]Item, []*clues.ErrCore) {
|
||||
@ -234,12 +235,12 @@ func itemsIn(failure error, recovered []error) ([]Item, []*clues.ErrCore) {
|
||||
continue
|
||||
}
|
||||
|
||||
is[ie.ID] = *ie
|
||||
is[ie.dedupeID()] = *ie
|
||||
}
|
||||
|
||||
var ie *Item
|
||||
if errors.As(failure, &ie) {
|
||||
is[ie.ID] = *ie
|
||||
is[ie.dedupeID()] = *ie
|
||||
}
|
||||
|
||||
return maps.Values(is), non
|
||||
|
||||
@ -193,7 +193,7 @@ func (suite *FaultErrorsUnitSuite) TestAddSkip() {
|
||||
n.AddRecoverable(assert.AnError)
|
||||
assert.Len(t, n.Skipped(), 0)
|
||||
|
||||
n.AddSkip(fault.OwnerSkip(fault.SkipMalware, "id", "name", nil))
|
||||
n.AddSkip(fault.OwnerSkip(fault.SkipMalware, "ns", "id", "name", nil))
|
||||
assert.Len(t, n.Skipped(), 1)
|
||||
}
|
||||
|
||||
@ -262,12 +262,12 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() {
|
||||
name: "failure item",
|
||||
errs: func() *fault.Errors {
|
||||
b := fault.New(false)
|
||||
b.Fail(fault.OwnerErr(ae, "id", "name", addtl))
|
||||
b.Fail(fault.OwnerErr(ae, "ns", "id", "name", addtl))
|
||||
b.AddRecoverable(ae)
|
||||
|
||||
return b.Errors()
|
||||
},
|
||||
expectItems: []fault.Item{*fault.OwnerErr(ae, "id", "name", addtl)},
|
||||
expectItems: []fault.Item{*fault.OwnerErr(ae, "ns", "id", "name", addtl)},
|
||||
expectRecoverable: noncore,
|
||||
},
|
||||
{
|
||||
@ -275,25 +275,40 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() {
|
||||
errs: func() *fault.Errors {
|
||||
b := fault.New(false)
|
||||
b.Fail(ae)
|
||||
b.AddRecoverable(fault.OwnerErr(ae, "id", "name", addtl))
|
||||
b.AddRecoverable(fault.OwnerErr(ae, "ns", "id", "name", addtl))
|
||||
|
||||
return b.Errors()
|
||||
},
|
||||
expectItems: []fault.Item{*fault.OwnerErr(ae, "id", "name", addtl)},
|
||||
expectItems: []fault.Item{*fault.OwnerErr(ae, "ns", "id", "name", addtl)},
|
||||
expectRecoverable: []*clues.ErrCore{},
|
||||
},
|
||||
{
|
||||
name: "two items",
|
||||
errs: func() *fault.Errors {
|
||||
b := fault.New(false)
|
||||
b.Fail(fault.OwnerErr(ae, "oid", "name", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "fid", "name", addtl))
|
||||
b.Fail(fault.OwnerErr(ae, "ns", "oid", "name", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "ns", "fid", "name", addtl))
|
||||
|
||||
return b.Errors()
|
||||
},
|
||||
expectItems: []fault.Item{
|
||||
*fault.OwnerErr(ae, "oid", "name", addtl),
|
||||
*fault.FileErr(ae, "fid", "name", addtl),
|
||||
*fault.OwnerErr(ae, "ns", "oid", "name", addtl),
|
||||
*fault.FileErr(ae, "ns", "fid", "name", addtl),
|
||||
},
|
||||
expectRecoverable: []*clues.ErrCore{},
|
||||
},
|
||||
{
|
||||
name: "two items - diff namespace same id",
|
||||
errs: func() *fault.Errors {
|
||||
b := fault.New(false)
|
||||
b.Fail(fault.OwnerErr(ae, "ns", "id", "name", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "ns2", "id", "name", addtl))
|
||||
|
||||
return b.Errors()
|
||||
},
|
||||
expectItems: []fault.Item{
|
||||
*fault.OwnerErr(ae, "ns", "id", "name", addtl),
|
||||
*fault.FileErr(ae, "ns2", "id", "name", addtl),
|
||||
},
|
||||
expectRecoverable: []*clues.ErrCore{},
|
||||
},
|
||||
@ -301,13 +316,13 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() {
|
||||
name: "duplicate items - failure priority",
|
||||
errs: func() *fault.Errors {
|
||||
b := fault.New(false)
|
||||
b.Fail(fault.OwnerErr(ae, "id", "name", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "id", "name", addtl))
|
||||
b.Fail(fault.OwnerErr(ae, "ns", "id", "name", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "ns", "id", "name", addtl))
|
||||
|
||||
return b.Errors()
|
||||
},
|
||||
expectItems: []fault.Item{
|
||||
*fault.OwnerErr(ae, "id", "name", addtl),
|
||||
*fault.OwnerErr(ae, "ns", "id", "name", addtl),
|
||||
},
|
||||
expectRecoverable: []*clues.ErrCore{},
|
||||
},
|
||||
@ -316,13 +331,13 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() {
|
||||
errs: func() *fault.Errors {
|
||||
b := fault.New(false)
|
||||
b.Fail(ae)
|
||||
b.AddRecoverable(fault.FileErr(ae, "fid", "name", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "fid", "name2", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "ns", "fid", "name", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "ns", "fid", "name2", addtl))
|
||||
|
||||
return b.Errors()
|
||||
},
|
||||
expectItems: []fault.Item{
|
||||
*fault.FileErr(ae, "fid", "name2", addtl),
|
||||
*fault.FileErr(ae, "ns", "fid", "name2", addtl),
|
||||
},
|
||||
expectRecoverable: []*clues.ErrCore{},
|
||||
},
|
||||
@ -331,13 +346,13 @@ func (suite *FaultErrorsUnitSuite) TestErrors_Items() {
|
||||
errs: func() *fault.Errors {
|
||||
b := fault.New(false)
|
||||
b.Fail(ae)
|
||||
b.AddRecoverable(fault.FileErr(ae, "fid", "name", addtl))
|
||||
b.AddRecoverable(fault.FileErr(ae, "ns", "fid", "name", addtl))
|
||||
b.AddRecoverable(ae)
|
||||
|
||||
return b.Errors()
|
||||
},
|
||||
expectItems: []fault.Item{
|
||||
*fault.FileErr(ae, "fid", "name", addtl),
|
||||
*fault.FileErr(ae, "ns", "fid", "name", addtl),
|
||||
},
|
||||
expectRecoverable: noncore,
|
||||
},
|
||||
|
||||
@ -49,6 +49,12 @@ var (
|
||||
// by the end user (cli or sdk) for surfacing human-readable and
|
||||
// identifiable points of failure.
|
||||
type Item struct {
|
||||
// deduplication namespace; the maximally-unique boundary of the
|
||||
// item ID. The scope of this boundary depends on the service.
|
||||
// ex: exchange items are unique within their category, drive items
|
||||
// are only unique within a given drive.
|
||||
Namespace string `json:"namespace"`
|
||||
|
||||
// deduplication identifier; the ID of the observed item.
|
||||
ID string `json:"id"`
|
||||
|
||||
@ -72,6 +78,12 @@ type Item struct {
|
||||
Additional map[string]any `json:"additional"`
|
||||
}
|
||||
|
||||
// dedupeID is the id used to deduplicate items when aggreagating
|
||||
// errors in fault.Errors().
|
||||
func (i *Item) dedupeID() string {
|
||||
return i.Namespace + i.ID
|
||||
}
|
||||
|
||||
// Error complies with the error interface.
|
||||
func (i *Item) Error() string {
|
||||
if i == nil {
|
||||
@ -111,23 +123,24 @@ func (i Item) Values() []string {
|
||||
}
|
||||
|
||||
// ContainerErr produces a Container-type Item for tracking erroneous items
|
||||
func ContainerErr(cause error, id, name string, addtl map[string]any) *Item {
|
||||
return itemErr(ContainerType, cause, id, name, addtl)
|
||||
func ContainerErr(cause error, namespace, id, name string, addtl map[string]any) *Item {
|
||||
return itemErr(ContainerType, cause, namespace, id, name, addtl)
|
||||
}
|
||||
|
||||
// FileErr produces a File-type Item for tracking erroneous items.
|
||||
func FileErr(cause error, id, name string, addtl map[string]any) *Item {
|
||||
return itemErr(FileType, cause, id, name, addtl)
|
||||
func FileErr(cause error, namespace, id, name string, addtl map[string]any) *Item {
|
||||
return itemErr(FileType, cause, namespace, id, name, addtl)
|
||||
}
|
||||
|
||||
// OnwerErr produces a ResourceOwner-type Item for tracking erroneous items.
|
||||
func OwnerErr(cause error, id, name string, addtl map[string]any) *Item {
|
||||
return itemErr(ResourceOwnerType, cause, id, name, addtl)
|
||||
func OwnerErr(cause error, namespace, id, name string, addtl map[string]any) *Item {
|
||||
return itemErr(ResourceOwnerType, cause, namespace, id, name, addtl)
|
||||
}
|
||||
|
||||
// itemErr produces a Item of the provided type for tracking erroneous items.
|
||||
func itemErr(t itemType, cause error, id, name string, addtl map[string]any) *Item {
|
||||
func itemErr(t itemType, cause error, namespace, id, name string, addtl map[string]any) *Item {
|
||||
return &Item{
|
||||
Namespace: namespace,
|
||||
ID: id,
|
||||
Name: name,
|
||||
Type: t,
|
||||
@ -228,24 +241,25 @@ func (s Skipped) Values() []string {
|
||||
}
|
||||
|
||||
// ContainerSkip produces a Container-kind Item for tracking skipped items.
|
||||
func ContainerSkip(cause skipCause, id, name string, addtl map[string]any) *Skipped {
|
||||
return itemSkip(ContainerType, cause, id, name, addtl)
|
||||
func ContainerSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
|
||||
return itemSkip(ContainerType, cause, namespace, id, name, addtl)
|
||||
}
|
||||
|
||||
// FileSkip produces a File-kind Item for tracking skipped items.
|
||||
func FileSkip(cause skipCause, id, name string, addtl map[string]any) *Skipped {
|
||||
return itemSkip(FileType, cause, id, name, addtl)
|
||||
func FileSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
|
||||
return itemSkip(FileType, cause, namespace, id, name, addtl)
|
||||
}
|
||||
|
||||
// OnwerSkip produces a ResourceOwner-kind Item for tracking skipped items.
|
||||
func OwnerSkip(cause skipCause, id, name string, addtl map[string]any) *Skipped {
|
||||
return itemSkip(ResourceOwnerType, cause, id, name, addtl)
|
||||
func OwnerSkip(cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
|
||||
return itemSkip(ResourceOwnerType, cause, namespace, id, name, addtl)
|
||||
}
|
||||
|
||||
// itemSkip produces a Item of the provided type for tracking skipped items.
|
||||
func itemSkip(t itemType, cause skipCause, id, name string, addtl map[string]any) *Skipped {
|
||||
func itemSkip(t itemType, cause skipCause, namespace, id, name string, addtl map[string]any) *Skipped {
|
||||
return &Skipped{
|
||||
Item: Item{
|
||||
Namespace: namespace,
|
||||
ID: id,
|
||||
Name: name,
|
||||
Type: t,
|
||||
|
||||
@ -36,9 +36,10 @@ func (suite *ItemUnitSuite) TestItem_Error() {
|
||||
func (suite *ItemUnitSuite) TestContainerErr() {
|
||||
t := suite.T()
|
||||
addtl := map[string]any{"foo": "bar"}
|
||||
i := ContainerErr(clues.New("foo"), "id", "name", addtl)
|
||||
i := ContainerErr(clues.New("foo"), "ns", "id", "name", addtl)
|
||||
|
||||
expect := Item{
|
||||
Namespace: "ns",
|
||||
ID: "id",
|
||||
Name: "name",
|
||||
Type: ContainerType,
|
||||
@ -52,9 +53,10 @@ func (suite *ItemUnitSuite) TestContainerErr() {
|
||||
func (suite *ItemUnitSuite) TestFileErr() {
|
||||
t := suite.T()
|
||||
addtl := map[string]any{"foo": "bar"}
|
||||
i := FileErr(clues.New("foo"), "id", "name", addtl)
|
||||
i := FileErr(clues.New("foo"), "ns", "id", "name", addtl)
|
||||
|
||||
expect := Item{
|
||||
Namespace: "ns",
|
||||
ID: "id",
|
||||
Name: "name",
|
||||
Type: FileType,
|
||||
@ -68,9 +70,10 @@ func (suite *ItemUnitSuite) TestFileErr() {
|
||||
func (suite *ItemUnitSuite) TestOwnerErr() {
|
||||
t := suite.T()
|
||||
addtl := map[string]any{"foo": "bar"}
|
||||
i := OwnerErr(clues.New("foo"), "id", "name", addtl)
|
||||
i := OwnerErr(clues.New("foo"), "ns", "id", "name", addtl)
|
||||
|
||||
expect := Item{
|
||||
Namespace: "ns",
|
||||
ID: "id",
|
||||
Name: "name",
|
||||
Type: ResourceOwnerType,
|
||||
@ -127,17 +130,17 @@ func (suite *ItemUnitSuite) TestItem_HeadersValues() {
|
||||
}{
|
||||
{
|
||||
name: "file",
|
||||
item: FileErr(assert.AnError, "id", "name", addtl),
|
||||
item: FileErr(assert.AnError, "ns", "id", "name", addtl),
|
||||
expect: []string{"Error", FileType.Printable(), "name", "cname", cause},
|
||||
},
|
||||
{
|
||||
name: "container",
|
||||
item: ContainerErr(assert.AnError, "id", "name", addtl),
|
||||
item: ContainerErr(assert.AnError, "ns", "id", "name", addtl),
|
||||
expect: []string{"Error", ContainerType.Printable(), "name", "cname", cause},
|
||||
},
|
||||
{
|
||||
name: "owner",
|
||||
item: OwnerErr(assert.AnError, "id", "name", nil),
|
||||
item: OwnerErr(assert.AnError, "ns", "id", "name", nil),
|
||||
expect: []string{"Error", ResourceOwnerType.Printable(), "name", "", cause},
|
||||
},
|
||||
}
|
||||
@ -169,9 +172,10 @@ func (suite *ItemUnitSuite) TestSkipped_String() {
|
||||
func (suite *ItemUnitSuite) TestContainerSkip() {
|
||||
t := suite.T()
|
||||
addtl := map[string]any{"foo": "bar"}
|
||||
i := ContainerSkip(SkipMalware, "id", "name", addtl)
|
||||
i := ContainerSkip(SkipMalware, "ns", "id", "name", addtl)
|
||||
|
||||
expect := Item{
|
||||
Namespace: "ns",
|
||||
ID: "id",
|
||||
Name: "name",
|
||||
Type: ContainerType,
|
||||
@ -185,9 +189,10 @@ func (suite *ItemUnitSuite) TestContainerSkip() {
|
||||
func (suite *ItemUnitSuite) TestFileSkip() {
|
||||
t := suite.T()
|
||||
addtl := map[string]any{"foo": "bar"}
|
||||
i := FileSkip(SkipMalware, "id", "name", addtl)
|
||||
i := FileSkip(SkipMalware, "ns", "id", "name", addtl)
|
||||
|
||||
expect := Item{
|
||||
Namespace: "ns",
|
||||
ID: "id",
|
||||
Name: "name",
|
||||
Type: FileType,
|
||||
@ -201,9 +206,10 @@ func (suite *ItemUnitSuite) TestFileSkip() {
|
||||
func (suite *ItemUnitSuite) TestOwnerSkip() {
|
||||
t := suite.T()
|
||||
addtl := map[string]any{"foo": "bar"}
|
||||
i := OwnerSkip(SkipMalware, "id", "name", addtl)
|
||||
i := OwnerSkip(SkipMalware, "ns", "id", "name", addtl)
|
||||
|
||||
expect := Item{
|
||||
Namespace: "ns",
|
||||
ID: "id",
|
||||
Name: "name",
|
||||
Type: ResourceOwnerType,
|
||||
@ -227,17 +233,17 @@ func (suite *ItemUnitSuite) TestSkipped_HeadersValues() {
|
||||
}{
|
||||
{
|
||||
name: "file",
|
||||
skip: FileSkip(SkipMalware, "id", "name", addtl),
|
||||
skip: FileSkip(SkipMalware, "ns", "id", "name", addtl),
|
||||
expect: []string{"Skip", FileType.Printable(), "name", "cname", string(SkipMalware)},
|
||||
},
|
||||
{
|
||||
name: "container",
|
||||
skip: ContainerSkip(SkipMalware, "id", "name", addtl),
|
||||
skip: ContainerSkip(SkipMalware, "ns", "id", "name", addtl),
|
||||
expect: []string{"Skip", ContainerType.Printable(), "name", "cname", string(SkipMalware)},
|
||||
},
|
||||
{
|
||||
name: "owner",
|
||||
skip: OwnerSkip(SkipMalware, "id", "name", nil),
|
||||
skip: OwnerSkip(SkipMalware, "ns", "id", "name", nil),
|
||||
expect: []string{"Skip", ResourceOwnerType.Printable(), "name", "", string(SkipMalware)},
|
||||
},
|
||||
}
|
||||
|
||||
2
src/pkg/fault/testdata/testdata.go
vendored
2
src/pkg/fault/testdata/testdata.go
vendored
@ -19,7 +19,7 @@ func MakeErrors(failure, recovered, skipped bool) fault.Errors {
|
||||
}
|
||||
|
||||
if skipped {
|
||||
fe.Skipped = []fault.Skipped{*fault.FileSkip(fault.SkipMalware, "id", "name", nil)}
|
||||
fe.Skipped = []fault.Skipped{*fault.FileSkip(fault.SkipMalware, "ns", "id", "name", nil)}
|
||||
}
|
||||
|
||||
return fe
|
||||
|
||||
@ -1,37 +0,0 @@
|
||||
// Code generated by "stringer -type=comparator -linecomment"; DO NOT EDIT.
|
||||
|
||||
package filters
|
||||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[UnknownComparator-0]
|
||||
_ = x[EqualTo-1]
|
||||
_ = x[GreaterThan-2]
|
||||
_ = x[LessThan-3]
|
||||
_ = x[TargetContains-4]
|
||||
_ = x[TargetIn-5]
|
||||
_ = x[Passes-6]
|
||||
_ = x[Fails-7]
|
||||
_ = x[IdentityValue-8]
|
||||
_ = x[TargetPrefixes-9]
|
||||
_ = x[TargetSuffixes-10]
|
||||
_ = x[TargetPathPrefix-11]
|
||||
_ = x[TargetPathContains-12]
|
||||
_ = x[TargetPathSuffix-13]
|
||||
_ = x[TargetPathEquals-14]
|
||||
}
|
||||
|
||||
const _comparator_name = "UnknownComparisonEQGTLTContINPassFailIdentityPfxSfxPathPfxPathContPathSfxPathEQ"
|
||||
|
||||
var _comparator_index = [...]uint8{0, 17, 19, 21, 23, 27, 29, 33, 37, 45, 48, 51, 58, 66, 73, 79}
|
||||
|
||||
func (i comparator) String() string {
|
||||
if i < 0 || i >= comparator(len(_comparator_index)-1) {
|
||||
return "comparator(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||
}
|
||||
return _comparator_name[_comparator_index[i]:_comparator_index[i+1]]
|
||||
}
|
||||
@ -11,41 +11,47 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type comparator int
|
||||
type comparator string
|
||||
|
||||
//go:generate stringer -type=comparator -linecomment
|
||||
const (
|
||||
UnknownComparator comparator = iota // UnknownComparison
|
||||
// a == b
|
||||
EqualTo // EQ
|
||||
UnknownComparator comparator = ""
|
||||
// norm(a) == norm(b)
|
||||
EqualTo = "EQ"
|
||||
// a === b
|
||||
StrictEqualTo = "StrictEQ"
|
||||
// a > b
|
||||
GreaterThan // GT
|
||||
GreaterThan = "GT"
|
||||
// a < b
|
||||
LessThan // LT
|
||||
LessThan = "LT"
|
||||
// "foo,bar,baz" contains "foo"
|
||||
TargetContains // Cont
|
||||
TargetContains = "Cont"
|
||||
// "foo" is found in "foo,bar,baz"
|
||||
TargetIn // IN
|
||||
TargetIn = "IN"
|
||||
// always passes
|
||||
Passes // Pass
|
||||
Passes = "Pass"
|
||||
// always fails
|
||||
Fails // Fail
|
||||
Fails = "Fail"
|
||||
// passthrough for the target
|
||||
IdentityValue // Identity
|
||||
IdentityValue = "Identity"
|
||||
// "foo" is a prefix of "foobarbaz"
|
||||
TargetPrefixes // Pfx
|
||||
TargetPrefixes = "Pfx"
|
||||
// "baz" is a suffix of "foobarbaz"
|
||||
TargetSuffixes // Sfx
|
||||
TargetSuffixes = "Sfx"
|
||||
// "foo" equals any complete element prefix of "foo/bar/baz"
|
||||
TargetPathPrefix // PathPfx
|
||||
TargetPathPrefix = "PathPfx"
|
||||
// "foo" equals any complete element in "foo/bar/baz"
|
||||
TargetPathContains // PathCont
|
||||
TargetPathContains = "PathCont"
|
||||
// "baz" equals any complete element suffix of "foo/bar/baz"
|
||||
TargetPathSuffix // PathSfx
|
||||
TargetPathSuffix = "PathSfx"
|
||||
// "foo/bar/baz" equals the complete path "foo/bar/baz"
|
||||
TargetPathEquals // PathEQ
|
||||
TargetPathEquals = "PathEQ"
|
||||
)
|
||||
|
||||
func (c comparator) String() string {
|
||||
return string(c)
|
||||
}
|
||||
|
||||
func normAll(ss []string) []string {
|
||||
r := slices.Clone(ss)
|
||||
for i := range r {
|
||||
@ -56,7 +62,7 @@ func normAll(ss []string) []string {
|
||||
}
|
||||
|
||||
func norm(s string) string {
|
||||
return strings.ToLower(s)
|
||||
return strings.ToLower(strings.TrimSpace(s))
|
||||
}
|
||||
|
||||
// normPathElem ensures the string is:
|
||||
@ -66,6 +72,8 @@ func norm(s string) string {
|
||||
// without re-running the prefix-suffix addition multiple
|
||||
// times per target.
|
||||
func normPathElem(s string) string {
|
||||
s = strings.TrimSpace(s)
|
||||
|
||||
if len(s) == 0 {
|
||||
return s
|
||||
}
|
||||
@ -74,7 +82,9 @@ func normPathElem(s string) string {
|
||||
s = string(path.PathSeparator) + s
|
||||
}
|
||||
|
||||
s = path.TrimTrailingSlash(s) + string(path.PathSeparator)
|
||||
s = path.TrimTrailingSlash(s)
|
||||
s = strings.ToLower(s)
|
||||
s += string(path.PathSeparator)
|
||||
|
||||
return s
|
||||
}
|
||||
@ -83,7 +93,7 @@ func normPathElem(s string) string {
|
||||
// compare values against. Filter.Matches(v) returns
|
||||
// true if Filter.Comparer(filter.target, v) is true.
|
||||
type Filter struct {
|
||||
Comparator comparator `json:"comparator"`
|
||||
Comparator comparator `json:"comparator_type"` // the type of comparison
|
||||
Targets []string `json:"targets"` // the set of values to compare
|
||||
NormalizedTargets []string `json:"normalizedTargets"` // the set of comparable values post normalization
|
||||
Negate bool `json:"negate"` // when true, negate the comparator result
|
||||
@ -92,7 +102,8 @@ type Filter struct {
|
||||
Identity string `json:"identity"`
|
||||
|
||||
// deprecated, kept around for deserialization
|
||||
Target string `json:"target"` // the value to compare against
|
||||
Target string `json:"target"` // the value to compare against
|
||||
ComparatorInt int `json:"comparator"`
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------------------------------
|
||||
@ -111,7 +122,7 @@ func Identity(id string) Filter {
|
||||
}
|
||||
|
||||
// Equal creates a filter where Compare(v) is true if, for any target string,
|
||||
// target == v
|
||||
// norm(target) == norm(v)
|
||||
func Equal(target []string) Filter {
|
||||
return newFilter(EqualTo, target, normAll(target), false)
|
||||
}
|
||||
@ -122,6 +133,19 @@ func NotEqual(target []string) Filter {
|
||||
return newFilter(EqualTo, target, normAll(target), true)
|
||||
}
|
||||
|
||||
// StrictEqual creates a filter where Compare(v) is true if, for any target string,
|
||||
// target === v. Target and v are not normalized for this comparison. The comparison
|
||||
// is case sensitive and ignores character folding.
|
||||
func StrictEqual(target []string) Filter {
|
||||
return newFilter(StrictEqualTo, target, normAll(target), false)
|
||||
}
|
||||
|
||||
// NotStrictEqual creates a filter where Compare(v) is true if, for any target string,
|
||||
// target != v
|
||||
func NotStrictEqual(target []string) Filter {
|
||||
return newFilter(StrictEqualTo, target, normAll(target), true)
|
||||
}
|
||||
|
||||
// Greater creates a filter where Compare(v) is true if, for any target string,
|
||||
// target > v
|
||||
func Greater(target []string) Filter {
|
||||
@ -356,29 +380,24 @@ func (f Filter) CompareAny(inputs ...string) bool {
|
||||
func (f Filter) Compare(input string) bool {
|
||||
var cmp func(string, string) bool
|
||||
|
||||
// select comparison func
|
||||
switch f.Comparator {
|
||||
case EqualTo, IdentityValue:
|
||||
case EqualTo, IdentityValue, TargetPathEquals:
|
||||
cmp = equals
|
||||
case StrictEqualTo:
|
||||
cmp = strictEquals
|
||||
case GreaterThan:
|
||||
cmp = greater
|
||||
case LessThan:
|
||||
cmp = less
|
||||
case TargetContains:
|
||||
case TargetContains, TargetPathContains:
|
||||
cmp = contains
|
||||
case TargetIn:
|
||||
cmp = in
|
||||
case TargetPrefixes:
|
||||
case TargetPrefixes, TargetPathPrefix:
|
||||
cmp = prefixed
|
||||
case TargetSuffixes:
|
||||
case TargetSuffixes, TargetPathSuffix:
|
||||
cmp = suffixed
|
||||
case TargetPathPrefix:
|
||||
cmp = pathPrefix
|
||||
case TargetPathContains:
|
||||
cmp = pathContains
|
||||
case TargetPathSuffix:
|
||||
cmp = pathSuffix
|
||||
case TargetPathEquals:
|
||||
cmp = pathEquals
|
||||
case Passes:
|
||||
return true
|
||||
case Fails:
|
||||
@ -388,14 +407,39 @@ func (f Filter) Compare(input string) bool {
|
||||
var (
|
||||
res bool
|
||||
targets = f.NormalizedTargets
|
||||
_input = norm(input)
|
||||
// most comparators expect cmp(target, input)
|
||||
// path comparators expect cmp(input, target)
|
||||
swapParams bool
|
||||
)
|
||||
|
||||
// set conditional behavior
|
||||
switch f.Comparator {
|
||||
case TargetContains:
|
||||
// legacy case handling for contains, which checks for
|
||||
// strings.Contains(target, input) instead of (input, target)
|
||||
swapParams = true
|
||||
case StrictEqualTo:
|
||||
targets = f.Targets
|
||||
_input = input
|
||||
case TargetPathPrefix, TargetPathContains, TargetPathSuffix, TargetPathEquals:
|
||||
// As a precondition, assumes each entry in the NormalizedTargets
|
||||
// list has been passed through normPathElem().
|
||||
_input = normPathElem(input)
|
||||
}
|
||||
|
||||
if len(targets) == 0 {
|
||||
targets = f.Targets
|
||||
}
|
||||
|
||||
for _, tgt := range targets {
|
||||
res = cmp(norm(tgt), norm(input))
|
||||
t, i := tgt, _input
|
||||
|
||||
if swapParams {
|
||||
t, i = _input, tgt
|
||||
}
|
||||
|
||||
res = cmp(t, i)
|
||||
|
||||
// any-match
|
||||
if res {
|
||||
@ -410,11 +454,16 @@ func (f Filter) Compare(input string) bool {
|
||||
return res
|
||||
}
|
||||
|
||||
// true if t == i
|
||||
// true if t == i, case insensitive and folded
|
||||
func equals(target, input string) bool {
|
||||
return strings.EqualFold(target, input)
|
||||
}
|
||||
|
||||
// true if t == i, case sensitive and not folded
|
||||
func strictEquals(target, input string) bool {
|
||||
return target == input
|
||||
}
|
||||
|
||||
// true if t > i
|
||||
func greater(target, input string) bool {
|
||||
return target > input
|
||||
@ -425,9 +474,9 @@ func less(target, input string) bool {
|
||||
return target < input
|
||||
}
|
||||
|
||||
// true if target contains input as a substring.
|
||||
// true if input contains target as a substring.
|
||||
func contains(target, input string) bool {
|
||||
return strings.Contains(target, input)
|
||||
return strings.Contains(input, target)
|
||||
}
|
||||
|
||||
// true if input contains target as a substring.
|
||||
@ -445,63 +494,6 @@ func suffixed(target, input string) bool {
|
||||
return strings.HasSuffix(input, target)
|
||||
}
|
||||
|
||||
// true if target is an _element complete_ prefix match
|
||||
// on the input. Element complete means we do not
|
||||
// succeed on partial element matches (ex: "/foo" does
|
||||
// not match "/foobar").
|
||||
//
|
||||
// As a precondition, assumes the target value has been
|
||||
// passed through normPathElem().
|
||||
//
|
||||
// The input is assumed to be the complete path that may
|
||||
// have the target as a prefix.
|
||||
func pathPrefix(target, input string) bool {
|
||||
return strings.HasPrefix(normPathElem(input), target)
|
||||
}
|
||||
|
||||
// true if target has an _element complete_ equality
|
||||
// with any element, or any sequence of elements, from
|
||||
// the input. Element complete means we do not succeed
|
||||
// on partial element matches (ex: foo does not match
|
||||
// /foobar, and foo/bar does not match foo/barbaz).
|
||||
//
|
||||
// As a precondition, assumes the target value has been
|
||||
// passed through normPathElem().
|
||||
//
|
||||
// Input is assumed to be the complete path that may
|
||||
// contain the target as an element or sequence of elems.
|
||||
func pathContains(target, input string) bool {
|
||||
return strings.Contains(normPathElem(input), target)
|
||||
}
|
||||
|
||||
// true if target is an _element complete_ suffix match
|
||||
// on the input. Element complete means we do not
|
||||
// succeed on partial element matches (ex: "/bar" does
|
||||
// not match "/foobar").
|
||||
//
|
||||
// As a precondition, assumes the target value has been
|
||||
// passed through normPathElem().
|
||||
//
|
||||
// The input is assumed to be the complete path that may
|
||||
// have the target as a suffix.
|
||||
func pathSuffix(target, input string) bool {
|
||||
return strings.HasSuffix(normPathElem(input), target)
|
||||
}
|
||||
|
||||
// true if target is an _exact_ match on the input, excluding
|
||||
// path delmiters. Element complete means we do not succeed
|
||||
// on partial element matches (ex: "/bar" does not match
|
||||
// "/foobar").
|
||||
//
|
||||
// As a precondition, assumes the target value has been
|
||||
// passed through normPathElem().
|
||||
//
|
||||
// The input is assumed to be the complete path that may
|
||||
// match the target.
|
||||
func pathEquals(target, input string) bool {
|
||||
return strings.EqualFold(normPathElem(input), target)
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------------------------------
|
||||
// Printers and PII control
|
||||
// ----------------------------------------------------------------------------------------------------
|
||||
@ -511,9 +503,11 @@ var _ clues.PlainConcealer = &Filter{}
|
||||
var safeFilterValues = map[string]struct{}{"*": {}}
|
||||
|
||||
func (f Filter) Conceal() string {
|
||||
fcs := f.Comparator.String()
|
||||
fcs := string(f.Comparator)
|
||||
|
||||
switch f.Comparator {
|
||||
case UnknownComparator:
|
||||
fcs = "UnknownComparison"
|
||||
case Passes, Fails:
|
||||
return fcs
|
||||
}
|
||||
@ -532,9 +526,11 @@ func (f Filter) String() string {
|
||||
}
|
||||
|
||||
func (f Filter) PlainString() string {
|
||||
fcs := f.Comparator.String()
|
||||
fcs := string(f.Comparator)
|
||||
|
||||
switch f.Comparator {
|
||||
case UnknownComparator:
|
||||
fcs = "UnknownComparison"
|
||||
case Passes, Fails:
|
||||
return fcs
|
||||
}
|
||||
|
||||
@ -51,6 +51,8 @@ func (suite *FiltersSuite) TestEquals() {
|
||||
expectNF assert.BoolAssertionFunc
|
||||
}{
|
||||
{"foo", assert.True, assert.False},
|
||||
{"FOO", assert.True, assert.False},
|
||||
{" foo ", assert.True, assert.False},
|
||||
{"bar", assert.False, assert.True},
|
||||
}
|
||||
for _, test := range table {
|
||||
@ -86,6 +88,30 @@ func (suite *FiltersSuite) TestEquals_any() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestStrictEquals() {
|
||||
f := filters.StrictEqual(foo)
|
||||
nf := filters.NotStrictEqual(foo)
|
||||
|
||||
table := []struct {
|
||||
input string
|
||||
expectF assert.BoolAssertionFunc
|
||||
expectNF assert.BoolAssertionFunc
|
||||
}{
|
||||
{"foo", assert.True, assert.False},
|
||||
{"FOO", assert.False, assert.True},
|
||||
{" foo ", assert.False, assert.True},
|
||||
{"bar", assert.False, assert.True},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.input, func() {
|
||||
t := suite.T()
|
||||
|
||||
test.expectF(t, f.Compare(test.input), "filter")
|
||||
test.expectNF(t, nf.Compare(test.input), "negated filter")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestGreater() {
|
||||
f := filters.Greater(five)
|
||||
nf := filters.NotGreater(five)
|
||||
@ -143,6 +169,7 @@ func (suite *FiltersSuite) TestContains() {
|
||||
}{
|
||||
{"murf", assert.True, assert.False},
|
||||
{"frum", assert.False, assert.True},
|
||||
{"ssmurfss", assert.False, assert.True},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.input, func() {
|
||||
@ -300,77 +327,134 @@ func (suite *FiltersSuite) TestSuffixes() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathPrefix() {
|
||||
table := []struct {
|
||||
name string
|
||||
targets []string
|
||||
input string
|
||||
expectF assert.BoolAssertionFunc
|
||||
expectNF assert.BoolAssertionFunc
|
||||
}{
|
||||
{"Exact - same case", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - different case", []string{"fa"}, "/fA", assert.True, assert.False},
|
||||
{"Prefix - same case", []string{"fA"}, "/fA/fB", assert.True, assert.False},
|
||||
{"Prefix - different case", []string{"fa"}, "/fA/fB", assert.True, assert.False},
|
||||
{"Exact - multiple folders", []string{"fA/fB"}, "/fA/fB", assert.True, assert.False},
|
||||
{"Prefix - single folder partial", []string{"f"}, "/fA/fB", assert.False, assert.True},
|
||||
{"Prefix - multi folder partial", []string{"fA/f"}, "/fA/fB", assert.False, assert.True},
|
||||
{"Target Longer - single folder", []string{"fA"}, "/f", assert.False, assert.True},
|
||||
{"Target Longer - multi folder", []string{"fA/fB"}, "/fA/f", assert.False, assert.True},
|
||||
{"Not prefix - single folder", []string{"fA"}, "/af", assert.False, assert.True},
|
||||
{"Not prefix - multi folder", []string{"fA/fB"}, "/fA/bf", assert.False, assert.True},
|
||||
{"Exact - target variations - none", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - prefix", []string{"/fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - suffix", []string{"fA/"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - both", []string{"/fA/"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - input variations - none", []string{"fA"}, "fA", assert.True, assert.False},
|
||||
{"Exact - input variations - prefix", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - input variations - suffix", []string{"fA"}, "fA/", assert.True, assert.False},
|
||||
{"Exact - input variations - both", []string{"fA"}, "/fA/", assert.True, assert.False},
|
||||
{"Prefix - target variations - none", []string{"fA"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Prefix - target variations - prefix", []string{"/fA"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Prefix - target variations - suffix", []string{"fA/"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Prefix - target variations - both", []string{"/fA/"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Prefix - input variations - none", []string{"fA"}, "fA/fb", assert.True, assert.False},
|
||||
{"Prefix - input variations - prefix", []string{"fA"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Prefix - input variations - suffix", []string{"fA"}, "fA/fb/", assert.True, assert.False},
|
||||
{"Prefix - input variations - both", []string{"fA"}, "/fA/fb/", assert.True, assert.False},
|
||||
{"Slice - one matches", []string{"foo", "fa/f", "fA"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Slice - none match", []string{"foo", "fa/f", "f"}, "/fA/fb", assert.False, assert.True},
|
||||
// ---------------------------------------------------------------------------
|
||||
// path comparators
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
var pathElemNormalizationTable = []struct {
|
||||
name string
|
||||
targets []string
|
||||
expect []string
|
||||
}{
|
||||
{"Single - no slash", []string{"fA"}, []string{"/fa/"}},
|
||||
{"Single - pre slash", []string{"/fA"}, []string{"/fa/"}},
|
||||
{"Single - suff slash", []string{"fA/"}, []string{"/fa/"}},
|
||||
{"Single - both slashes", []string{"/fA/"}, []string{"/fa/"}},
|
||||
{"Multipath - no slash", []string{"fA/fB"}, []string{"/fa/fb/"}},
|
||||
{"Multipath - pre slash", []string{"/fA/fB"}, []string{"/fa/fb/"}},
|
||||
{"Multipath - suff slash", []string{"fA/fB/"}, []string{"/fa/fb/"}},
|
||||
{"Multipath - both slashes", []string{"/fA/fB/"}, []string{"/fa/fb/"}},
|
||||
{"Multi input - no slash", []string{"fA", "fB"}, []string{"/fa/", "/fb/"}},
|
||||
{"Multi input - pre slash", []string{"/fA", "/fB"}, []string{"/fa/", "/fb/"}},
|
||||
{"Multi input - suff slash", []string{"fA/", "fB/"}, []string{"/fa/", "/fb/"}},
|
||||
{"Multi input - both slashes", []string{"/fA/", "/fB/"}, []string{"/fa/", "/fb/"}},
|
||||
}
|
||||
|
||||
type baf struct {
|
||||
fn assert.BoolAssertionFunc
|
||||
yes bool
|
||||
}
|
||||
|
||||
var (
|
||||
yes = baf{
|
||||
fn: assert.True,
|
||||
yes: true,
|
||||
}
|
||||
for _, test := range table {
|
||||
no = baf{
|
||||
fn: assert.False,
|
||||
yes: false,
|
||||
}
|
||||
)
|
||||
|
||||
var pathComparisonsTable = []struct {
|
||||
name string
|
||||
targets []string
|
||||
input string
|
||||
expectContains baf
|
||||
expectEquals baf
|
||||
expectPrefix baf
|
||||
expectSuffix baf
|
||||
}{
|
||||
{"single folder partial", []string{"f"}, "/fA", no, no, no, no},
|
||||
{"single folder target partial", []string{"f"}, "/fA/fB", no, no, no, no},
|
||||
{"multi folder input partial", []string{"A/f"}, "/fA/fB", no, no, no, no},
|
||||
{"longer target - single folder", []string{"fA"}, "/f", no, no, no, no},
|
||||
{"longer target - multi folder", []string{"fA/fB"}, "/fA/f", no, no, no, no},
|
||||
{"non-matching - single folder", []string{"fA"}, "/af", no, no, no, no},
|
||||
{"non-matching - multi folder", []string{"fA/fB"}, "/fA/bf", no, no, no, no},
|
||||
|
||||
{"Exact - same case", []string{"fA"}, "/fA", yes, yes, yes, yes},
|
||||
{"Exact - different case", []string{"fa"}, "/fA", yes, yes, yes, yes},
|
||||
{"Exact - multiple folders", []string{"fA/fB"}, "/fA/fB", yes, yes, yes, yes},
|
||||
{"Exact - target slash variations - prefix", []string{"/fA"}, "/fA", yes, yes, yes, yes},
|
||||
{"Exact - target slash variations - suffix", []string{"fA/"}, "/fA", yes, yes, yes, yes},
|
||||
{"Exact - target slash variations - both", []string{"/fA/"}, "/fA", yes, yes, yes, yes},
|
||||
{"Exact - input slash variations - none", []string{"fA"}, "fA", yes, yes, yes, yes},
|
||||
{"Exact - input slash variations - prefix", []string{"fA"}, "/fA", yes, yes, yes, yes},
|
||||
{"Exact - input slash variations - suffix", []string{"fA"}, "fA/", yes, yes, yes, yes},
|
||||
{"Exact - input slash variations - both", []string{"fA"}, "/fA/", yes, yes, yes, yes},
|
||||
|
||||
{"Prefix - same case", []string{"fA"}, "/fA/fB", yes, no, yes, no},
|
||||
{"Prefix - different case", []string{"fa"}, "/fA/fB", yes, no, yes, no},
|
||||
{"Prefix - multiple folders", []string{"fa/fb"}, "/fA/fB/fC", yes, no, yes, no},
|
||||
{"Prefix - target slash variations - none", []string{"fA"}, "/fA/fb", yes, no, yes, no},
|
||||
{"Prefix - target slash variations - prefix", []string{"/fA"}, "/fA/fb", yes, no, yes, no},
|
||||
{"Prefix - target slash variations - suffix", []string{"fA/"}, "/fA/fb", yes, no, yes, no},
|
||||
{"Prefix - target slash variations - both", []string{"/fA/"}, "/fA/fb", yes, no, yes, no},
|
||||
{"Prefix - input slash variations - none", []string{"fA"}, "fA/fb", yes, no, yes, no},
|
||||
{"Prefix - input slash variations - prefix", []string{"fA"}, "/fA/fb", yes, no, yes, no},
|
||||
{"Prefix - input slash variations - suffix", []string{"fA"}, "fA/fb/", yes, no, yes, no},
|
||||
{"Prefix - input slash variations - both", []string{"fA"}, "/fA/fb/", yes, no, yes, no},
|
||||
|
||||
{"Suffix - same case", []string{"fB"}, "/fA/fB", yes, no, no, yes},
|
||||
{"Suffix - different case", []string{"fb"}, "/fA/fB", yes, no, no, yes},
|
||||
{"Suffix - multiple folders", []string{"fb/fc"}, "/fA/fB/fC", yes, no, no, yes},
|
||||
{"Suffix - target slash variations - none", []string{"fB"}, "/fA/fb", yes, no, no, yes},
|
||||
{"Suffix - target slash variations - prefix", []string{"/fB"}, "/fA/fb", yes, no, no, yes},
|
||||
{"Suffix - target slash variations - suffix", []string{"fB/"}, "/fA/fb", yes, no, no, yes},
|
||||
{"Suffix - target slash variations - both", []string{"/fB/"}, "/fA/fb", yes, no, no, yes},
|
||||
{"Suffix - input slash variations - none", []string{"fB"}, "fA/fb", yes, no, no, yes},
|
||||
{"Suffix - input slash variations - prefix", []string{"fB"}, "/fA/fb", yes, no, no, yes},
|
||||
{"Suffix - input slash variations - suffix", []string{"fB"}, "fA/fb/", yes, no, no, yes},
|
||||
{"Suffix - input slash variations - both", []string{"fB"}, "/fA/fb/", yes, no, no, yes},
|
||||
|
||||
{"Contains - same case", []string{"fB"}, "/fA/fB/fC", yes, no, no, no},
|
||||
{"Contains - different case", []string{"fb"}, "/fA/fB/fC", yes, no, no, no},
|
||||
{"Contains - multiple folders", []string{"fb/fc"}, "/fA/fB/fC/fD", yes, no, no, no},
|
||||
{"Contains - target slash variations - none", []string{"fB"}, "/fA/fb/fc", yes, no, no, no},
|
||||
{"Contains - target slash variations - prefix", []string{"/fB"}, "/fA/fb/fc", yes, no, no, no},
|
||||
{"Contains - target slash variations - suffix", []string{"fB/"}, "/fA/fb/fc", yes, no, no, no},
|
||||
{"Contains - target slash variations - both", []string{"/fB/"}, "/fA/fb/fc", yes, no, no, no},
|
||||
{"Contains - input slash variations - none", []string{"fB"}, "fA/fb/fc", yes, no, no, no},
|
||||
{"Contains - input slash variations - prefix", []string{"fB"}, "/fA/fb/fc/", yes, no, no, no},
|
||||
{"Contains - input slash variations - suffix", []string{"fB"}, "fA/fb/fc/", yes, no, no, no},
|
||||
{"Contains - input slash variations - both", []string{"fB"}, "/fA/fb/fc/", yes, no, no, no},
|
||||
|
||||
{"Slice - one exact matches", []string{"foo", "fa/f", "fA"}, "/fA", yes, yes, yes, yes},
|
||||
{"Slice - none match", []string{"foo", "fa/f", "f"}, "/fA", no, no, no, no},
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathPrefix() {
|
||||
for _, test := range pathComparisonsTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
var (
|
||||
t = suite.T()
|
||||
f = filters.PathPrefix(test.targets)
|
||||
nf = filters.NotPathPrefix(test.targets)
|
||||
)
|
||||
|
||||
f := filters.PathPrefix(test.targets)
|
||||
nf := filters.NotPathPrefix(test.targets)
|
||||
|
||||
test.expectF(t, f.Compare(test.input), "filter")
|
||||
test.expectNF(t, nf.Compare(test.input), "negated filter")
|
||||
test.expectPrefix.fn(t, f.Compare(test.input), "filter")
|
||||
if test.expectPrefix.yes {
|
||||
no.fn(t, nf.Compare(test.input), "negated filter")
|
||||
} else {
|
||||
yes.fn(t, nf.Compare(test.input), "negated filter")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathPrefix_NormalizedTargets() {
|
||||
table := []struct {
|
||||
name string
|
||||
targets []string
|
||||
expect []string
|
||||
}{
|
||||
{"Single - no slash", []string{"fA"}, []string{"/fA/"}},
|
||||
{"Single - pre slash", []string{"/fA"}, []string{"/fA/"}},
|
||||
{"Single - suff slash", []string{"fA/"}, []string{"/fA/"}},
|
||||
{"Single - both slashes", []string{"/fA/"}, []string{"/fA/"}},
|
||||
{"Multipath - no slash", []string{"fA/fB"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - pre slash", []string{"/fA/fB"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - suff slash", []string{"fA/fB/"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - both slashes", []string{"/fA/fB/"}, []string{"/fA/fB/"}},
|
||||
{"Multi input - no slash", []string{"fA", "fB"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - pre slash", []string{"/fA", "/fB"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - suff slash", []string{"fA/", "fB/"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - both slashes", []string{"/fA/", "/fB/"}, []string{"/fA/", "/fB/"}},
|
||||
}
|
||||
for _, test := range table {
|
||||
for _, test := range pathElemNormalizationTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
@ -381,79 +465,26 @@ func (suite *FiltersSuite) TestPathPrefix_NormalizedTargets() {
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathContains() {
|
||||
table := []struct {
|
||||
name string
|
||||
targets []string
|
||||
input string
|
||||
expectF assert.BoolAssertionFunc
|
||||
expectNF assert.BoolAssertionFunc
|
||||
}{
|
||||
{"Exact - same case", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - different case", []string{"fa"}, "/fA", assert.True, assert.False},
|
||||
{"Cont - same case single target", []string{"fA"}, "/Z/fA/B", assert.True, assert.False},
|
||||
{"Cont - different case single target", []string{"fA"}, "/z/fa/b", assert.True, assert.False},
|
||||
{"Cont - same case multi target", []string{"Z/fA"}, "/Z/fA/B", assert.True, assert.False},
|
||||
{"Cont - different case multi target", []string{"fA/B"}, "/z/fa/b", assert.True, assert.False},
|
||||
{"Exact - multiple folders", []string{"Z/fA/B"}, "/Z/fA/B", assert.True, assert.False},
|
||||
{"Cont - single folder partial", []string{"folder"}, "/Z/fA/fB", assert.False, assert.True},
|
||||
{"Cont - multi folder partial", []string{"fA/fold"}, "/Z/fA/fB", assert.False, assert.True},
|
||||
{"Target Longer - single folder", []string{"fA"}, "/folder", assert.False, assert.True},
|
||||
{"Target Longer - multi folder", []string{"fA/fB"}, "/fA/fold", assert.False, assert.True},
|
||||
{"Not cont - single folder", []string{"fA"}, "/afolder", assert.False, assert.True},
|
||||
{"Not cont - single target", []string{"fA"}, "/z/afolder/bfolder", assert.False, assert.True},
|
||||
{"Not cont - multi folder", []string{"fA/fB"}, "/z/fA/bfolder", assert.False, assert.True},
|
||||
{"Exact - target variations - none", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - prefix", []string{"/fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - suffix", []string{"fA/"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - both", []string{"/fA/"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - input variations - none", []string{"fA"}, "fA", assert.True, assert.False},
|
||||
{"Exact - input variations - prefix", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - input variations - suffix", []string{"fA"}, "fA/", assert.True, assert.False},
|
||||
{"Exact - input variations - both", []string{"fA"}, "/fA/", assert.True, assert.False},
|
||||
{"Cont - target variations - none", []string{"fA"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Cont - target variations - prefix", []string{"/fA"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Cont - target variations - suffix", []string{"fA/"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Cont - target variations - both", []string{"/fA/"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Cont - input variations - none", []string{"fA"}, "fA/fb", assert.True, assert.False},
|
||||
{"Cont - input variations - prefix", []string{"fA"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Cont - input variations - suffix", []string{"fA"}, "fA/fb/", assert.True, assert.False},
|
||||
{"Cont - input variations - both", []string{"fA"}, "/fA/fb/", assert.True, assert.False},
|
||||
{"Slice - one matches", []string{"foo", "fa/f", "fA"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Slice - none match", []string{"foo", "fa/f", "f"}, "/fA/fb", assert.False, assert.True},
|
||||
}
|
||||
for _, test := range table {
|
||||
for _, test := range pathComparisonsTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
var (
|
||||
t = suite.T()
|
||||
f = filters.PathContains(test.targets)
|
||||
nf = filters.NotPathContains(test.targets)
|
||||
)
|
||||
|
||||
f := filters.PathContains(test.targets)
|
||||
nf := filters.NotPathContains(test.targets)
|
||||
|
||||
test.expectF(t, f.Compare(test.input), "filter")
|
||||
test.expectNF(t, nf.Compare(test.input), "negated filter")
|
||||
test.expectContains.fn(t, f.Compare(test.input), "filter")
|
||||
if test.expectContains.yes {
|
||||
no.fn(t, nf.Compare(test.input), "negated filter")
|
||||
} else {
|
||||
yes.fn(t, nf.Compare(test.input), "negated filter")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathContains_NormalizedTargets() {
|
||||
table := []struct {
|
||||
name string
|
||||
targets []string
|
||||
expect []string
|
||||
}{
|
||||
{"Single - no slash", []string{"fA"}, []string{"/fA/"}},
|
||||
{"Single - pre slash", []string{"/fA"}, []string{"/fA/"}},
|
||||
{"Single - suff slash", []string{"fA/"}, []string{"/fA/"}},
|
||||
{"Single - both slashes", []string{"/fA/"}, []string{"/fA/"}},
|
||||
{"Multipath - no slash", []string{"fA/fB"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - pre slash", []string{"/fA/fB"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - suff slash", []string{"fA/fB/"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - both slashes", []string{"/fA/fB/"}, []string{"/fA/fB/"}},
|
||||
{"Multi input - no slash", []string{"fA", "fB"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - pre slash", []string{"/fA", "/fB"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - suff slash", []string{"fA/", "fB/"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - both slashes", []string{"/fA/", "/fB/"}, []string{"/fA/", "/fB/"}},
|
||||
}
|
||||
for _, test := range table {
|
||||
for _, test := range pathElemNormalizationTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
@ -464,76 +495,26 @@ func (suite *FiltersSuite) TestPathContains_NormalizedTargets() {
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathSuffix() {
|
||||
table := []struct {
|
||||
name string
|
||||
targets []string
|
||||
input string
|
||||
expectF assert.BoolAssertionFunc
|
||||
expectNF assert.BoolAssertionFunc
|
||||
}{
|
||||
{"Exact - same case", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - different case", []string{"fa"}, "/fA", assert.True, assert.False},
|
||||
{"Suffix - same case", []string{"fB"}, "/fA/fB", assert.True, assert.False},
|
||||
{"Suffix - different case", []string{"fb"}, "/fA/fB", assert.True, assert.False},
|
||||
{"Exact - multiple folders", []string{"fA/fB"}, "/fA/fB", assert.True, assert.False},
|
||||
{"Suffix - single folder partial", []string{"f"}, "/fA/fB", assert.False, assert.True},
|
||||
{"Suffix - multi folder partial", []string{"A/fB"}, "/fA/fB", assert.False, assert.True},
|
||||
{"Target Longer - single folder", []string{"fA"}, "/f", assert.False, assert.True},
|
||||
{"Target Longer - multi folder", []string{"fA/fB"}, "/fA/f", assert.False, assert.True},
|
||||
{"Not suffix - single folder", []string{"fA"}, "/af", assert.False, assert.True},
|
||||
{"Not suffix - multi folder", []string{"fA/fB"}, "/Af/fB", assert.False, assert.True},
|
||||
{"Exact - target variations - none", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - prefix", []string{"/fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - suffix", []string{"fA/"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - both", []string{"/fA/"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - input variations - none", []string{"fA"}, "fA", assert.True, assert.False},
|
||||
{"Exact - input variations - prefix", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - input variations - suffix", []string{"fA"}, "fA/", assert.True, assert.False},
|
||||
{"Exact - input variations - both", []string{"fA"}, "/fA/", assert.True, assert.False},
|
||||
{"Suffix - target variations - none", []string{"fb"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Suffix - target variations - prefix", []string{"/fb"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Suffix - target variations - suffix", []string{"fb/"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Suffix - target variations - both", []string{"/fb/"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Suffix - input variations - none", []string{"fb"}, "fA/fb", assert.True, assert.False},
|
||||
{"Suffix - input variations - prefix", []string{"fb"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Suffix - input variations - suffix", []string{"fb"}, "fA/fb/", assert.True, assert.False},
|
||||
{"Suffix - input variations - both", []string{"fb"}, "/fA/fb/", assert.True, assert.False},
|
||||
{"Slice - one matches", []string{"foo", "fa/f", "fb"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Slice - none match", []string{"foo", "fa/f", "f"}, "/fA/fb", assert.False, assert.True},
|
||||
}
|
||||
for _, test := range table {
|
||||
for _, test := range pathComparisonsTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
var (
|
||||
t = suite.T()
|
||||
f = filters.PathSuffix(test.targets)
|
||||
nf = filters.NotPathSuffix(test.targets)
|
||||
)
|
||||
|
||||
f := filters.PathSuffix(test.targets)
|
||||
nf := filters.NotPathSuffix(test.targets)
|
||||
|
||||
test.expectF(t, f.Compare(test.input), "filter")
|
||||
test.expectNF(t, nf.Compare(test.input), "negated filter")
|
||||
test.expectSuffix.fn(t, f.Compare(test.input), "filter")
|
||||
if test.expectSuffix.yes {
|
||||
no.fn(t, nf.Compare(test.input), "negated filter")
|
||||
} else {
|
||||
yes.fn(t, nf.Compare(test.input), "negated filter")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathSuffix_NormalizedTargets() {
|
||||
table := []struct {
|
||||
name string
|
||||
targets []string
|
||||
expect []string
|
||||
}{
|
||||
{"Single - no slash", []string{"fA"}, []string{"/fA/"}},
|
||||
{"Single - pre slash", []string{"/fA"}, []string{"/fA/"}},
|
||||
{"Single - suff slash", []string{"fA/"}, []string{"/fA/"}},
|
||||
{"Single - both slashes", []string{"/fA/"}, []string{"/fA/"}},
|
||||
{"Multipath - no slash", []string{"fA/fB"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - pre slash", []string{"/fA/fB"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - suff slash", []string{"fA/fB/"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - both slashes", []string{"/fA/fB/"}, []string{"/fA/fB/"}},
|
||||
{"Multi input - no slash", []string{"fA", "fB"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - pre slash", []string{"/fA", "/fB"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - suff slash", []string{"fA/", "fB/"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - both slashes", []string{"/fA/", "/fB/"}, []string{"/fA/", "/fB/"}},
|
||||
}
|
||||
for _, test := range table {
|
||||
for _, test := range pathElemNormalizationTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
@ -544,67 +525,26 @@ func (suite *FiltersSuite) TestPathSuffix_NormalizedTargets() {
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathEquals() {
|
||||
table := []struct {
|
||||
name string
|
||||
targets []string
|
||||
input string
|
||||
expectF assert.BoolAssertionFunc
|
||||
expectNF assert.BoolAssertionFunc
|
||||
}{
|
||||
{"Exact - same case", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - different case", []string{"fa"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - multiple folders", []string{"fA/fB"}, "/fA/fB", assert.True, assert.False},
|
||||
{"Exact - target variations - none", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - prefix", []string{"/fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - suffix", []string{"fA/"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - target variations - both", []string{"/fA/"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - input variations - none", []string{"fA"}, "fA", assert.True, assert.False},
|
||||
{"Exact - input variations - prefix", []string{"fA"}, "/fA", assert.True, assert.False},
|
||||
{"Exact - input variations - suffix", []string{"fA"}, "fA/", assert.True, assert.False},
|
||||
{"Exact - input variations - both", []string{"fA"}, "/fA/", assert.True, assert.False},
|
||||
{"Partial match", []string{"f"}, "/fA/", assert.False, assert.True},
|
||||
{"Suffix - same case", []string{"fB"}, "/fA/fB", assert.False, assert.True},
|
||||
{"Suffix - different case", []string{"fb"}, "/fA/fB", assert.False, assert.True},
|
||||
{"Prefix - same case", []string{"fA"}, "/fA/fB", assert.False, assert.True},
|
||||
{"Prefix - different case", []string{"fa"}, "/fA/fB", assert.False, assert.True},
|
||||
{"Contains - same case", []string{"fB"}, "/fA/fB/fC", assert.False, assert.True},
|
||||
{"Contains - different case", []string{"fb"}, "/fA/fB/fC", assert.False, assert.True},
|
||||
{"Slice - one matches", []string{"foo", "/fA/fb", "fb"}, "/fA/fb", assert.True, assert.False},
|
||||
{"Slice - none match", []string{"foo", "fa/f", "f"}, "/fA/fb", assert.False, assert.True},
|
||||
}
|
||||
for _, test := range table {
|
||||
for _, test := range pathComparisonsTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
var (
|
||||
t = suite.T()
|
||||
f = filters.PathEquals(test.targets)
|
||||
nf = filters.NotPathEquals(test.targets)
|
||||
)
|
||||
|
||||
f := filters.PathEquals(test.targets)
|
||||
nf := filters.NotPathEquals(test.targets)
|
||||
|
||||
test.expectF(t, f.Compare(test.input), "filter")
|
||||
test.expectNF(t, nf.Compare(test.input), "negated filter")
|
||||
test.expectEquals.fn(t, f.Compare(test.input), "filter")
|
||||
if test.expectEquals.yes {
|
||||
no.fn(t, nf.Compare(test.input), "negated filter")
|
||||
} else {
|
||||
yes.fn(t, nf.Compare(test.input), "negated filter")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *FiltersSuite) TestPathEquals_NormalizedTargets() {
|
||||
table := []struct {
|
||||
name string
|
||||
targets []string
|
||||
expect []string
|
||||
}{
|
||||
{"Single - no slash", []string{"fA"}, []string{"/fA/"}},
|
||||
{"Single - pre slash", []string{"/fA"}, []string{"/fA/"}},
|
||||
{"Single - suff slash", []string{"fA/"}, []string{"/fA/"}},
|
||||
{"Single - both slashes", []string{"/fA/"}, []string{"/fA/"}},
|
||||
{"Multipath - no slash", []string{"fA/fB"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - pre slash", []string{"/fA/fB"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - suff slash", []string{"fA/fB/"}, []string{"/fA/fB/"}},
|
||||
{"Multipath - both slashes", []string{"/fA/fB/"}, []string{"/fA/fB/"}},
|
||||
{"Multi input - no slash", []string{"fA", "fB"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - pre slash", []string{"/fA", "/fB"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - suff slash", []string{"fA/", "fB/"}, []string{"/fA/", "/fB/"}},
|
||||
{"Multi input - both slashes", []string{"/fA/", "/fB/"}, []string{"/fA/", "/fB/"}},
|
||||
}
|
||||
for _, test := range table {
|
||||
for _, test := range pathElemNormalizationTable {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
@ -614,6 +554,10 @@ func (suite *FiltersSuite) TestPathEquals_NormalizedTargets() {
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// pii handling
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func (suite *FiltersSuite) TestFilter_pii() {
|
||||
targets := []string{"fnords", "smarf", "*"}
|
||||
|
||||
@ -636,8 +580,8 @@ func (suite *FiltersSuite) TestFilter_pii() {
|
||||
suite.Run(test.name, func() {
|
||||
var (
|
||||
t = suite.T()
|
||||
expect = test.f.Comparator.String() + ":***,***,*"
|
||||
expectPlain = test.f.Comparator.String() + ":" + strings.Join(targets, ",")
|
||||
expect = string(test.f.Comparator) + ":***,***,*"
|
||||
expectPlain = string(test.f.Comparator) + ":" + strings.Join(targets, ",")
|
||||
)
|
||||
|
||||
result := test.f.Conceal()
|
||||
@ -671,14 +615,14 @@ func (suite *FiltersSuite) TestFilter_pii() {
|
||||
{
|
||||
"identity",
|
||||
filters.Identity("id"),
|
||||
filters.IdentityValue.String() + ":***",
|
||||
filters.IdentityValue.String() + ":id",
|
||||
filters.IdentityValue + ":***",
|
||||
filters.IdentityValue + ":id",
|
||||
},
|
||||
{
|
||||
"identity",
|
||||
filters.Identity("*"),
|
||||
filters.IdentityValue.String() + ":*",
|
||||
filters.IdentityValue.String() + ":*",
|
||||
filters.IdentityValue + ":*",
|
||||
filters.IdentityValue + ":*",
|
||||
},
|
||||
}
|
||||
for _, test := range table2 {
|
||||
|
||||
@ -434,8 +434,8 @@ func (suite *RepositoryModelIntgSuite) TestGetBackupErrors() {
|
||||
var (
|
||||
err = clues.Wrap(assert.AnError, "wrap")
|
||||
cec = err.Core()
|
||||
item = fault.FileErr(err, "file-id", "file-name", map[string]any{"foo": "bar"})
|
||||
skip = fault.FileSkip(fault.SkipMalware, "s-file-id", "s-file-name", map[string]any{"foo": "bar"})
|
||||
item = fault.FileErr(err, "ns", "file-id", "file-name", map[string]any{"foo": "bar"})
|
||||
skip = fault.FileSkip(fault.SkipMalware, "ns", "s-file-id", "s-file-name", map[string]any{"foo": "bar"})
|
||||
info = details.ItemInfo{
|
||||
Exchange: &details.ExchangeInfo{
|
||||
ItemType: details.ExchangeMail,
|
||||
|
||||
@ -216,9 +216,8 @@ func (s *exchange) Contacts(folders, contacts []string, opts ...option) []Exchan
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[ExchangeScope](ExchangeContact, contacts).
|
||||
set(ExchangeContactFolder, folders, opts...),
|
||||
)
|
||||
makeScope[ExchangeScope](ExchangeContact, contacts, defaultItemOptions(s.Cfg)...).
|
||||
set(ExchangeContactFolder, folders, opts...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -236,8 +235,7 @@ func (s *exchange) ContactFolders(folders []string, opts ...option) []ExchangeSc
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[ExchangeScope](ExchangeContactFolder, folders, os...),
|
||||
)
|
||||
makeScope[ExchangeScope](ExchangeContactFolder, folders, os...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -252,9 +250,8 @@ func (s *exchange) Events(calendars, events []string, opts ...option) []Exchange
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[ExchangeScope](ExchangeEvent, events).
|
||||
set(ExchangeEventCalendar, calendars, opts...),
|
||||
)
|
||||
makeScope[ExchangeScope](ExchangeEvent, events, defaultItemOptions(s.Cfg)...).
|
||||
set(ExchangeEventCalendar, calendars, opts...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -273,8 +270,7 @@ func (s *exchange) EventCalendars(events []string, opts ...option) []ExchangeSco
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[ExchangeScope](ExchangeEventCalendar, events, os...),
|
||||
)
|
||||
makeScope[ExchangeScope](ExchangeEventCalendar, events, os...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -289,9 +285,8 @@ func (s *exchange) Mails(folders, mails []string, opts ...option) []ExchangeScop
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[ExchangeScope](ExchangeMail, mails).
|
||||
set(ExchangeMailFolder, folders, opts...),
|
||||
)
|
||||
makeScope[ExchangeScope](ExchangeMail, mails, defaultItemOptions(s.Cfg)...).
|
||||
set(ExchangeMailFolder, folders, opts...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -309,8 +304,7 @@ func (s *exchange) MailFolders(folders []string, opts ...option) []ExchangeScope
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[ExchangeScope](ExchangeMailFolder, folders, os...),
|
||||
)
|
||||
makeScope[ExchangeScope](ExchangeMailFolder, folders, os...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -326,8 +320,7 @@ func (s *exchange) AllData() []ExchangeScope {
|
||||
scopes = append(scopes,
|
||||
makeScope[ExchangeScope](ExchangeContactFolder, Any()),
|
||||
makeScope[ExchangeScope](ExchangeEventCalendar, Any()),
|
||||
makeScope[ExchangeScope](ExchangeMailFolder, Any()),
|
||||
)
|
||||
makeScope[ExchangeScope](ExchangeMailFolder, Any()))
|
||||
|
||||
return scopes
|
||||
}
|
||||
|
||||
@ -146,6 +146,14 @@ func stubInfoScope(match string) mockScope {
|
||||
return sc
|
||||
}
|
||||
|
||||
func makeStubScope(cfg Config, match []string) mockScope {
|
||||
return makeScope[mockScope](leafCatStub, match, defaultItemOptions(cfg)...)
|
||||
}
|
||||
|
||||
func (s mockScope) Matches(cat mockCategorizer, target string) bool {
|
||||
return matches(s, cat, target)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Stringers and Concealers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -223,8 +223,7 @@ func (s *oneDrive) Folders(folders []string, opts ...option) []OneDriveScope {
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[OneDriveScope](OneDriveFolder, folders, os...),
|
||||
)
|
||||
makeScope[OneDriveScope](OneDriveFolder, folders, os...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -239,9 +238,8 @@ func (s *oneDrive) Items(folders, items []string, opts ...option) []OneDriveScop
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[OneDriveScope](OneDriveItem, items).
|
||||
set(OneDriveFolder, folders, opts...),
|
||||
)
|
||||
makeScope[OneDriveScope](OneDriveItem, items, defaultItemOptions(s.Cfg)...).
|
||||
set(OneDriveFolder, folders, opts...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
|
||||
@ -460,6 +460,67 @@ func (suite *SelectorScopesSuite) TestMatchesPathValues() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SelectorScopesSuite) TestDefaultItemOptions() {
|
||||
table := []struct {
|
||||
name string
|
||||
cfg Config
|
||||
match []string
|
||||
target string
|
||||
expect assert.BoolAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "no config, matches same value",
|
||||
cfg: Config{},
|
||||
match: []string{"foo"},
|
||||
target: "foo",
|
||||
expect: assert.True,
|
||||
},
|
||||
{
|
||||
name: "no config, does not match different case",
|
||||
cfg: Config{},
|
||||
match: []string{"bar"},
|
||||
target: "BAR",
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "no config, does not match substring",
|
||||
cfg: Config{},
|
||||
match: []string{"bar"},
|
||||
target: "ba",
|
||||
expect: assert.False,
|
||||
},
|
||||
{
|
||||
name: "only names, matches same same value",
|
||||
cfg: Config{OnlyMatchItemNames: true},
|
||||
match: []string{"fnords"},
|
||||
target: "fnords",
|
||||
expect: assert.True,
|
||||
},
|
||||
{
|
||||
name: "only names, matches different case",
|
||||
cfg: Config{OnlyMatchItemNames: true},
|
||||
match: []string{"smarf"},
|
||||
target: "SMARF",
|
||||
expect: assert.True,
|
||||
},
|
||||
{
|
||||
name: "only names, does not match substring",
|
||||
cfg: Config{OnlyMatchItemNames: true},
|
||||
match: []string{"brunhilda"},
|
||||
target: "unhild",
|
||||
expect: assert.False,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
sc := makeStubScope(test.cfg, test.match)
|
||||
|
||||
test.expect(t, sc.Matches(leafCatStub, test.target))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *SelectorScopesSuite) TestClean() {
|
||||
table := []struct {
|
||||
name string
|
||||
@ -513,17 +574,17 @@ func (suite *SelectorScopesSuite) TestScopeConfig() {
|
||||
table := []struct {
|
||||
name string
|
||||
config scopeConfig
|
||||
expect int
|
||||
expect string
|
||||
}{
|
||||
{
|
||||
name: "no configs set",
|
||||
config: scopeConfig{},
|
||||
expect: int(filters.EqualTo),
|
||||
expect: filters.EqualTo,
|
||||
},
|
||||
{
|
||||
name: "force prefix",
|
||||
config: scopeConfig{usePrefixFilter: true},
|
||||
expect: int(filters.TargetPrefixes),
|
||||
expect: filters.TargetPrefixes,
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
@ -531,7 +592,7 @@ func (suite *SelectorScopesSuite) TestScopeConfig() {
|
||||
t := suite.T()
|
||||
|
||||
result := filterFor(test.config, input)
|
||||
assert.Equal(t, test.expect, int(result.Comparator))
|
||||
assert.Equal(t, test.expect, string(result.Comparator))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -463,11 +463,26 @@ func pathCategoriesIn[T scopeT, C categoryT](ss []scope) []path.CategoryType {
|
||||
// scope constructors
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// constructs the default item-scope comparator options according
|
||||
// to the selector configuration.
|
||||
// - if cfg.OnlyMatchItemNames == false, then comparison assumes item IDs,
|
||||
// which are case sensitive, resulting in StrictEqualsMatch
|
||||
func defaultItemOptions(cfg Config) []option {
|
||||
opts := []option{}
|
||||
|
||||
if !cfg.OnlyMatchItemNames {
|
||||
opts = append(opts, StrictEqualMatch())
|
||||
}
|
||||
|
||||
return opts
|
||||
}
|
||||
|
||||
type scopeConfig struct {
|
||||
usePathFilter bool
|
||||
usePrefixFilter bool
|
||||
useSuffixFilter bool
|
||||
useEqualsFilter bool
|
||||
usePathFilter bool
|
||||
usePrefixFilter bool
|
||||
useSuffixFilter bool
|
||||
useEqualsFilter bool
|
||||
useStrictEqualsFilter bool
|
||||
}
|
||||
|
||||
type option func(*scopeConfig)
|
||||
@ -496,9 +511,16 @@ func SuffixMatch() option {
|
||||
}
|
||||
}
|
||||
|
||||
// StrictEqualsMatch ensures the selector uses a StrictEquals comparator, instead
|
||||
// of contains. Will not override a default Any() or None() comparator.
|
||||
func StrictEqualMatch() option {
|
||||
return func(sc *scopeConfig) {
|
||||
sc.useStrictEqualsFilter = true
|
||||
}
|
||||
}
|
||||
|
||||
// ExactMatch ensures the selector uses an Equals comparator, instead
|
||||
// of contains. Will not override a default Any() or None()
|
||||
// comparator.
|
||||
// of contains. Will not override a default Any() or None() comparator.
|
||||
func ExactMatch() option {
|
||||
return func(sc *scopeConfig) {
|
||||
sc.useEqualsFilter = true
|
||||
@ -599,6 +621,10 @@ func filterize(
|
||||
return filters.Suffix(targets)
|
||||
}
|
||||
|
||||
if sc.useStrictEqualsFilter {
|
||||
return filters.StrictEqual(targets)
|
||||
}
|
||||
|
||||
if defaultFilter != nil {
|
||||
return defaultFilter(targets)
|
||||
}
|
||||
|
||||
@ -245,8 +245,7 @@ func (s *sharePoint) AllData() []SharePointScope {
|
||||
scopes,
|
||||
makeScope[SharePointScope](SharePointLibraryFolder, Any()),
|
||||
makeScope[SharePointScope](SharePointList, Any()),
|
||||
makeScope[SharePointScope](SharePointPageFolder, Any()),
|
||||
)
|
||||
makeScope[SharePointScope](SharePointPageFolder, Any()))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -276,9 +275,8 @@ func (s *sharePoint) ListItems(lists, items []string, opts ...option) []SharePoi
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[SharePointScope](SharePointListItem, items).
|
||||
set(SharePointList, lists, opts...),
|
||||
)
|
||||
makeScope[SharePointScope](SharePointListItem, items, defaultItemOptions(s.Cfg)...).
|
||||
set(SharePointList, lists, opts...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -312,8 +310,7 @@ func (s *sharePoint) LibraryFolders(libraryFolders []string, opts ...option) []S
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[SharePointScope](SharePointLibraryFolder, libraryFolders, os...),
|
||||
)
|
||||
makeScope[SharePointScope](SharePointLibraryFolder, libraryFolders, os...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -328,9 +325,8 @@ func (s *sharePoint) LibraryItems(libraries, items []string, opts ...option) []S
|
||||
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[SharePointScope](SharePointLibraryItem, items).
|
||||
set(SharePointLibraryFolder, libraries, opts...),
|
||||
)
|
||||
makeScope[SharePointScope](SharePointLibraryItem, items, defaultItemOptions(s.Cfg)...).
|
||||
set(SharePointLibraryFolder, libraries, opts...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
@ -361,8 +357,7 @@ func (s *sharePoint) PageItems(pages, items []string, opts ...option) []SharePoi
|
||||
scopes = append(
|
||||
scopes,
|
||||
makeScope[SharePointScope](SharePointPage, items).
|
||||
set(SharePointPageFolder, pages, opts...),
|
||||
)
|
||||
set(SharePointPageFolder, pages, opts...))
|
||||
|
||||
return scopes
|
||||
}
|
||||
|
||||
@ -1,7 +1,10 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
@ -19,37 +22,87 @@ import (
|
||||
type Client struct {
|
||||
Credentials account.M365Config
|
||||
|
||||
// The stable service is re-usable for any non-paged request.
|
||||
// The Stable service is re-usable for any non-paged request.
|
||||
// This allows us to maintain performance across async requests.
|
||||
stable graph.Servicer
|
||||
Stable graph.Servicer
|
||||
|
||||
// The LargeItem graph servicer is configured specifically for
|
||||
// downloading large items such as drive item content or outlook
|
||||
// mail and event attachments.
|
||||
LargeItem graph.Servicer
|
||||
}
|
||||
|
||||
// NewClient produces a new api client. Must be used in
|
||||
// NewClient produces a new exchange api client. Must be used in
|
||||
// place of creating an ad-hoc client struct.
|
||||
func NewClient(creds account.M365Config) (Client, error) {
|
||||
s, err := newService(creds)
|
||||
s, err := NewService(creds)
|
||||
if err != nil {
|
||||
return Client{}, err
|
||||
}
|
||||
|
||||
return Client{creds, s}, nil
|
||||
}
|
||||
|
||||
// service generates a new service. Used for paged and other long-running
|
||||
// requests instead of the client's stable service, so that in-flight state
|
||||
// within the adapter doesn't get clobbered
|
||||
func (c Client) Service() (*graph.Service, error) {
|
||||
return newService(c.Credentials)
|
||||
}
|
||||
|
||||
func newService(creds account.M365Config) (*graph.Service, error) {
|
||||
adapter, err := graph.CreateAdapter(
|
||||
creds.AzureTenantID,
|
||||
creds.AzureClientID,
|
||||
creds.AzureClientSecret)
|
||||
li, err := newLargeItemService(creds)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "generating graph api service client")
|
||||
return Client{}, err
|
||||
}
|
||||
|
||||
return graph.NewService(adapter), nil
|
||||
return Client{creds, s, li}, nil
|
||||
}
|
||||
|
||||
// Service generates a new graph servicer. New servicers are used for paged
|
||||
// and other long-running requests instead of the client's stable service,
|
||||
// so that in-flight state within the adapter doesn't get clobbered.
|
||||
// Most calls should use the Client.Stable property instead of calling this
|
||||
// func, unless it is explicitly necessary.
|
||||
func (c Client) Service() (graph.Servicer, error) {
|
||||
return NewService(c.Credentials)
|
||||
}
|
||||
|
||||
func NewService(creds account.M365Config, opts ...graph.Option) (*graph.Service, error) {
|
||||
a, err := graph.CreateAdapter(
|
||||
creds.AzureTenantID,
|
||||
creds.AzureClientID,
|
||||
creds.AzureClientSecret,
|
||||
opts...)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "generating graph api adapter")
|
||||
}
|
||||
|
||||
return graph.NewService(a), nil
|
||||
}
|
||||
|
||||
func newLargeItemService(creds account.M365Config) (*graph.Service, error) {
|
||||
a, err := NewService(creds, graph.NoTimeout())
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "generating no-timeout graph adapter")
|
||||
}
|
||||
|
||||
return a, nil
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// common types and consts
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// DeltaUpdate holds the results of a current delta token. It normally
|
||||
// gets produced when aggregating the addition and removal of items in
|
||||
// a delta-queryable folder.
|
||||
type DeltaUpdate struct {
|
||||
// the deltaLink itself
|
||||
URL string
|
||||
// true if the old delta was marked as invalid
|
||||
Reset bool
|
||||
}
|
||||
|
||||
// GraphQuery represents functions which perform exchange-specific queries
|
||||
// into M365 backstore. Responses -> returned items will only contain the information
|
||||
// that is included in the options
|
||||
// TODO: use selector or path for granularity into specific folders or specific date ranges
|
||||
type GraphQuery func(ctx context.Context, userID string) (serialization.Parsable, error)
|
||||
|
||||
// GraphRetrievalFunctions are functions from the Microsoft Graph API that retrieve
|
||||
// the default associated data of a M365 object. This varies by object. Additional
|
||||
// Queries must be run to obtain the omitted fields.
|
||||
type GraphRetrievalFunc func(
|
||||
ctx context.Context,
|
||||
user, m365ID string,
|
||||
) (serialization.Parsable, error)
|
||||
|
||||
@ -45,7 +45,7 @@ func (c Contacts) CreateContactFolder(
|
||||
temp := folderName
|
||||
requestBody.SetDisplayName(&temp)
|
||||
|
||||
mdl, err := c.Stable.Client().UsersById(user).ContactFolders().Post(ctx, requestBody, nil)
|
||||
mdl, err := c.Stable.Client().Users().ByUserId(user).ContactFolders().Post(ctx, requestBody, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "creating contact folder")
|
||||
}
|
||||
@ -65,7 +65,7 @@ func (c Contacts) DeleteContainer(
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
|
||||
err = srv.Client().UsersById(user).ContactFoldersById(folderID).Delete(ctx, nil)
|
||||
err = srv.Client().Users().ByUserId(user).ContactFolders().ByContactFolderId(folderID).Delete(ctx, nil)
|
||||
if err != nil {
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
@ -84,7 +84,7 @@ func (c Contacts) GetItem(
|
||||
Headers: buildPreferHeaders(false, immutableIDs),
|
||||
}
|
||||
|
||||
cont, err := c.Stable.Client().UsersById(user).ContactsById(itemID).Get(ctx, options)
|
||||
cont, err := c.Stable.Client().Users().ByUserId(user).Contacts().ByContactId(itemID).Get(ctx, options)
|
||||
if err != nil {
|
||||
return nil, nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -101,7 +101,7 @@ func (c Contacts) GetContainerByID(
|
||||
return nil, graph.Wrap(ctx, err, "setting contact folder options")
|
||||
}
|
||||
|
||||
resp, err := c.Stable.Client().UsersById(userID).ContactFoldersById(dirID).Get(ctx, ofcf)
|
||||
resp, err := c.Stable.Client().Users().ByUserId(userID).ContactFolders().ByContactFolderId(dirID).Get(ctx, ofcf)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -120,7 +120,7 @@ func (c Contacts) EnumerateContainers(
|
||||
fn func(graph.CacheFolder) error,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
@ -134,8 +134,10 @@ func (c Contacts) EnumerateContainers(
|
||||
|
||||
el := errs.Local()
|
||||
builder := service.Client().
|
||||
UsersById(userID).
|
||||
ContactFoldersById(baseDirID).
|
||||
Users().
|
||||
ByUserId(userID).
|
||||
ContactFolders().
|
||||
ByContactFolderId(baseDirID).
|
||||
ChildFolders()
|
||||
|
||||
for {
|
||||
@ -217,18 +219,18 @@ func NewContactPager(
|
||||
return &contactPager{}, err
|
||||
}
|
||||
|
||||
builder := gs.Client().UsersById(user).ContactFoldersById(directoryID).Contacts()
|
||||
builder := gs.Client().Users().ByUserId(user).ContactFolders().ByContactFolderId(directoryID).Contacts()
|
||||
|
||||
return &contactPager{gs, builder, options}, nil
|
||||
}
|
||||
|
||||
func (p *contactPager) getPage(ctx context.Context) (api.PageLinker, error) {
|
||||
func (p *contactPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
|
||||
resp, err := p.builder.Get(ctx, p.options)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
return api.EmptyDeltaLinker[models.Contactable]{PageLinkValuer: resp}, nil
|
||||
}
|
||||
|
||||
func (p *contactPager) setNext(nextLink string) {
|
||||
@ -263,7 +265,7 @@ func getContactDeltaBuilder(
|
||||
directoryID string,
|
||||
options *users.ItemContactFoldersItemContactsDeltaRequestBuilderGetRequestConfiguration,
|
||||
) *users.ItemContactFoldersItemContactsDeltaRequestBuilder {
|
||||
builder := gs.Client().UsersById(user).ContactFoldersById(directoryID).Contacts().Delta()
|
||||
builder := gs.Client().Users().ByUserId(user).ContactFolders().ByContactFolderId(directoryID).Contacts().Delta()
|
||||
return builder
|
||||
}
|
||||
|
||||
@ -301,7 +303,7 @@ func NewContactDeltaPager(
|
||||
return &contactDeltaPager{gs, user, directoryID, builder, options}, nil
|
||||
}
|
||||
|
||||
func (p *contactDeltaPager) getPage(ctx context.Context) (api.PageLinker, error) {
|
||||
func (p *contactDeltaPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
|
||||
resp, err := p.builder.Get(ctx, p.options)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
@ -328,7 +330,7 @@ func (c Contacts) GetAddedAndRemovedItemIDs(
|
||||
immutableIDs bool,
|
||||
canMakeDeltaQueries bool,
|
||||
) ([]string, []string, DeltaUpdate, error) {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return nil, nil, DeltaUpdate{}, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -8,7 +8,6 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
abstractions "github.com/microsoft/kiota-abstractions-go"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drive"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/drives"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/sites"
|
||||
@ -17,6 +16,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph/api"
|
||||
onedrive "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
)
|
||||
|
||||
@ -35,8 +35,8 @@ const pageSize = int32(999)
|
||||
type driveItemPager struct {
|
||||
gs graph.Servicer
|
||||
driveID string
|
||||
builder *drives.ItemRootDeltaRequestBuilder
|
||||
options *drives.ItemRootDeltaRequestBuilderGetRequestConfiguration
|
||||
builder *drives.ItemItemsItemDeltaRequestBuilder
|
||||
options *drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration
|
||||
}
|
||||
|
||||
func NewItemPager(
|
||||
@ -55,9 +55,9 @@ func NewItemPager(
|
||||
}
|
||||
headers.Add("Prefer", strings.Join(preferHeaderItems, ","))
|
||||
|
||||
requestConfig := &drives.ItemRootDeltaRequestBuilderGetRequestConfiguration{
|
||||
requestConfig := &drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration{
|
||||
Headers: headers,
|
||||
QueryParameters: &drives.ItemRootDeltaRequestBuilderGetQueryParameters{
|
||||
QueryParameters: &drives.ItemItemsItemDeltaRequestBuilderGetQueryParameters{
|
||||
Top: &pageCount,
|
||||
Select: fields,
|
||||
},
|
||||
@ -67,11 +67,14 @@ func NewItemPager(
|
||||
gs: gs,
|
||||
driveID: driveID,
|
||||
options: requestConfig,
|
||||
builder: gs.Client().DrivesById(driveID).Root().Delta(),
|
||||
builder: gs.Client().
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().ByDriveItemId(onedrive.RootID).Delta(),
|
||||
}
|
||||
|
||||
if len(link) > 0 {
|
||||
res.builder = drives.NewItemRootDeltaRequestBuilder(link, gs.Adapter())
|
||||
res.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, gs.Adapter())
|
||||
}
|
||||
|
||||
return res
|
||||
@ -92,11 +95,16 @@ func (p *driveItemPager) GetPage(ctx context.Context) (api.DeltaPageLinker, erro
|
||||
}
|
||||
|
||||
func (p *driveItemPager) SetNext(link string) {
|
||||
p.builder = drives.NewItemRootDeltaRequestBuilder(link, p.gs.Adapter())
|
||||
p.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, p.gs.Adapter())
|
||||
}
|
||||
|
||||
func (p *driveItemPager) Reset() {
|
||||
p.builder = p.gs.Client().DrivesById(p.driveID).Root().Delta()
|
||||
p.builder = p.gs.Client().
|
||||
Drives().
|
||||
ByDriveId(p.driveID).
|
||||
Items().
|
||||
ByDriveItemId(onedrive.RootID).
|
||||
Delta()
|
||||
}
|
||||
|
||||
func (p *driveItemPager) ValuesIn(l api.DeltaPageLinker) ([]models.DriveItemable, error) {
|
||||
@ -125,7 +133,7 @@ func NewUserDrivePager(
|
||||
userID: userID,
|
||||
gs: gs,
|
||||
options: requestConfig,
|
||||
builder: gs.Client().UsersById(userID).Drives(),
|
||||
builder: gs.Client().Users().ByUserId(userID).Drives(),
|
||||
}
|
||||
|
||||
return res
|
||||
@ -143,7 +151,7 @@ func (p *userDrivePager) GetPage(ctx context.Context) (api.PageLinker, error) {
|
||||
err error
|
||||
)
|
||||
|
||||
d, err := p.gs.Client().UsersById(p.userID).Drive().Get(ctx, nil)
|
||||
d, err := p.gs.Client().Users().ByUserId(p.userID).Drive().Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -204,7 +212,7 @@ func NewSiteDrivePager(
|
||||
res := &siteDrivePager{
|
||||
gs: gs,
|
||||
options: requestConfig,
|
||||
builder: gs.Client().SitesById(siteID).Drives(),
|
||||
builder: gs.Client().Sites().BySiteId(siteID).Drives(),
|
||||
}
|
||||
|
||||
return res
|
||||
@ -308,8 +316,10 @@ func GetDriveItem(
|
||||
driveID, itemID string,
|
||||
) (models.DriveItemable, error) {
|
||||
di, err := srv.Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(itemID).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(itemID).
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "getting item")
|
||||
@ -325,8 +335,10 @@ func GetItemPermission(
|
||||
) (models.PermissionCollectionResponseable, error) {
|
||||
perm, err := service.
|
||||
Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(itemID).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(itemID).
|
||||
Permissions().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -342,7 +354,8 @@ func GetUsersDrive(
|
||||
user string,
|
||||
) (models.Driveable, error) {
|
||||
d, err := srv.Client().
|
||||
UsersById(user).
|
||||
Users().
|
||||
ByUserId(user).
|
||||
Drive().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -358,7 +371,8 @@ func GetSitesDefaultDrive(
|
||||
site string,
|
||||
) (models.Driveable, error) {
|
||||
d, err := srv.Client().
|
||||
SitesById(site).
|
||||
Sites().
|
||||
BySiteId(site).
|
||||
Drive().
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -373,10 +387,7 @@ func GetDriveRoot(
|
||||
srv graph.Servicer,
|
||||
driveID string,
|
||||
) (models.DriveItemable, error) {
|
||||
root, err := srv.Client().
|
||||
DrivesById(driveID).
|
||||
Root().
|
||||
Get(ctx, nil)
|
||||
root, err := srv.Client().Drives().ByDriveId(driveID).Root().Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "getting drive root")
|
||||
}
|
||||
@ -400,7 +411,7 @@ func GetFolderByName(
|
||||
// https://learn.microsoft.com/en-us/graph/onedrive-addressing-driveitems#path-based-addressing
|
||||
// - which allows us to lookup an item by its path relative to the parent ID
|
||||
rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folder)
|
||||
builder := drive.NewItemsDriveItemItemRequestBuilder(rawURL, service.Adapter())
|
||||
builder := drives.NewItemItemsDriveItemItemRequestBuilder(rawURL, service.Adapter())
|
||||
|
||||
foundItem, err := builder.Get(ctx, nil)
|
||||
if err != nil {
|
||||
@ -423,13 +434,15 @@ func PostItemPermissionUpdate(
|
||||
ctx context.Context,
|
||||
service graph.Servicer,
|
||||
driveID, itemID string,
|
||||
body *drive.ItemsItemInvitePostRequestBody,
|
||||
body *drives.ItemItemsItemInvitePostRequestBody,
|
||||
) (drives.ItemItemsItemInviteResponseable, error) {
|
||||
ctx = graph.ConsumeNTokens(ctx, graph.PermissionsLC)
|
||||
|
||||
itm, err := service.Client().
|
||||
DrivesById(driveID).
|
||||
ItemsById(itemID).
|
||||
Drives().
|
||||
ByDriveId(driveID).
|
||||
Items().
|
||||
ByDriveItemId(itemID).
|
||||
Invite().
|
||||
Post(ctx, body, nil)
|
||||
if err != nil {
|
||||
@ -9,9 +9,9 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/onedrive/api"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
type OneDriveAPISuite struct {
|
||||
@ -46,7 +46,7 @@ func (c Events) CreateCalendar(
|
||||
requestbody := models.NewCalendar()
|
||||
requestbody.SetName(&calendarName)
|
||||
|
||||
mdl, err := c.Stable.Client().UsersById(user).Calendars().Post(ctx, requestbody, nil)
|
||||
mdl, err := c.Stable.Client().Users().ByUserId(user).Calendars().Post(ctx, requestbody, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "creating calendar")
|
||||
}
|
||||
@ -67,7 +67,7 @@ func (c Events) DeleteContainer(
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
|
||||
err = srv.Client().UsersById(user).CalendarsById(calendarID).Delete(ctx, nil)
|
||||
err = srv.Client().Users().ByUserId(user).Calendars().ByCalendarId(calendarID).Delete(ctx, nil)
|
||||
if err != nil {
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
@ -79,7 +79,7 @@ func (c Events) GetContainerByID(
|
||||
ctx context.Context,
|
||||
userID, containerID string,
|
||||
) (graph.Container, error) {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -89,7 +89,7 @@ func (c Events) GetContainerByID(
|
||||
return nil, graph.Wrap(ctx, err, "setting event calendar options")
|
||||
}
|
||||
|
||||
cal, err := service.Client().UsersById(userID).CalendarsById(containerID).Get(ctx, ofc)
|
||||
cal, err := service.Client().Users().ByUserId(userID).Calendars().ByCalendarId(containerID).Get(ctx, ofc)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err).WithClues(ctx)
|
||||
}
|
||||
@ -111,7 +111,7 @@ func (c Events) GetContainerByName(
|
||||
|
||||
ctx = clues.Add(ctx, "calendar_name", name)
|
||||
|
||||
resp, err := c.Stable.Client().UsersById(userID).Calendars().Get(ctx, options)
|
||||
resp, err := c.Stable.Client().Users().ByUserId(userID).Calendars().Get(ctx, options)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err).WithClues(ctx)
|
||||
}
|
||||
@ -152,7 +152,7 @@ func (c Events) GetItem(
|
||||
}
|
||||
)
|
||||
|
||||
event, err = c.Stable.Client().UsersById(user).EventsById(itemID).Get(ctx, itemOpts)
|
||||
event, err = c.Stable.Client().Users().ByUserId(user).Events().ByEventId(itemID).Get(ctx, itemOpts)
|
||||
if err != nil {
|
||||
return nil, nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -167,8 +167,10 @@ func (c Events) GetItem(
|
||||
|
||||
attached, err := c.LargeItem.
|
||||
Client().
|
||||
UsersById(user).
|
||||
EventsById(itemID).
|
||||
Users().
|
||||
ByUserId(user).
|
||||
Events().
|
||||
ByEventId(itemID).
|
||||
Attachments().
|
||||
Get(ctx, options)
|
||||
if err != nil {
|
||||
@ -192,7 +194,7 @@ func (c Events) EnumerateContainers(
|
||||
fn func(graph.CacheFolder) error,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
@ -203,7 +205,7 @@ func (c Events) EnumerateContainers(
|
||||
}
|
||||
|
||||
el := errs.Local()
|
||||
builder := service.Client().UsersById(userID).Calendars()
|
||||
builder := service.Client().Users().ByUserId(userID).Calendars()
|
||||
|
||||
for {
|
||||
if el.Failure() != nil {
|
||||
@ -278,18 +280,18 @@ func NewEventPager(
|
||||
Headers: buildPreferHeaders(true, immutableIDs),
|
||||
}
|
||||
|
||||
builder := gs.Client().UsersById(user).CalendarsById(calendarID).Events()
|
||||
builder := gs.Client().Users().ByUserId(user).Calendars().ByCalendarId(calendarID).Events()
|
||||
|
||||
return &eventPager{gs, builder, options}, nil
|
||||
}
|
||||
|
||||
func (p *eventPager) getPage(ctx context.Context) (api.PageLinker, error) {
|
||||
func (p *eventPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
|
||||
resp, err := p.builder.Get(ctx, p.options)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
return api.EmptyDeltaLinker[models.Eventable]{PageLinkValuer: resp}, nil
|
||||
}
|
||||
|
||||
func (p *eventPager) setNext(nextLink string) {
|
||||
@ -359,7 +361,7 @@ func getEventDeltaBuilder(
|
||||
return builder
|
||||
}
|
||||
|
||||
func (p *eventDeltaPager) getPage(ctx context.Context) (api.PageLinker, error) {
|
||||
func (p *eventDeltaPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
|
||||
resp, err := p.builder.Get(ctx, p.options)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
@ -386,7 +388,7 @@ func (c Events) GetAddedAndRemovedItemIDs(
|
||||
immutableIDs bool,
|
||||
canMakeDeltaQueries bool,
|
||||
) ([]string, []string, DeltaUpdate, error) {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return nil, nil, DeltaUpdate{}, err
|
||||
}
|
||||
43
src/pkg/services/m365/api/exchange_common.go
Normal file
43
src/pkg/services/m365/api/exchange_common.go
Normal file
@ -0,0 +1,43 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
)
|
||||
|
||||
// checkIDAndName is a helper function to ensure that
|
||||
// the ID and name pointers are set prior to being called.
|
||||
func checkIDAndName(c graph.Container) error {
|
||||
id := ptr.Val(c.GetId())
|
||||
if len(id) == 0 {
|
||||
return clues.New("container missing ID")
|
||||
}
|
||||
|
||||
dn := ptr.Val(c.GetDisplayName())
|
||||
if len(dn) == 0 {
|
||||
return clues.New("container missing display name").With("container_id", id)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func HasAttachments(body models.ItemBodyable) bool {
|
||||
if body == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if ct, ok := ptr.ValOK(body.GetContentType()); !ok || ct == models.TEXT_BODYTYPE {
|
||||
return false
|
||||
}
|
||||
|
||||
if body, ok := ptr.ValOK(body.GetContent()); !ok || len(body) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
return strings.Contains(ptr.Val(body.GetContent()), "src=\"cid:")
|
||||
}
|
||||
@ -51,7 +51,7 @@ func (c Mail) CreateMailFolder(
|
||||
requestBody.SetDisplayName(&folder)
|
||||
requestBody.SetIsHidden(&isHidden)
|
||||
|
||||
mdl, err := c.Stable.Client().UsersById(user).MailFolders().Post(ctx, requestBody, nil)
|
||||
mdl, err := c.Stable.Client().Users().ByUserId(user).MailFolders().Post(ctx, requestBody, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "creating mail folder")
|
||||
}
|
||||
@ -63,7 +63,7 @@ func (c Mail) CreateMailFolderWithParent(
|
||||
ctx context.Context,
|
||||
user, folder, parentID string,
|
||||
) (models.MailFolderable, error) {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -75,8 +75,10 @@ func (c Mail) CreateMailFolderWithParent(
|
||||
|
||||
mdl, err := service.
|
||||
Client().
|
||||
UsersById(user).
|
||||
MailFoldersById(parentID).
|
||||
Users().
|
||||
ByUserId(user).
|
||||
MailFolders().
|
||||
ByMailFolderId(parentID).
|
||||
ChildFolders().
|
||||
Post(ctx, requestBody, nil)
|
||||
if err != nil {
|
||||
@ -99,7 +101,12 @@ func (c Mail) DeleteContainer(
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
|
||||
err = srv.Client().UsersById(user).MailFoldersById(folderID).Delete(ctx, nil)
|
||||
err = srv.Client().
|
||||
Users().
|
||||
ByUserId(user).
|
||||
MailFolders().
|
||||
ByMailFolderId(folderID).
|
||||
Delete(ctx, nil)
|
||||
if err != nil {
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
@ -111,7 +118,7 @@ func (c Mail) GetContainerByID(
|
||||
ctx context.Context,
|
||||
userID, dirID string,
|
||||
) (graph.Container, error) {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -121,7 +128,12 @@ func (c Mail) GetContainerByID(
|
||||
return nil, graph.Wrap(ctx, err, "setting mail folder options")
|
||||
}
|
||||
|
||||
resp, err := service.Client().UsersById(userID).MailFoldersById(dirID).Get(ctx, ofmf)
|
||||
resp, err := service.Client().
|
||||
Users().
|
||||
ByUserId(userID).
|
||||
MailFolders().
|
||||
ByMailFolderId(dirID).
|
||||
Get(ctx, ofmf)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -147,7 +159,7 @@ func (c Mail) GetItem(
|
||||
Headers: headers,
|
||||
}
|
||||
|
||||
mail, err := c.Stable.Client().UsersById(user).MessagesById(itemID).Get(ctx, itemOpts)
|
||||
mail, err := c.Stable.Client().Users().ByUserId(user).Messages().ByMessageId(itemID).Get(ctx, itemOpts)
|
||||
if err != nil {
|
||||
return nil, nil, graph.Stack(ctx, err)
|
||||
}
|
||||
@ -173,8 +185,10 @@ func (c Mail) GetItem(
|
||||
|
||||
attached, err := c.LargeItem.
|
||||
Client().
|
||||
UsersById(user).
|
||||
MessagesById(itemID).
|
||||
Users().
|
||||
ByUserId(user).
|
||||
Messages().
|
||||
ByMessageId(itemID).
|
||||
Attachments().
|
||||
Get(ctx, options)
|
||||
if err == nil {
|
||||
@ -204,8 +218,10 @@ func (c Mail) GetItem(
|
||||
|
||||
attachments, err := c.LargeItem.
|
||||
Client().
|
||||
UsersById(user).
|
||||
MessagesById(itemID).
|
||||
Users().
|
||||
ByUserId(user).
|
||||
Messages().
|
||||
ByMessageId(itemID).
|
||||
Attachments().
|
||||
Get(ctx, options)
|
||||
if err != nil {
|
||||
@ -224,9 +240,12 @@ func (c Mail) GetItem(
|
||||
|
||||
att, err := c.Stable.
|
||||
Client().
|
||||
UsersById(user).
|
||||
MessagesById(itemID).
|
||||
AttachmentsById(ptr.Val(a.GetId())).
|
||||
Users().
|
||||
ByUserId(user).
|
||||
Messages().
|
||||
ByMessageId(itemID).
|
||||
Attachments().
|
||||
ByAttachmentId(ptr.Val(a.GetId())).
|
||||
Get(ctx, options)
|
||||
if err != nil {
|
||||
return nil, nil,
|
||||
@ -292,7 +311,7 @@ func (c Mail) EnumerateContainers(
|
||||
fn func(graph.CacheFolder) error,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return graph.Stack(ctx, err)
|
||||
}
|
||||
@ -380,18 +399,23 @@ func NewMailPager(
|
||||
return &mailPager{}, err
|
||||
}
|
||||
|
||||
builder := gs.Client().UsersById(user).MailFoldersById(directoryID).Messages()
|
||||
builder := gs.Client().
|
||||
Users().
|
||||
ByUserId(user).
|
||||
MailFolders().
|
||||
ByMailFolderId(directoryID).
|
||||
Messages()
|
||||
|
||||
return &mailPager{gs, builder, options}, nil
|
||||
}
|
||||
|
||||
func (p *mailPager) getPage(ctx context.Context) (api.PageLinker, error) {
|
||||
func (p *mailPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
|
||||
page, err := p.builder.Get(ctx, p.options)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
}
|
||||
|
||||
return page, nil
|
||||
return api.EmptyDeltaLinker[models.Messageable]{PageLinkValuer: page}, nil
|
||||
}
|
||||
|
||||
func (p *mailPager) setNext(nextLink string) {
|
||||
@ -426,7 +450,14 @@ func getMailDeltaBuilder(
|
||||
directoryID string,
|
||||
options *users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration,
|
||||
) *users.ItemMailFoldersItemMessagesDeltaRequestBuilder {
|
||||
builder := gs.Client().UsersById(user).MailFoldersById(directoryID).Messages().Delta()
|
||||
builder := gs.Client().
|
||||
Users().
|
||||
ByUserId(user).
|
||||
MailFolders().
|
||||
ByMailFolderId(directoryID).
|
||||
Messages().
|
||||
Delta()
|
||||
|
||||
return builder
|
||||
}
|
||||
|
||||
@ -465,7 +496,7 @@ func NewMailDeltaPager(
|
||||
return &mailDeltaPager{gs, user, directoryID, builder, options}, nil
|
||||
}
|
||||
|
||||
func (p *mailDeltaPager) getPage(ctx context.Context) (api.PageLinker, error) {
|
||||
func (p *mailDeltaPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
|
||||
page, err := p.builder.Get(ctx, p.options)
|
||||
if err != nil {
|
||||
return nil, graph.Stack(ctx, err)
|
||||
@ -479,7 +510,13 @@ func (p *mailDeltaPager) setNext(nextLink string) {
|
||||
}
|
||||
|
||||
func (p *mailDeltaPager) reset(ctx context.Context) {
|
||||
p.builder = p.gs.Client().UsersById(p.user).MailFoldersById(p.directoryID).Messages().Delta()
|
||||
p.builder = p.gs.Client().
|
||||
Users().
|
||||
ByUserId(p.user).
|
||||
MailFolders().
|
||||
ByMailFolderId(p.directoryID).
|
||||
Messages().
|
||||
Delta()
|
||||
}
|
||||
|
||||
func (p *mailDeltaPager) valuesIn(pl api.PageLinker) ([]getIDAndAddtler, error) {
|
||||
@ -492,7 +529,7 @@ func (c Mail) GetAddedAndRemovedItemIDs(
|
||||
immutableIDs bool,
|
||||
canMakeDeltaQueries bool,
|
||||
) ([]string, []string, DeltaUpdate, error) {
|
||||
service, err := c.service()
|
||||
service, err := c.Service()
|
||||
if err != nil {
|
||||
return nil, nil, DeltaUpdate{}, err
|
||||
}
|
||||
@ -15,12 +15,12 @@ import (
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api/mock"
|
||||
)
|
||||
|
||||
type MailAPIUnitSuite struct {
|
||||
@ -1,10 +1,10 @@
|
||||
package mock
|
||||
|
||||
import (
|
||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph/mock"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
// NewClient produces a new exchange api client that can be
|
||||
@ -18,7 +18,7 @@ import (
|
||||
|
||||
type itemPager interface {
|
||||
// getPage get a page with the specified options from graph
|
||||
getPage(context.Context) (api.PageLinker, error)
|
||||
getPage(context.Context) (api.DeltaPageLinker, error)
|
||||
// setNext is used to pass in the next url got from graph
|
||||
setNext(string)
|
||||
// reset is used to clear delta url in delta pagers. When
|
||||
@ -119,8 +119,6 @@ func getItemsAddedAndRemovedFromContainer(
|
||||
addedIDs = []string{}
|
||||
removedIDs = []string{}
|
||||
deltaURL string
|
||||
nextLink string
|
||||
deltaLink string
|
||||
)
|
||||
|
||||
itemCount := 0
|
||||
@ -160,13 +158,7 @@ func getItemsAddedAndRemovedFromContainer(
|
||||
}
|
||||
}
|
||||
|
||||
dresp, ok := resp.(api.DeltaPageLinker)
|
||||
if ok {
|
||||
nextLink, deltaLink = api.NextAndDeltaLink(dresp)
|
||||
} else {
|
||||
nextLink = api.NextLink(resp)
|
||||
deltaLink = "" // to make sure we don't use an old value
|
||||
}
|
||||
nextLink, deltaLink := api.NextAndDeltaLink(resp)
|
||||
|
||||
// the deltaLink is kind of like a cursor for overall data state.
|
||||
// once we run through pages of nextLinks, the last query will
|
||||
@ -9,6 +9,7 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||
"github.com/alcionai/corso/src/internal/connector/graph/api"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
@ -31,8 +32,13 @@ func (v testPagerValue) GetAdditionalData() map[string]any {
|
||||
type testPage struct{}
|
||||
|
||||
func (p testPage) GetOdataNextLink() *string {
|
||||
next := "" // no next, just one page
|
||||
return &next
|
||||
// no next, just one page
|
||||
return ptr.To("")
|
||||
}
|
||||
|
||||
func (p testPage) GetOdataDeltaLink() *string {
|
||||
// delta is not tested here
|
||||
return ptr.To("")
|
||||
}
|
||||
|
||||
var _ itemPager = &testPager{}
|
||||
@ -45,7 +51,7 @@ type testPager struct {
|
||||
needsReset bool
|
||||
}
|
||||
|
||||
func (p *testPager) getPage(ctx context.Context) (api.PageLinker, error) {
|
||||
func (p *testPager) getPage(ctx context.Context) (api.DeltaPageLinker, error) {
|
||||
if p.errorCode != "" {
|
||||
ierr := odataerrors.NewMainError()
|
||||
ierr.SetCode(&p.errorCode)
|
||||
@ -47,7 +47,7 @@ func (c Sites) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable,
|
||||
return nil, graph.Wrap(ctx, err, "getting all sites")
|
||||
}
|
||||
|
||||
iter, err := msgraphgocore.NewPageIterator(
|
||||
iter, err := msgraphgocore.NewPageIterator[models.Siteable](
|
||||
resp,
|
||||
service.Adapter(),
|
||||
models.CreateSiteCollectionResponseFromDiscriminatorValue)
|
||||
@ -60,12 +60,12 @@ func (c Sites) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable,
|
||||
el = errs.Local()
|
||||
)
|
||||
|
||||
iterator := func(item any) bool {
|
||||
iterator := func(item models.Siteable) bool {
|
||||
if el.Failure() != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
s, err := validateSite(item)
|
||||
err := validateSite(item)
|
||||
if errors.Is(err, errKnownSkippableCase) {
|
||||
// safe to no-op
|
||||
return true
|
||||
@ -76,7 +76,7 @@ func (c Sites) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable,
|
||||
return true
|
||||
}
|
||||
|
||||
us = append(us, s)
|
||||
us = append(us, item)
|
||||
|
||||
return true
|
||||
}
|
||||
@ -109,7 +109,7 @@ func (c Sites) GetByID(ctx context.Context, identifier string) (models.Siteable,
|
||||
ctx = clues.Add(ctx, "given_site_id", identifier)
|
||||
|
||||
if siteIDRE.MatchString(identifier) {
|
||||
resp, err = c.stable.Client().SitesById(identifier).Get(ctx, nil)
|
||||
resp, err = c.Stable.Client().Sites().BySiteId(identifier).Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "getting site by id")
|
||||
}
|
||||
@ -136,7 +136,7 @@ func (c Sites) GetByID(ctx context.Context, identifier string) (models.Siteable,
|
||||
rawURL := fmt.Sprintf(webURLGetTemplate, u.Host, path)
|
||||
|
||||
resp, err = sites.
|
||||
NewItemSitesSiteItemRequestBuilder(rawURL, c.stable.Adapter()).
|
||||
NewItemSitesSiteItemRequestBuilder(rawURL, c.Stable.Adapter()).
|
||||
Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "getting site by weburl")
|
||||
@ -168,38 +168,33 @@ const personalSitePath = "sharepoint.com/personal/"
|
||||
// validateSite ensures the item is a Siteable, and contains the necessary
|
||||
// identifiers that we handle with all users.
|
||||
// returns the item as a Siteable model.
|
||||
func validateSite(item any) (models.Siteable, error) {
|
||||
m, ok := item.(models.Siteable)
|
||||
if !ok {
|
||||
return nil, clues.New(fmt.Sprintf("unexpected model: %T", item))
|
||||
}
|
||||
|
||||
id := ptr.Val(m.GetId())
|
||||
func validateSite(item models.Siteable) error {
|
||||
id := ptr.Val(item.GetId())
|
||||
if len(id) == 0 {
|
||||
return nil, clues.New("missing ID")
|
||||
return clues.New("missing ID")
|
||||
}
|
||||
|
||||
wURL := ptr.Val(m.GetWebUrl())
|
||||
wURL := ptr.Val(item.GetWebUrl())
|
||||
if len(wURL) == 0 {
|
||||
return nil, clues.New("missing webURL").With("site_id", id) // TODO: pii
|
||||
return clues.New("missing webURL").With("site_id", id) // TODO: pii
|
||||
}
|
||||
|
||||
// personal (ie: oneDrive) sites have to be filtered out server-side.
|
||||
if strings.Contains(wURL, personalSitePath) {
|
||||
return nil, clues.Stack(errKnownSkippableCase).
|
||||
return clues.Stack(errKnownSkippableCase).
|
||||
With("site_id", id, "site_web_url", wURL) // TODO: pii
|
||||
}
|
||||
|
||||
name := ptr.Val(m.GetDisplayName())
|
||||
name := ptr.Val(item.GetDisplayName())
|
||||
if len(name) == 0 {
|
||||
// the built-in site at "https://{tenant-domain}/search" never has a name.
|
||||
if strings.HasSuffix(wURL, "/search") {
|
||||
return nil, clues.Stack(errKnownSkippableCase).
|
||||
return clues.Stack(errKnownSkippableCase).
|
||||
With("site_id", id, "site_web_url", wURL) // TODO: pii
|
||||
}
|
||||
|
||||
return nil, clues.New("missing site display name").With("site_id", id)
|
||||
return clues.New("missing site display name").With("site_id", id)
|
||||
}
|
||||
|
||||
return m, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -33,16 +33,10 @@ func (suite *SitesUnitSuite) TestValidateSite() {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
args any
|
||||
want models.Siteable
|
||||
args models.Siteable
|
||||
errCheck assert.ErrorAssertionFunc
|
||||
errIsSkippable bool
|
||||
}{
|
||||
{
|
||||
name: "Invalid type",
|
||||
args: string("invalid type"),
|
||||
errCheck: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "No ID",
|
||||
args: models.NewSite(),
|
||||
@ -92,7 +86,6 @@ func (suite *SitesUnitSuite) TestValidateSite() {
|
||||
{
|
||||
name: "Valid Site",
|
||||
args: site,
|
||||
want: site,
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
}
|
||||
@ -100,14 +93,12 @@ func (suite *SitesUnitSuite) TestValidateSite() {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
got, err := validateSite(test.args)
|
||||
err := validateSite(test.args)
|
||||
test.errCheck(t, err, clues.ToCore(err))
|
||||
|
||||
if test.errIsSkippable {
|
||||
assert.ErrorIs(t, err, errKnownSkippableCase)
|
||||
}
|
||||
|
||||
assert.Equal(t, test.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -171,7 +171,7 @@ func (c Users) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Userable,
|
||||
return nil, graph.Wrap(ctx, err, "getting all users")
|
||||
}
|
||||
|
||||
iter, err := msgraphgocore.NewPageIterator(
|
||||
iter, err := msgraphgocore.NewPageIterator[models.Userable](
|
||||
resp,
|
||||
service.Adapter(),
|
||||
models.CreateUserCollectionResponseFromDiscriminatorValue)
|
||||
@ -184,16 +184,16 @@ func (c Users) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Userable,
|
||||
el = errs.Local()
|
||||
)
|
||||
|
||||
iterator := func(item any) bool {
|
||||
iterator := func(item models.Userable) bool {
|
||||
if el.Failure() != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
u, err := validateUser(item)
|
||||
err := validateUser(item)
|
||||
if err != nil {
|
||||
el.AddRecoverable(graph.Wrap(ctx, err, "validating user"))
|
||||
} else {
|
||||
us = append(us, u)
|
||||
us = append(us, item)
|
||||
}
|
||||
|
||||
return true
|
||||
@ -214,7 +214,7 @@ func (c Users) GetByID(ctx context.Context, identifier string) (models.Userable,
|
||||
err error
|
||||
)
|
||||
|
||||
resp, err = c.stable.Client().UsersById(identifier).Get(ctx, nil)
|
||||
resp, err = c.Stable.Client().Users().ByUserId(identifier).Get(ctx, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "getting user")
|
||||
@ -315,9 +315,11 @@ func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
|
||||
Top: ptr.To[int32](1), // just one item is enough
|
||||
},
|
||||
}
|
||||
_, err = c.stable.Client().
|
||||
UsersById(userID).
|
||||
MailFoldersById(ptr.Val(mf.GetId())).
|
||||
_, err = c.Stable.Client().
|
||||
Users().
|
||||
ByUserId(userID).
|
||||
MailFolders().
|
||||
ByMailFolderId(ptr.Val(mf.GetId())).
|
||||
Messages().
|
||||
Delta().
|
||||
Get(ctx, options)
|
||||
@ -338,7 +340,7 @@ func (c Users) GetMailFolders(
|
||||
userID string,
|
||||
options users.ItemMailFoldersRequestBuilderGetRequestConfiguration,
|
||||
) (models.MailFolderCollectionResponseable, error) {
|
||||
mailFolders, err := c.stable.Client().UsersById(userID).MailFolders().Get(ctx, &options)
|
||||
mailFolders, err := c.Stable.Client().Users().ByUserId(userID).MailFolders().Get(ctx, &options)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "getting MailFolders")
|
||||
}
|
||||
@ -348,7 +350,7 @@ func (c Users) GetMailFolders(
|
||||
|
||||
// TODO: remove when drive api goes into this package
|
||||
func (c Users) GetDrives(ctx context.Context, userID string) (models.DriveCollectionResponseable, error) {
|
||||
drives, err := c.stable.Client().UsersById(userID).Drives().Get(ctx, nil)
|
||||
drives, err := c.Stable.Client().Users().ByUserId(userID).Drives().Get(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, graph.Wrap(ctx, err, "getting drives")
|
||||
}
|
||||
@ -362,7 +364,7 @@ func (c Users) getMailboxSettings(
|
||||
) (MailboxInfo, error) {
|
||||
var (
|
||||
rawURL = fmt.Sprintf("https://graph.microsoft.com/v1.0/users/%s/mailboxSettings", userID)
|
||||
adapter = c.stable.Adapter()
|
||||
adapter = c.Stable.Adapter()
|
||||
mi = MailboxInfo{
|
||||
ErrGetMailBoxSetting: []error{},
|
||||
}
|
||||
@ -497,22 +499,16 @@ func appendIfErr(errs []error, err error) []error {
|
||||
|
||||
// validateUser ensures the item is a Userable, and contains the necessary
|
||||
// identifiers that we handle with all users.
|
||||
// returns the item as a Userable model.
|
||||
func validateUser(item any) (models.Userable, error) {
|
||||
m, ok := item.(models.Userable)
|
||||
if !ok {
|
||||
return nil, clues.New(fmt.Sprintf("unexpected model: %T", item))
|
||||
func validateUser(item models.Userable) error {
|
||||
if item.GetId() == nil {
|
||||
return clues.New("missing ID")
|
||||
}
|
||||
|
||||
if m.GetId() == nil {
|
||||
return nil, clues.New("missing ID")
|
||||
if item.GetUserPrincipalName() == nil {
|
||||
return clues.New("missing principalName")
|
||||
}
|
||||
|
||||
if m.GetUserPrincipalName() == nil {
|
||||
return nil, clues.New("missing principalName")
|
||||
}
|
||||
|
||||
return m, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func toString(ctx context.Context, key string, data map[string]any) (string, error) {
|
||||
|
||||
@ -30,15 +30,9 @@ func (suite *UsersUnitSuite) TestValidateUser() {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
args interface{}
|
||||
want models.Userable
|
||||
args models.Userable
|
||||
errCheck assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "Invalid type",
|
||||
args: string("invalid type"),
|
||||
errCheck: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "No ID",
|
||||
args: models.NewUser(),
|
||||
@ -56,7 +50,6 @@ func (suite *UsersUnitSuite) TestValidateUser() {
|
||||
{
|
||||
name: "Valid User",
|
||||
args: user,
|
||||
want: user,
|
||||
errCheck: assert.NoError,
|
||||
},
|
||||
}
|
||||
@ -64,10 +57,8 @@ func (suite *UsersUnitSuite) TestValidateUser() {
|
||||
suite.Run(tt.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
got, err := validateUser(tt.args)
|
||||
err := validateUser(tt.args)
|
||||
tt.errCheck(t, err, clues.ToCore(err))
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
1162
website/package-lock.json
generated
1162
website/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -14,9 +14,9 @@
|
||||
"write-heading-ids": "docusaurus write-heading-ids"
|
||||
},
|
||||
"dependencies": {
|
||||
"@docusaurus/core": "2.4.0",
|
||||
"@docusaurus/core": "2.4.1",
|
||||
"@docusaurus/plugin-google-gtag": "^2.4.0",
|
||||
"@docusaurus/preset-classic": "2.4.0",
|
||||
"@docusaurus/preset-classic": "2.4.1",
|
||||
"@loadable/component": "^5.15.3",
|
||||
"@mdx-js/react": "^1.6.22",
|
||||
"animate.css": "^4.1.1",
|
||||
@ -36,7 +36,7 @@
|
||||
"wow.js": "^1.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@docusaurus/module-type-aliases": "2.4.0",
|
||||
"@docusaurus/module-type-aliases": "2.4.1",
|
||||
"@iconify/react": "^4.1.0",
|
||||
"autoprefixer": "^10.4.14",
|
||||
"postcss": "^8.4.23",
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user