use query params in all handlers

backup handlers are all re-using the same inputs for
tenantID and protected resource ID.  In some cases we're
storing those values in the handler, other cases we don't.
This pr seeks to normalize backup handler design by expecting
a common structure for holding the resource and tenant ids.
This commit is contained in:
ryanfkeepers 2024-01-26 12:48:16 -07:00
parent befec6d341
commit e1ed5275dc
34 changed files with 391 additions and 223 deletions

View File

@ -311,7 +311,7 @@ func (c *Collections) Get(
} }
// Enumerate drives for the specified resourceOwner // Enumerate drives for the specified resourceOwner
pager := c.handler.NewDrivePager(c.protectedResource.ID(), nil) pager := c.handler.NewDrivePager(nil)
drives, err := api.GetAllDrives(ctx, pager) drives, err := api.GetAllDrives(ctx, pager)
if err != nil { if err != nil {
@ -439,7 +439,7 @@ func (c *Collections) Get(
continue continue
} }
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID) p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID))
if err != nil { if err != nil {
return nil, false, clues.WrapWC(ictx, err, "making exclude prefix") return nil, false, clues.WrapWC(ictx, err, "making exclude prefix")
} }
@ -504,7 +504,7 @@ func (c *Collections) Get(
// generate tombstones for drives that were removed. // generate tombstones for drives that were removed.
for driveID := range driveTombstones { for driveID := range driveTombstones {
prevDrivePath, err := c.handler.PathPrefix(c.tenantID, driveID) prevDrivePath, err := c.handler.PathPrefix(driveID)
if err != nil { if err != nil {
return nil, false, clues.WrapWC(ctx, err, "making drive tombstone for previous path").Label(count.BadPathPrefix) return nil, false, clues.WrapWC(ctx, err, "making drive tombstone for previous path").Label(count.BadPathPrefix)
} }
@ -532,7 +532,7 @@ func (c *Collections) Get(
alertIfPrevPathsHaveCollisions(ctx, driveIDToPrevPaths, c.counter, errs) alertIfPrevPathsHaveCollisions(ctx, driveIDToPrevPaths, c.counter, errs)
// add metadata collections // add metadata collections
pathPrefix, err := c.handler.MetadataPathPrefix(c.tenantID) pathPrefix, err := c.handler.MetadataPathPrefix()
if err != nil { if err != nil {
// It's safe to return here because the logic for starting an // It's safe to return here because the logic for starting an
// incremental backup should eventually find that the metadata files are // incremental backup should eventually find that the metadata files are
@ -729,7 +729,7 @@ func (c *Collections) getCollectionPath(
pb = path.Builder{}.Append(path.Split(ptr.Val(item.GetParentReference().GetPath()))...) pb = path.Builder{}.Append(path.Split(ptr.Val(item.GetParentReference().GetPath()))...)
} }
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID) collectionPath, err := c.handler.CanonicalPath(pb)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "making item path") return nil, clues.Wrap(err, "making item path")
} }

View File

@ -2639,7 +2639,7 @@ func (suite *CollectionsUnitSuite) TestGet() {
prevDelta := "prev-delta" prevDelta := "prev-delta"
pathPrefix, err := mbh.MetadataPathPrefix(tenant) pathPrefix, err := mbh.MetadataPathPrefix()
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
mc, err := graph.MakeMetadataCollection( mc, err := graph.MakeMetadataCollection(

View File

@ -68,7 +68,7 @@ func (c *Collections) getTree(
driveTombstones[driveID] = struct{}{} driveTombstones[driveID] = struct{}{}
} }
pager := c.handler.NewDrivePager(c.protectedResource.ID(), nil) pager := c.handler.NewDrivePager(nil)
drives, err := api.GetAllDrives(ctx, pager) drives, err := api.GetAllDrives(ctx, pager)
if err != nil { if err != nil {
@ -176,7 +176,7 @@ func (c *Collections) makeDriveCollections(
) ([]data.BackupCollection, map[string]string, pagers.DeltaUpdate, error) { ) ([]data.BackupCollection, map[string]string, pagers.DeltaUpdate, error) {
driveID := ptr.Val(drv.GetId()) driveID := ptr.Val(drv.GetId())
ppfx, err := c.handler.PathPrefix(c.tenantID, driveID) ppfx, err := c.handler.PathPrefix(driveID)
if err != nil { if err != nil {
return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix") return nil, nil, pagers.DeltaUpdate{}, clues.Wrap(err, "generating backup tree prefix")
} }
@ -228,7 +228,7 @@ func (c *Collections) makeDriveCollections(
// if a reset did occur, the collections should already be marked as // if a reset did occur, the collections should already be marked as
// "do not merge", therefore everything will get processed as a new addition. // "do not merge", therefore everything will get processed as a new addition.
if !tree.hadReset && len(prevDeltaLink) > 0 { if !tree.hadReset && len(prevDeltaLink) > 0 {
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID) p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID))
if err != nil { if err != nil {
err = clues.WrapWC(ctx, err, "making canonical path for item exclusions") err = clues.WrapWC(ctx, err, "making canonical path for item exclusions")
return nil, nil, pagers.DeltaUpdate{}, err return nil, nil, pagers.DeltaUpdate{}, err
@ -552,7 +552,7 @@ func (c *Collections) makeFolderCollectionPath(
) (path.Path, error) { ) (path.Path, error) {
if folder.GetRoot() != nil { if folder.GetRoot() != nil {
pb := odConsts.DriveFolderPrefixBuilder(driveID) pb := odConsts.DriveFolderPrefixBuilder(driveID)
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID) collectionPath, err := c.handler.CanonicalPath(pb)
return collectionPath, clues.WrapWC(ctx, err, "making canonical root path").OrNil() return collectionPath, clues.WrapWC(ctx, err, "making canonical root path").OrNil()
} }
@ -571,7 +571,7 @@ func (c *Collections) makeFolderCollectionPath(
folderPath := path.Split(ptr.Val(folder.GetParentReference().GetPath())) folderPath := path.Split(ptr.Val(folder.GetParentReference().GetPath()))
folderPath = append(folderPath, name) folderPath = append(folderPath, name)
pb := path.Builder{}.Append(folderPath...) pb := path.Builder{}.Append(folderPath...)
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID) collectionPath, err := c.handler.CanonicalPath(pb)
return collectionPath, clues.WrapWC(ctx, err, "making folder collection path").OrNil() return collectionPath, clues.WrapWC(ctx, err, "making folder collection path").OrNil()
} }
@ -684,7 +684,7 @@ func (c *Collections) makeDriveTombstones(
break break
} }
prevDrivePath, err := c.handler.PathPrefix(c.tenantID, driveID) prevDrivePath, err := c.handler.PathPrefix(driveID)
if err != nil { if err != nil {
err = clues.WrapWC(ctx, err, "making drive tombstone for previous path").Label(count.BadPathPrefix) err = clues.WrapWC(ctx, err, "making drive tombstone for previous path").Label(count.BadPathPrefix)
el.AddRecoverable(ctx, err) el.AddRecoverable(ctx, err)
@ -712,7 +712,7 @@ func (c *Collections) makeMetadataCollections(
) []data.BackupCollection { ) []data.BackupCollection {
colls := []data.BackupCollection{} colls := []data.BackupCollection{}
pathPrefix, err := c.handler.MetadataPathPrefix(c.tenantID) pathPrefix, err := c.handler.MetadataPathPrefix()
if err != nil { if err != nil {
logger.CtxErr(ctx, err).Info("making metadata collection path prefixes") logger.CtxErr(ctx, err).Info("making metadata collection path prefixes")

View File

@ -10,6 +10,7 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/custom" "github.com/alcionai/corso/src/pkg/services/m365/custom"
) )
@ -17,51 +18,51 @@ var _ BackupHandler = &groupBackupHandler{}
type groupBackupHandler struct { type groupBackupHandler struct {
siteBackupHandler siteBackupHandler
groupID string groupQP graph.QueryParams
scope selectors.GroupsScope scope selectors.GroupsScope
} }
func NewGroupBackupHandler( func NewGroupBackupHandler(
groupID, siteID string, groupQP, siteQP graph.QueryParams,
ac api.Drives, ac api.Drives,
scope selectors.GroupsScope, scope selectors.GroupsScope,
) groupBackupHandler { ) groupBackupHandler {
return groupBackupHandler{ return groupBackupHandler{
siteBackupHandler{ siteBackupHandler: siteBackupHandler{
baseSiteHandler: baseSiteHandler{ baseSiteHandler: baseSiteHandler{
qp: siteQP,
ac: ac, ac: ac,
}, },
siteID: siteID,
// Not adding scope here. Anything that needs scope has to // Not adding scope here. Anything that needs scope has to
// be from group handler // be from group handler
service: path.GroupsService, service: path.GroupsService,
}, },
groupID, groupQP: groupQP,
scope, scope: scope,
} }
} }
func (h groupBackupHandler) PathPrefix( func (h groupBackupHandler) PathPrefix(
tenantID, driveID string, driveID string,
) (path.Path, error) { ) (path.Path, error) {
// TODO: move tenantID to struct // TODO: move tenantID to struct
return path.Build( return path.Build(
tenantID, h.groupQP.TenantID,
h.groupID, h.groupQP.ProtectedResource.ID(),
h.service, h.service,
path.LibrariesCategory, path.LibrariesCategory,
false, false,
odConsts.SitesPathDir, odConsts.SitesPathDir,
h.siteID, h.siteBackupHandler.qp.ProtectedResource.ID(),
odConsts.DrivesPathDir, odConsts.DrivesPathDir,
driveID, driveID,
odConsts.RootPathDir) odConsts.RootPathDir)
} }
func (h groupBackupHandler) MetadataPathPrefix(tenantID string) (path.Path, error) { func (h groupBackupHandler) MetadataPathPrefix() (path.Path, error) {
p, err := path.BuildMetadata( p, err := path.BuildMetadata(
tenantID, h.groupQP.TenantID,
h.groupID, h.groupQP.ProtectedResource.ID(),
h.service, h.service,
path.LibrariesCategory, path.LibrariesCategory,
false) false)
@ -69,7 +70,7 @@ func (h groupBackupHandler) MetadataPathPrefix(tenantID string) (path.Path, erro
return nil, clues.Wrap(err, "making metadata path") return nil, clues.Wrap(err, "making metadata path")
} }
p, err = p.Append(false, odConsts.SitesPathDir, h.siteID) p, err = p.Append(false, odConsts.SitesPathDir, h.siteBackupHandler.qp.ProtectedResource.ID())
if err != nil { if err != nil {
return nil, clues.Wrap(err, "appending site id to metadata path") return nil, clues.Wrap(err, "appending site id to metadata path")
} }
@ -79,27 +80,26 @@ func (h groupBackupHandler) MetadataPathPrefix(tenantID string) (path.Path, erro
func (h groupBackupHandler) CanonicalPath( func (h groupBackupHandler) CanonicalPath(
folders *path.Builder, folders *path.Builder,
tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return folders.ToDataLayerPath( return folders.ToDataLayerPath(
tenantID, h.groupQP.TenantID,
h.groupID, h.groupQP.ProtectedResource.ID(),
h.service, h.service,
path.LibrariesCategory, path.LibrariesCategory,
false, false,
odConsts.SitesPathDir, odConsts.SitesPathDir,
h.siteID) h.siteBackupHandler.qp.ProtectedResource.ID())
} }
func (h groupBackupHandler) SitePathPrefix(tenantID string) (path.Path, error) { func (h groupBackupHandler) SitePathPrefix() (path.Path, error) {
return path.Build( return path.Build(
tenantID, h.groupQP.TenantID,
h.groupID, h.groupQP.ProtectedResource.ID(),
h.service, h.service,
path.LibrariesCategory, path.LibrariesCategory,
false, false,
odConsts.SitesPathDir, odConsts.SitesPathDir,
h.siteID) h.siteBackupHandler.qp.ProtectedResource.ID())
} }
func (h groupBackupHandler) AugmentItemInfo( func (h groupBackupHandler) AugmentItemInfo(

View File

@ -7,9 +7,11 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
) )
type GroupBackupHandlerUnitSuite struct { type GroupBackupHandlerUnitSuite struct {
@ -37,9 +39,17 @@ func (suite *GroupBackupHandlerUnitSuite) TestPathPrefix() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := NewGroupBackupHandler(resourceOwner, "site-id", api.Drives{}, nil) groupQP := graph.QueryParams{
TenantID: tenantID,
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
}
siteQP := graph.QueryParams{
TenantID: tenantID,
ProtectedResource: idname.NewProvider("site-id", "site-id"),
}
h := NewGroupBackupHandler(groupQP, siteQP, api.Drives{}, nil)
result, err := h.PathPrefix(tenantID, "drive-id") result, err := h.PathPrefix("drive-id")
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {
@ -66,9 +76,17 @@ func (suite *GroupBackupHandlerUnitSuite) TestSitePathPrefix() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := NewGroupBackupHandler(resourceOwner, "site-id", api.Drives{}, nil) groupQP := graph.QueryParams{
TenantID: tenantID,
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
}
siteQP := graph.QueryParams{
TenantID: tenantID,
ProtectedResource: idname.NewProvider("site-id", "site-id"),
}
h := NewGroupBackupHandler(groupQP, siteQP, api.Drives{}, nil)
result, err := h.SitePathPrefix(tenantID) result, err := h.SitePathPrefix()
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {
@ -95,9 +113,17 @@ func (suite *GroupBackupHandlerUnitSuite) TestMetadataPathPrefix() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := NewGroupBackupHandler(resourceOwner, "site-id", api.Drives{}, nil) groupQP := graph.QueryParams{
TenantID: tenantID,
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
}
siteQP := graph.QueryParams{
TenantID: tenantID,
ProtectedResource: idname.NewProvider("site-id", "site-id"),
}
h := NewGroupBackupHandler(groupQP, siteQP, api.Drives{}, nil)
result, err := h.MetadataPathPrefix(tenantID) result, err := h.MetadataPathPrefix()
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {
@ -124,10 +150,18 @@ func (suite *GroupBackupHandlerUnitSuite) TestCanonicalPath() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := NewGroupBackupHandler(resourceOwner, "site-id", api.Drives{}, nil) groupQP := graph.QueryParams{
TenantID: tenantID,
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
}
siteQP := graph.QueryParams{
TenantID: tenantID,
ProtectedResource: idname.NewProvider("site-id", "site-id"),
}
h := NewGroupBackupHandler(groupQP, siteQP, api.Drives{}, nil)
p := path.Builder{}.Append("prefix") p := path.Builder{}.Append("prefix")
result, err := h.CanonicalPath(p, tenantID) result, err := h.CanonicalPath(p)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {

View File

@ -40,19 +40,23 @@ type BackupHandler interface {
GetItemPermissioner GetItemPermissioner
GetItemer GetItemer
GetRootFolderer GetRootFolderer
NewDrivePagerer
EnumerateDriveItemsDeltaer EnumerateDriveItemsDeltaer
// NewDrivePager produces a pager that fetches all drives for the
// protected resource in the handler. Differs from the restore
// drive pager in that it does not acccept a resource parameter.
NewDrivePager(fields []string) pagers.NonDeltaHandler[models.Driveable]
// PathPrefix constructs the service and category specific path prefix for // PathPrefix constructs the service and category specific path prefix for
// the given values. // the given values.
PathPrefix(tenantID, driveID string) (path.Path, error) PathPrefix(driveID string) (path.Path, error)
// MetadataPathPrefix returns the prefix path for metadata // MetadataPathPrefix returns the prefix path for metadata
MetadataPathPrefix(tenantID string) (path.Path, error) MetadataPathPrefix() (path.Path, error)
// CanonicalPath constructs the service and category specific path for // CanonicalPath constructs the service and category specific path for
// the given values. // the given values.
CanonicalPath(folders *path.Builder, tenantID string) (path.Path, error) CanonicalPath(folders *path.Builder) (path.Path, error)
// ServiceCat returns the service and category used by this implementation. // ServiceCat returns the service and category used by this implementation.
ServiceCat() (path.ServiceType, path.CategoryType) ServiceCat() (path.ServiceType, path.CategoryType)
@ -67,10 +71,6 @@ type BackupHandler interface {
IncludesDir(dir string) bool IncludesDir(dir string) bool
} }
type NewDrivePagerer interface {
NewDrivePager(resourceOwner string, fields []string) pagers.NonDeltaHandler[models.Driveable]
}
type GetItemPermissioner interface { type GetItemPermissioner interface {
GetItemPermission( GetItemPermission(
ctx context.Context, ctx context.Context,
@ -104,13 +104,14 @@ type RestoreHandler interface {
GetItemsByCollisionKeyser GetItemsByCollisionKeyser
GetRootFolderer GetRootFolderer
ItemInfoAugmenter ItemInfoAugmenter
NewDrivePagerer
NewItemContentUploader NewItemContentUploader
PostDriver PostDriver
PostItemInContainerer PostItemInContainerer
DeleteItemPermissioner DeleteItemPermissioner
UpdateItemPermissioner UpdateItemPermissioner
UpdateItemLinkSharer UpdateItemLinkSharer
NewDrivePagerer
} }
type DeleteItemer interface { type DeleteItemer interface {
@ -195,3 +196,13 @@ type GetRootFolderer interface {
driveID string, driveID string,
) (models.DriveItemable, error) ) (models.DriveItemable, error)
} }
// NewDrivePagerer produces a pager that fetches all drives for the
// protected resource in the handler. Differs from the backup
// drive pager in that it accepts a resource parameter.
type NewDrivePagerer interface {
NewDrivePager(
protectedResourceID string,
fields []string,
) pagers.NonDeltaHandler[models.Driveable]
}

View File

@ -717,6 +717,8 @@ type mockBackupHandler[T any] struct {
GetErrs []error GetErrs []error
RootFolder models.DriveItemable RootFolder models.DriveItemable
TenantID string
} }
func stubRootFolder() models.DriveItemable { func stubRootFolder() models.DriveItemable {
@ -752,6 +754,7 @@ func defaultOneDriveBH(resourceOwner string) *mockBackupHandler[models.DriveItem
GetResps: []*http.Response{nil}, GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")}, GetErrs: []error{clues.New("not defined")},
RootFolder: stubRootFolder(), RootFolder: stubRootFolder(),
TenantID: "tenantID",
} }
} }
@ -777,6 +780,7 @@ func defaultSharePointBH(resourceOwner string) *mockBackupHandler[models.DriveIt
GetResps: []*http.Response{nil}, GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")}, GetErrs: []error{clues.New("not defined")},
RootFolder: stubRootFolder(), RootFolder: stubRootFolder(),
TenantID: "tenantID",
} }
} }
@ -790,8 +794,8 @@ func defaultDriveBHWith(
return mbh return mbh
} }
func (h mockBackupHandler[T]) PathPrefix(tID, driveID string) (path.Path, error) { func (h mockBackupHandler[T]) PathPrefix(driveID string) (path.Path, error) {
pp, err := h.PathPrefixFn(tID, h.ProtectedResource.ID(), driveID) pp, err := h.PathPrefixFn(h.TenantID, h.ProtectedResource.ID(), driveID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -799,8 +803,8 @@ func (h mockBackupHandler[T]) PathPrefix(tID, driveID string) (path.Path, error)
return pp, h.PathPrefixErr return pp, h.PathPrefixErr
} }
func (h mockBackupHandler[T]) MetadataPathPrefix(tID string) (path.Path, error) { func (h mockBackupHandler[T]) MetadataPathPrefix() (path.Path, error) {
pp, err := h.MetadataPathPrefixFn(tID, h.ProtectedResource.ID()) pp, err := h.MetadataPathPrefixFn(h.TenantID, h.ProtectedResource.ID())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -808,8 +812,8 @@ func (h mockBackupHandler[T]) MetadataPathPrefix(tID string) (path.Path, error)
return pp, h.MetadataPathPrefixErr return pp, h.MetadataPathPrefixErr
} }
func (h mockBackupHandler[T]) CanonicalPath(pb *path.Builder, tID string) (path.Path, error) { func (h mockBackupHandler[T]) CanonicalPath(pb *path.Builder) (path.Path, error) {
cp, err := h.CanonPathFn(pb, tID, h.ProtectedResource.ID()) cp, err := h.CanonPathFn(pb, h.TenantID, h.ProtectedResource.ID())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -821,7 +825,7 @@ func (h mockBackupHandler[T]) ServiceCat() (path.ServiceType, path.CategoryType)
return h.Service, h.Category return h.Service, h.Category
} }
func (h mockBackupHandler[T]) NewDrivePager(string, []string) pagers.NonDeltaHandler[models.Driveable] { func (h mockBackupHandler[T]) NewDrivePager([]string) pagers.NonDeltaHandler[models.Driveable] {
return h.DriveItemEnumeration.drivePager() return h.DriveItemEnumeration.drivePager()
} }

View File

@ -265,10 +265,13 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
colls := NewCollections( colls := NewCollections(
&userDriveBackupHandler{ &userDriveBackupHandler{
baseUserDriveHandler: baseUserDriveHandler{ baseUserDriveHandler: baseUserDriveHandler{
qp: graph.QueryParams{
ProtectedResource: idname.NewProvider(suite.userID, suite.userID),
TenantID: suite.creds.AzureClientID,
},
ac: suite.ac.Drives(), ac: suite.ac.Drives(),
}, },
userID: test.user, scope: scope,
scope: scope,
}, },
creds.AzureTenantID, creds.AzureTenantID,
idname.NewProvider(test.user, test.user), idname.NewProvider(test.user, test.user),

View File

@ -14,6 +14,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -117,10 +118,13 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
bh := &userDriveBackupHandler{ bh := &userDriveBackupHandler{
baseUserDriveHandler: baseUserDriveHandler{ baseUserDriveHandler: baseUserDriveHandler{
qp: graph.QueryParams{
ProtectedResource: idname.NewProvider(suite.user, suite.user),
TenantID: suite.service.credentials.AzureTenantID,
},
ac: suite.service.ac.Drives(), ac: suite.service.ac.Drives(),
}, },
userID: suite.user, scope: sc,
scope: sc,
} }
// Read data for the file // Read data for the file

View File

@ -16,19 +16,20 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers" "github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
"github.com/alcionai/corso/src/pkg/services/m365/custom" "github.com/alcionai/corso/src/pkg/services/m365/custom"
) )
type baseSiteHandler struct { type baseSiteHandler struct {
ac api.Drives ac api.Drives
qp graph.QueryParams
} }
func (h baseSiteHandler) NewDrivePager( func (h baseSiteHandler) NewDrivePager(
resourceOwner string,
fields []string, fields []string,
) pagers.NonDeltaHandler[models.Driveable] { ) pagers.NonDeltaHandler[models.Driveable] {
return h.ac.NewSiteDrivePager(resourceOwner, fields) return h.ac.NewSiteDrivePager(h.qp.ProtectedResource.ID(), fields)
} }
func (h baseSiteHandler) AugmentItemInfo( func (h baseSiteHandler) AugmentItemInfo(
@ -68,22 +69,21 @@ var _ BackupHandler = &siteBackupHandler{}
type siteBackupHandler struct { type siteBackupHandler struct {
baseSiteHandler baseSiteHandler
siteID string
scope selectors.SharePointScope scope selectors.SharePointScope
service path.ServiceType service path.ServiceType
} }
func NewSiteBackupHandler( func NewSiteBackupHandler(
qp graph.QueryParams,
ac api.Drives, ac api.Drives,
siteID string,
scope selectors.SharePointScope, scope selectors.SharePointScope,
service path.ServiceType, service path.ServiceType,
) siteBackupHandler { ) siteBackupHandler {
return siteBackupHandler{ return siteBackupHandler{
baseSiteHandler: baseSiteHandler{ baseSiteHandler: baseSiteHandler{
ac: ac, ac: ac,
qp: qp,
}, },
siteID: siteID,
scope: scope, scope: scope,
service: service, service: service,
} }
@ -98,11 +98,11 @@ func (h siteBackupHandler) Get(
} }
func (h siteBackupHandler) PathPrefix( func (h siteBackupHandler) PathPrefix(
tenantID, driveID string, driveID string,
) (path.Path, error) { ) (path.Path, error) {
return path.Build( return path.Build(
tenantID, h.qp.TenantID,
h.siteID, h.qp.ProtectedResource.ID(),
h.service, h.service,
path.LibrariesCategory, path.LibrariesCategory,
false, false,
@ -111,12 +111,10 @@ func (h siteBackupHandler) PathPrefix(
odConsts.RootPathDir) odConsts.RootPathDir)
} }
func (h siteBackupHandler) MetadataPathPrefix( func (h siteBackupHandler) MetadataPathPrefix() (path.Path, error) {
tenantID string,
) (path.Path, error) {
p, err := path.BuildMetadata( p, err := path.BuildMetadata(
tenantID, h.qp.TenantID,
h.siteID, h.qp.ProtectedResource.ID(),
h.service, h.service,
path.LibrariesCategory, path.LibrariesCategory,
false) false)
@ -129,9 +127,13 @@ func (h siteBackupHandler) MetadataPathPrefix(
func (h siteBackupHandler) CanonicalPath( func (h siteBackupHandler) CanonicalPath(
folders *path.Builder, folders *path.Builder,
tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return folders.ToDataLayerPath(tenantID, h.siteID, h.service, path.LibrariesCategory, false) return folders.ToDataLayerPath(
h.qp.TenantID,
h.qp.ProtectedResource.ID(),
h.service,
path.LibrariesCategory,
false)
} }
func (h siteBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) { func (h siteBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
@ -212,6 +214,13 @@ func NewSiteRestoreHandler(ac api.Client, service path.ServiceType) siteRestoreH
} }
} }
func (h siteRestoreHandler) NewDrivePager(
protectedResourceID string,
fields []string,
) pagers.NonDeltaHandler[models.Driveable] {
return h.ac.Drives().NewSiteDrivePager(protectedResourceID, fields)
}
func (h siteRestoreHandler) PostDrive( func (h siteRestoreHandler) PostDrive(
ctx context.Context, ctx context.Context,
siteID, driveName string, siteID, driveName string,

View File

@ -7,8 +7,10 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
) )
type LibraryBackupHandlerUnitSuite struct { type LibraryBackupHandlerUnitSuite struct {
@ -36,9 +38,17 @@ func (suite *LibraryBackupHandlerUnitSuite) TestPathPrefix() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := siteBackupHandler{service: path.SharePointService, siteID: resourceOwner} h := siteBackupHandler{
baseSiteHandler: baseSiteHandler{
qp: graph.QueryParams{
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
TenantID: tenantID,
},
},
service: path.SharePointService,
}
result, err := h.PathPrefix(tenantID, "driveID") result, err := h.PathPrefix("driveID")
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {
@ -65,9 +75,17 @@ func (suite *LibraryBackupHandlerUnitSuite) TestMetadataPathPrefix() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := siteBackupHandler{service: path.SharePointService, siteID: resourceOwner} h := siteBackupHandler{
baseSiteHandler: baseSiteHandler{
qp: graph.QueryParams{
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
TenantID: tenantID,
},
},
service: path.SharePointService,
}
result, err := h.MetadataPathPrefix(tenantID) result, err := h.MetadataPathPrefix()
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {
@ -94,10 +112,18 @@ func (suite *LibraryBackupHandlerUnitSuite) TestCanonicalPath() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := siteBackupHandler{service: path.SharePointService, siteID: resourceOwner} h := siteBackupHandler{
baseSiteHandler: baseSiteHandler{
qp: graph.QueryParams{
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
TenantID: tenantID,
},
},
service: path.SharePointService,
}
p := path.Builder{}.Append("prefix") p := path.Builder{}.Append("prefix")
result, err := h.CanonicalPath(p, tenantID) result, err := h.CanonicalPath(p)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {

View File

@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers" "github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
"github.com/alcionai/corso/src/pkg/services/m365/custom" "github.com/alcionai/corso/src/pkg/services/m365/custom"
) )
@ -26,13 +27,13 @@ import (
type baseUserDriveHandler struct { type baseUserDriveHandler struct {
ac api.Drives ac api.Drives
qp graph.QueryParams
} }
func (h baseUserDriveHandler) NewDrivePager( func (h baseUserDriveHandler) NewDrivePager(
resourceOwner string,
fields []string, fields []string,
) pagers.NonDeltaHandler[models.Driveable] { ) pagers.NonDeltaHandler[models.Driveable] {
return h.ac.NewUserDrivePager(resourceOwner, fields) return h.ac.NewUserDrivePager(h.qp.ProtectedResource.ID(), fields)
} }
// AugmentItemInfo will populate a details.OneDriveInfo struct // AugmentItemInfo will populate a details.OneDriveInfo struct
@ -75,17 +76,20 @@ var _ BackupHandler = &userDriveBackupHandler{}
type userDriveBackupHandler struct { type userDriveBackupHandler struct {
baseUserDriveHandler baseUserDriveHandler
userID string scope selectors.OneDriveScope
scope selectors.OneDriveScope
} }
func NewUserDriveBackupHandler(ac api.Drives, userID string, scope selectors.OneDriveScope) *userDriveBackupHandler { func NewUserDriveBackupHandler(
qp graph.QueryParams,
ac api.Drives,
scope selectors.OneDriveScope,
) *userDriveBackupHandler {
return &userDriveBackupHandler{ return &userDriveBackupHandler{
baseUserDriveHandler: baseUserDriveHandler{ baseUserDriveHandler: baseUserDriveHandler{
ac: ac, ac: ac,
qp: qp,
}, },
userID: userID, scope: scope,
scope: scope,
} }
} }
@ -98,11 +102,11 @@ func (h userDriveBackupHandler) Get(
} }
func (h userDriveBackupHandler) PathPrefix( func (h userDriveBackupHandler) PathPrefix(
tenantID, driveID string, driveID string,
) (path.Path, error) { ) (path.Path, error) {
return path.Build( return path.Build(
tenantID, h.qp.TenantID,
h.userID, h.qp.ProtectedResource.ID(),
path.OneDriveService, path.OneDriveService,
path.FilesCategory, path.FilesCategory,
false, false,
@ -111,12 +115,10 @@ func (h userDriveBackupHandler) PathPrefix(
odConsts.RootPathDir) odConsts.RootPathDir)
} }
func (h userDriveBackupHandler) MetadataPathPrefix( func (h userDriveBackupHandler) MetadataPathPrefix() (path.Path, error) {
tenantID string,
) (path.Path, error) {
p, err := path.BuildMetadata( p, err := path.BuildMetadata(
tenantID, h.qp.TenantID,
h.userID, h.qp.ProtectedResource.ID(),
path.OneDriveService, path.OneDriveService,
path.FilesCategory, path.FilesCategory,
false) false)
@ -129,11 +131,10 @@ func (h userDriveBackupHandler) MetadataPathPrefix(
func (h userDriveBackupHandler) CanonicalPath( func (h userDriveBackupHandler) CanonicalPath(
folders *path.Builder, folders *path.Builder,
tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return path.Build( return path.Build(
tenantID, h.qp.TenantID,
h.userID, h.qp.ProtectedResource.ID(),
path.OneDriveService, path.OneDriveService,
path.FilesCategory, path.FilesCategory,
false, false,
@ -205,7 +206,9 @@ type userDriveRestoreHandler struct {
baseUserDriveHandler baseUserDriveHandler
} }
func NewUserDriveRestoreHandler(ac api.Client) *userDriveRestoreHandler { func NewUserDriveRestoreHandler(
ac api.Client,
) *userDriveRestoreHandler {
return &userDriveRestoreHandler{ return &userDriveRestoreHandler{
baseUserDriveHandler: baseUserDriveHandler{ baseUserDriveHandler: baseUserDriveHandler{
ac: ac.Drives(), ac: ac.Drives(),
@ -213,6 +216,13 @@ func NewUserDriveRestoreHandler(ac api.Client) *userDriveRestoreHandler {
} }
} }
func (h userDriveRestoreHandler) NewDrivePager(
protectedResourceID string,
fields []string,
) pagers.NonDeltaHandler[models.Driveable] {
return h.ac.NewUserDrivePager(protectedResourceID, fields)
}
func (h userDriveRestoreHandler) PostDrive( func (h userDriveRestoreHandler) PostDrive(
context.Context, context.Context,
string, string, string, string,

View File

@ -7,8 +7,10 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
) )
type ItemBackupHandlerUnitSuite struct { type ItemBackupHandlerUnitSuite struct {
@ -36,9 +38,16 @@ func (suite *ItemBackupHandlerUnitSuite) TestPathPrefix() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := userDriveBackupHandler{userID: resourceOwner} h := userDriveBackupHandler{
baseUserDriveHandler: baseUserDriveHandler{
qp: graph.QueryParams{
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
TenantID: tenantID,
},
},
}
result, err := h.PathPrefix(tenantID, "driveID") result, err := h.PathPrefix("driveID")
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {
@ -65,9 +74,16 @@ func (suite *ItemBackupHandlerUnitSuite) TestMetadataPathPrefix() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := userDriveBackupHandler{userID: resourceOwner} h := userDriveBackupHandler{
baseUserDriveHandler: baseUserDriveHandler{
qp: graph.QueryParams{
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
TenantID: tenantID,
},
},
}
result, err := h.MetadataPathPrefix(tenantID) result, err := h.MetadataPathPrefix()
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {
@ -94,10 +110,17 @@ func (suite *ItemBackupHandlerUnitSuite) TestCanonicalPath() {
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
h := userDriveBackupHandler{userID: resourceOwner} h := userDriveBackupHandler{
baseUserDriveHandler: baseUserDriveHandler{
qp: graph.QueryParams{
ProtectedResource: idname.NewProvider(resourceOwner, resourceOwner),
TenantID: tenantID,
},
},
}
p := path.Builder{}.Append("prefix") p := path.Builder{}.Append("prefix")
result, err := h.CanonicalPath(p, tenantID) result, err := h.CanonicalPath(p)
test.expectErr(t, err, clues.ToCore(err)) test.expectErr(t, err, clues.ToCore(err))
if result != nil { if result != nil {

View File

@ -181,7 +181,7 @@ func populateCollections[C graph.GetIDer, I groupsItemer](
logger.Ctx(ictx).Info("missing delta url") logger.Ctx(ictx).Info("missing delta url")
} }
currPath, err := bh.canonicalPath(c.storageDirFolders, qp.TenantID) currPath, err := bh.canonicalPath(c.storageDirFolders)
if err != nil { if err != nil {
err = clues.StackWC(ctx, err).Label(count.BadCollPath) err = clues.StackWC(ctx, err).Label(count.BadCollPath)
el.AddRecoverable(ctx, err) el.AddRecoverable(ctx, err)

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock" inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/groups/testdata" "github.com/alcionai/corso/src/internal/m365/collection/groups/testdata"
@ -50,6 +51,7 @@ type mockBackupHandler struct {
info map[string]*details.GroupsInfo info map[string]*details.GroupsInfo
getMessageErr map[string]error getMessageErr map[string]error
doNotInclude bool doNotInclude bool
tenantID string
} }
//lint:ignore U1000 false linter issue due to generics //lint:ignore U1000 false linter issue due to generics
@ -114,12 +116,11 @@ func (bh mockBackupHandler) includeContainer(
func (bh mockBackupHandler) canonicalPath( func (bh mockBackupHandler) canonicalPath(
storageDirFolders path.Elements, storageDirFolders path.Elements,
tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return storageDirFolders. return storageDirFolders.
Builder(). Builder().
ToDataLayerPath( ToDataLayerPath(
tenantID, bh.tenantID,
"protectedResource", "protectedResource",
path.GroupsService, path.GroupsService,
path.ChannelMessagesCategory, path.ChannelMessagesCategory,
@ -519,7 +520,11 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
var ( var (
protectedResource = tconfig.M365TeamID(suite.T()) protectedResource = tconfig.M365TeamID(suite.T())
resources = []string{protectedResource} resources = []string{protectedResource}
handler = NewChannelBackupHandler(protectedResource, suite.ac.Channels()) qp = graph.QueryParams{
ProtectedResource: idname.NewProvider(protectedResource, protectedResource),
TenantID: "tenant",
}
handler = NewChannelBackupHandler(qp, suite.ac.Channels())
) )
tests := []struct { tests := []struct {

View File

@ -11,23 +11,24 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers" "github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
) )
var _ backupHandler[models.Channelable, models.ChatMessageable] = &channelsBackupHandler{} var _ backupHandler[models.Channelable, models.ChatMessageable] = &channelsBackupHandler{}
type channelsBackupHandler struct { type channelsBackupHandler struct {
ac api.Channels ac api.Channels
protectedResource string qp graph.QueryParams
} }
func NewChannelBackupHandler( func NewChannelBackupHandler(
protectedResource string, qp graph.QueryParams,
ac api.Channels, ac api.Channels,
) channelsBackupHandler { ) channelsBackupHandler {
return channelsBackupHandler{ return channelsBackupHandler{
ac: ac, ac: ac,
protectedResource: protectedResource, qp: qp,
} }
} }
@ -40,7 +41,7 @@ func (bh channelsBackupHandler) getContainers(
ctx context.Context, ctx context.Context,
_ api.CallConfig, _ api.CallConfig,
) ([]container[models.Channelable], error) { ) ([]container[models.Channelable], error) {
chans, err := bh.ac.GetChannels(ctx, bh.protectedResource) chans, err := bh.ac.GetChannels(ctx, bh.qp.ProtectedResource.ID())
results := make([]container[models.Channelable], 0, len(chans)) results := make([]container[models.Channelable], 0, len(chans))
for _, ch := range chans { for _, ch := range chans {
@ -58,7 +59,7 @@ func (bh channelsBackupHandler) getContainerItemIDs(
) (pagers.AddedAndRemoved, error) { ) (pagers.AddedAndRemoved, error) {
return bh.ac.GetChannelMessageIDs( return bh.ac.GetChannelMessageIDs(
ctx, ctx,
bh.protectedResource, bh.qp.ProtectedResource.ID(),
containerPath[0], containerPath[0],
prevDelta, prevDelta,
cc) cc)
@ -74,22 +75,21 @@ func (bh channelsBackupHandler) includeContainer(
func (bh channelsBackupHandler) canonicalPath( func (bh channelsBackupHandler) canonicalPath(
storageDirFolders path.Elements, storageDirFolders path.Elements,
tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return storageDirFolders. return storageDirFolders.
Builder(). Builder().
ToDataLayerPath( ToDataLayerPath(
tenantID, bh.qp.TenantID,
bh.protectedResource, bh.qp.ProtectedResource.ID(),
path.GroupsService, path.GroupsService,
path.ChannelMessagesCategory, path.ChannelMessagesCategory,
false) false)
} }
func (bh channelsBackupHandler) PathPrefix(tenantID string) (path.Path, error) { func (bh channelsBackupHandler) PathPrefix() (path.Path, error) {
return path.Build( return path.Build(
tenantID, bh.qp.TenantID,
bh.protectedResource, bh.qp.ProtectedResource.ID(),
path.GroupsService, path.GroupsService,
path.ChannelMessagesCategory, path.ChannelMessagesCategory,
false) false)

View File

@ -11,23 +11,24 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
"github.com/alcionai/corso/src/pkg/services/m365/api/pagers" "github.com/alcionai/corso/src/pkg/services/m365/api/pagers"
) )
var _ backupHandler[models.Conversationable, models.Postable] = &conversationsBackupHandler{} var _ backupHandler[models.Conversationable, models.Postable] = &conversationsBackupHandler{}
type conversationsBackupHandler struct { type conversationsBackupHandler struct {
ac api.Conversations ac api.Conversations
protectedResource string qp graph.QueryParams
} }
func NewConversationBackupHandler( func NewConversationBackupHandler(
protectedResource string, qp graph.QueryParams,
ac api.Conversations, ac api.Conversations,
) conversationsBackupHandler { ) conversationsBackupHandler {
return conversationsBackupHandler{ return conversationsBackupHandler{
ac: ac, ac: ac,
protectedResource: protectedResource, qp: qp,
} }
} }
@ -41,7 +42,7 @@ func (bh conversationsBackupHandler) getContainers(
ctx context.Context, ctx context.Context,
cc api.CallConfig, cc api.CallConfig,
) ([]container[models.Conversationable], error) { ) ([]container[models.Conversationable], error) {
convs, err := bh.ac.GetConversations(ctx, bh.protectedResource, cc) convs, err := bh.ac.GetConversations(ctx, bh.qp.ProtectedResource.ID(), cc)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting conversations") return nil, clues.Wrap(err, "getting conversations")
} }
@ -53,7 +54,7 @@ func (bh conversationsBackupHandler) getContainers(
threads, err := bh.ac.GetConversationThreads( threads, err := bh.ac.GetConversationThreads(
ictx, ictx,
bh.protectedResource, bh.qp.ProtectedResource.ID(),
ptr.Val(conv.GetId()), ptr.Val(conv.GetId()),
cc) cc)
if err != nil { if err != nil {
@ -76,7 +77,7 @@ func (bh conversationsBackupHandler) getContainerItemIDs(
) (pagers.AddedAndRemoved, error) { ) (pagers.AddedAndRemoved, error) {
return bh.ac.GetConversationThreadPostIDs( return bh.ac.GetConversationThreadPostIDs(
ctx, ctx,
bh.protectedResource, bh.qp.ProtectedResource.ID(),
containerPath[0], containerPath[0],
containerPath[1], containerPath[1],
cc) cc)
@ -92,22 +93,21 @@ func (bh conversationsBackupHandler) includeContainer(
func (bh conversationsBackupHandler) canonicalPath( func (bh conversationsBackupHandler) canonicalPath(
storageDirFolders path.Elements, storageDirFolders path.Elements,
tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return storageDirFolders. return storageDirFolders.
Builder(). Builder().
ToDataLayerPath( ToDataLayerPath(
tenantID, bh.qp.TenantID,
bh.protectedResource, bh.qp.ProtectedResource.ID(),
path.GroupsService, path.GroupsService,
path.ConversationPostsCategory, path.ConversationPostsCategory,
false) false)
} }
func (bh conversationsBackupHandler) PathPrefix(tenantID string) (path.Path, error) { func (bh conversationsBackupHandler) PathPrefix() (path.Path, error) {
return path.Build( return path.Build(
tenantID, bh.qp.TenantID,
bh.protectedResource, bh.qp.ProtectedResource.ID(),
path.GroupsService, path.GroupsService,
path.ConversationPostsCategory, path.ConversationPostsCategory,
false) false)

View File

@ -81,10 +81,7 @@ type includeContainerer[C graph.GetIDer] interface {
// canonicalPath constructs the service and category specific path for // canonicalPath constructs the service and category specific path for
// the given builder. // the given builder.
type canonicalPather interface { type canonicalPather interface {
canonicalPath( canonicalPath(storageDir path.Elements) (path.Path, error)
storageDir path.Elements,
tenantID string,
) (path.Path, error)
} }
// canMakeDeltaQueries evaluates whether the handler can support a // canMakeDeltaQueries evaluates whether the handler can support a

View File

@ -253,7 +253,7 @@ func populateListsCollections(
} }
} }
currPath, err := bh.CanonicalPath(storageDir, tenantID) currPath, err := bh.CanonicalPath(storageDir)
if err != nil { if err != nil {
el.AddRecoverable(ctx, clues.WrapWC(ctx, err, "creating list collection path")) el.AddRecoverable(ctx, clues.WrapWC(ctx, err, "creating list collection path"))
return nil, err return nil, err

View File

@ -67,7 +67,7 @@ func (suite *SharePointBackupUnitSuite) TestCollectLists() {
}{ }{
{ {
name: "one list", name: "one list",
mock: siteMock.NewListHandler(siteMock.StubLists("one"), siteID, nil), mock: siteMock.NewListHandler(siteMock.StubLists("one"), suite.creds.AzureTenantID, siteID, nil),
expectErr: require.NoError, expectErr: require.NoError,
expectColls: 2, expectColls: 2,
expectNewColls: 1, expectNewColls: 1,
@ -76,7 +76,7 @@ func (suite *SharePointBackupUnitSuite) TestCollectLists() {
}, },
{ {
name: "many lists", name: "many lists",
mock: siteMock.NewListHandler(siteMock.StubLists("one", "two"), siteID, nil), mock: siteMock.NewListHandler(siteMock.StubLists("one", "two"), suite.creds.AzureTenantID, siteID, nil),
expectErr: require.NoError, expectErr: require.NoError,
expectColls: 3, expectColls: 3,
expectNewColls: 2, expectNewColls: 2,
@ -85,7 +85,7 @@ func (suite *SharePointBackupUnitSuite) TestCollectLists() {
}, },
{ {
name: "with error", name: "with error",
mock: siteMock.NewListHandler(siteMock.StubLists("one"), siteID, errors.New("some error")), mock: siteMock.NewListHandler(siteMock.StubLists("one"), suite.creds.AzureTenantID, siteID, errors.New("some error")),
expectErr: require.Error, expectErr: require.Error,
expectColls: 0, expectColls: 0,
expectNewColls: 0, expectNewColls: 0,
@ -286,7 +286,7 @@ func (suite *SharePointBackupUnitSuite) TestPopulateListsCollections_incremental
cs, err := populateListsCollections( cs, err := populateListsCollections(
ctx, ctx,
siteMock.NewListHandler(test.lists, siteID, nil), siteMock.NewListHandler(test.lists, suite.creds.AzureTenantID, siteID, nil),
bpc, bpc,
ac, ac,
suite.creds.AzureTenantID, suite.creds.AzureTenantID,
@ -417,8 +417,12 @@ func (suite *SharePointSuite) TestCollectLists() {
} }
sel := selectors.NewSharePointBackup([]string{siteID}) sel := selectors.NewSharePointBackup([]string{siteID})
qp := graph.QueryParams{
ProtectedResource: bpc.ProtectedResource,
TenantID: creds.AzureTenantID,
}
bh := NewListsBackupHandler(bpc.ProtectedResource.ID(), ac.Lists()) bh := NewListsBackupHandler(qp, ac.Lists())
col, _, err := CollectLists( col, _, err := CollectLists(
ctx, ctx,

View File

@ -312,10 +312,11 @@ func (suite *SharePointCollectionSuite) TestLazyCollection_Items() {
errs = fault.New(true) errs = fault.New(true)
start = time.Now().Add(-time.Second) start = time.Now().Add(-time.Second)
statusUpdater = func(*support.ControllerOperationStatus) {} statusUpdater = func(*support.ControllerOperationStatus) {}
tenant = "t"
) )
fullPath, err := path.Build( fullPath, err := path.Build(
"t", tenant,
"pr", "pr",
path.SharePointService, path.SharePointService,
path.ListsCategory, path.ListsCategory,
@ -326,7 +327,7 @@ func (suite *SharePointCollectionSuite) TestLazyCollection_Items() {
locPath := path.Elements{"full"}.Builder() locPath := path.Elements{"full"}.Builder()
prevPath, err := path.Build( prevPath, err := path.Build(
"t", tenant,
"pr", "pr",
path.SharePointService, path.SharePointService,
path.ListsCategory, path.ListsCategory,
@ -367,7 +368,7 @@ func (suite *SharePointCollectionSuite) TestLazyCollection_Items() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
getter := mock.NewListHandler(nil, "", nil) getter := mock.NewListHandler(nil, tenant, "", nil)
defer getter.Check(t, test.expectReads) defer getter.Check(t, test.expectReads)
col := NewLazyFetchCollection( col := NewLazyFetchCollection(
@ -425,7 +426,7 @@ func (suite *SharePointCollectionSuite) TestLazyItem() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
lh := mock.NewListHandler(nil, "", nil) lh := mock.NewListHandler(nil, "tenant", "", nil)
li := data.NewLazyItemWithInfo( li := data.NewLazyItemWithInfo(
ctx, ctx,
@ -469,7 +470,7 @@ func (suite *SharePointCollectionSuite) TestLazyItem_ReturnsEmptyReaderOnDeleted
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
lh := mock.NewListHandler(nil, "", core.ErrNotFound) lh := mock.NewListHandler(nil, "tenant", "", core.ErrNotFound)
li := data.NewLazyItemWithInfo( li := data.NewLazyItemWithInfo(
ctx, ctx,

View File

@ -20,10 +20,7 @@ type backupHandler interface {
// canonicalPath constructs the service and category specific path for // canonicalPath constructs the service and category specific path for
// the given builder. // the given builder.
type canonicalPather interface { type canonicalPather interface {
CanonicalPath( CanonicalPath(storageDir path.Elements) (path.Path, error)
storageDir path.Elements,
tenantID string,
) (path.Path, error)
} }
type getItemByIDer interface { type getItemByIDer interface {

View File

@ -9,31 +9,34 @@ import (
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
) )
var _ backupHandler = &listsBackupHandler{} var _ backupHandler = &listsBackupHandler{}
type listsBackupHandler struct { type listsBackupHandler struct {
ac api.Lists ac api.Lists
protectedResource string qp graph.QueryParams
} }
func NewListsBackupHandler(protectedResource string, ac api.Lists) listsBackupHandler { func NewListsBackupHandler(
qp graph.QueryParams,
ac api.Lists,
) listsBackupHandler {
return listsBackupHandler{ return listsBackupHandler{
ac: ac, ac: ac,
protectedResource: protectedResource, qp: qp,
} }
} }
func (bh listsBackupHandler) CanonicalPath( func (bh listsBackupHandler) CanonicalPath(
storageDirFolders path.Elements, storageDirFolders path.Elements,
tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return storageDirFolders. return storageDirFolders.
Builder(). Builder().
ToDataLayerPath( ToDataLayerPath(
tenantID, bh.qp.TenantID,
bh.protectedResource, bh.qp.ProtectedResource.ID(),
path.SharePointService, path.SharePointService,
path.ListsCategory, path.ListsCategory,
false) false)
@ -43,11 +46,11 @@ func (bh listsBackupHandler) GetItemByID(
ctx context.Context, ctx context.Context,
itemID string, itemID string,
) (models.Listable, *details.SharePointInfo, error) { ) (models.Listable, *details.SharePointInfo, error) {
return bh.ac.GetListByID(ctx, bh.protectedResource, itemID) return bh.ac.GetListByID(ctx, bh.qp.ProtectedResource.ID(), itemID)
} }
func (bh listsBackupHandler) GetItems(ctx context.Context, cc api.CallConfig) ([]models.Listable, error) { func (bh listsBackupHandler) GetItems(ctx context.Context, cc api.CallConfig) ([]models.Listable, error) {
return bh.ac.GetLists(ctx, bh.protectedResource, cc) return bh.ac.GetLists(ctx, bh.qp.ProtectedResource.ID(), cc)
} }
var _ restoreHandler = &listsRestoreHandler{} var _ restoreHandler = &listsRestoreHandler{}

View File

@ -9,30 +9,39 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
) )
type ListHandler struct { type ListHandler struct {
protectedResource string qp graph.QueryParams
lists []models.Listable lists []models.Listable
listsMap map[string]models.Listable listsMap map[string]models.Listable
err error err error
} }
func NewListHandler(lists []models.Listable, protectedResource string, err error) ListHandler { func NewListHandler(
lists []models.Listable,
tenantID, protectedResource string,
err error,
) ListHandler {
lstMap := make(map[string]models.Listable) lstMap := make(map[string]models.Listable)
for _, lst := range lists { for _, lst := range lists {
lstMap[ptr.Val(lst.GetId())] = lst lstMap[ptr.Val(lst.GetId())] = lst
} }
return ListHandler{ return ListHandler{
protectedResource: protectedResource, qp: graph.QueryParams{
lists: lists, ProtectedResource: idname.NewProvider(protectedResource, protectedResource),
listsMap: lstMap, TenantID: tenantID,
err: err, },
lists: lists,
listsMap: lstMap,
err: err,
} }
} }
@ -72,13 +81,12 @@ func (lh ListHandler) GetItems(
func (lh ListHandler) CanonicalPath( func (lh ListHandler) CanonicalPath(
storageDirFolders path.Elements, storageDirFolders path.Elements,
tenantID string,
) (path.Path, error) { ) (path.Path, error) {
return storageDirFolders. return storageDirFolders.
Builder(). Builder().
ToDataLayerPath( ToDataLayerPath(
tenantID, lh.qp.TenantID,
lh.protectedResource, lh.qp.ProtectedResource.ID(),
path.SharePointService, path.SharePointService,
path.ListsCategory, path.ListsCategory,
false) false)

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
inMock "github.com/alcionai/corso/src/internal/common/idname/mock" inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -86,7 +87,6 @@ func (bh mockBackupHandler) CanonicalPath() (path.Path, error) {
//lint:ignore U1000 false linter issue due to generics //lint:ignore U1000 false linter issue due to generics
func (bh mockBackupHandler) getItem( func (bh mockBackupHandler) getItem(
_ context.Context, _ context.Context,
_ string,
chat models.Chatable, chat models.Chatable,
) (models.Chatable, *details.TeamsChatsInfo, error) { ) (models.Chatable, *details.TeamsChatsInfo, error) {
chatID := ptr.Val(chat.GetId()) chatID := ptr.Val(chat.GetId())
@ -285,7 +285,11 @@ func (suite *BackupIntgSuite) TestCreateCollections() {
tenant = tconfig.M365TenantID(suite.T()) tenant = tconfig.M365TenantID(suite.T())
protectedResource = tconfig.M365TeamID(suite.T()) protectedResource = tconfig.M365TeamID(suite.T())
resources = []string{protectedResource} resources = []string{protectedResource}
handler = NewUsersChatsBackupHandler(tenant, protectedResource, suite.ac.Chats()) qp = graph.QueryParams{
ProtectedResource: idname.NewProvider(protectedResource, protectedResource),
TenantID: tenant,
}
handler = NewUsersChatsBackupHandler(qp, suite.ac.Chats())
) )
tests := []struct { tests := []struct {

View File

@ -12,24 +12,23 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
) )
var _ backupHandler[models.Chatable] = &usersChatsBackupHandler{} var _ backupHandler[models.Chatable] = &usersChatsBackupHandler{}
type usersChatsBackupHandler struct { type usersChatsBackupHandler struct {
ac api.Chats ac api.Chats
protectedResourceID string qp graph.QueryParams
tenantID string
} }
func NewUsersChatsBackupHandler( func NewUsersChatsBackupHandler(
tenantID, protectedResourceID string, qp graph.QueryParams,
ac api.Chats, ac api.Chats,
) usersChatsBackupHandler { ) usersChatsBackupHandler {
return usersChatsBackupHandler{ return usersChatsBackupHandler{
ac: ac, ac: ac,
protectedResourceID: protectedResourceID, qp: qp,
tenantID: tenantID,
} }
} }
@ -53,7 +52,7 @@ func (bh usersChatsBackupHandler) getItemIDs(
return bh.ac.GetChats( return bh.ac.GetChats(
ctx, ctx,
bh.protectedResourceID, bh.qp.ProtectedResource.ID(),
cc) cc)
} }
@ -74,8 +73,8 @@ func (bh usersChatsBackupHandler) includeItem(
func (bh usersChatsBackupHandler) CanonicalPath() (path.Path, error) { func (bh usersChatsBackupHandler) CanonicalPath() (path.Path, error) {
return path.BuildPrefix( return path.BuildPrefix(
bh.tenantID, bh.qp.TenantID,
bh.protectedResourceID, bh.qp.ProtectedResource.ID(),
path.TeamsChatsService, path.TeamsChatsService,
path.ChatsCategory) path.ChatsCategory)
} }
@ -83,7 +82,6 @@ func (bh usersChatsBackupHandler) CanonicalPath() (path.Path, error) {
//lint:ignore U1000 false linter issue due to generics //lint:ignore U1000 false linter issue due to generics
func (bh usersChatsBackupHandler) getItem( func (bh usersChatsBackupHandler) getItem(
ctx context.Context, ctx context.Context,
userID string,
chat models.Chatable, chat models.Chatable,
) (models.Chatable, *details.TeamsChatsInfo, error) { ) (models.Chatable, *details.TeamsChatsInfo, error) {
if chat == nil { if chat == nil {

View File

@ -208,10 +208,7 @@ func (lig *lazyItemGetter[I]) GetData(
writer := kjson.NewJsonSerializationWriter() writer := kjson.NewJsonSerializationWriter()
defer writer.Close() defer writer.Close()
item, info, err := lig.getter.getItem( item, info, err := lig.getter.getItem(ctx, lig.item)
ctx,
lig.resourceID,
lig.item)
if err != nil { if err != nil {
// For items that were deleted in flight, add the skip label so that // For items that were deleted in flight, add the skip label so that
// they don't lead to recoverable failures during backup. // they don't lead to recoverable failures during backup.

View File

@ -155,7 +155,6 @@ type getAndAugmentChat struct {
//lint:ignore U1000 false linter issue due to generics //lint:ignore U1000 false linter issue due to generics
func (m getAndAugmentChat) getItem( func (m getAndAugmentChat) getItem(
_ context.Context, _ context.Context,
_ string,
chat models.Chatable, chat models.Chatable,
) (models.Chatable, *details.TeamsChatsInfo, error) { ) (models.Chatable, *details.TeamsChatsInfo, error) {
chat.SetTopic(chat.GetId()) chat.SetTopic(chat.GetId())

View File

@ -49,7 +49,6 @@ type getItemIDser[I chatsItemer] interface {
type getItemer[I chatsItemer] interface { type getItemer[I chatsItemer] interface {
getItem( getItem(
ctx context.Context, ctx context.Context,
protectedResource string,
i I, i I,
) (I, *details.TeamsChatsInfo, error) ) (I, *details.TeamsChatsInfo, error)
} }

View File

@ -217,9 +217,17 @@ func backupLibraries(
Selector: bc.producerConfig.Selector, Selector: bc.producerConfig.Selector,
MetadataCollections: siteMetadataCollection[ptr.Val(s.GetId())], MetadataCollections: siteMetadataCollection[ptr.Val(s.GetId())],
} }
groupQP = graph.QueryParams{
ProtectedResource: bc.producerConfig.ProtectedResource,
TenantID: bc.creds.AzureTenantID,
}
siteQP = graph.QueryParams{
ProtectedResource: idname.NewProvider(ptr.Val(s.GetId()), ptr.Val(s.GetWebUrl())),
TenantID: bc.creds.AzureTenantID,
}
bh = drive.NewGroupBackupHandler( bh = drive.NewGroupBackupHandler(
bc.producerConfig.ProtectedResource.ID(), groupQP,
ptr.Val(s.GetId()), siteQP,
bc.apiCli.Drives(), bc.apiCli.Drives(),
scope) scope)
) )
@ -229,7 +237,7 @@ func backupLibraries(
"site_id", ptr.Val(s.GetId()), "site_id", ptr.Val(s.GetId()),
"site_weburl", graph.LoggableURL(ptr.Val(s.GetWebUrl()))) "site_weburl", graph.LoggableURL(ptr.Val(s.GetWebUrl())))
sp, err := bh.SitePathPrefix(bc.creds.AzureTenantID) sp, err := bh.SitePathPrefix()
if err != nil { if err != nil {
return nil, clues.WrapWC(ictx, err, "getting site path").Label(count.BadPathPrefix) return nil, clues.WrapWC(ictx, err, "getting site path").Label(count.BadPathPrefix)
} }
@ -270,6 +278,10 @@ func backupChannels(
var ( var (
colls []data.BackupCollection colls []data.BackupCollection
canUsePreviousBackup bool canUsePreviousBackup bool
qp = graph.QueryParams{
ProtectedResource: bc.producerConfig.ProtectedResource,
TenantID: bc.creds.AzureTenantID,
}
) )
progressMessage := observe.MessageWithCompletion( progressMessage := observe.MessageWithCompletion(
@ -286,9 +298,7 @@ func backupChannels(
return colls, nil return colls, nil
} }
bh := groups.NewChannelBackupHandler( bh := groups.NewChannelBackupHandler(qp, bc.apiCli.Channels())
bc.producerConfig.ProtectedResource.ID(),
bc.apiCli.Channels())
// Always disable lazy reader for channels until #4321 support is added // Always disable lazy reader for channels until #4321 support is added
useLazyReader := false useLazyReader := false
@ -308,7 +318,7 @@ func backupChannels(
} }
if !canUsePreviousBackup { if !canUsePreviousBackup {
tp, err := bh.PathPrefix(bc.creds.AzureTenantID) tp, err := bh.PathPrefix()
if err != nil { if err != nil {
err = clues.WrapWC(ctx, err, "getting message path").Label(count.BadPathPrefix) err = clues.WrapWC(ctx, err, "getting message path").Label(count.BadPathPrefix)
return nil, err return nil, err
@ -328,9 +338,11 @@ func backupConversations(
errs *fault.Bus, errs *fault.Bus,
) ([]data.BackupCollection, error) { ) ([]data.BackupCollection, error) {
var ( var (
bh = groups.NewConversationBackupHandler( qp = graph.QueryParams{
bc.producerConfig.ProtectedResource.ID(), ProtectedResource: bc.producerConfig.ProtectedResource,
bc.apiCli.Conversations()) TenantID: bc.creds.AzureTenantID,
}
bh = groups.NewConversationBackupHandler(qp, bc.apiCli.Conversations())
colls []data.BackupCollection colls []data.BackupCollection
) )
@ -360,7 +372,7 @@ func backupConversations(
} }
if !canUsePreviousBackup { if !canUsePreviousBackup {
tp, err := bh.PathPrefix(bc.creds.AzureTenantID) tp, err := bh.PathPrefix()
if err != nil { if err != nil {
err = clues.WrapWC(ctx, err, "getting conversation path").Label(count.BadPathPrefix) err = clues.WrapWC(ctx, err, "getting conversation path").Label(count.BadPathPrefix)
return nil, err return nil, err

View File

@ -62,8 +62,13 @@ func (oneDriveBackup) ProduceBackupCollections(
logger.Ctx(ctx).Debug("creating OneDrive collections") logger.Ctx(ctx).Debug("creating OneDrive collections")
qp := graph.QueryParams{
ProtectedResource: bpc.ProtectedResource,
TenantID: tenantID,
}
nc := drive.NewCollections( nc := drive.NewCollections(
drive.NewUserDriveBackupHandler(ac.Drives(), bpc.ProtectedResource.ID(), scope), drive.NewUserDriveBackupHandler(qp, ac.Drives(), scope),
tenantID, tenantID,
bpc.ProtectedResource, bpc.ProtectedResource,
su, su,

View File

@ -71,6 +71,8 @@ type BackupHandler[T any] struct {
GetErrs []error GetErrs []error
RootFolder models.DriveItemable RootFolder models.DriveItemable
TenantID string
} }
func stubRootFolder() models.DriveItemable { func stubRootFolder() models.DriveItemable {
@ -106,6 +108,7 @@ func DefaultOneDriveBH(resourceOwner string) *BackupHandler[models.DriveItemable
GetResps: []*http.Response{nil}, GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")}, GetErrs: []error{clues.New("not defined")},
RootFolder: stubRootFolder(), RootFolder: stubRootFolder(),
TenantID: "tenantID",
} }
} }
@ -131,6 +134,7 @@ func DefaultSharePointBH(resourceOwner string) *BackupHandler[models.DriveItemab
GetResps: []*http.Response{nil}, GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")}, GetErrs: []error{clues.New("not defined")},
RootFolder: stubRootFolder(), RootFolder: stubRootFolder(),
TenantID: "tenantID",
} }
} }
@ -144,8 +148,8 @@ func DefaultDriveBHWith(
return mbh return mbh
} }
func (h BackupHandler[T]) PathPrefix(tID, driveID string) (path.Path, error) { func (h BackupHandler[T]) PathPrefix(driveID string) (path.Path, error) {
pp, err := h.PathPrefixFn(tID, h.ProtectedResource.ID(), driveID) pp, err := h.PathPrefixFn(h.TenantID, h.ProtectedResource.ID(), driveID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -153,8 +157,8 @@ func (h BackupHandler[T]) PathPrefix(tID, driveID string) (path.Path, error) {
return pp, h.PathPrefixErr return pp, h.PathPrefixErr
} }
func (h BackupHandler[T]) MetadataPathPrefix(tID string) (path.Path, error) { func (h BackupHandler[T]) MetadataPathPrefix() (path.Path, error) {
pp, err := h.MetadataPathPrefixFn(tID, h.ProtectedResource.ID()) pp, err := h.MetadataPathPrefixFn(h.TenantID, h.ProtectedResource.ID())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -162,8 +166,8 @@ func (h BackupHandler[T]) MetadataPathPrefix(tID string) (path.Path, error) {
return pp, h.MetadataPathPrefixErr return pp, h.MetadataPathPrefixErr
} }
func (h BackupHandler[T]) CanonicalPath(pb *path.Builder, tID string) (path.Path, error) { func (h BackupHandler[T]) CanonicalPath(pb *path.Builder) (path.Path, error) {
cp, err := h.CanonPathFn(pb, tID, h.ProtectedResource.ID()) cp, err := h.CanonPathFn(pb, h.TenantID, h.ProtectedResource.ID())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -175,7 +179,7 @@ func (h BackupHandler[T]) ServiceCat() (path.ServiceType, path.CategoryType) {
return h.Service, h.Category return h.Service, h.Category
} }
func (h BackupHandler[T]) NewDrivePager(string, []string) pagers.NonDeltaHandler[models.Driveable] { func (h BackupHandler[T]) NewDrivePager([]string) pagers.NonDeltaHandler[models.Driveable] {
return h.DriveItemEnumeration.DrivePager() return h.DriveItemEnumeration.DrivePager()
} }

View File

@ -62,9 +62,14 @@ func (sharePointBackup) ProduceBackupCollections(
var spcs []data.BackupCollection var spcs []data.BackupCollection
qp := graph.QueryParams{
ProtectedResource: bpc.ProtectedResource,
TenantID: creds.AzureClientID,
}
switch scope.Category().PathType() { switch scope.Category().PathType() {
case path.ListsCategory: case path.ListsCategory:
bh := site.NewListsBackupHandler(bpc.ProtectedResource.ID(), ac.Lists()) bh := site.NewListsBackupHandler(qp, ac.Lists())
spcs, canUsePreviousBackup, err = site.CollectLists( spcs, canUsePreviousBackup, err = site.CollectLists(
ctx, ctx,
@ -86,8 +91,8 @@ func (sharePointBackup) ProduceBackupCollections(
ctx, ctx,
bpc, bpc,
drive.NewSiteBackupHandler( drive.NewSiteBackupHandler(
qp,
ac.Drives(), ac.Drives(),
bpc.ProtectedResource.ID(),
scope, scope,
bpc.Selector.PathService()), bpc.Selector.PathService()),
creds.AzureTenantID, creds.AzureTenantID,

View File

@ -20,6 +20,7 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -54,9 +55,14 @@ func (suite *LibrariesBackupUnitSuite) TestUpdateCollections() {
siteID = "site" siteID = "site"
) )
qp := graph.QueryParams{
ProtectedResource: idname.NewProvider(siteID, siteID),
TenantID: tenantID,
}
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...) pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
ep, err := drive.NewSiteBackupHandler(api.Drives{}, siteID, nil, path.SharePointService). ep, err := drive.NewSiteBackupHandler(qp, api.Drives{}, nil, path.SharePointService).
CanonicalPath(pb, tenantID) CanonicalPath(pb)
require.NoError(suite.T(), err, clues.ToCore(err)) require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct { tests := []struct {