drive-focused api refactoring prep (#3471)

setting up the drive api calls into files/spaces that will cascade naturally to the addition of an api client for users and sites.  contains some partial implementation of these clients, which will get completed in the next pr.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [ ] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #1996

#### Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2023-06-02 14:16:47 -06:00 committed by GitHub
parent d51b20c99a
commit cbbc8d2f6c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
77 changed files with 3137 additions and 2803 deletions

View File

@ -51,7 +51,6 @@ type dataBuilderFunc func(id, now, subject, body string) []byte
func generateAndRestoreItems(
ctx context.Context,
gc *connector.GraphConnector,
acct account.Account,
service path.ServiceType,
cat path.CategoryType,
sel selectors.Selector,
@ -99,7 +98,7 @@ func generateAndRestoreItems(
print.Infof(ctx, "Generating %d %s items in %s\n", howMany, cat, Destination)
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, dataColls, errs)
return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, dataColls, errs)
}
// ------------------------------------------------------------------------------------------
@ -188,7 +187,7 @@ func buildCollections(
mc.Data[i] = c.items[i].data
}
collections = append(collections, data.NotFoundRestoreCollection{Collection: mc})
collections = append(collections, data.NoFetchRestoreCollection{Collection: mc})
}
return collections, nil
@ -233,14 +232,14 @@ func generateAndRestoreDriveItems(
switch service {
case path.SharePointService:
d, err := gc.Service.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
d, err := gc.AC.Stable.Client().Sites().BySiteId(resourceOwner).Drive().Get(ctx, nil)
if err != nil {
return nil, clues.Wrap(err, "getting site's default drive")
}
driveID = ptr.Val(d.GetId())
default:
d, err := gc.Service.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
d, err := gc.AC.Stable.Client().Users().ByUserId(resourceOwner).Drive().Get(ctx, nil)
if err != nil {
return nil, clues.Wrap(err, "getting user's default drive")
}
@ -390,7 +389,6 @@ func generateAndRestoreDriveItems(
}
config := connector.ConfigInfo{
Acct: acct,
Opts: opts,
Resource: connector.Users,
Service: service,
@ -407,5 +405,5 @@ func generateAndRestoreDriveItems(
return nil, err
}
return gc.ConsumeRestoreCollections(ctx, version.Backup, acct, sel, dest, opts, collections, errs)
return gc.ConsumeRestoreCollections(ctx, version.Backup, sel, dest, opts, collections, errs)
}

View File

@ -52,7 +52,7 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
return nil
}
gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
if err != nil {
return Only(ctx, err)
}
@ -60,7 +60,6 @@ func handleExchangeEmailFactory(cmd *cobra.Command, args []string) error {
deets, err := generateAndRestoreItems(
ctx,
gc,
acct,
service,
category,
selectors.NewExchangeRestore([]string{User}).Selector,
@ -99,7 +98,7 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
return nil
}
gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
if err != nil {
return Only(ctx, err)
}
@ -107,7 +106,6 @@ func handleExchangeCalendarEventFactory(cmd *cobra.Command, args []string) error
deets, err := generateAndRestoreItems(
ctx,
gc,
acct,
service,
category,
selectors.NewExchangeRestore([]string{User}).Selector,
@ -145,7 +143,7 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
return nil
}
gc, acct, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
gc, _, _, err := getGCAndVerifyResourceOwner(ctx, connector.Users, User)
if err != nil {
return Only(ctx, err)
}
@ -153,7 +151,6 @@ func handleExchangeContactFactory(cmd *cobra.Command, args []string) error {
deets, err := generateAndRestoreItems(
ctx,
gc,
acct,
service,
category,
selectors.NewExchangeRestore([]string{User}).Selector,

View File

@ -71,16 +71,14 @@ func handleOneDriveCmd(cmd *cobra.Command, args []string) error {
AzureTenantID: tid,
}
// todo: swap to drive api client, when finished.
adpt, err := graph.CreateAdapter(tid, creds.AzureClientID, creds.AzureClientSecret)
if err != nil {
return Only(ctx, clues.Wrap(err, "creating graph adapter"))
}
svc := graph.NewService(adpt)
gr := graph.NewNoTimeoutHTTPWrapper()
err = runDisplayM365JSON(ctx, svc, gr, creds, user, m365ID)
ac, err := api.NewClient(creds)
if err != nil {
return Only(ctx, clues.Wrap(err, "getting api client"))
}
err = runDisplayM365JSON(ctx, ac, gr, creds, user, m365ID)
if err != nil {
cmd.SilenceUsage = true
cmd.SilenceErrors = true
@ -107,12 +105,12 @@ func (i itemPrintable) MinimumPrintable() any {
func runDisplayM365JSON(
ctx context.Context,
srv graph.Servicer,
ac api.Client,
gr graph.Requester,
creds account.M365Config,
user, itemID string,
userID, itemID string,
) error {
drive, err := api.GetUsersDrive(ctx, srv, user)
drive, err := ac.Users().GetDefaultDrive(ctx, userID)
if err != nil {
return err
}
@ -121,7 +119,7 @@ func runDisplayM365JSON(
it := itemPrintable{}
item, err := api.GetDriveItem(ctx, srv, driveID, itemID)
item, err := ac.Drives().GetItem(ctx, driveID, itemID)
if err != nil {
return err
}
@ -148,7 +146,7 @@ func runDisplayM365JSON(
return err
}
perms, err := api.GetItemPermission(ctx, srv, driveID, itemID)
perms, err := ac.Drives().GetItemPermission(ctx, driveID, itemID)
if err != nil {
return err
}

View File

@ -3,7 +3,6 @@ package connector
import (
"context"
"strings"
"sync"
"github.com/alcionai/clues"
@ -17,7 +16,6 @@ import (
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
@ -27,13 +25,6 @@ import (
"github.com/alcionai/corso/src/pkg/selectors"
)
const (
// copyBufferSize is used for chunked upload
// Microsoft recommends 5-10MB buffers
// https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession?view=graph-rest-1.0#best-practices
copyBufferSize = 5 * 1024 * 1024
)
// ---------------------------------------------------------------------------
// Data Collections
// ---------------------------------------------------------------------------
@ -71,7 +62,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
serviceEnabled, canMakeDeltaQueries, err := checkServiceEnabled(
ctx,
gc.Discovery.Users(),
gc.AC.Users(),
path.ServiceType(sels.Service),
sels.DiscreteOwner)
if err != nil {
@ -97,7 +88,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
case selectors.ServiceExchange:
colls, ssmb, err = exchange.DataCollections(
ctx,
gc.Discovery,
gc.AC,
sels,
gc.credentials.AzureTenantID,
owner,
@ -112,13 +103,12 @@ func (gc *GraphConnector) ProduceBackupCollections(
case selectors.ServiceOneDrive:
colls, ssmb, err = onedrive.DataCollections(
ctx,
gc.AC,
sels,
owner,
metadata,
lastBackupVersion,
gc.credentials.AzureTenantID,
gc.itemClient,
gc.Service,
gc.UpdateStatus,
ctrlOpts,
errs)
@ -129,12 +119,11 @@ func (gc *GraphConnector) ProduceBackupCollections(
case selectors.ServiceSharePoint:
colls, ssmb, err = sharepoint.DataCollections(
ctx,
gc.itemClient,
gc.AC,
sels,
owner,
metadata,
gc.credentials,
gc.Service,
gc,
ctrlOpts,
errs)
@ -174,7 +163,7 @@ func (gc *GraphConnector) IsBackupRunnable(
return true, nil
}
info, err := gc.Discovery.Users().GetInfo(ctx, resourceOwner)
info, err := gc.AC.Users().GetInfo(ctx, resourceOwner)
if err != nil {
return false, err
}
@ -242,7 +231,6 @@ func checkServiceEnabled(
func (gc *GraphConnector) ConsumeRestoreCollections(
ctx context.Context,
backupVersion int,
acct account.Account,
sels selectors.Selector,
dest control.RestoreDestination,
opts control.Options,
@ -257,52 +245,31 @@ func (gc *GraphConnector) ConsumeRestoreCollections(
var (
status *support.ConnectorOperationStatus
deets = &details.Builder{}
err error
)
creds, err := acct.M365Config()
if err != nil {
return nil, clues.Wrap(err, "malformed azure credentials")
}
// Buffer pool for uploads
pool := sync.Pool{
New: func() interface{} {
b := make([]byte, copyBufferSize)
return &b
},
}
switch sels.Service {
case selectors.ServiceExchange:
status, err = exchange.RestoreCollections(ctx,
creds,
gc.Discovery,
gc.Service,
dest,
dcs,
deets,
errs)
status, err = exchange.RestoreCollections(ctx, gc.AC, dest, dcs, deets, errs)
case selectors.ServiceOneDrive:
status, err = onedrive.RestoreCollections(ctx,
creds,
status, err = onedrive.RestoreCollections(
ctx,
onedrive.NewRestoreHandler(gc.AC),
backupVersion,
gc.Service,
dest,
opts,
dcs,
deets,
&pool,
errs)
case selectors.ServiceSharePoint:
status, err = sharepoint.RestoreCollections(ctx,
status, err = sharepoint.RestoreCollections(
ctx,
backupVersion,
creds,
gc.Service,
gc.AC,
dest,
opts,
dcs,
deets,
&pool,
errs)
default:
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")

View File

@ -12,7 +12,6 @@ import (
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
@ -298,12 +297,11 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
collections, excludes, err := sharepoint.DataCollections(
ctx,
graph.NewNoTimeoutHTTPWrapper(),
suite.ac,
sel,
sel,
nil,
connector.credentials,
connector.Service,
connector,
control.Defaults(),
fault.New(true))

View File

@ -192,7 +192,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
require.NoError(t, err, clues.ToCore(err))
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: coll},
data.NoFetchRestoreCollection{Collection: coll},
}, fault.New(true))
test.expectError(t, err, clues.ToCore(err))
@ -402,7 +402,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
require.NotNil(t, metadata, "collections contains a metadata collection")
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: metadata},
data.NoFetchRestoreCollection{Collection: metadata},
}, fault.New(true))
require.NoError(t, err, clues.ToCore(err))

View File

@ -11,7 +11,6 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
@ -21,7 +20,6 @@ import (
type RestoreIntgSuite struct {
tester.Suite
gs graph.Servicer
credentials account.M365Config
ac api.Client
}
@ -44,14 +42,6 @@ func (suite *RestoreIntgSuite) SetupSuite() {
suite.credentials = m365
suite.ac, err = api.NewClient(m365)
require.NoError(t, err, clues.ToCore(err))
adpt, err := graph.CreateAdapter(
m365.AzureTenantID,
m365.AzureClientID,
m365.AzureClientSecret)
require.NoError(t, err, clues.ToCore(err))
suite.gs = graph.NewService(adpt)
}
// TestRestoreContact ensures contact object can be created, placed into

View File

@ -427,7 +427,7 @@ func checkMetadata(
) {
catPaths, err := parseMetadataCollections(
ctx,
[]data.RestoreCollection{data.NotFoundRestoreCollection{Collection: c}},
[]data.RestoreCollection{data.NoFetchRestoreCollection{Collection: c}},
fault.New(true))
if !assert.NoError(t, err, "getting metadata", clues.ToCore(err)) {
return

View File

@ -14,7 +14,6 @@ import (
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
@ -27,9 +26,7 @@ import (
// store through GraphAPI.
func RestoreCollections(
ctx context.Context,
creds account.M365Config,
ac api.Client,
gs graph.Servicer,
dest control.RestoreDestination,
dcs []data.RestoreCollection,
deets *details.Builder,

View File

@ -15,6 +15,11 @@ const (
// number of uploads, but the max that can be specified. This is
// added as a safeguard in case we misconfigure the values.
maxConccurrentUploads = 20
// CopyBufferSize is used for chunked upload
// Microsoft recommends 5-10MB buffers
// https://docs.microsoft.com/en-us/graph/api/driveitem-createuploadsession?view=graph-rest-1.0#best-practices
CopyBufferSize = 5 * 1024 * 1024
)
// ---------------------------------------------------------------------------

View File

@ -169,9 +169,13 @@ func IsMalware(err error) bool {
}
func IsMalwareResp(ctx context.Context, resp *http.Response) bool {
if resp == nil {
return false
}
// https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wsshp/ba4ee7a8-704c-4e9c-ab14-fa44c574bdf4
// https://learn.microsoft.com/en-us/openspecs/sharepoint_protocols/ms-wdvmoduu/6fa6d4a9-ac18-4cd7-b696-8a3b14a98291
if resp.Header.Get("X-Virus-Infected") == "true" {
if len(resp.Header) > 0 && resp.Header.Get("X-Virus-Infected") == "true" {
return true
}

View File

@ -15,7 +15,7 @@ import (
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/account"
m365api "github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
@ -32,9 +32,7 @@ var (
// GraphRequestAdapter from the msgraph-sdk-go. Additional fields are for
// bookkeeping and interfacing with other component.
type GraphConnector struct {
Service graph.Servicer
Discovery m365api.Client
itemClient graph.Requester // configured to handle large item downloads
AC api.Client
tenant string
credentials account.M365Config
@ -64,12 +62,7 @@ func NewGraphConnector(
return nil, clues.Wrap(err, "retrieving m365 account configuration").WithClues(ctx)
}
service, err := createService(creds)
if err != nil {
return nil, clues.Wrap(err, "creating service connection").WithClues(ctx)
}
ac, err := m365api.NewClient(creds)
ac, err := api.NewClient(creds)
if err != nil {
return nil, clues.Wrap(err, "creating api client").WithClues(ctx)
}
@ -80,12 +73,10 @@ func NewGraphConnector(
}
gc := GraphConnector{
Discovery: ac,
AC: ac,
IDNameLookup: idname.NewCache(nil),
Service: service,
credentials: creds,
itemClient: graph.NewNoTimeoutHTTPWrapper(),
ownerLookup: rc,
tenant: acct.ID(),
wg: &sync.WaitGroup{},
@ -94,23 +85,6 @@ func NewGraphConnector(
return &gc, nil
}
// ---------------------------------------------------------------------------
// Service Client
// ---------------------------------------------------------------------------
// createService constructor for graphService component
func createService(creds account.M365Config) (*graph.Service, error) {
adapter, err := graph.CreateAdapter(
creds.AzureTenantID,
creds.AzureClientID,
creds.AzureClientSecret)
if err != nil {
return &graph.Service{}, err
}
return graph.NewService(adapter), nil
}
// ---------------------------------------------------------------------------
// Processing Status
// ---------------------------------------------------------------------------
@ -180,7 +154,7 @@ const (
Sites
)
func (r Resource) resourceClient(ac m365api.Client) (*resourceClient, error) {
func (r Resource) resourceClient(ac api.Client) (*resourceClient, error) {
switch r {
case Users:
return &resourceClient{enum: r, getter: ac.Users()}, nil
@ -209,7 +183,7 @@ var _ getOwnerIDAndNamer = &resourceClient{}
type getOwnerIDAndNamer interface {
getOwnerIDAndNameFrom(
ctx context.Context,
discovery m365api.Client,
discovery api.Client,
owner string,
ins idname.Cacher,
) (
@ -227,7 +201,7 @@ type getOwnerIDAndNamer interface {
// (PrincipalName for users, WebURL for sites).
func (r resourceClient) getOwnerIDAndNameFrom(
ctx context.Context,
discovery m365api.Client,
discovery api.Client,
owner string,
ins idname.Cacher,
) (string, string, error) {
@ -275,7 +249,7 @@ func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom(
owner string, // input value, can be either id or name
ins idname.Cacher,
) (string, string, error) {
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.Discovery, owner, ins)
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.AC, owner, ins)
if err != nil {
return "", "", clues.Wrap(err, "identifying resource owner")
}

View File

@ -18,7 +18,6 @@ import (
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
@ -44,8 +43,8 @@ var (
func mustGetDefaultDriveID(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
backupService path.ServiceType,
service graph.Servicer,
ac api.Client,
service path.ServiceType,
resourceOwner string,
) string {
var (
@ -53,13 +52,13 @@ func mustGetDefaultDriveID(
d models.Driveable
)
switch backupService {
switch service {
case path.OneDriveService:
d, err = api.GetUsersDrive(ctx, service, resourceOwner)
d, err = ac.Users().GetDefaultDrive(ctx, resourceOwner)
case path.SharePointService:
d, err = api.GetSitesDefaultDrive(ctx, service, resourceOwner)
d, err = ac.Sites().GetDefaultDrive(ctx, resourceOwner)
default:
assert.FailNowf(t, "unknown service type %s", backupService.String())
assert.FailNowf(t, "unknown service type %s", service.String())
}
if err != nil {
@ -75,19 +74,18 @@ func mustGetDefaultDriveID(
}
type suiteInfo interface {
Service() graph.Servicer
Account() account.Account
APIClient() api.Client
Tenant() string
// Returns (username, user ID) for the user. These values are used for
// permissions.
PrimaryUser() (string, string)
SecondaryUser() (string, string)
TertiaryUser() (string, string)
// BackupResourceOwner returns the resource owner to run the backup/restore
// ResourceOwner returns the resource owner to run the backup/restore
// with. This can be different from the values used for permissions and it can
// also be a site.
BackupResourceOwner() string
BackupService() path.ServiceType
ResourceOwner() string
Service() path.ServiceType
Resource() Resource
}
@ -97,25 +95,46 @@ type oneDriveSuite interface {
}
type suiteInfoImpl struct {
ac api.Client
connector *GraphConnector
resourceOwner string
user string
userID string
resourceType Resource
secondaryUser string
secondaryUserID string
service path.ServiceType
tertiaryUser string
tertiaryUserID string
acct account.Account
service path.ServiceType
resourceType Resource
user string
userID string
}
func (si suiteInfoImpl) Service() graph.Servicer {
return si.connector.Service
func NewSuiteInfoImpl(
t *testing.T,
ctx context.Context, //revive:disable-line:context-as-argument
resourceOwner string,
service path.ServiceType,
) suiteInfoImpl {
resource := Users
if service == path.SharePointService {
resource = Sites
}
gc := loadConnector(ctx, t, resource)
return suiteInfoImpl{
ac: gc.AC,
connector: gc,
resourceOwner: resourceOwner,
resourceType: resource,
secondaryUser: tester.SecondaryM365UserID(t),
service: service,
tertiaryUser: tester.TertiaryM365UserID(t),
user: tester.M365UserID(t),
}
}
func (si suiteInfoImpl) Account() account.Account {
return si.acct
func (si suiteInfoImpl) APIClient() api.Client {
return si.ac
}
func (si suiteInfoImpl) Tenant() string {
@ -134,11 +153,11 @@ func (si suiteInfoImpl) TertiaryUser() (string, string) {
return si.tertiaryUser, si.tertiaryUserID
}
func (si suiteInfoImpl) BackupResourceOwner() string {
func (si suiteInfoImpl) ResourceOwner() string {
return si.resourceOwner
}
func (si suiteInfoImpl) BackupService() path.ServiceType {
func (si suiteInfoImpl) Service() path.ServiceType {
return si.service
}
@ -162,8 +181,7 @@ func TestGraphConnectorSharePointIntegrationSuite(t *testing.T) {
suite.Run(t, &GraphConnectorSharePointIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
[][]string{tester.M365AcctCredEnvs}),
})
}
@ -173,27 +191,18 @@ func (suite *GraphConnectorSharePointIntegrationSuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
si := suiteInfoImpl{
connector: loadConnector(ctx, suite.T(), Sites),
user: tester.M365UserID(suite.T()),
secondaryUser: tester.SecondaryM365UserID(suite.T()),
tertiaryUser: tester.TertiaryM365UserID(suite.T()),
acct: tester.NewM365Account(suite.T()),
service: path.SharePointService,
resourceType: Sites,
}
si := NewSuiteInfoImpl(suite.T(), ctx, tester.M365SiteID(suite.T()), path.SharePointService)
si.resourceOwner = tester.M365SiteID(suite.T())
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
// users needed for permissions
user, err := si.connector.AC.Users().GetByID(ctx, si.user)
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
si.userID = ptr.Val(user.GetId())
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser)
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
tertiaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.tertiaryUser)
tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser)
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
@ -233,8 +242,7 @@ func TestGraphConnectorOneDriveIntegrationSuite(t *testing.T) {
suite.Run(t, &GraphConnectorOneDriveIntegrationSuite{
Suite: tester.NewIntegrationSuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
[][]string{tester.M365AcctCredEnvs}),
})
}
@ -244,25 +252,20 @@ func (suite *GraphConnectorOneDriveIntegrationSuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
si := suiteInfoImpl{
connector: loadConnector(ctx, t, Users),
user: tester.M365UserID(t),
secondaryUser: tester.SecondaryM365UserID(t),
acct: tester.NewM365Account(t),
service: path.OneDriveService,
resourceType: Users,
}
si := NewSuiteInfoImpl(t, ctx, tester.M365UserID(t), path.OneDriveService)
si.resourceOwner = si.user
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
user, err := si.connector.AC.Users().GetByID(ctx, si.user)
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
si.userID = ptr.Val(user.GetId())
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser)
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser)
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
suite.suiteInfo = si
}
@ -299,8 +302,7 @@ func TestGraphConnectorOneDriveNightlySuite(t *testing.T) {
suite.Run(t, &GraphConnectorOneDriveNightlySuite{
Suite: tester.NewNightlySuite(
t,
[][]string{tester.M365AcctCredEnvs},
),
[][]string{tester.M365AcctCredEnvs}),
})
}
@ -310,25 +312,20 @@ func (suite *GraphConnectorOneDriveNightlySuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
si := suiteInfoImpl{
connector: loadConnector(ctx, t, Users),
user: tester.M365UserID(t),
secondaryUser: tester.SecondaryM365UserID(t),
acct: tester.NewM365Account(t),
service: path.OneDriveService,
resourceType: Users,
}
si := NewSuiteInfoImpl(t, ctx, tester.M365UserID(t), path.OneDriveService)
si.resourceOwner = si.user
user, err := si.connector.Discovery.Users().GetByID(ctx, si.user)
user, err := si.connector.AC.Users().GetByID(ctx, si.user)
require.NoError(t, err, "fetching user", si.user, clues.ToCore(err))
si.userID = ptr.Val(user.GetId())
secondaryUser, err := si.connector.Discovery.Users().GetByID(ctx, si.secondaryUser)
secondaryUser, err := si.connector.AC.Users().GetByID(ctx, si.secondaryUser)
require.NoError(t, err, "fetching user", si.secondaryUser, clues.ToCore(err))
si.secondaryUserID = ptr.Val(secondaryUser.GetId())
tertiaryUser, err := si.connector.AC.Users().GetByID(ctx, si.tertiaryUser)
require.NoError(t, err, "fetching user", si.tertiaryUser, clues.ToCore(err))
si.tertiaryUserID = ptr.Val(tertiaryUser.GetId())
suite.suiteInfo = si
}
@ -367,9 +364,9 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.BackupService(),
suite.APIClient(),
suite.Service(),
suite.BackupResourceOwner())
suite.ResourceOwner())
rootPath := []string{
odConsts.DrivesPathDir,
@ -470,17 +467,17 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
},
}
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
expected, err := DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("Version%d", vn), func() {
t := suite.T()
input, err := DataForInfo(suite.BackupService(), cols, vn)
input, err := DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(),
service: suite.Service(),
resource: suite.Resource(),
backupVersion: vn,
collectionsPrevious: input,
@ -489,10 +486,9 @@ func testRestoreAndBackupMultipleFilesAndFoldersNoPermissions(
runRestoreBackupTestVersions(
t,
suite.Account(),
testData,
suite.Tenant(),
[]string{suite.BackupResourceOwner()},
[]string{suite.ResourceOwner()},
control.Options{
RestorePermissions: true,
ToggleFeatures: control.Toggles{},
@ -513,9 +509,9 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.BackupService(),
suite.APIClient(),
suite.Service(),
suite.BackupResourceOwner())
suite.ResourceOwner())
fileName2 := "test-file2.txt"
folderCName := "folder-c"
@ -683,9 +679,9 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
},
}
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
expected, err := DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.BackupService().String()
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
@ -693,11 +689,11 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
// Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as
// to validate that flow as well.
input, err := DataForInfo(suite.BackupService(), cols, vn)
input, err := DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(),
service: suite.Service(),
resource: suite.Resource(),
backupVersion: vn,
collectionsPrevious: input,
@ -706,10 +702,9 @@ func testPermissionsRestoreAndBackup(suite oneDriveSuite, startVersion int) {
runRestoreBackupTestVersions(
t,
suite.Account(),
testData,
suite.Tenant(),
[]string{suite.BackupResourceOwner()},
[]string{suite.ResourceOwner()},
control.Options{
RestorePermissions: true,
ToggleFeatures: control.Toggles{},
@ -730,9 +725,9 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.BackupService(),
suite.APIClient(),
suite.Service(),
suite.BackupResourceOwner())
suite.ResourceOwner())
inputCols := []OnedriveColInfo{
{
@ -772,18 +767,18 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
},
}
expected, err := DataForInfo(suite.BackupService(), expectedCols, version.Backup)
expected, err := DataForInfo(suite.Service(), expectedCols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.BackupService().String()
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
t := suite.T()
input, err := DataForInfo(suite.BackupService(), inputCols, vn)
input, err := DataForInfo(suite.Service(), inputCols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(),
service: suite.Service(),
resource: suite.Resource(),
backupVersion: vn,
collectionsPrevious: input,
@ -792,10 +787,9 @@ func testPermissionsBackupAndNoRestore(suite oneDriveSuite, startVersion int) {
runRestoreBackupTestVersions(
t,
suite.Account(),
testData,
suite.Tenant(),
[]string{suite.BackupResourceOwner()},
[]string{suite.ResourceOwner()},
control.Options{
RestorePermissions: false,
ToggleFeatures: control.Toggles{},
@ -819,9 +813,9 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
driveID := mustGetDefaultDriveID(
t,
ctx,
suite.BackupService(),
suite.APIClient(),
suite.Service(),
suite.BackupResourceOwner())
suite.ResourceOwner())
folderAName := "custom"
folderBName := "inherited"
@ -953,9 +947,9 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
},
}
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
expected, err := DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.BackupService().String()
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
@ -963,11 +957,11 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
// Ideally this can always be true or false and still
// work, but limiting older versions to use emails so as
// to validate that flow as well.
input, err := DataForInfo(suite.BackupService(), cols, vn)
input, err := DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(),
service: suite.Service(),
resource: suite.Resource(),
backupVersion: vn,
collectionsPrevious: input,
@ -976,10 +970,9 @@ func testPermissionsInheritanceRestoreAndBackup(suite oneDriveSuite, startVersio
runRestoreBackupTestVersions(
t,
suite.Account(),
testData,
suite.Tenant(),
[]string{suite.BackupResourceOwner()},
[]string{suite.ResourceOwner()},
control.Options{
RestorePermissions: true,
ToggleFeatures: control.Toggles{},
@ -1001,9 +994,9 @@ func testRestoreFolderNamedFolderRegression(
driveID := mustGetDefaultDriveID(
suite.T(),
ctx,
suite.BackupService(),
suite.APIClient(),
suite.Service(),
suite.BackupResourceOwner())
suite.ResourceOwner())
rootPath := []string{
odConsts.DrivesPathDir,
@ -1072,18 +1065,18 @@ func testRestoreFolderNamedFolderRegression(
},
}
expected, err := DataForInfo(suite.BackupService(), cols, version.Backup)
expected, err := DataForInfo(suite.Service(), cols, version.Backup)
require.NoError(suite.T(), err)
bss := suite.BackupService().String()
bss := suite.Service().String()
for vn := startVersion; vn <= version.Backup; vn++ {
suite.Run(fmt.Sprintf("%s-Version%d", bss, vn), func() {
t := suite.T()
input, err := DataForInfo(suite.BackupService(), cols, vn)
input, err := DataForInfo(suite.Service(), cols, vn)
require.NoError(suite.T(), err)
testData := restoreBackupInfoMultiVersion{
service: suite.BackupService(),
service: suite.Service(),
resource: suite.Resource(),
backupVersion: vn,
collectionsPrevious: input,
@ -1092,10 +1085,9 @@ func testRestoreFolderNamedFolderRegression(
runRestoreTestWithVerion(
t,
suite.Account(),
testData,
suite.Tenant(),
[]string{suite.BackupResourceOwner()},
[]string{suite.ResourceOwner()},
control.Options{
RestorePermissions: true,
ToggleFeatures: control.Toggles{},

View File

@ -19,7 +19,6 @@ import (
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
@ -263,7 +262,6 @@ type GraphConnectorIntegrationSuite struct {
connector *GraphConnector
user string
secondaryUser string
acct account.Account
}
func TestGraphConnectorIntegrationSuite(t *testing.T) {
@ -284,7 +282,6 @@ func (suite *GraphConnectorIntegrationSuite) SetupSuite() {
suite.connector = loadConnector(ctx, t, Users)
suite.user = tester.M365UserID(t)
suite.secondaryUser = tester.SecondaryM365UserID(t)
suite.acct = tester.NewM365Account(t)
tester.LogTimeOfTest(t)
}
@ -296,7 +293,6 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
defer flush()
var (
acct = tester.NewM365Account(t)
dest = tester.DefaultTestRestoreDestination("")
sel = selectors.Selector{
Service: selectors.ServiceUnknown,
@ -306,7 +302,6 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreFailsBadService() {
deets, err := suite.connector.ConsumeRestoreCollections(
ctx,
version.Backup,
acct,
sel,
dest,
control.Options{
@ -385,7 +380,6 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
deets, err := suite.connector.ConsumeRestoreCollections(
ctx,
version.Backup,
suite.acct,
test.sel,
dest,
control.Options{
@ -429,7 +423,6 @@ func runRestore(
deets, err := restoreGC.ConsumeRestoreCollections(
ctx,
backupVersion,
config.Acct,
restoreSel,
config.Dest,
config.Opts,
@ -528,7 +521,6 @@ func runBackupAndCompare(
func runRestoreBackupTest(
t *testing.T,
acct account.Account,
test restoreBackupInfo,
tenant string,
resourceOwners []string,
@ -538,7 +530,6 @@ func runRestoreBackupTest(
defer flush()
config := ConfigInfo{
Acct: acct,
Opts: opts,
Resource: test.resource,
Service: test.service,
@ -575,7 +566,6 @@ func runRestoreBackupTest(
// runRestoreTest restores with data using the test's backup version
func runRestoreTestWithVerion(
t *testing.T,
acct account.Account,
test restoreBackupInfoMultiVersion,
tenant string,
resourceOwners []string,
@ -585,7 +575,6 @@ func runRestoreTestWithVerion(
defer flush()
config := ConfigInfo{
Acct: acct,
Opts: opts,
Resource: test.resource,
Service: test.service,
@ -614,7 +603,6 @@ func runRestoreTestWithVerion(
// something that would be in the form of a newer backup.
func runRestoreBackupTestVersions(
t *testing.T,
acct account.Account,
test restoreBackupInfoMultiVersion,
tenant string,
resourceOwners []string,
@ -624,7 +612,6 @@ func runRestoreBackupTestVersions(
defer flush()
config := ConfigInfo{
Acct: acct,
Opts: opts,
Resource: test.resource,
Service: test.service,
@ -920,15 +907,13 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup() {
suite.Run(test.name, func() {
runRestoreBackupTest(
suite.T(),
suite.acct,
test,
suite.connector.tenant,
[]string{suite.user},
control.Options{
RestorePermissions: true,
ToggleFeatures: control.Toggles{},
},
)
})
})
}
}
@ -1044,7 +1029,6 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
deets, err := restoreGC.ConsumeRestoreCollections(
ctx,
version.Backup,
suite.acct,
restoreSel,
dest,
control.Options{
@ -1135,7 +1119,6 @@ func (suite *GraphConnectorIntegrationSuite) TestRestoreAndBackup_largeMailAttac
runRestoreBackupTest(
suite.T(),
suite.acct,
test,
suite.connector.tenant,
[]string{suite.user},

View File

@ -8,7 +8,6 @@ import (
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/path"
)
@ -39,7 +38,6 @@ type ItemInfo struct {
}
type ConfigInfo struct {
Acct account.Account
Opts control.Options
Resource Resource
Service path.ServiceType
@ -104,7 +102,7 @@ type mockRestoreCollection struct {
auxItems map[string]data.Stream
}
func (rc mockRestoreCollection) Fetch(
func (rc mockRestoreCollection) FetchItemByName(
ctx context.Context,
name string,
) (data.Stream, error) {

View File

@ -7,7 +7,6 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
@ -59,7 +58,6 @@ func (gc GraphConnector) Wait() *data.CollectionStats {
func (gc GraphConnector) ConsumeRestoreCollections(
_ context.Context,
_ int,
_ account.Account,
_ selectors.Selector,
_ control.RestoreDestination,
_ control.Options,

View File

@ -43,8 +43,7 @@ var (
// Collection represents a set of OneDrive objects retrieved from M365
type Collection struct {
// configured to handle large item downloads
itemClient graph.Requester
handler BackupHandler
// data is used to share data streams with the collection consumer
data chan data.Stream
@ -56,14 +55,9 @@ type Collection struct {
// Primary M365 ID of the drive this collection was created from
driveID string
// Display Name of the associated drive
driveName string
source driveSource
service graph.Servicer
statusUpdater support.StatusUpdater
itemGetter itemGetterFunc
itemReader itemReaderFunc
itemMetaReader itemMetaReaderFunc
ctrl control.Options
// PrevPath is the previous hierarchical path used by this collection.
@ -92,29 +86,6 @@ type Collection struct {
doNotMergeItems bool
}
// itemGetterFunc gets a specified item
type itemGetterFunc func(
ctx context.Context,
srv graph.Servicer,
driveID, itemID string,
) (models.DriveItemable, error)
// itemReadFunc returns a reader for the specified item
type itemReaderFunc func(
ctx context.Context,
client graph.Requester,
item models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error)
// itemMetaReaderFunc returns a reader for the metadata of the
// specified item
type itemMetaReaderFunc func(
ctx context.Context,
service graph.Servicer,
driveID string,
item models.DriveItemable,
) (io.ReadCloser, int, error)
func pathToLocation(p path.Path) (*path.Builder, error) {
if p == nil {
return nil, nil
@ -130,13 +101,11 @@ func pathToLocation(p path.Path) (*path.Builder, error) {
// NewCollection creates a Collection
func NewCollection(
itemClient graph.Requester,
handler BackupHandler,
currPath path.Path,
prevPath path.Path,
driveID string,
service graph.Servicer,
statusUpdater support.StatusUpdater,
source driveSource,
ctrlOpts control.Options,
colScope collectionScope,
doNotMergeItems bool,
@ -156,13 +125,11 @@ func NewCollection(
}
c := newColl(
itemClient,
handler,
currPath,
prevPath,
driveID,
service,
statusUpdater,
source,
ctrlOpts,
colScope,
doNotMergeItems)
@ -174,26 +141,21 @@ func NewCollection(
}
func newColl(
gr graph.Requester,
handler BackupHandler,
currPath path.Path,
prevPath path.Path,
driveID string,
service graph.Servicer,
statusUpdater support.StatusUpdater,
source driveSource,
ctrlOpts control.Options,
colScope collectionScope,
doNotMergeItems bool,
) *Collection {
c := &Collection{
itemClient: gr,
itemGetter: api.GetDriveItem,
handler: handler,
folderPath: currPath,
prevPath: prevPath,
driveItems: map[string]models.DriveItemable{},
driveID: driveID,
source: source,
service: service,
data: make(chan data.Stream, graph.Parallelism(path.OneDriveMetadataService).CollectionBufferSize()),
statusUpdater: statusUpdater,
ctrl: ctrlOpts,
@ -202,16 +164,6 @@ func newColl(
doNotMergeItems: doNotMergeItems,
}
// Allows tests to set a mock populator
switch source {
case SharePointSource:
c.itemReader = sharePointItemReader
c.itemMetaReader = sharePointItemMetaReader
default:
c.itemReader = oneDriveItemReader
c.itemMetaReader = oneDriveItemMetaReader
}
return c
}
@ -222,7 +174,8 @@ func (oc *Collection) Add(item models.DriveItemable) bool {
_, found := oc.driveItems[ptr.Val(item.GetId())]
oc.driveItems[ptr.Val(item.GetId())] = item
return !found // !found = new
// if !found, it's a new addition
return !found
}
// Remove removes a item from the collection
@ -246,7 +199,7 @@ func (oc *Collection) IsEmpty() bool {
// Items() returns the channel containing M365 Exchange objects
func (oc *Collection) Items(
ctx context.Context,
errs *fault.Bus, // TODO: currently unused while onedrive isn't up to date with clues/fault
errs *fault.Bus,
) <-chan data.Stream {
go oc.populateItems(ctx, errs)
return oc.data
@ -274,21 +227,7 @@ func (oc Collection) PreviousLocationPath() details.LocationIDer {
return nil
}
var ider details.LocationIDer
switch oc.source {
case OneDriveSource:
ider = details.NewOneDriveLocationIDer(
oc.driveID,
oc.prevLocPath.Elements()...)
default:
ider = details.NewSharePointLocationIDer(
oc.driveID,
oc.prevLocPath.Elements()...)
}
return ider
return oc.handler.NewLocationIDer(oc.driveID, oc.prevLocPath.Elements()...)
}
func (oc Collection) State() data.CollectionState {
@ -328,14 +267,7 @@ func (oc *Collection) getDriveItemContent(
el = errs.Local()
)
itemData, err := downloadContent(
ctx,
oc.service,
oc.itemGetter,
oc.itemReader,
oc.itemClient,
item,
oc.driveID)
itemData, err := downloadContent(ctx, oc.handler, item, oc.driveID)
if err != nil {
if clues.HasLabel(err, graph.LabelsMalware) || (item != nil && item.GetMalware() != nil) {
logger.CtxErr(ctx, err).With("skipped_reason", fault.SkipMalware).Info("item flagged as malware")
@ -377,19 +309,21 @@ func (oc *Collection) getDriveItemContent(
return itemData, nil
}
type itemAndAPIGetter interface {
GetItemer
api.Getter
}
// downloadContent attempts to fetch the item content. If the content url
// is expired (ie, returns a 401), it re-fetches the item to get a new download
// url and tries again.
func downloadContent(
ctx context.Context,
svc graph.Servicer,
igf itemGetterFunc,
irf itemReaderFunc,
gr graph.Requester,
iaag itemAndAPIGetter,
item models.DriveItemable,
driveID string,
) (io.ReadCloser, error) {
_, content, err := irf(ctx, gr, item)
content, err := downloadItem(ctx, iaag, item)
if err == nil {
return content, nil
} else if !graph.IsErrUnauthorized(err) {
@ -400,12 +334,12 @@ func downloadContent(
// token, and that we've overrun the available window to
// download the actual file. Re-downloading the item will
// refresh that download url.
di, err := igf(ctx, svc, driveID, ptr.Val(item.GetId()))
di, err := iaag.GetItem(ctx, driveID, ptr.Val(item.GetId()))
if err != nil {
return nil, clues.Wrap(err, "retrieving expired item")
}
_, content, err = irf(ctx, gr, di)
content, err = downloadItem(ctx, iaag, di)
if err != nil {
return nil, clues.Wrap(err, "content download retry")
}
@ -428,16 +362,13 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
// Retrieve the OneDrive folder path to set later in
// `details.OneDriveInfo`
parentPathString, err := path.GetDriveFolderPath(oc.folderPath)
parentPath, err := path.GetDriveFolderPath(oc.folderPath)
if err != nil {
oc.reportAsCompleted(ctx, 0, 0, 0)
return
}
queuedPath := "/" + parentPathString
if oc.source == SharePointSource && len(oc.driveName) > 0 {
queuedPath = "/" + oc.driveName + queuedPath
}
queuedPath := oc.handler.FormatDisplayPath(oc.driveName, parentPath)
folderProgress := observe.ProgressWithCount(
ctx,
@ -498,25 +429,13 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
}
// Fetch metadata for the file
itemMeta, itemMetaSize, err = oc.itemMetaReader(
ctx,
oc.service,
oc.driveID,
item)
itemMeta, itemMetaSize, err = downloadItemMeta(ctx, oc.handler, oc.driveID, item)
if err != nil {
el.AddRecoverable(clues.Wrap(err, "getting item metadata").Label(fault.LabelForceNoBackupCreation))
return
}
switch oc.source {
case SharePointSource:
itemInfo.SharePoint = sharePointItemInfo(item, itemSize)
itemInfo.SharePoint.ParentPath = parentPathString
default:
itemInfo.OneDrive = oneDriveItemInfo(item, itemSize)
itemInfo.OneDrive.ParentPath = parentPathString
}
itemInfo = oc.handler.AugmentItemInfo(itemInfo, item, itemSize, parentPath)
ctx = clues.Add(ctx, "item_info", itemInfo)

View File

@ -2,7 +2,6 @@ package onedrive
import (
"bytes"
"context"
"encoding/json"
"io"
"net/http"
@ -12,7 +11,6 @@ import (
"time"
"github.com/alcionai/clues"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@ -21,6 +19,9 @@ import (
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
metaTD "github.com/alcionai/corso/src/internal/connector/onedrive/metadata/testdata"
"github.com/alcionai/corso/src/internal/connector/onedrive/mock"
odTD "github.com/alcionai/corso/src/internal/connector/onedrive/testdata"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
@ -30,21 +31,14 @@ import (
"github.com/alcionai/corso/src/pkg/path"
)
// ---------------------------------------------------------------------------
// tests
// ---------------------------------------------------------------------------
type CollectionUnitTestSuite struct {
tester.Suite
}
// Allows `*CollectionUnitTestSuite` to be used as a graph.Servicer
// TODO: Implement these methods
func (suite *CollectionUnitTestSuite) Client() *msgraphsdk.GraphServiceClient {
return nil
}
func (suite *CollectionUnitTestSuite) Adapter() *msgraphsdk.GraphRequestAdapter {
return nil
}
func TestCollectionUnitTestSuite(t *testing.T) {
suite.Run(t, &CollectionUnitTestSuite{Suite: tester.NewUnitSuite(t)})
}
@ -64,16 +58,23 @@ func (suite *CollectionUnitTestSuite) testStatusUpdater(
func (suite *CollectionUnitTestSuite) TestCollection() {
var (
testItemID = "fakeItemID"
testItemName = "itemName"
testItemData = []byte("testdata")
now = time.Now()
testItemMeta = metadata.Metadata{
stubItemID = "fakeItemID"
stubItemName = "itemName"
stubItemContent = []byte("stub_content")
stubMetaID = "testMetaID"
stubMetaEntityID = "email@provider.com"
stubMetaRoles = []string{"read", "write"}
stubMeta = metadata.Metadata{
FileName: stubItemName,
Permissions: []metadata.Permission{
{
ID: "testMetaID",
Roles: []string{"read", "write"},
Email: "email@provider.com",
ID: stubMetaID,
EntityID: stubMetaEntityID,
EntityType: metadata.GV2User,
Roles: stubMetaRoles,
Expiration: &now,
},
},
@ -89,106 +90,74 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
table := []struct {
name string
numInstances int
source driveSource
itemReader itemReaderFunc
service path.ServiceType
itemInfo details.ItemInfo
getBody io.ReadCloser
getErr error
itemDeets nst
infoFrom func(*testing.T, details.ItemInfo) (string, string)
expectErr require.ErrorAssertionFunc
expectLabels []string
}{
{
name: "oneDrive, no duplicates",
numInstances: 1,
source: OneDriveSource,
itemDeets: nst{testItemName, 42, now},
itemReader: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: testItemName, Modified: now}},
io.NopCloser(bytes.NewReader(testItemData)),
nil
},
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
require.NotNil(t, dii.OneDrive)
return dii.OneDrive.ItemName, dii.OneDrive.ParentPath
},
service: path.OneDriveService,
itemDeets: nst{stubItemName, 42, now},
itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}},
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
getErr: nil,
expectErr: require.NoError,
},
{
name: "oneDrive, duplicates",
numInstances: 3,
source: OneDriveSource,
itemDeets: nst{testItemName, 42, now},
itemReader: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: testItemName, Modified: now}},
io.NopCloser(bytes.NewReader(testItemData)),
nil
},
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
require.NotNil(t, dii.OneDrive)
return dii.OneDrive.ItemName, dii.OneDrive.ParentPath
},
service: path.OneDriveService,
itemDeets: nst{stubItemName, 42, now},
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
getErr: nil,
itemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: stubItemName, Modified: now}},
expectErr: require.NoError,
},
{
name: "oneDrive, malware",
numInstances: 3,
source: OneDriveSource,
itemDeets: nst{testItemName, 42, now},
itemReader: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{}, nil, clues.New("test malware").Label(graph.LabelsMalware)
},
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
require.NotNil(t, dii.OneDrive)
return dii.OneDrive.ItemName, dii.OneDrive.ParentPath
},
service: path.OneDriveService,
itemDeets: nst{stubItemName, 42, now},
itemInfo: details.ItemInfo{},
getBody: nil,
getErr: clues.New("test malware").Label(graph.LabelsMalware),
expectErr: require.Error,
expectLabels: []string{graph.LabelsMalware, graph.LabelsSkippable},
},
{
name: "oneDrive, not found",
numInstances: 3,
source: OneDriveSource,
itemDeets: nst{testItemName, 42, now},
// Usually `Not Found` is returned from itemGetter and not itemReader
itemReader: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{}, nil, clues.New("test not found").Label(graph.LabelStatus(http.StatusNotFound))
},
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
require.NotNil(t, dii.OneDrive)
return dii.OneDrive.ItemName, dii.OneDrive.ParentPath
},
service: path.OneDriveService,
itemDeets: nst{stubItemName, 42, now},
itemInfo: details.ItemInfo{},
getBody: nil,
getErr: clues.New("test not found").Label(graph.LabelStatus(http.StatusNotFound)),
expectErr: require.Error,
expectLabels: []string{graph.LabelStatus(http.StatusNotFound), graph.LabelsSkippable},
},
{
name: "sharePoint, no duplicates",
numInstances: 1,
source: SharePointSource,
itemDeets: nst{testItemName, 42, now},
itemReader: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: testItemName, Modified: now}},
io.NopCloser(bytes.NewReader(testItemData)),
nil
},
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
require.NotNil(t, dii.SharePoint)
return dii.SharePoint.ItemName, dii.SharePoint.ParentPath
},
service: path.SharePointService,
itemDeets: nst{stubItemName, 42, now},
itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}},
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
getErr: nil,
expectErr: require.NoError,
},
{
name: "sharePoint, duplicates",
numInstances: 3,
source: SharePointSource,
itemDeets: nst{testItemName, 42, now},
itemReader: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: testItemName, Modified: now}},
io.NopCloser(bytes.NewReader(testItemData)),
nil
},
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
require.NotNil(t, dii.SharePoint)
return dii.SharePoint.ItemName, dii.SharePoint.ParentPath
},
service: path.SharePointService,
itemDeets: nst{stubItemName, 42, now},
itemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: stubItemName, Modified: now}},
getBody: io.NopCloser(bytes.NewReader(stubItemContent)),
getErr: nil,
expectErr: require.NoError,
},
}
@ -205,19 +174,34 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
readItems = []data.Stream{}
)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "tenant", "owner", test.source)
require.NoError(t, err, clues.ToCore(err))
driveFolderPath, err := path.GetDriveFolderPath(folderPath)
pb := path.Builder{}.Append(path.Split("drive/driveID1/root:/dir1/dir2/dir3")...)
folderPath, err := pb.ToDataLayerOneDrivePath("tenant", "owner", false)
require.NoError(t, err, clues.ToCore(err))
mbh := mock.DefaultOneDriveBH()
if test.service == path.SharePointService {
mbh = mock.DefaultSharePointBH()
mbh.ItemInfo.SharePoint.Modified = now
mbh.ItemInfo.SharePoint.ItemName = stubItemName
} else {
mbh.ItemInfo.OneDrive.Modified = now
mbh.ItemInfo.OneDrive.ItemName = stubItemName
}
mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK, Body: test.getBody}}
mbh.GetErrs = []error{test.getErr}
mbh.GI = mock.GetsItem{Err: assert.AnError}
pcr := metaTD.NewStubPermissionResponse(metadata.GV2User, stubMetaID, stubMetaEntityID, stubMetaRoles)
mbh.GIP = mock.GetsItemPermission{Perm: pcr}
coll, err := NewCollection(
graph.NewNoTimeoutHTTPWrapper(),
mbh,
folderPath,
nil,
"drive-id",
suite,
suite.testStatusUpdater(&wg, &collStatus),
test.source,
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true)
@ -225,34 +209,21 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
require.NotNil(t, coll)
assert.Equal(t, folderPath, coll.FullPath())
// Set a item reader, add an item and validate we get the item back
mockItem := models.NewDriveItem()
mockItem.SetId(&testItemID)
mockItem.SetFile(models.NewFile())
mockItem.SetName(&test.itemDeets.name)
mockItem.SetSize(&test.itemDeets.size)
mockItem.SetCreatedDateTime(&test.itemDeets.time)
mockItem.SetLastModifiedDateTime(&test.itemDeets.time)
stubItem := odTD.NewStubDriveItem(
stubItemID,
test.itemDeets.name,
test.itemDeets.size,
test.itemDeets.time,
test.itemDeets.time,
true,
true)
for i := 0; i < test.numInstances; i++ {
coll.Add(mockItem)
}
coll.itemReader = test.itemReader
coll.itemMetaReader = func(_ context.Context,
_ graph.Servicer,
_ string,
_ models.DriveItemable,
) (io.ReadCloser, int, error) {
metaJSON, err := json.Marshal(testItemMeta)
if err != nil {
return nil, 0, err
}
return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil
coll.Add(stubItem)
}
// Read items from the collection
// only needs 1 because multiple items should get deduped.
wg.Add(1)
for item := range coll.Items(ctx, fault.New(true)) {
@ -269,11 +240,10 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
// Validate item info and data
readItem := readItems[0]
readItemInfo := readItem.(data.StreamInfo)
assert.Equal(t, testItemID+metadata.DataFileSuffix, readItem.UUID())
assert.Equal(t, stubItemID+metadata.DataFileSuffix, readItem.UUID())
require.Implements(t, (*data.StreamModTime)(nil), readItem)
mt := readItem.(data.StreamModTime)
assert.Equal(t, now, mt.ModTime())
@ -288,104 +258,73 @@ func (suite *CollectionUnitTestSuite) TestCollection() {
return
}
name, parentPath := test.infoFrom(t, readItemInfo.Info())
assert.Equal(t, testItemData, readData)
assert.Equal(t, testItemName, name)
assert.Equal(t, driveFolderPath, parentPath)
assert.Equal(t, stubItemContent, readData)
readItemMeta := readItems[1]
assert.Equal(t, stubItemID+metadata.MetaFileSuffix, readItemMeta.UUID())
assert.Equal(t, testItemID+metadata.MetaFileSuffix, readItemMeta.UUID())
readMetaData, err := io.ReadAll(readItemMeta.ToReader())
readMeta := metadata.Metadata{}
err = json.NewDecoder(readItemMeta.ToReader()).Decode(&readMeta)
require.NoError(t, err, clues.ToCore(err))
tm, err := json.Marshal(testItemMeta)
if err != nil {
t.Fatal("unable to marshall test permissions", err)
}
assert.Equal(t, tm, readMetaData)
metaTD.AssertMetadataEqual(t, stubMeta, readMeta)
})
}
}
func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
var (
t = suite.T()
stubItemID = "fakeItemID"
collStatus = support.ConnectorOperationStatus{}
wg = sync.WaitGroup{}
name = "name"
size int64 = 42
now = time.Now()
)
table := []struct {
name string
source driveSource
}{
{
name: "oneDrive",
source: OneDriveSource,
},
{
name: "sharePoint",
source: SharePointSource,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
testItemID = "fakeItemID"
collStatus = support.ConnectorOperationStatus{}
wg = sync.WaitGroup{}
)
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
pb := path.Builder{}.Append(path.Split("drive/driveID1/root:/folderPath")...)
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
require.NoError(t, err, clues.ToCore(err))
mbh := mock.DefaultOneDriveBH()
mbh.GI = mock.GetsItem{Err: assert.AnError}
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
mbh.GetResps = []*http.Response{
nil,
{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))},
}
mbh.GetErrs = []error{
clues.Stack(assert.AnError).Label(graph.LabelStatus(http.StatusUnauthorized)),
nil,
}
coll, err := NewCollection(
graph.NewNoTimeoutHTTPWrapper(),
mbh,
folderPath,
nil,
"fakeDriveID",
suite,
suite.testStatusUpdater(&wg, &collStatus),
test.source,
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true)
require.NoError(t, err, clues.ToCore(err))
mockItem := models.NewDriveItem()
mockItem.SetId(&testItemID)
mockItem.SetFile(models.NewFile())
mockItem.SetName(&name)
mockItem.SetSize(&size)
mockItem.SetCreatedDateTime(&now)
mockItem.SetLastModifiedDateTime(&now)
coll.Add(mockItem)
stubItem := odTD.NewStubDriveItem(
stubItemID,
name,
size,
now,
now,
true,
false)
coll.itemReader = func(
context.Context,
graph.Requester,
models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{}, nil, assert.AnError
}
coll.itemMetaReader = func(_ context.Context,
_ graph.Servicer,
_ string,
_ models.DriveItemable,
) (io.ReadCloser, int, error) {
return io.NopCloser(strings.NewReader(`{}`)), 2, nil
}
coll.Add(stubItem)
collItem, ok := <-coll.Items(ctx, fault.New(true))
assert.True(t, ok)
@ -398,101 +337,61 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
// Expect no items
require.Equal(t, 1, collStatus.Metrics.Objects, "only one object should be counted")
require.Equal(t, 1, collStatus.Metrics.Successes, "TODO: should be 0, but allowing 1 to reduce async management")
})
}
}
func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry() {
var (
t = suite.T()
stubItemID = "fakeItemID"
collStatus = support.ConnectorOperationStatus{}
wg = sync.WaitGroup{}
name = "name"
size int64 = 42
now = time.Now()
)
table := []struct {
name string
source driveSource
}{
{
name: "oneDrive",
source: OneDriveSource,
},
{
name: "sharePoint",
source: SharePointSource,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
testItemID = "fakeItemID"
collStatus = support.ConnectorOperationStatus{}
wg = sync.WaitGroup{}
)
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
stubItem := odTD.NewStubDriveItem(
stubItemID,
name,
size,
now,
now,
true,
false)
pb := path.Builder{}.Append(path.Split("drive/driveID1/root:/folderPath")...)
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
require.NoError(t, err)
mbh := mock.DefaultOneDriveBH()
mbh.GI = mock.GetsItem{Item: stubItem}
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
mbh.GetResps = []*http.Response{
nil,
{StatusCode: http.StatusOK, Body: io.NopCloser(strings.NewReader("test"))},
}
mbh.GetErrs = []error{
clues.Stack(assert.AnError).Label(graph.LabelStatus(http.StatusUnauthorized)),
nil,
}
coll, err := NewCollection(
graph.NewNoTimeoutHTTPWrapper(),
mbh,
folderPath,
nil,
"fakeDriveID",
suite,
suite.testStatusUpdater(&wg, &collStatus),
test.source,
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true)
require.NoError(t, err, clues.ToCore(err))
mockItem := models.NewDriveItem()
mockItem.SetId(&testItemID)
mockItem.SetFile(models.NewFile())
mockItem.SetName(&name)
mockItem.SetSize(&size)
mockItem.SetCreatedDateTime(&now)
mockItem.SetLastModifiedDateTime(&now)
coll.Add(mockItem)
count := 0
coll.itemGetter = func(
ctx context.Context,
srv graph.Servicer,
driveID, itemID string,
) (models.DriveItemable, error) {
return mockItem, nil
}
coll.itemReader = func(
context.Context,
graph.Requester,
models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) {
if count < 1 {
count++
return details.ItemInfo{}, nil, clues.Stack(assert.AnError).
Label(graph.LabelStatus(http.StatusUnauthorized))
}
return details.ItemInfo{}, io.NopCloser(strings.NewReader("test")), nil
}
coll.itemMetaReader = func(_ context.Context,
_ graph.Servicer,
_ string,
_ models.DriveItemable,
) (io.ReadCloser, int, error) {
return io.NopCloser(strings.NewReader(`{}`)), 2, nil
}
coll.Add(stubItem)
collItem, ok := <-coll.Items(ctx, fault.New(true))
assert.True(t, ok)
@ -502,89 +401,64 @@ func (suite *CollectionUnitTestSuite) TestCollectionReadUnauthorizedErrorRetry()
wg.Wait()
require.Equal(t, 1, collStatus.Metrics.Objects, "only one object should be counted")
require.Equal(t, 1, collStatus.Metrics.Successes, "read object successfully")
require.Equal(t, 1, count, "retry count")
})
}
require.Equal(t, collStatus.Metrics.Objects, 1, "only one object should be counted")
require.Equal(t, collStatus.Metrics.Successes, 1, "read object successfully")
}
// Ensure metadata file always uses latest time for mod time
func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTime() {
table := []struct {
name string
source driveSource
}{
{
name: "oneDrive",
source: OneDriveSource,
},
{
name: "sharePoint",
source: SharePointSource,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
testItemID = "fakeItemID"
testItemName = "Fake Item"
testItemSize = int64(10)
t = suite.T()
stubItemID = "fakeItemID"
stubItemName = "Fake Item"
stubItemSize = int64(10)
collStatus = support.ConnectorOperationStatus{}
wg = sync.WaitGroup{}
)
ctx, flush := tester.NewContext(t)
defer flush()
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
pb := path.Builder{}.Append(path.Split("drive/driveID1/root:/folderPath")...)
folderPath, err := pb.ToDataLayerOneDrivePath("a-tenant", "a-user", false)
require.NoError(t, err, clues.ToCore(err))
mbh := mock.DefaultOneDriveBH()
mbh.ItemInfo = details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}}
mbh.GIP = mock.GetsItemPermission{Perm: models.NewPermissionCollectionResponse()}
mbh.GetResps = []*http.Response{{
StatusCode: http.StatusOK,
Body: io.NopCloser(strings.NewReader("Fake Data!")),
}}
mbh.GetErrs = []error{nil}
coll, err := NewCollection(
graph.NewNoTimeoutHTTPWrapper(),
mbh,
folderPath,
nil,
"drive-id",
suite,
suite.testStatusUpdater(&wg, &collStatus),
test.source,
control.Options{ToggleFeatures: control.Toggles{}},
CollectionScopeFolder,
true)
require.NoError(t, err, clues.ToCore(err))
mtime := time.Now().AddDate(0, -1, 0)
mockItem := models.NewDriveItem()
mockItem.SetFile(models.NewFile())
mockItem.SetId(&testItemID)
mockItem.SetName(&testItemName)
mockItem.SetSize(&testItemSize)
mockItem.SetCreatedDateTime(&mtime)
mockItem.SetLastModifiedDateTime(&mtime)
coll.Add(mockItem)
coll.itemReader = func(
context.Context,
graph.Requester,
models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: "fakeName", Modified: time.Now()}},
io.NopCloser(strings.NewReader("Fake Data!")),
nil
}
stubItem := odTD.NewStubDriveItem(
stubItemID,
stubItemName,
stubItemSize,
mtime,
mtime,
true,
false)
coll.itemMetaReader = func(_ context.Context,
_ graph.Servicer,
_ string,
_ models.DriveItemable,
) (io.ReadCloser, int, error) {
return io.NopCloser(strings.NewReader(`{}`)), 16, nil
}
coll.Add(stubItem)
coll.handler = mbh
readItems := []data.Stream{}
for item := range coll.Items(ctx, fault.New(true)) {
@ -601,15 +475,13 @@ func (suite *CollectionUnitTestSuite) TestCollectionPermissionBackupLatestModTim
if strings.HasSuffix(i.UUID(), metadata.MetaFileSuffix) {
content, err := io.ReadAll(i.ToReader())
require.NoError(t, err, clues.ToCore(err))
require.Equal(t, content, []byte("{}"))
require.Equal(t, `{"filename":"Fake Item","permissionMode":1}`, string(content))
im, ok := i.(data.StreamModTime)
require.Equal(t, ok, true, "modtime interface")
require.Greater(t, im.ModTime(), mtime, "permissions time greater than mod time")
}
}
})
}
}
type GetDriveItemUnitTestSuite struct {
@ -690,32 +562,27 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
var (
errs = fault.New(false)
item = models.NewDriveItem()
col = &Collection{scope: test.colScope}
now = time.Now()
)
item.SetId(&strval)
item.SetName(&strval)
item.SetSize(&test.itemSize)
stubItem := odTD.NewStubDriveItem(
strval,
strval,
test.itemSize,
now,
now,
true,
false)
col.itemReader = func(
_ context.Context,
_ graph.Requester,
_ models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{}, nil, test.err
}
mbh := mock.DefaultOneDriveBH()
mbh.GI = mock.GetsItem{Item: stubItem}
mbh.GetResps = []*http.Response{{StatusCode: http.StatusOK}}
mbh.GetErrs = []error{test.err}
col.itemGetter = func(
ctx context.Context,
srv graph.Servicer,
driveID, itemID string,
) (models.DriveItemable, error) {
// We are not testing this err here
return item, nil
}
col.handler = mbh
_, err := col.getDriveItemContent(ctx, "driveID", item, errs)
_, err := col.getDriveItemContent(ctx, "driveID", stubItem, errs)
if test.err == nil {
assert.NoError(t, err, clues.ToCore(err))
return
@ -735,85 +602,62 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
var (
svc graph.Servicer
gr graph.Requester
driveID string
iorc = io.NopCloser(bytes.NewReader([]byte("fnords")))
item = models.NewDriveItem()
itemWID = models.NewDriveItem()
item = odTD.NewStubDriveItem("id", "n", 1, time.Now(), time.Now(), true, false)
itemWID = odTD.NewStubDriveItem("id", "n", 1, time.Now(), time.Now(), true, false)
errUnauth = clues.Stack(assert.AnError).Label(graph.LabelStatus(http.StatusUnauthorized))
)
itemWID.SetId(ptr.To("brainhooldy"))
table := []struct {
name string
igf itemGetterFunc
irf itemReaderFunc
mgi mock.GetsItem
itemInfo details.ItemInfo
respBody []io.ReadCloser
getErr []error
expectErr require.ErrorAssertionFunc
expect require.ValueAssertionFunc
}{
{
name: "good",
irf: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{}, iorc, nil
},
itemInfo: details.ItemInfo{},
respBody: []io.ReadCloser{iorc},
getErr: []error{nil},
expectErr: require.NoError,
expect: require.NotNil,
},
{
name: "expired url redownloads",
igf: func(context.Context, graph.Servicer, string, string) (models.DriveItemable, error) {
return itemWID, nil
},
irf: func(c context.Context, g graph.Requester, m models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
// a bit hacky: assume only igf returns an item with a non-zero id.
if len(ptr.Val(m.GetId())) == 0 {
return details.ItemInfo{},
nil,
clues.Stack(assert.AnError).Label(graph.LabelStatus(http.StatusUnauthorized))
}
return details.ItemInfo{}, iorc, nil
},
mgi: mock.GetsItem{Item: itemWID, Err: nil},
itemInfo: details.ItemInfo{},
respBody: []io.ReadCloser{nil, iorc},
getErr: []error{errUnauth, nil},
expectErr: require.NoError,
expect: require.NotNil,
},
{
name: "immediate error",
irf: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{}, nil, assert.AnError
},
itemInfo: details.ItemInfo{},
getErr: []error{assert.AnError},
expectErr: require.Error,
expect: require.Nil,
},
{
name: "re-fetching the item fails",
igf: func(context.Context, graph.Servicer, string, string) (models.DriveItemable, error) {
return nil, assert.AnError
},
irf: func(context.Context, graph.Requester, models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{},
nil,
clues.Stack(assert.AnError).Label(graph.LabelStatus(http.StatusUnauthorized))
},
itemInfo: details.ItemInfo{},
getErr: []error{errUnauth},
mgi: mock.GetsItem{Item: nil, Err: assert.AnError},
expectErr: require.Error,
expect: require.Nil,
},
{
name: "expired url fails redownload",
igf: func(context.Context, graph.Servicer, string, string) (models.DriveItemable, error) {
return itemWID, nil
},
irf: func(c context.Context, g graph.Requester, m models.DriveItemable) (details.ItemInfo, io.ReadCloser, error) {
// a bit hacky: assume only igf returns an item with a non-zero id.
if len(ptr.Val(m.GetId())) == 0 {
return details.ItemInfo{},
nil,
clues.Stack(assert.AnError).Label(graph.LabelStatus(http.StatusUnauthorized))
}
return details.ItemInfo{}, iorc, assert.AnError
},
mgi: mock.GetsItem{Item: itemWID, Err: nil},
itemInfo: details.ItemInfo{},
respBody: []io.ReadCloser{nil, nil},
getErr: []error{errUnauth, assert.AnError},
expectErr: require.Error,
expect: require.Nil,
},
@ -825,8 +669,23 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
ctx, flush := tester.NewContext(t)
defer flush()
r, err := downloadContent(ctx, svc, test.igf, test.irf, gr, item, driveID)
resps := make([]*http.Response, 0, len(test.respBody))
for _, v := range test.respBody {
if v == nil {
resps = append(resps, nil)
} else {
resps = append(resps, &http.Response{StatusCode: http.StatusOK, Body: v})
}
}
mbh := mock.DefaultOneDriveBH()
mbh.GI = test.mgi
mbh.ItemInfo = test.itemInfo
mbh.GetResps = resps
mbh.GetErrs = test.getErr
r, err := downloadContent(ctx, mbh, item, driveID)
test.expect(t, r)
test.expectErr(t, err, clues.ToCore(err))
})

View File

@ -14,6 +14,7 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
@ -25,14 +26,6 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type driveSource int
const (
unknownDriveSource driveSource = iota
OneDriveSource
SharePointSource
)
type collectionScope int
const (
@ -47,21 +40,7 @@ const (
CollectionScopePackage
)
const (
restrictedDirectory = "Site Pages"
rootDrivePattern = "/drives/%s/root:"
)
func (ds driveSource) toPathServiceCat() (path.ServiceType, path.CategoryType) {
switch ds {
case OneDriveSource:
return path.OneDriveService, path.FilesCategory
case SharePointSource:
return path.SharePointService, path.LibrariesCategory
default:
return path.UnknownService, path.UnknownCategory
}
}
const restrictedDirectory = "Site Pages"
type folderMatcher interface {
IsAny() bool
@ -71,14 +50,11 @@ type folderMatcher interface {
// Collections is used to retrieve drive data for a
// resource owner, which can be either a user or a sharepoint site.
type Collections struct {
// configured to handle large item downloads
itemClient graph.Requester
handler BackupHandler
tenant string
tenantID string
resourceOwner string
source driveSource
matcher folderMatcher
service graph.Servicer
statusUpdater support.StatusUpdater
ctrl control.Options
@ -88,17 +64,6 @@ type Collections struct {
// driveID -> itemID -> collection
CollectionMap map[string]map[string]*Collection
// Not the most ideal, but allows us to change the pager function for testing
// as needed. This will allow us to mock out some scenarios during testing.
drivePagerFunc func(
source driveSource,
servicer graph.Servicer,
resourceOwner string,
fields []string,
) (api.DrivePager, error)
itemPagerFunc driveItemPagerFunc
servicePathPfxFunc pathPrefixerFunc
// Track stats from drive enumeration. Represents the items backed up.
NumItems int
NumFiles int
@ -106,26 +71,19 @@ type Collections struct {
}
func NewCollections(
itemClient graph.Requester,
tenant string,
bh BackupHandler,
tenantID string,
resourceOwner string,
source driveSource,
matcher folderMatcher,
service graph.Servicer,
statusUpdater support.StatusUpdater,
ctrlOpts control.Options,
) *Collections {
return &Collections{
itemClient: itemClient,
tenant: tenant,
handler: bh,
tenantID: tenantID,
resourceOwner: resourceOwner,
source: source,
matcher: matcher,
CollectionMap: map[string]map[string]*Collection{},
drivePagerFunc: PagerForSource,
itemPagerFunc: defaultItemPager,
servicePathPfxFunc: pathPrefixerForSource(tenant, resourceOwner, source),
service: service,
statusUpdater: statusUpdater,
ctrl: ctrlOpts,
}
@ -287,10 +245,7 @@ func (c *Collections) Get(
defer close(driveComplete)
// Enumerate drives for the specified resourceOwner
pager, err := c.drivePagerFunc(c.source, c.service, c.resourceOwner, nil)
if err != nil {
return nil, graph.Stack(ctx, err)
}
pager := c.handler.NewDrivePager(c.resourceOwner, nil)
drives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
if err != nil {
@ -331,7 +286,7 @@ func (c *Collections) Get(
delta, paths, excluded, err := collectItems(
ictx,
c.itemPagerFunc(c.service, driveID, ""),
c.handler.NewItemPager(driveID, "", api.DriveItemSelectDefault()),
driveID,
driveName,
c.UpdateCollections,
@ -370,14 +325,12 @@ func (c *Collections) Get(
// For both cases we don't need to do set difference on folder map if the
// delta token was valid because we should see all the changes.
if !delta.Reset && len(excluded) == 0 {
if !delta.Reset {
if len(excluded) == 0 {
continue
} else if !delta.Reset {
p, err := GetCanonicalPath(
fmt.Sprintf(rootDrivePattern, driveID),
c.tenant,
c.resourceOwner,
c.source)
}
p, err := c.handler.CanonicalPath(odConsts.DriveFolderPrefixBuilder(driveID), c.tenantID, c.resourceOwner)
if err != nil {
return nil, clues.Wrap(err, "making exclude prefix").WithClues(ictx)
}
@ -411,13 +364,11 @@ func (c *Collections) Get(
}
col, err := NewCollection(
c.itemClient,
c.handler,
nil, // delete the folder
prevPath,
driveID,
c.service,
c.statusUpdater,
c.source,
c.ctrl,
CollectionScopeUnknown,
true)
@ -442,19 +393,17 @@ func (c *Collections) Get(
// generate tombstones for drives that were removed.
for driveID := range driveTombstones {
prevDrivePath, err := c.servicePathPfxFunc(driveID)
prevDrivePath, err := c.handler.PathPrefix(c.tenantID, c.resourceOwner, driveID)
if err != nil {
return nil, clues.Wrap(err, "making drive tombstone previous path").WithClues(ctx)
return nil, clues.Wrap(err, "making drive tombstone for previous path").WithClues(ctx)
}
coll, err := NewCollection(
c.itemClient,
c.handler,
nil, // delete the drive
prevDrivePath,
driveID,
c.service,
c.statusUpdater,
c.source,
c.ctrl,
CollectionScopeUnknown,
true)
@ -466,9 +415,9 @@ func (c *Collections) Get(
}
// add metadata collections
service, category := c.source.toPathServiceCat()
service, category := c.handler.ServiceCat()
md, err := graph.MakeMetadataCollection(
c.tenant,
c.tenantID,
c.resourceOwner,
service,
category,
@ -601,13 +550,11 @@ func (c *Collections) handleDelete(
}
col, err := NewCollection(
c.itemClient,
nil,
c.handler,
nil, // deletes the collection
prevPath,
driveID,
c.service,
c.statusUpdater,
c.source,
c.ctrl,
CollectionScopeUnknown,
// DoNotMerge is not checked for deleted items.
@ -629,14 +576,12 @@ func (c *Collections) getCollectionPath(
item models.DriveItemable,
) (path.Path, error) {
var (
collectionPathStr string
pb = odConsts.DriveFolderPrefixBuilder(driveID)
isRoot = item.GetRoot() != nil
isFile = item.GetFile() != nil
)
if isRoot {
collectionPathStr = fmt.Sprintf(rootDrivePattern, driveID)
} else {
if !isRoot {
if item.GetParentReference() == nil ||
item.GetParentReference().GetPath() == nil {
err := clues.New("no parent reference").
@ -645,15 +590,10 @@ func (c *Collections) getCollectionPath(
return nil, err
}
collectionPathStr = ptr.Val(item.GetParentReference().GetPath())
pb = path.Builder{}.Append(path.Split(ptr.Val(item.GetParentReference().GetPath()))...)
}
collectionPath, err := GetCanonicalPath(
collectionPathStr,
c.tenant,
c.resourceOwner,
c.source,
)
collectionPath, err := c.handler.CanonicalPath(pb, c.tenantID, c.resourceOwner)
if err != nil {
return nil, clues.Wrap(err, "making item path")
}
@ -794,17 +734,14 @@ func (c *Collections) UpdateCollections(
}
col, err := NewCollection(
c.itemClient,
c.handler,
collectionPath,
prevPath,
driveID,
c.service,
c.statusUpdater,
c.source,
c.ctrl,
colScope,
invalidPrevDelta,
)
invalidPrevDelta)
if err != nil {
return clues.Stack(err).WithClues(ictx)
}
@ -889,33 +826,9 @@ func shouldSkipDrive(ctx context.Context, drivePath path.Path, m folderMatcher,
(drivePath.Category() == path.LibrariesCategory && restrictedDirectory == driveName)
}
// GetCanonicalPath constructs the standard path for the given source.
func GetCanonicalPath(p, tenant, resourceOwner string, source driveSource) (path.Path, error) {
var (
pathBuilder = path.Builder{}.Append(strings.Split(p, "/")...)
result path.Path
err error
)
switch source {
case OneDriveSource:
result, err = pathBuilder.ToDataLayerOneDrivePath(tenant, resourceOwner, false)
case SharePointSource:
result, err = pathBuilder.ToDataLayerSharePointPath(tenant, resourceOwner, path.LibrariesCategory, false)
default:
return nil, clues.New("unrecognized data source")
}
if err != nil {
return nil, clues.Wrap(err, "converting to canonical path")
}
return result, nil
}
func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) bool {
// Check if the folder is allowed by the scope.
folderPathString, err := path.GetDriveFolderPath(folderPath)
pb, err := path.GetDriveFolderPath(folderPath)
if err != nil {
logger.Ctx(ctx).With("err", err).Error("getting drive folder path")
return true
@ -923,11 +836,11 @@ func includePath(ctx context.Context, m folderMatcher, folderPath path.Path) boo
// Hack for the edge case where we're looking at the root folder and can
// select any folder. Right now the root folder has an empty folder path.
if len(folderPathString) == 0 && m.IsAny() {
if len(pb.Elements()) == 0 && m.IsAny() {
return true
}
return m.Matches(folderPathString)
return m.Matches(pb.String())
}
func updatePath(paths map[string]string, id, newPath string) {

View File

@ -2,8 +2,6 @@ package onedrive
import (
"context"
"fmt"
"strings"
"testing"
"github.com/alcionai/clues"
@ -18,7 +16,9 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
"github.com/alcionai/corso/src/internal/connector/graph"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/connector/onedrive/mock"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
@ -27,7 +27,7 @@ import (
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
"github.com/alcionai/corso/src/pkg/services/m365/api/mock"
apiMock "github.com/alcionai/corso/src/pkg/services/m365/api/mock"
)
type statePath struct {
@ -38,6 +38,7 @@ type statePath struct {
func getExpectedStatePathGenerator(
t *testing.T,
bh BackupHandler,
tenant, user, base string,
) func(data.CollectionState, ...string) statePath {
return func(state data.CollectionState, pths ...string) statePath {
@ -53,11 +54,13 @@ func getExpectedStatePathGenerator(
require.Len(t, pths, 1, "invalid number of paths to getExpectedStatePathGenerator")
} else {
require.Len(t, pths, 2, "invalid number of paths to getExpectedStatePathGenerator")
p2, err = GetCanonicalPath(base+pths[1], tenant, user, OneDriveSource)
pb := path.Builder{}.Append(path.Split(base + pths[1])...)
p2, err = bh.CanonicalPath(pb, tenant, user)
require.NoError(t, err, clues.ToCore(err))
}
p1, err = GetCanonicalPath(base+pths[0], tenant, user, OneDriveSource)
pb := path.Builder{}.Append(path.Split(base + pths[0])...)
p1, err = bh.CanonicalPath(pb, tenant, user)
require.NoError(t, err, clues.ToCore(err))
switch state {
@ -81,14 +84,17 @@ func getExpectedStatePathGenerator(
}
}
func getExpectedPathGenerator(t *testing.T,
func getExpectedPathGenerator(
t *testing.T,
bh BackupHandler,
tenant, user, base string,
) func(string) string {
return func(path string) string {
p, err := GetCanonicalPath(base+path, tenant, user, OneDriveSource)
return func(p string) string {
pb := path.Builder{}.Append(path.Split(base + p)...)
cp, err := bh.CanonicalPath(pb, tenant, user)
require.NoError(t, err, clues.ToCore(err))
return p.String()
return cp.String()
}
}
@ -100,52 +106,6 @@ func TestOneDriveCollectionsUnitSuite(t *testing.T) {
suite.Run(t, &OneDriveCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *OneDriveCollectionsUnitSuite) TestGetCanonicalPath() {
tenant, resourceOwner := "tenant", "resourceOwner"
table := []struct {
name string
source driveSource
dir []string
expect string
expectErr assert.ErrorAssertionFunc
}{
{
name: "onedrive",
source: OneDriveSource,
dir: []string{"onedrive"},
expect: "tenant/onedrive/resourceOwner/files/onedrive",
expectErr: assert.NoError,
},
{
name: "sharepoint",
source: SharePointSource,
dir: []string{"sharepoint"},
expect: "tenant/sharepoint/resourceOwner/libraries/sharepoint",
expectErr: assert.NoError,
},
{
name: "unknown",
source: unknownDriveSource,
dir: []string{"unknown"},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
p := strings.Join(test.dir, "/")
result, err := GetCanonicalPath(p, tenant, resourceOwner, test.source)
test.expectErr(t, err, clues.ToCore(err))
if result != nil {
assert.Equal(t, test.expect, result.String())
}
})
}
}
func getDelList(files ...string) map[string]struct{} {
delList := map[string]struct{}{}
for _, file := range files {
@ -168,9 +128,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
pkg = "/package"
)
testBaseDrivePath := fmt.Sprintf(rootDrivePattern, "driveID1")
expectedPath := getExpectedPathGenerator(suite.T(), tenant, user, testBaseDrivePath)
expectedStatePath := getExpectedStatePathGenerator(suite.T(), tenant, user, testBaseDrivePath)
bh := itemBackupHandler{}
testBaseDrivePath := odConsts.DriveFolderPrefixBuilder("driveID1").String()
expectedPath := getExpectedPathGenerator(suite.T(), bh, tenant, user, testBaseDrivePath)
expectedStatePath := getExpectedStatePathGenerator(suite.T(), bh, tenant, user, testBaseDrivePath)
tests := []struct {
testCase string
@ -782,12 +743,10 @@ func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
maps.Copy(outputFolderMap, tt.inputFolderMap)
c := NewCollections(
graph.NewNoTimeoutHTTPWrapper(),
&itemBackupHandler{api.Drives{}},
tenant,
user,
OneDriveSource,
testFolderMatcher{tt.scope},
&MockGraphService{},
nil,
control.Options{ToggleFeatures: control.Toggles{}})
@ -1168,7 +1127,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
func(*support.ConnectorOperationStatus) {})
require.NoError(t, err, clues.ToCore(err))
cols = append(cols, data.NotFoundRestoreCollection{Collection: mc})
cols = append(cols, data.NoFetchRestoreCollection{Collection: mc})
}
deltas, paths, err := deserializeMetadata(ctx, cols, fault.New(true))
@ -1267,11 +1226,13 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
drive2.SetName(&driveID2)
var (
driveBasePath1 = fmt.Sprintf(rootDrivePattern, driveID1)
driveBasePath2 = fmt.Sprintf(rootDrivePattern, driveID2)
bh = itemBackupHandler{}
expectedPath1 = getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath1)
expectedPath2 = getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath2)
driveBasePath1 = odConsts.DriveFolderPrefixBuilder(driveID1).String()
driveBasePath2 = odConsts.DriveFolderPrefixBuilder(driveID2).String()
expectedPath1 = getExpectedPathGenerator(suite.T(), bh, tenant, user, driveBasePath1)
expectedPath2 = getExpectedPathGenerator(suite.T(), bh, tenant, user, driveBasePath2)
rootFolderPath1 = expectedPath1("")
folderPath1 = expectedPath1("/folder")
@ -2297,42 +2258,31 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
ctx, flush := tester.NewContext(t)
defer flush()
drivePagerFunc := func(
source driveSource,
servicer graph.Servicer,
resourceOwner string,
fields []string,
) (api.DrivePager, error) {
return &mock.DrivePager{
ToReturn: []mock.PagerResult{
{
Drives: test.drives,
mockDrivePager := &apiMock.DrivePager{
ToReturn: []apiMock.PagerResult{
{Drives: test.drives},
},
},
}, nil
}
itemPagerFunc := func(
servicer graph.Servicer,
driveID, link string,
) itemPager {
return &mockItemPager{
itemPagers := map[string]api.DriveItemEnumerator{}
for driveID := range test.items {
itemPagers[driveID] = &mockItemPager{
toReturn: test.items[driveID],
}
}
mbh := mock.DefaultOneDriveBH()
mbh.DrivePagerV = mockDrivePager
mbh.ItemPagerV = itemPagers
c := NewCollections(
graph.NewNoTimeoutHTTPWrapper(),
mbh,
tenant,
user,
OneDriveSource,
testFolderMatcher{anyFolder},
&MockGraphService{},
func(*support.ConnectorOperationStatus) {},
control.Options{ToggleFeatures: control.Toggles{}},
)
c.drivePagerFunc = drivePagerFunc
c.itemPagerFunc = itemPagerFunc
control.Options{ToggleFeatures: control.Toggles{}})
prevDelta := "prev-delta"
mc, err := graph.MakeMetadataCollection(
@ -2355,7 +2305,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
)
assert.NoError(t, err, "creating metadata collection", clues.ToCore(err))
prevMetadata := []data.RestoreCollection{data.NotFoundRestoreCollection{Collection: mc}}
prevMetadata := []data.RestoreCollection{data.NoFetchRestoreCollection{Collection: mc}}
errs := fault.New(true)
delList := prefixmatcher.NewStringSetBuilder()
@ -2381,7 +2331,7 @@ func (suite *OneDriveCollectionsUnitSuite) TestGet() {
deltas, paths, err := deserializeMetadata(
ctx,
[]data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: baseCol},
data.NoFetchRestoreCollection{Collection: baseCol},
},
fault.New(true))
if !assert.NoError(t, err, "deserializing metadata", clues.ToCore(err)) {

View File

@ -1,5 +1,7 @@
package onedrive
import "github.com/alcionai/corso/src/pkg/path"
const (
// const used as the root dir for the drive portion of a path prefix.
// eg: tid/onedrive/ro/files/drives/driveid/...
@ -10,3 +12,7 @@ const (
// root id for drive items
RootID = "root"
)
func DriveFolderPrefixBuilder(driveID string) *path.Builder {
return path.Builder{}.Append(DrivesPathDir, driveID, RootPathDir)
}

View File

@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type odFolderMatcher struct {
@ -34,13 +35,12 @@ func (fm odFolderMatcher) Matches(dir string) bool {
// for the specified user
func DataCollections(
ctx context.Context,
ac api.Client,
selector selectors.Selector,
user idname.Provider,
metadata []data.RestoreCollection,
lastBackupVersion int,
tenant string,
itemClient graph.Requester,
service graph.Servicer,
su support.StatusUpdater,
ctrlOpts control.Options,
errs *fault.Bus,
@ -66,12 +66,10 @@ func DataCollections(
logger.Ctx(ctx).Debug("creating OneDrive collections")
nc := NewCollections(
itemClient,
&itemBackupHandler{ac.Drives()},
tenant,
user.ID(),
OneDriveSource,
odFolderMatcher{scope},
service,
su,
ctrlOpts)
@ -86,7 +84,6 @@ func DataCollections(
}
mcs, err := migrationCollections(
service,
lastBackupVersion,
tenant,
user,
@ -120,7 +117,6 @@ func DataCollections(
// adds data migrations to the collection set.
func migrationCollections(
svc graph.Servicer,
lastBackupVersion int,
tenant string,
user idname.Provider,

View File

@ -85,7 +85,7 @@ func (suite *DataCollectionsUnitSuite) TestMigrationCollections() {
ToggleFeatures: control.Toggles{},
}
mc, err := migrationCollections(nil, test.version, "t", u, nil, opts)
mc, err := migrationCollections(test.version, "t", u, nil, opts)
require.NoError(t, err, clues.ToCore(err))
if test.expectLen == 0 {

View File

@ -2,32 +2,20 @@ package onedrive
import (
"context"
"fmt"
"strings"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
const (
maxDrivesRetries = 3
// nextLinkKey is used to find the next link in a paged
// graph response
nextLinkKey = "@odata.nextLink"
itemChildrenRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s/children"
itemNotFoundErrorCode = "itemNotFound"
)
const maxDrivesRetries = 3
// DeltaUpdate holds the results of a current delta token. It normally
// gets produced when aggregating the addition and removal of items in
@ -40,41 +28,6 @@ type DeltaUpdate struct {
Reset bool
}
func PagerForSource(
source driveSource,
servicer graph.Servicer,
resourceOwner string,
fields []string,
) (api.DrivePager, error) {
switch source {
case OneDriveSource:
return api.NewUserDrivePager(servicer, resourceOwner, fields), nil
case SharePointSource:
return api.NewSiteDrivePager(servicer, resourceOwner, fields), nil
default:
return nil, clues.New("unrecognized drive data source")
}
}
type pathPrefixerFunc func(driveID string) (path.Path, error)
func pathPrefixerForSource(
tenantID, resourceOwner string,
source driveSource,
) pathPrefixerFunc {
cat := path.FilesCategory
serv := path.OneDriveService
if source == SharePointSource {
cat = path.LibrariesCategory
serv = path.SharePointService
}
return func(driveID string) (path.Path, error) {
return path.Build(tenantID, resourceOwner, serv, cat, false, odConsts.DrivesPathDir, driveID, odConsts.RootPathDir)
}
}
// itemCollector functions collect the items found in a drive
type itemCollector func(
ctx context.Context,
@ -88,36 +41,22 @@ type itemCollector func(
errs *fault.Bus,
) error
type driveItemPagerFunc func(
servicer graph.Servicer,
driveID, link string,
) itemPager
type itemPager interface {
GetPage(context.Context) (api.DeltaPageLinker, error)
SetNext(nextLink string)
Reset()
ValuesIn(api.DeltaPageLinker) ([]models.DriveItemable, error)
}
func defaultItemPager(
servicer graph.Servicer,
driveID, link string,
) itemPager {
return api.NewItemPager(servicer, driveID, link, api.DriveItemSelectDefault())
}
// collectItems will enumerate all items in the specified drive and hand them to the
// provided `collector` method
func collectItems(
ctx context.Context,
pager itemPager,
pager api.DriveItemEnumerator,
driveID, driveName string,
collector itemCollector,
oldPaths map[string]string,
prevDelta string,
errs *fault.Bus,
) (DeltaUpdate, map[string]string, map[string]struct{}, error) {
) (
DeltaUpdate,
map[string]string, // newPaths
map[string]struct{}, // excluded
error,
) {
var (
newDeltaURL = ""
newPaths = map[string]string{}
@ -196,28 +135,8 @@ func collectItems(
return DeltaUpdate{URL: newDeltaURL, Reset: invalidPrevDelta}, newPaths, excluded, nil
}
// Create a new item in the specified folder
func CreateItem(
ctx context.Context,
service graph.Servicer,
driveID, parentFolderID string,
newItem models.DriveItemable,
) (models.DriveItemable, error) {
// Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended
// here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310
rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID)
builder := drives.NewItemItemsRequestBuilder(rawURL, service.Adapter())
newItem, err := builder.Post(ctx, newItem, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "creating item")
}
return newItem, nil
}
// newItem initializes a `models.DriveItemable` that can be used as input to `createItem`
func newItem(name string, folder bool) models.DriveItemable {
func newItem(name string, folder bool) *models.DriveItem {
itemToCreate := models.NewDriveItem()
itemToCreate.SetName(&name)
@ -243,12 +162,12 @@ func (op *Displayable) GetDisplayName() *string {
// are a subfolder or top-level folder in the hierarchy.
func GetAllFolders(
ctx context.Context,
gs graph.Servicer,
bh BackupHandler,
pager api.DrivePager,
prefix string,
errs *fault.Bus,
) ([]*Displayable, error) {
drvs, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
ds, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
if err != nil {
return nil, clues.Wrap(err, "getting OneDrive folders")
}
@ -258,14 +177,14 @@ func GetAllFolders(
el = errs.Local()
)
for _, d := range drvs {
for _, drive := range ds {
if el.Failure() != nil {
break
}
var (
id = ptr.Val(d.GetId())
name = ptr.Val(d.GetName())
id = ptr.Val(drive.GetId())
name = ptr.Val(drive.GetName())
)
ictx := clues.Add(ctx, "drive_id", id, "drive_name", clues.Hide(name))
@ -311,7 +230,7 @@ func GetAllFolders(
_, _, _, err = collectItems(
ictx,
defaultItemPager(gs, id, ""),
bh.NewItemPager(id, "", nil),
id,
name,
collector,

View File

@ -286,6 +286,7 @@ type OneDriveIntgSuite struct {
tester.Suite
userID string
creds account.M365Config
ac api.Client
}
func TestOneDriveSuite(t *testing.T) {
@ -303,9 +304,12 @@ func (suite *OneDriveIntgSuite) SetupSuite() {
acct := tester.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err)
require.NoError(t, err, clues.ToCore(err))
suite.creds = creds
suite.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
@ -318,11 +322,9 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
folderIDs = []string{}
folderName1 = "Corso_Folder_Test_" + dttm.FormatNow(dttm.SafeForTesting)
folderElements = []string{folderName1}
gs = loadTestService(t)
)
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
require.NoError(t, err, clues.ToCore(err))
pager := suite.ac.Drives().NewUserDrivePager(suite.userID, nil)
drives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
require.NoError(t, err, clues.ToCore(err))
@ -337,14 +339,14 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
// deletes require unique http clients
// https://github.com/alcionai/corso/issues/2707
err := api.DeleteDriveItem(ictx, loadTestService(t), driveID, id)
err := suite.ac.Drives().DeleteItem(ictx, driveID, id)
if err != nil {
logger.CtxErr(ictx, err).Errorw("deleting folder")
}
}
}()
rootFolder, err := api.GetDriveRoot(ctx, gs, driveID)
rootFolder, err := suite.ac.Drives().GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
restoreDir := path.Builder{}.Append(folderElements...)
@ -357,7 +359,9 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
caches := NewRestoreCaches()
caches.DriveIDToRootFolderID[driveID] = ptr.Val(rootFolder.GetId())
folderID, err := createRestoreFolders(ctx, gs, &drivePath, restoreDir, caches)
rh := NewRestoreHandler(suite.ac)
folderID, err := createRestoreFolders(ctx, rh, &drivePath, restoreDir, caches)
require.NoError(t, err, clues.ToCore(err))
folderIDs = append(folderIDs, folderID)
@ -365,7 +369,7 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
folderName2 := "Corso_Folder_Test_" + dttm.FormatNow(dttm.SafeForTesting)
restoreDir = restoreDir.Append(folderName2)
folderID, err = createRestoreFolders(ctx, gs, &drivePath, restoreDir, caches)
folderID, err = createRestoreFolders(ctx, rh, &drivePath, restoreDir, caches)
require.NoError(t, err, clues.ToCore(err))
folderIDs = append(folderIDs, folderID)
@ -387,11 +391,13 @@ func (suite *OneDriveIntgSuite) TestCreateGetDeleteFolder() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
bh := itemBackupHandler{suite.ac.Drives()}
pager := suite.ac.Drives().NewUserDrivePager(suite.userID, nil)
pager, err := PagerForSource(OneDriveSource, gs, suite.userID, nil)
require.NoError(t, err, clues.ToCore(err))
ctx, flush := tester.NewContext(t)
defer flush()
allFolders, err := GetAllFolders(ctx, gs, pager, test.prefix, fault.New(true))
allFolders, err := GetAllFolders(ctx, bh, pager, test.prefix, fault.New(true))
require.NoError(t, err, clues.ToCore(err))
foundFolderIDs := []string{}
@ -454,12 +460,10 @@ func (suite *OneDriveIntgSuite) TestOneDriveNewCollections() {
)
colls := NewCollections(
graph.NewNoTimeoutHTTPWrapper(),
&itemBackupHandler{suite.ac.Drives()},
creds.AzureTenantID,
test.user,
OneDriveSource,
testFolderMatcher{scope},
service,
service.updateStatus,
control.Options{
ToggleFeatures: control.Toggles{},

View File

@ -1,25 +0,0 @@
// Code generated by "stringer -type=driveSource"; DO NOT EDIT.
package onedrive
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[unknownDriveSource-0]
_ = x[OneDriveSource-1]
_ = x[SharePointSource-2]
}
const _driveSource_name = "unknownDriveSourceOneDriveSourceSharePointSource"
var _driveSource_index = [...]uint8{0, 18, 32, 48}
func (i driveSource) String() string {
if i < 0 || i >= driveSource(len(_driveSource_index)-1) {
return "driveSource(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _driveSource_name[_driveSource_index[i]:_driveSource_index[i+1]]
}

View File

@ -0,0 +1,132 @@
package onedrive
import (
"context"
"github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type ItemInfoAugmenter interface {
// AugmentItemInfo will populate a details.<Service>Info struct
// with properties from the drive item. ItemSize is passed in
// separately for restore processes because the local itemable
// doesn't have its size value updated as a side effect of creation,
// and kiota drops any SetSize update.
AugmentItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo
}
// ---------------------------------------------------------------------------
// backup
// ---------------------------------------------------------------------------
type BackupHandler interface {
ItemInfoAugmenter
api.Getter
GetItemPermissioner
GetItemer
// PathPrefix constructs the service and category specific path prefix for
// the given values.
PathPrefix(tenantID, resourceOwner, driveID string) (path.Path, error)
// CanonicalPath constructs the service and category specific path for
// the given values.
CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
) (path.Path, error)
// ServiceCat returns the service and category used by this implementation.
ServiceCat() (path.ServiceType, path.CategoryType)
NewDrivePager(resourceOwner string, fields []string) api.DrivePager
NewItemPager(driveID, link string, fields []string) api.DriveItemEnumerator
// FormatDisplayPath creates a human-readable string to represent the
// provided path.
FormatDisplayPath(driveName string, parentPath *path.Builder) string
NewLocationIDer(driveID string, elems ...string) details.LocationIDer
}
type GetItemPermissioner interface {
GetItemPermission(
ctx context.Context,
driveID, itemID string,
) (models.PermissionCollectionResponseable, error)
}
type GetItemer interface {
GetItem(
ctx context.Context,
driveID, itemID string,
) (models.DriveItemable, error)
}
// ---------------------------------------------------------------------------
// restore
// ---------------------------------------------------------------------------
type RestoreHandler interface {
DeleteItemPermissioner
GetFolderByNamer
GetRootFolderer
ItemInfoAugmenter
NewItemContentUploader
PostItemInContainerer
UpdateItemPermissioner
}
type NewItemContentUploader interface {
// NewItemContentUpload creates an upload session which is used as a writer
// for large item content.
NewItemContentUpload(
ctx context.Context,
driveID, itemID string,
) (models.UploadSessionable, error)
}
type DeleteItemPermissioner interface {
DeleteItemPermission(
ctx context.Context,
driveID, itemID, permissionID string,
) error
}
type UpdateItemPermissioner interface {
PostItemPermissionUpdate(
ctx context.Context,
driveID, itemID string,
body *drives.ItemItemsItemInvitePostRequestBody,
) (drives.ItemItemsItemInviteResponseable, error)
}
type PostItemInContainerer interface {
PostItemInContainer(
ctx context.Context,
driveID, parentFolderID string,
newItem models.DriveItemable,
) (models.DriveItemable, error)
}
type GetFolderByNamer interface {
GetFolderByName(
ctx context.Context,
driveID, parentFolderID, folderID string,
) (models.DriveItemable, error)
}
type GetRootFolderer interface {
// GetRootFolder gets the root folder for the drive.
GetRootFolder(
ctx context.Context,
driveID string,
) (models.DriveItemable, error)
}

View File

@ -5,17 +5,14 @@ import (
"context"
"encoding/json"
"io"
"net/http"
"strings"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
@ -25,58 +22,64 @@ var downloadURLKeys = []string{
"@content.downloadUrl",
}
// sharePointItemReader will return a io.ReadCloser for the specified item
// It crafts this by querying M365 for a download URL for the item
// and using a http client to initialize a reader
// TODO: Add metadata fetching to SharePoint
func sharePointItemReader(
func downloadItem(
ctx context.Context,
client graph.Requester,
ag api.Getter,
item models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) {
resp, err := downloadItem(ctx, client, item)
if err != nil {
return details.ItemInfo{}, nil, clues.Wrap(err, "sharepoint reader")
}
dii := details.ItemInfo{
SharePoint: sharePointItemInfo(item, ptr.Val(item.GetSize())),
}
return dii, resp.Body, nil
}
func oneDriveItemMetaReader(
ctx context.Context,
service graph.Servicer,
driveID string,
item models.DriveItemable,
) (io.ReadCloser, int, error) {
return baseItemMetaReader(ctx, service, driveID, item)
}
func sharePointItemMetaReader(
ctx context.Context,
service graph.Servicer,
driveID string,
item models.DriveItemable,
) (io.ReadCloser, int, error) {
// TODO: include permissions
return baseItemMetaReader(ctx, service, driveID, item)
}
func baseItemMetaReader(
ctx context.Context,
service graph.Servicer,
driveID string,
item models.DriveItemable,
) (io.ReadCloser, int, error) {
) (io.ReadCloser, error) {
var (
perms []metadata.Permission
err error
meta = metadata.Metadata{FileName: ptr.Val(item.GetName())}
rc io.ReadCloser
isFile = item.GetFile() != nil
)
if isFile {
var (
url string
ad = item.GetAdditionalData()
)
for _, key := range downloadURLKeys {
if v, err := str.AnyValueToString(key, ad); err == nil {
url = v
break
}
}
if len(url) == 0 {
return nil, clues.New("extracting file url")
}
resp, err := ag.Get(ctx, url, nil)
if err != nil {
return nil, clues.Wrap(err, "getting item")
}
if graph.IsMalwareResp(ctx, resp) {
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
}
if (resp.StatusCode / 100) != 2 {
// upstream error checks can compare the status with
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
return nil, clues.
Wrap(clues.New(resp.Status), "non-2xx http response").
Label(graph.LabelStatus(resp.StatusCode))
}
rc = resp.Body
}
return rc, nil
}
func downloadItemMeta(
ctx context.Context,
gip GetItemPermissioner,
driveID string,
item models.DriveItemable,
) (io.ReadCloser, int, error) {
meta := metadata.Metadata{FileName: ptr.Val(item.GetName())}
if item.GetShared() == nil {
meta.SharingMode = metadata.SharingModeInherited
} else {
@ -84,12 +87,12 @@ func baseItemMetaReader(
}
if meta.SharingMode == metadata.SharingModeCustom {
perms, err = driveItemPermissionInfo(ctx, service, driveID, ptr.Val(item.GetId()))
perm, err := gip.GetItemPermission(ctx, driveID, ptr.Val(item.GetId()))
if err != nil {
return nil, 0, err
}
meta.Permissions = perms
meta.Permissions = metadata.FilterPermissions(ctx, perm.GetValue())
}
metaJSON, err := json.Marshal(meta)
@ -100,283 +103,25 @@ func baseItemMetaReader(
return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil
}
// oneDriveItemReader will return a io.ReadCloser for the specified item
// It crafts this by querying M365 for a download URL for the item
// and using a http client to initialize a reader
func oneDriveItemReader(
// driveItemWriter is used to initialize and return an io.Writer to upload data for the specified item
// It does so by creating an upload session and using that URL to initialize an `itemWriter`
// TODO: @vkamra verify if var session is the desired input
func driveItemWriter(
ctx context.Context,
client graph.Requester,
item models.DriveItemable,
) (details.ItemInfo, io.ReadCloser, error) {
var (
rc io.ReadCloser
isFile = item.GetFile() != nil
)
nicu NewItemContentUploader,
driveID, itemID string,
itemSize int64,
) (io.Writer, string, error) {
ctx = clues.Add(ctx, "upload_item_id", itemID)
if isFile {
resp, err := downloadItem(ctx, client, item)
icu, err := nicu.NewItemContentUpload(ctx, driveID, itemID)
if err != nil {
return details.ItemInfo{}, nil, clues.Wrap(err, "onedrive reader")
return nil, "", clues.Stack(err)
}
rc = resp.Body
}
iw := graph.NewLargeItemWriter(itemID, ptr.Val(icu.GetUploadUrl()), itemSize)
dii := details.ItemInfo{
OneDrive: oneDriveItemInfo(item, ptr.Val(item.GetSize())),
}
return dii, rc, nil
}
func downloadItem(
ctx context.Context,
client graph.Requester,
item models.DriveItemable,
) (*http.Response, error) {
var url string
for _, key := range downloadURLKeys {
tmp, ok := item.GetAdditionalData()[key].(*string)
if ok {
url = ptr.Val(tmp)
break
}
}
if len(url) == 0 {
return nil, clues.New("extracting file url").With("item_id", ptr.Val(item.GetId()))
}
resp, err := client.Request(ctx, http.MethodGet, url, nil, nil)
if err != nil {
return nil, err
}
if (resp.StatusCode / 100) == 2 {
return resp, nil
}
if graph.IsMalwareResp(ctx, resp) {
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
}
// upstream error checks can compare the status with
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
cerr := clues.Wrap(clues.New(resp.Status), "non-2xx http response").
Label(graph.LabelStatus(resp.StatusCode))
return resp, cerr
}
// oneDriveItemInfo will populate a details.OneDriveInfo struct
// with properties from the drive item. ItemSize is specified
// separately for restore processes because the local itemable
// doesn't have its size value updated as a side effect of creation,
// and kiota drops any SetSize update.
func oneDriveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDriveInfo {
var email, driveName, driveID string
if di.GetCreatedBy() != nil && di.GetCreatedBy().GetUser() != nil {
// User is sometimes not available when created via some
// external applications (like backup/restore solutions)
ed, ok := di.GetCreatedBy().GetUser().GetAdditionalData()["email"]
if ok {
email = *ed.(*string)
}
}
if di.GetParentReference() != nil {
driveID = ptr.Val(di.GetParentReference().GetDriveId())
driveName = strings.TrimSpace(ptr.Val(di.GetParentReference().GetName()))
}
return &details.OneDriveInfo{
Created: ptr.Val(di.GetCreatedDateTime()),
DriveID: driveID,
DriveName: driveName,
ItemName: ptr.Val(di.GetName()),
ItemType: details.OneDriveItem,
Modified: ptr.Val(di.GetLastModifiedDateTime()),
Owner: email,
Size: itemSize,
}
}
// driveItemPermissionInfo will fetch the permission information
// for a drive item given a drive and item id.
func driveItemPermissionInfo(
ctx context.Context,
service graph.Servicer,
driveID string,
itemID string,
) ([]metadata.Permission, error) {
perm, err := api.GetItemPermission(ctx, service, driveID, itemID)
if err != nil {
return nil, err
}
uperms := filterUserPermissions(ctx, perm.GetValue())
return uperms, nil
}
func filterUserPermissions(ctx context.Context, perms []models.Permissionable) []metadata.Permission {
up := []metadata.Permission{}
for _, p := range perms {
if p.GetGrantedToV2() == nil {
// For link shares, we get permissions without a user
// specified
continue
}
var (
// Below are the mapping from roles to "Advanced" permissions
// screen entries:
//
// owner - Full Control
// write - Design | Edit | Contribute (no difference in /permissions api)
// read - Read
// empty - Restricted View
//
// helpful docs:
// https://devblogs.microsoft.com/microsoft365dev/controlling-app-access-on-specific-sharepoint-site-collections/
roles = p.GetRoles()
gv2 = p.GetGrantedToV2()
entityID string
gv2t metadata.GV2Type
)
switch true {
case gv2.GetUser() != nil:
gv2t = metadata.GV2User
entityID = ptr.Val(gv2.GetUser().GetId())
case gv2.GetSiteUser() != nil:
gv2t = metadata.GV2SiteUser
entityID = ptr.Val(gv2.GetSiteUser().GetId())
case gv2.GetGroup() != nil:
gv2t = metadata.GV2Group
entityID = ptr.Val(gv2.GetGroup().GetId())
case gv2.GetSiteGroup() != nil:
gv2t = metadata.GV2SiteGroup
entityID = ptr.Val(gv2.GetSiteGroup().GetId())
case gv2.GetApplication() != nil:
gv2t = metadata.GV2App
entityID = ptr.Val(gv2.GetApplication().GetId())
case gv2.GetDevice() != nil:
gv2t = metadata.GV2Device
entityID = ptr.Val(gv2.GetDevice().GetId())
default:
logger.Ctx(ctx).Info("untracked permission")
}
// Technically GrantedToV2 can also contain devices, but the
// documentation does not mention about devices in permissions
if entityID == "" {
// This should ideally not be hit
continue
}
up = append(up, metadata.Permission{
ID: ptr.Val(p.GetId()),
Roles: roles,
EntityID: entityID,
EntityType: gv2t,
Expiration: p.GetExpirationDateTime(),
})
}
return up
}
// sharePointItemInfo will populate a details.SharePointInfo struct
// with properties from the drive item. ItemSize is specified
// separately for restore processes because the local itemable
// doesn't have its size value updated as a side effect of creation,
// and kiota drops any SetSize update.
// TODO: Update drive name during Issue #2071
func sharePointItemInfo(di models.DriveItemable, itemSize int64) *details.SharePointInfo {
var driveName, siteID, driveID, weburl, creatorEmail string
// TODO: we rely on this info for details/restore lookups,
// so if it's nil we have an issue, and will need an alternative
// way to source the data.
if di.GetCreatedBy() != nil && di.GetCreatedBy().GetUser() != nil {
// User is sometimes not available when created via some
// external applications (like backup/restore solutions)
additionalData := di.GetCreatedBy().GetUser().GetAdditionalData()
ed, ok := additionalData["email"]
if !ok {
ed = additionalData["displayName"]
}
if ed != nil {
creatorEmail = *ed.(*string)
}
}
gsi := di.GetSharepointIds()
if gsi != nil {
siteID = ptr.Val(gsi.GetSiteId())
weburl = ptr.Val(gsi.GetSiteUrl())
if len(weburl) == 0 {
weburl = constructWebURL(di.GetAdditionalData())
}
}
if di.GetParentReference() != nil {
driveID = ptr.Val(di.GetParentReference().GetDriveId())
driveName = strings.TrimSpace(ptr.Val(di.GetParentReference().GetName()))
}
return &details.SharePointInfo{
ItemType: details.SharePointLibrary,
ItemName: ptr.Val(di.GetName()),
Created: ptr.Val(di.GetCreatedDateTime()),
Modified: ptr.Val(di.GetLastModifiedDateTime()),
DriveID: driveID,
DriveName: driveName,
Size: itemSize,
Owner: creatorEmail,
WebURL: weburl,
SiteID: siteID,
}
}
// constructWebURL helper function for recreating the webURL
// for the originating SharePoint site. Uses additional data map
// from a models.DriveItemable that possesses a downloadURL within the map.
// Returns "" if map nil or key is not present.
func constructWebURL(adtl map[string]any) string {
var (
desiredKey = "@microsoft.graph.downloadUrl"
sep = `/_layouts`
url string
)
if adtl == nil {
return url
}
r := adtl[desiredKey]
point, ok := r.(*string)
if !ok {
return url
}
value := ptr.Val(point)
if len(value) == 0 {
return url
}
temp := strings.Split(value, sep)
url = temp[0]
return url
return iw, ptr.Val(icu.GetUploadUrl()), nil
}
func setName(orig models.ItemReferenceable, driveName string) models.ItemReferenceable {

View File

@ -0,0 +1,227 @@
package onedrive
import (
"context"
"net/http"
"strings"
"github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
// backup
// ---------------------------------------------------------------------------
var _ BackupHandler = &itemBackupHandler{}
type itemBackupHandler struct {
ac api.Drives
}
func (h itemBackupHandler) Get(
ctx context.Context,
url string,
headers map[string]string,
) (*http.Response, error) {
return h.ac.Get(ctx, url, headers)
}
func (h itemBackupHandler) PathPrefix(
tenantID, resourceOwner, driveID string,
) (path.Path, error) {
return path.Build(
tenantID,
resourceOwner,
path.OneDriveService,
path.FilesCategory,
false,
odConsts.DrivesPathDir,
driveID,
odConsts.RootPathDir)
}
func (h itemBackupHandler) CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
) (path.Path, error) {
return folders.ToDataLayerOneDrivePath(tenantID, resourceOwner, false)
}
func (h itemBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
return path.OneDriveService, path.FilesCategory
}
func (h itemBackupHandler) NewDrivePager(
resourceOwner string, fields []string,
) api.DrivePager {
return h.ac.NewUserDrivePager(resourceOwner, fields)
}
func (h itemBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DriveItemEnumerator {
return h.ac.NewItemPager(driveID, link, fields)
}
func (h itemBackupHandler) AugmentItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentItemInfo(dii, item, size, parentPath)
}
func (h itemBackupHandler) FormatDisplayPath(
_ string, // drive name not displayed for onedrive
pb *path.Builder,
) string {
return "/" + pb.String()
}
func (h itemBackupHandler) NewLocationIDer(
driveID string,
elems ...string,
) details.LocationIDer {
return details.NewOneDriveLocationIDer(driveID, elems...)
}
func (h itemBackupHandler) GetItemPermission(
ctx context.Context,
driveID, itemID string,
) (models.PermissionCollectionResponseable, error) {
return h.ac.GetItemPermission(ctx, driveID, itemID)
}
func (h itemBackupHandler) GetItem(
ctx context.Context,
driveID, itemID string,
) (models.DriveItemable, error) {
return h.ac.GetItem(ctx, driveID, itemID)
}
// ---------------------------------------------------------------------------
// Restore
// ---------------------------------------------------------------------------
var _ RestoreHandler = &itemRestoreHandler{}
type itemRestoreHandler struct {
ac api.Drives
}
func NewRestoreHandler(ac api.Client) *itemRestoreHandler {
return &itemRestoreHandler{ac.Drives()}
}
// AugmentItemInfo will populate a details.OneDriveInfo struct
// with properties from the drive item. ItemSize is specified
// separately for restore processes because the local itemable
// doesn't have its size value updated as a side effect of creation,
// and kiota drops any SetSize update.
func (h itemRestoreHandler) AugmentItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentItemInfo(dii, item, size, parentPath)
}
func (h itemRestoreHandler) NewItemContentUpload(
ctx context.Context,
driveID, itemID string,
) (models.UploadSessionable, error) {
return h.ac.NewItemContentUpload(ctx, driveID, itemID)
}
func (h itemRestoreHandler) DeleteItemPermission(
ctx context.Context,
driveID, itemID, permissionID string,
) error {
return h.ac.DeleteItemPermission(ctx, driveID, itemID, permissionID)
}
func (h itemRestoreHandler) PostItemPermissionUpdate(
ctx context.Context,
driveID, itemID string,
body *drives.ItemItemsItemInvitePostRequestBody,
) (drives.ItemItemsItemInviteResponseable, error) {
return h.ac.PostItemPermissionUpdate(ctx, driveID, itemID, body)
}
func (h itemRestoreHandler) PostItemInContainer(
ctx context.Context,
driveID, parentFolderID string,
newItem models.DriveItemable,
) (models.DriveItemable, error) {
return h.ac.PostItemInContainer(ctx, driveID, parentFolderID, newItem)
}
func (h itemRestoreHandler) GetFolderByName(
ctx context.Context,
driveID, parentFolderID, folderName string,
) (models.DriveItemable, error) {
return h.ac.GetFolderByName(ctx, driveID, parentFolderID, folderName)
}
func (h itemRestoreHandler) GetRootFolder(
ctx context.Context,
driveID string,
) (models.DriveItemable, error) {
return h.ac.GetRootFolder(ctx, driveID)
}
// ---------------------------------------------------------------------------
// Common
// ---------------------------------------------------------------------------
func augmentItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
var email, driveName, driveID string
if item.GetCreatedBy() != nil && item.GetCreatedBy().GetUser() != nil {
// User is sometimes not available when created via some
// external applications (like backup/restore solutions)
ed, ok := item.GetCreatedBy().GetUser().GetAdditionalData()["email"]
if ok {
email = *ed.(*string)
}
}
if item.GetParentReference() != nil {
driveID = ptr.Val(item.GetParentReference().GetDriveId())
driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName()))
}
var pps string
if parentPath != nil {
pps = parentPath.String()
}
dii.OneDrive = &details.OneDriveInfo{
Created: ptr.Val(item.GetCreatedDateTime()),
DriveID: driveID,
DriveName: driveName,
ItemName: ptr.Val(item.GetName()),
ItemType: details.OneDriveItem,
Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: email,
ParentPath: pps,
Size: size,
}
return dii
}

View File

@ -0,0 +1,58 @@
package onedrive
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
type ItemBackupHandlerUnitSuite struct {
tester.Suite
}
func TestItemBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &ItemBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ItemBackupHandlerUnitSuite) TestCanonicalPath() {
tenantID, resourceOwner := "tenant", "resourceOwner"
table := []struct {
name string
expect string
expectErr assert.ErrorAssertionFunc
}{
{
name: "onedrive",
expect: "tenant/onedrive/resourceOwner/files/prefix",
expectErr: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := itemBackupHandler{}
p := path.Builder{}.Append("prefix")
result, err := h.CanonicalPath(p, tenantID, resourceOwner)
test.expectErr(t, err, clues.ToCore(err))
if result != nil {
assert.Equal(t, test.expect, result.String())
}
})
}
}
func (suite *ItemBackupHandlerUnitSuite) TestServiceCat() {
t := suite.T()
s, c := itemBackupHandler{}.ServiceCat()
assert.Equal(t, path.OneDriveService, s)
assert.Equal(t, path.FilesCategory, c)
}

View File

@ -8,14 +8,11 @@ import (
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/services/m365/api"
@ -25,7 +22,7 @@ type ItemIntegrationSuite struct {
tester.Suite
user string
userDriveID string
service graph.Servicer
service *oneDriveService
}
func TestItemIntegrationSuite(t *testing.T) {
@ -46,8 +43,7 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
suite.service = loadTestService(t)
suite.user = tester.SecondaryM365UserID(t)
pager, err := PagerForSource(OneDriveSource, suite.service, suite.user, nil)
require.NoError(t, err, clues.ToCore(err))
pager := suite.service.ac.Drives().NewUserDrivePager(suite.user, nil)
odDrives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
require.NoError(t, err, clues.ToCore(err))
@ -83,6 +79,10 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
_ bool,
_ *fault.Bus,
) error {
if driveItem != nil {
return nil
}
for _, item := range items {
if item.GetFile() != nil && ptr.Val(item.GetSize()) > 0 {
driveItem = item
@ -92,12 +92,14 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
return nil
}
ip := suite.service.ac.
Drives().
NewItemPager(suite.userDriveID, "", api.DriveItemSelectDefault())
_, _, _, err := collectItems(
ctx,
defaultItemPager(
suite.service,
suite.userDriveID,
""),
ip,
suite.userDriveID,
"General",
itemCollector,
@ -114,19 +116,15 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
suite.user,
suite.userDriveID)
// Read data for the file
itemInfo, itemData, err := oneDriveItemReader(ctx, graph.NewNoTimeoutHTTPWrapper(), driveItem)
bh := itemBackupHandler{suite.service.ac.Drives()}
// Read data for the file
itemData, err := downloadItem(ctx, bh, driveItem)
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, itemInfo.OneDrive)
require.NotEmpty(t, itemInfo.OneDrive.ItemName)
size, err := io.Copy(io.Discard, itemData)
require.NoError(t, err, clues.ToCore(err))
require.NotZero(t, size)
require.Equal(t, size, itemInfo.OneDrive.Size)
t.Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName)
}
// TestItemWriter is an integration test for uploading data to OneDrive
@ -148,21 +146,19 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
rh := NewRestoreHandler(suite.service.ac)
ctx, flush := tester.NewContext(t)
defer flush()
srv := suite.service
root, err := api.GetDriveRoot(ctx, srv, test.driveID)
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
require.NoError(t, err, clues.ToCore(err))
newFolderName := tester.DefaultTestRestoreDestination("folder").ContainerName
t.Logf("creating folder %s", newFolderName)
newFolder, err := CreateItem(
newFolder, err := rh.PostItemInContainer(
ctx,
srv,
test.driveID,
ptr.Val(root.GetId()),
newItem(newFolderName, true))
@ -172,9 +168,8 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
newItemName := "testItem_" + dttm.FormatNow(dttm.SafeForTesting)
t.Logf("creating item %s", newItemName)
newItem, err := CreateItem(
newItem, err := rh.PostItemInContainer(
ctx,
srv,
test.driveID,
ptr.Val(newFolder.GetId()),
newItem(newItemName, false))
@ -183,19 +178,24 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
// newly created item should fail because it's a file not a folder
_, err = api.GetFolderByName(ctx, srv, test.driveID, ptr.Val(newFolder.GetId()), newItemName)
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(newFolder.GetId()),
newItemName)
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
// Initialize a 100KB mockDataProvider
td, writeSize := mockDataReader(int64(100 * 1024))
itemID := ptr.Val(newItem.GetId())
r, err := api.PostDriveItem(ctx, srv, test.driveID, itemID)
w, _, err := driveItemWriter(
ctx,
rh,
test.driveID,
ptr.Val(newItem.GetId()),
writeSize)
require.NoError(t, err, clues.ToCore(err))
w := graph.NewLargeItemWriter(itemID, ptr.Val(r.GetUploadUrl()), writeSize)
// Using a 32 KB buffer for the copy allows us to validate the
// multi-part upload. `io.CopyBuffer` will only write 32 KB at
// a time
@ -235,210 +235,24 @@ func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
ctx, flush := tester.NewContext(t)
defer flush()
srv := suite.service
root, err := api.GetDriveRoot(ctx, srv, test.driveID)
root, err := suite.service.ac.Drives().GetRootFolder(ctx, test.driveID)
require.NoError(t, err, clues.ToCore(err))
// Lookup a folder that doesn't exist
_, err = api.GetFolderByName(ctx, srv, test.driveID, ptr.Val(root.GetId()), "FolderDoesNotExist")
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(root.GetId()),
"FolderDoesNotExist")
require.ErrorIs(t, err, api.ErrFolderNotFound, clues.ToCore(err))
// Lookup a folder that does exist
_, err = api.GetFolderByName(ctx, srv, test.driveID, ptr.Val(root.GetId()), "")
_, err = suite.service.ac.Drives().GetFolderByName(
ctx,
test.driveID,
ptr.Val(root.GetId()),
"")
require.NoError(t, err, clues.ToCore(err))
})
}
}
func getPermsAndResourceOwnerPerms(
permID, resourceOwner string,
gv2t metadata.GV2Type,
scopes []string,
) (models.Permissionable, metadata.Permission) {
sharepointIdentitySet := models.NewSharePointIdentitySet()
switch gv2t {
case metadata.GV2App, metadata.GV2Device, metadata.GV2Group, metadata.GV2User:
identity := models.NewIdentity()
identity.SetId(&resourceOwner)
identity.SetAdditionalData(map[string]any{"email": &resourceOwner})
switch gv2t {
case metadata.GV2User:
sharepointIdentitySet.SetUser(identity)
case metadata.GV2Group:
sharepointIdentitySet.SetGroup(identity)
case metadata.GV2App:
sharepointIdentitySet.SetApplication(identity)
case metadata.GV2Device:
sharepointIdentitySet.SetDevice(identity)
}
case metadata.GV2SiteUser, metadata.GV2SiteGroup:
spIdentity := models.NewSharePointIdentity()
spIdentity.SetId(&resourceOwner)
spIdentity.SetAdditionalData(map[string]any{"email": &resourceOwner})
switch gv2t {
case metadata.GV2SiteUser:
sharepointIdentitySet.SetSiteUser(spIdentity)
case metadata.GV2SiteGroup:
sharepointIdentitySet.SetSiteGroup(spIdentity)
}
}
perm := models.NewPermission()
perm.SetId(&permID)
perm.SetRoles([]string{"read"})
perm.SetGrantedToV2(sharepointIdentitySet)
ownersPerm := metadata.Permission{
ID: permID,
Roles: []string{"read"},
EntityID: resourceOwner,
EntityType: gv2t,
}
return perm, ownersPerm
}
type ItemUnitTestSuite struct {
tester.Suite
}
func TestItemUnitTestSuite(t *testing.T) {
suite.Run(t, &ItemUnitTestSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *ItemUnitTestSuite) TestDrivePermissionsFilter() {
var (
pID = "fakePermId"
uID = "fakeuser@provider.com"
uID2 = "fakeuser2@provider.com"
own = []string{"owner"}
r = []string{"read"}
rw = []string{"read", "write"}
)
userOwnerPerm, userOwnerROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2User, own)
userReadPerm, userReadROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2User, r)
userReadWritePerm, userReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, metadata.GV2User, rw)
siteUserOwnerPerm, siteUserOwnerROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2SiteUser, own)
siteUserReadPerm, siteUserReadROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2SiteUser, r)
siteUserReadWritePerm, siteUserReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, metadata.GV2SiteUser, rw)
groupReadPerm, groupReadROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2Group, r)
groupReadWritePerm, groupReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, metadata.GV2Group, rw)
siteGroupReadPerm, siteGroupReadROperm := getPermsAndResourceOwnerPerms(pID, uID, metadata.GV2SiteGroup, r)
siteGroupReadWritePerm, siteGroupReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, metadata.GV2SiteGroup, rw)
noPerm, _ := getPermsAndResourceOwnerPerms(pID, uID, "user", []string{"read"})
noPerm.SetGrantedToV2(nil) // eg: link shares
cases := []struct {
name string
graphPermissions []models.Permissionable
parsedPermissions []metadata.Permission
}{
{
name: "no perms",
graphPermissions: []models.Permissionable{},
parsedPermissions: []metadata.Permission{},
},
{
name: "no user bound to perms",
graphPermissions: []models.Permissionable{noPerm},
parsedPermissions: []metadata.Permission{},
},
// user
{
name: "user with read permissions",
graphPermissions: []models.Permissionable{userReadPerm},
parsedPermissions: []metadata.Permission{userReadROperm},
},
{
name: "user with owner permissions",
graphPermissions: []models.Permissionable{userOwnerPerm},
parsedPermissions: []metadata.Permission{userOwnerROperm},
},
{
name: "user with read and write permissions",
graphPermissions: []models.Permissionable{userReadWritePerm},
parsedPermissions: []metadata.Permission{userReadWriteROperm},
},
{
name: "multiple users with separate permissions",
graphPermissions: []models.Permissionable{userReadPerm, userReadWritePerm},
parsedPermissions: []metadata.Permission{userReadROperm, userReadWriteROperm},
},
// site-user
{
name: "site user with read permissions",
graphPermissions: []models.Permissionable{siteUserReadPerm},
parsedPermissions: []metadata.Permission{siteUserReadROperm},
},
{
name: "site user with owner permissions",
graphPermissions: []models.Permissionable{siteUserOwnerPerm},
parsedPermissions: []metadata.Permission{siteUserOwnerROperm},
},
{
name: "site user with read and write permissions",
graphPermissions: []models.Permissionable{siteUserReadWritePerm},
parsedPermissions: []metadata.Permission{siteUserReadWriteROperm},
},
{
name: "multiple site users with separate permissions",
graphPermissions: []models.Permissionable{siteUserReadPerm, siteUserReadWritePerm},
parsedPermissions: []metadata.Permission{siteUserReadROperm, siteUserReadWriteROperm},
},
// group
{
name: "group with read permissions",
graphPermissions: []models.Permissionable{groupReadPerm},
parsedPermissions: []metadata.Permission{groupReadROperm},
},
{
name: "group with read and write permissions",
graphPermissions: []models.Permissionable{groupReadWritePerm},
parsedPermissions: []metadata.Permission{groupReadWriteROperm},
},
{
name: "multiple groups with separate permissions",
graphPermissions: []models.Permissionable{groupReadPerm, groupReadWritePerm},
parsedPermissions: []metadata.Permission{groupReadROperm, groupReadWriteROperm},
},
// site-group
{
name: "site group with read permissions",
graphPermissions: []models.Permissionable{siteGroupReadPerm},
parsedPermissions: []metadata.Permission{siteGroupReadROperm},
},
{
name: "site group with read and write permissions",
graphPermissions: []models.Permissionable{siteGroupReadWritePerm},
parsedPermissions: []metadata.Permission{siteGroupReadWriteROperm},
},
{
name: "multiple site groups with separate permissions",
graphPermissions: []models.Permissionable{siteGroupReadPerm, siteGroupReadWritePerm},
parsedPermissions: []metadata.Permission{siteGroupReadROperm, siteGroupReadWriteROperm},
},
}
for _, tc := range cases {
suite.Run(tc.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
actual := filterUserPermissions(ctx, tc.graphPermissions)
assert.ElementsMatch(t, tc.parsedPermissions, actual)
})
}
}

View File

@ -1,9 +1,14 @@
package metadata
import (
"context"
"time"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/pkg/logger"
)
type SharingMode int
@ -100,3 +105,72 @@ func DiffPermissions(before, after []Permission) ([]Permission, []Permission) {
return added, removed
}
func FilterPermissions(ctx context.Context, perms []models.Permissionable) []Permission {
up := []Permission{}
for _, p := range perms {
if p.GetGrantedToV2() == nil {
// For link shares, we get permissions without a user
// specified
continue
}
var (
// Below are the mapping from roles to "Advanced" permissions
// screen entries:
//
// owner - Full Control
// write - Design | Edit | Contribute (no difference in /permissions api)
// read - Read
// empty - Restricted View
//
// helpful docs:
// https://devblogs.microsoft.com/microsoft365dev/controlling-app-access-on-specific-sharepoint-site-collections/
roles = p.GetRoles()
gv2 = p.GetGrantedToV2()
entityID string
gv2t GV2Type
)
switch true {
case gv2.GetUser() != nil:
gv2t = GV2User
entityID = ptr.Val(gv2.GetUser().GetId())
case gv2.GetSiteUser() != nil:
gv2t = GV2SiteUser
entityID = ptr.Val(gv2.GetSiteUser().GetId())
case gv2.GetGroup() != nil:
gv2t = GV2Group
entityID = ptr.Val(gv2.GetGroup().GetId())
case gv2.GetSiteGroup() != nil:
gv2t = GV2SiteGroup
entityID = ptr.Val(gv2.GetSiteGroup().GetId())
case gv2.GetApplication() != nil:
gv2t = GV2App
entityID = ptr.Val(gv2.GetApplication().GetId())
case gv2.GetDevice() != nil:
gv2t = GV2Device
entityID = ptr.Val(gv2.GetDevice().GetId())
default:
logger.Ctx(ctx).Info("untracked permission")
}
// Technically GrantedToV2 can also contain devices, but the
// documentation does not mention about devices in permissions
if entityID == "" {
// This should ideally not be hit
continue
}
up = append(up, Permission{
ID: ptr.Val(p.GetId()),
Roles: roles,
EntityID: entityID,
EntityType: gv2t,
Expiration: p.GetExpirationDateTime(),
})
}
return up
}

View File

@ -3,6 +3,7 @@ package metadata
import (
"testing"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
@ -147,3 +148,187 @@ func (suite *PermissionsUnitTestSuite) TestDiffPermissions() {
})
}
}
func getPermsAndResourceOwnerPerms(
permID, resourceOwner string,
gv2t GV2Type,
scopes []string,
) (models.Permissionable, Permission) {
sharepointIdentitySet := models.NewSharePointIdentitySet()
switch gv2t {
case GV2App, GV2Device, GV2Group, GV2User:
identity := models.NewIdentity()
identity.SetId(&resourceOwner)
identity.SetAdditionalData(map[string]any{"email": &resourceOwner})
switch gv2t {
case GV2User:
sharepointIdentitySet.SetUser(identity)
case GV2Group:
sharepointIdentitySet.SetGroup(identity)
case GV2App:
sharepointIdentitySet.SetApplication(identity)
case GV2Device:
sharepointIdentitySet.SetDevice(identity)
}
case GV2SiteUser, GV2SiteGroup:
spIdentity := models.NewSharePointIdentity()
spIdentity.SetId(&resourceOwner)
spIdentity.SetAdditionalData(map[string]any{"email": &resourceOwner})
switch gv2t {
case GV2SiteUser:
sharepointIdentitySet.SetSiteUser(spIdentity)
case GV2SiteGroup:
sharepointIdentitySet.SetSiteGroup(spIdentity)
}
}
perm := models.NewPermission()
perm.SetId(&permID)
perm.SetRoles([]string{"read"})
perm.SetGrantedToV2(sharepointIdentitySet)
ownersPerm := Permission{
ID: permID,
Roles: []string{"read"},
EntityID: resourceOwner,
EntityType: gv2t,
}
return perm, ownersPerm
}
func (suite *PermissionsUnitTestSuite) TestDrivePermissionsFilter() {
var (
pID = "fakePermId"
uID = "fakeuser@provider.com"
uID2 = "fakeuser2@provider.com"
own = []string{"owner"}
r = []string{"read"}
rw = []string{"read", "write"}
)
userOwnerPerm, userOwnerROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2User, own)
userReadPerm, userReadROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2User, r)
userReadWritePerm, userReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, GV2User, rw)
siteUserOwnerPerm, siteUserOwnerROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2SiteUser, own)
siteUserReadPerm, siteUserReadROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2SiteUser, r)
siteUserReadWritePerm, siteUserReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, GV2SiteUser, rw)
groupReadPerm, groupReadROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2Group, r)
groupReadWritePerm, groupReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, GV2Group, rw)
siteGroupReadPerm, siteGroupReadROperm := getPermsAndResourceOwnerPerms(pID, uID, GV2SiteGroup, r)
siteGroupReadWritePerm, siteGroupReadWriteROperm := getPermsAndResourceOwnerPerms(pID, uID2, GV2SiteGroup, rw)
noPerm, _ := getPermsAndResourceOwnerPerms(pID, uID, "user", []string{"read"})
noPerm.SetGrantedToV2(nil) // eg: link shares
cases := []struct {
name string
graphPermissions []models.Permissionable
parsedPermissions []Permission
}{
{
name: "no perms",
graphPermissions: []models.Permissionable{},
parsedPermissions: []Permission{},
},
{
name: "no user bound to perms",
graphPermissions: []models.Permissionable{noPerm},
parsedPermissions: []Permission{},
},
// user
{
name: "user with read permissions",
graphPermissions: []models.Permissionable{userReadPerm},
parsedPermissions: []Permission{userReadROperm},
},
{
name: "user with owner permissions",
graphPermissions: []models.Permissionable{userOwnerPerm},
parsedPermissions: []Permission{userOwnerROperm},
},
{
name: "user with read and write permissions",
graphPermissions: []models.Permissionable{userReadWritePerm},
parsedPermissions: []Permission{userReadWriteROperm},
},
{
name: "multiple users with separate permissions",
graphPermissions: []models.Permissionable{userReadPerm, userReadWritePerm},
parsedPermissions: []Permission{userReadROperm, userReadWriteROperm},
},
// site-user
{
name: "site user with read permissions",
graphPermissions: []models.Permissionable{siteUserReadPerm},
parsedPermissions: []Permission{siteUserReadROperm},
},
{
name: "site user with owner permissions",
graphPermissions: []models.Permissionable{siteUserOwnerPerm},
parsedPermissions: []Permission{siteUserOwnerROperm},
},
{
name: "site user with read and write permissions",
graphPermissions: []models.Permissionable{siteUserReadWritePerm},
parsedPermissions: []Permission{siteUserReadWriteROperm},
},
{
name: "multiple site users with separate permissions",
graphPermissions: []models.Permissionable{siteUserReadPerm, siteUserReadWritePerm},
parsedPermissions: []Permission{siteUserReadROperm, siteUserReadWriteROperm},
},
// group
{
name: "group with read permissions",
graphPermissions: []models.Permissionable{groupReadPerm},
parsedPermissions: []Permission{groupReadROperm},
},
{
name: "group with read and write permissions",
graphPermissions: []models.Permissionable{groupReadWritePerm},
parsedPermissions: []Permission{groupReadWriteROperm},
},
{
name: "multiple groups with separate permissions",
graphPermissions: []models.Permissionable{groupReadPerm, groupReadWritePerm},
parsedPermissions: []Permission{groupReadROperm, groupReadWriteROperm},
},
// site-group
{
name: "site group with read permissions",
graphPermissions: []models.Permissionable{siteGroupReadPerm},
parsedPermissions: []Permission{siteGroupReadROperm},
},
{
name: "site group with read and write permissions",
graphPermissions: []models.Permissionable{siteGroupReadWritePerm},
parsedPermissions: []Permission{siteGroupReadWriteROperm},
},
{
name: "multiple site groups with separate permissions",
graphPermissions: []models.Permissionable{siteGroupReadPerm, siteGroupReadWritePerm},
parsedPermissions: []Permission{siteGroupReadROperm, siteGroupReadWriteROperm},
},
}
for _, tc := range cases {
suite.Run(tc.name, func() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
actual := FilterPermissions(ctx, tc.graphPermissions)
assert.ElementsMatch(t, tc.parsedPermissions, actual)
})
}
}

View File

@ -0,0 +1,54 @@
package testdata
import (
"testing"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
)
func AssertMetadataEqual(t *testing.T, expect, got metadata.Metadata) {
assert.Equal(t, expect.FileName, got.FileName, "fileName")
assert.Equal(t, expect.SharingMode, got.SharingMode, "sharingMode")
assert.Equal(t, len(expect.Permissions), len(got.Permissions), "permissions count")
for i, ep := range expect.Permissions {
gp := got.Permissions[i]
assert.Equal(t, ep.EntityType, gp.EntityType, "permission %d entityType", i)
assert.Equal(t, ep.EntityID, gp.EntityID, "permission %d entityID", i)
assert.Equal(t, ep.ID, gp.ID, "permission %d ID", i)
assert.ElementsMatch(t, ep.Roles, gp.Roles, "permission %d roles", i)
}
}
func NewStubPermissionResponse(
gv2 metadata.GV2Type,
permID, entityID string,
roles []string,
) models.PermissionCollectionResponseable {
var (
p = models.NewPermission()
pcr = models.NewPermissionCollectionResponse()
spis = models.NewSharePointIdentitySet()
)
switch gv2 {
case metadata.GV2User:
i := models.NewIdentity()
i.SetId(&entityID)
i.SetDisplayName(&entityID)
spis.SetUser(i)
}
p.SetGrantedToV2(spis)
p.SetId(&permID)
p.SetRoles(roles)
pcr.SetValue([]models.Permissionable{p})
return pcr
}

View File

@ -0,0 +1,217 @@
package mock
import (
"context"
"net/http"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
// Backup Handler
// ---------------------------------------------------------------------------
type BackupHandler struct {
ItemInfo details.ItemInfo
GI GetsItem
GIP GetsItemPermission
PathPrefixFn pathPrefixer
PathPrefixErr error
CanonPathFn canonPather
CanonPathErr error
Service path.ServiceType
Category path.CategoryType
DrivePagerV api.DrivePager
// driveID -> itemPager
ItemPagerV map[string]api.DriveItemEnumerator
LocationIDFn locationIDer
getCall int
GetResps []*http.Response
GetErrs []error
}
func DefaultOneDriveBH() *BackupHandler {
return &BackupHandler{
ItemInfo: details.ItemInfo{OneDrive: &details.OneDriveInfo{}},
GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultOneDrivePathPrefixer,
CanonPathFn: defaultOneDriveCanonPather,
Service: path.OneDriveService,
Category: path.FilesCategory,
LocationIDFn: defaultOneDriveLocationIDer,
GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")},
}
}
func DefaultSharePointBH() *BackupHandler {
return &BackupHandler{
ItemInfo: details.ItemInfo{SharePoint: &details.SharePointInfo{}},
GI: GetsItem{Err: clues.New("not defined")},
GIP: GetsItemPermission{Err: clues.New("not defined")},
PathPrefixFn: defaultSharePointPathPrefixer,
CanonPathFn: defaultSharePointCanonPather,
Service: path.SharePointService,
Category: path.LibrariesCategory,
LocationIDFn: defaultSharePointLocationIDer,
GetResps: []*http.Response{nil},
GetErrs: []error{clues.New("not defined")},
}
}
func (h BackupHandler) PathPrefix(tID, ro, driveID string) (path.Path, error) {
pp, err := h.PathPrefixFn(tID, ro, driveID)
if err != nil {
return nil, err
}
return pp, h.PathPrefixErr
}
func (h BackupHandler) CanonicalPath(pb *path.Builder, tID, ro string) (path.Path, error) {
cp, err := h.CanonPathFn(pb, tID, ro)
if err != nil {
return nil, err
}
return cp, h.CanonPathErr
}
func (h BackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
return h.Service, h.Category
}
func (h BackupHandler) NewDrivePager(string, []string) api.DrivePager {
return h.DrivePagerV
}
func (h BackupHandler) NewItemPager(driveID string, _ string, _ []string) api.DriveItemEnumerator {
return h.ItemPagerV[driveID]
}
func (h BackupHandler) FormatDisplayPath(_ string, pb *path.Builder) string {
return "/" + pb.String()
}
func (h BackupHandler) NewLocationIDer(driveID string, elems ...string) details.LocationIDer {
return h.LocationIDFn(driveID, elems...)
}
func (h BackupHandler) AugmentItemInfo(details.ItemInfo, models.DriveItemable, int64, *path.Builder) details.ItemInfo {
return h.ItemInfo
}
func (h *BackupHandler) Get(context.Context, string, map[string]string) (*http.Response, error) {
c := h.getCall
h.getCall++
// allows mockers to only populate the errors slice
if h.GetErrs[c] != nil {
return nil, h.GetErrs[c]
}
return h.GetResps[c], h.GetErrs[c]
}
func (h BackupHandler) GetItem(ctx context.Context, _, _ string) (models.DriveItemable, error) {
return h.GI.GetItem(ctx, "", "")
}
func (h BackupHandler) GetItemPermission(
ctx context.Context,
_, _ string,
) (models.PermissionCollectionResponseable, error) {
return h.GIP.GetItemPermission(ctx, "", "")
}
type canonPather func(*path.Builder, string, string) (path.Path, error)
var defaultOneDriveCanonPather = func(pb *path.Builder, tID, ro string) (path.Path, error) {
return pb.ToDataLayerOneDrivePath(tID, ro, false)
}
var defaultSharePointCanonPather = func(pb *path.Builder, tID, ro string) (path.Path, error) {
return pb.ToDataLayerSharePointPath(tID, ro, path.LibrariesCategory, false)
}
type pathPrefixer func(tID, ro, driveID string) (path.Path, error)
var defaultOneDrivePathPrefixer = func(tID, ro, driveID string) (path.Path, error) {
return path.Build(
tID,
ro,
path.OneDriveService,
path.FilesCategory,
false,
odConsts.DrivesPathDir,
driveID,
odConsts.RootPathDir)
}
var defaultSharePointPathPrefixer = func(tID, ro, driveID string) (path.Path, error) {
return path.Build(
tID,
ro,
path.SharePointService,
path.LibrariesCategory,
false,
odConsts.DrivesPathDir,
driveID,
odConsts.RootPathDir)
}
type locationIDer func(string, ...string) details.LocationIDer
var defaultOneDriveLocationIDer = func(driveID string, elems ...string) details.LocationIDer {
return details.NewOneDriveLocationIDer(driveID, elems...)
}
var defaultSharePointLocationIDer = func(driveID string, elems ...string) details.LocationIDer {
return details.NewSharePointLocationIDer(driveID, elems...)
}
// ---------------------------------------------------------------------------
// Get Itemer
// ---------------------------------------------------------------------------
type GetsItem struct {
Item models.DriveItemable
Err error
}
func (m GetsItem) GetItem(
_ context.Context,
_, _ string,
) (models.DriveItemable, error) {
return m.Item, m.Err
}
// ---------------------------------------------------------------------------
// Get Item Permissioner
// ---------------------------------------------------------------------------
type GetsItemPermission struct {
Perm models.PermissionCollectionResponseable
Err error
}
func (m GetsItemPermission) GetItemPermission(
_ context.Context,
_, _ string,
) (models.PermissionCollectionResponseable, error) {
return m.Perm, m.Err
}

View File

@ -0,0 +1,47 @@
package mock
//nolint:lll
const DriveFilePayloadData = `{
"@odata.context": "https://graph.microsoft.com/v1.0/$metadata#drives('b%22-8wC6Jt04EWvKr1fQUDOyw5Gk8jIUJdEjzqonlSRf48i67LJdwopT4-6kiycJ5AV')/items/$entity",
"@microsoft.graph.downloadUrl": "https://test-my.sharepoint.com/personal/brunhilda_test_onmicrosoft_com/_layouts/15/download.aspx?UniqueId=deadbeef-1b6a-4d13-aae6-bf5f9b07d424&Translate=false&tempauth=eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJhdWQiOiIwMDAwMDAwMy0wMDAwLTBmZjEtY2UwMC0wMDAwMDAwMDAwMDAvMTBycWMyLW15LnNoYXJlcG9pbnQuY29tQGZiOGFmYmFhLWU5NGMtNGVhNS04YThhLTI0YWZmMDRkNzg3NCIsImlzcyI6IjAwMDAwMDAzLTAwMDAtMGZmMS1jZTAwLTAwMDAwMDAwMDAwMCIsIm5iZiI6IjE2ODUxMjk1MzIiLCJleHAiOiIxNjg1MTMzMTMyIiwiZW5kcG9pbnR1cmwiOiJkTStxblBIQitkNDMzS0ErTHVTUVZMRi9IaVliSkI2eHJWN0tuYk45aXQ0PSIsImVuZHBvaW50dXJsTGVuZ3RoIjoiMTYxIiwiaXNsb29wYmFjayI6IlRydWUiLCJjaWQiOiJOVFl4TXpNMFkyWXRZVFk0TVMwMFpXUmxMVGt5TjJZdFlXVmpNVGMwTldWbU16TXgiLCJ2ZXIiOiJoYXNoZWRwcm9vZnRva2VuIiwic2l0ZWlkIjoiWlRnd01tTmpabUl0TnpRNVlpMDBOV1V3TFdGbU1tRXRZbVExWmpReE5EQmpaV05pIiwiYXBwX2Rpc3BsYXluYW1lIjoiS2VlcGVyc19Mb2NhbCIsIm5hbWVpZCI6ImFkYjk3MTQ2LTcxYTctNDkxYS05YWMwLWUzOGFkNzdkZWViNkBmYjhhZmJhYS1lOTRjLTRlYTUtOGE4YS0yNGFmZjA0ZDc4NzQiLCJyb2xlcyI6ImFsbHNpdGVzLndyaXRlIGFsbHNpdGVzLm1hbmFnZSBhbGxmaWxlcy53cml0ZSBhbGxzaXRlcy5mdWxsY29udHJvbCBhbGxwcm9maWxlcy5yZWFkIiwidHQiOiIxIiwidXNlUGVyc2lzdGVudENvb2tpZSI6bnVsbCwiaXBhZGRyIjoiMjA1MTkwLjE1Ny4zMCJ9.lN7Vpfzk1abEyE0M3gyRyZXEaGQ3JMXCyaXUBNbD5Vo&ApiVersion=2.0",
"createdDateTime": "2023-04-25T21:32:58Z",
"eTag": "\"{DEADBEEF-1B6A-4D13-AAE6-BF5F9B07D424},1\"",
"id": "017W47IH3FQVEFI23QCNG2VZV7L6NQPVBE",
"lastModifiedDateTime": "2023-04-25T21:32:58Z",
"name": "huehuehue.GIF",
"webUrl": "https://test-my.sharepoint.com/personal/brunhilda_test_onmicrosoft_com/Documents/test/huehuehue.GIF",
"cTag": "\"c:{DEADBEEF-1B6A-4D13-AAE6-BF5F9B07D424},1\"",
"size": 88843,
"createdBy": {
"user": {
"email": "brunhilda@test.onmicrosoft.com",
"id": "DEADBEEF-4c80-4da4-86ef-a08d8d6f0f94",
"displayName": "BrunHilda"
}
},
"lastModifiedBy": {
"user": {
"email": "brunhilda@10rqc2.onmicrosoft.com",
"id": "DEADBEEF-4c80-4da4-86ef-a08d8d6f0f94",
"displayName": "BrunHilda"
}
},
"parentReference": {
"driveType": "business",
"driveId": "b!-8wC6Jt04EWvKr1fQUDOyw5Gk8jIUJdEjzqonlSRf48i67LJdwopT4-6kiycJ5VA",
"id": "017W47IH6DRQF2GS2N6NGWLZRS7RUJ2DIP",
"path": "/drives/b!-8wC6Jt04EWvKr1fQUDOyw5Gk8jIUJdEjzqonlSRf48i67LJdwopT4-6kiycJ5VA/root:/test",
"siteId": "DEADBEEF-749b-45e0-af2a-bd5f4140cecb"
},
"file": {
"mimeType": "image/gif",
"hashes": {
"quickXorHash": "sU5rmXOvVFn6zJHpCPro9cYaK+Q="
}
},
"fileSystemInfo": {
"createdDateTime": "2023-04-25T21:32:58Z",
"lastModifiedDateTime": "2023-04-25T21:32:58Z"
},
"image": {}
}`

View File

@ -8,13 +8,10 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
func getParentMetadata(
@ -132,12 +129,16 @@ func computeParentPermissions(
}
}
type updateDeleteItemPermissioner interface {
DeleteItemPermissioner
UpdateItemPermissioner
}
// UpdatePermissions takes in the set of permission to be added and
// removed from an item to bring it to the desired state.
func UpdatePermissions(
ctx context.Context,
creds account.M365Config,
service graph.Servicer,
udip updateDeleteItemPermissioner,
driveID string,
itemID string,
permAdded, permRemoved []metadata.Permission,
@ -161,9 +162,8 @@ func UpdatePermissions(
return clues.New("no new permission id").WithClues(ctx)
}
err := api.DeleteDriveItemPermission(
err := udip.DeleteItemPermission(
ictx,
creds,
driveID,
itemID,
pid)
@ -216,7 +216,7 @@ func UpdatePermissions(
pbody.SetRecipients([]models.DriveRecipientable{rec})
newPerm, err := api.PostItemPermissionUpdate(ictx, service, driveID, itemID, pbody)
newPerm, err := udip.PostItemPermissionUpdate(ictx, driveID, itemID, pbody)
if err != nil {
return clues.Stack(err)
}
@ -233,8 +233,7 @@ func UpdatePermissions(
// on onedrive items.
func RestorePermissions(
ctx context.Context,
creds account.M365Config,
service graph.Servicer,
rh RestoreHandler,
driveID string,
itemID string,
itemPath path.Path,
@ -256,8 +255,7 @@ func RestorePermissions(
return UpdatePermissions(
ctx,
creds,
service,
rh,
driveID,
itemID,
permAdded,

View File

@ -1,7 +1,6 @@
package onedrive
import (
"fmt"
"strings"
"testing"
@ -9,6 +8,7 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
@ -36,8 +36,8 @@ func runComputeParentPermissionsTest(
category path.CategoryType,
resourceOwner string,
) {
entryPath := fmt.Sprintf(rootDrivePattern, "drive-id") + "/level0/level1/level2/entry"
rootEntryPath := fmt.Sprintf(rootDrivePattern, "drive-id") + "/entry"
entryPath := odConsts.DriveFolderPrefixBuilder("drive-id").String() + "/level0/level1/level2/entry"
rootEntryPath := odConsts.DriveFolderPrefixBuilder("drive-id").String() + "/entry"
entry, err := path.Build(
"tenant",

View File

@ -22,7 +22,6 @@ import (
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
@ -41,6 +40,7 @@ type restoreCaches struct {
ParentDirToMeta map[string]metadata.Metadata
OldPermIDToNewID map[string]string
DriveIDToRootFolderID map[string]string
pool sync.Pool
}
func NewRestoreCaches() *restoreCaches {
@ -49,20 +49,25 @@ func NewRestoreCaches() *restoreCaches {
ParentDirToMeta: map[string]metadata.Metadata{},
OldPermIDToNewID: map[string]string{},
DriveIDToRootFolderID: map[string]string{},
// Buffer pool for uploads
pool: sync.Pool{
New: func() interface{} {
b := make([]byte, graph.CopyBufferSize)
return &b
},
},
}
}
// RestoreCollections will restore the specified data collections into OneDrive
func RestoreCollections(
ctx context.Context,
creds account.M365Config,
rh RestoreHandler,
backupVersion int,
service graph.Servicer,
dest control.RestoreDestination,
opts control.Options,
dcs []data.RestoreCollection,
deets *details.Builder,
pool *sync.Pool,
errs *fault.Bus,
) (*support.ConnectorOperationStatus, error) {
var (
@ -99,16 +104,13 @@ func RestoreCollections(
metrics, err = RestoreCollection(
ictx,
creds,
rh,
backupVersion,
service,
dc,
caches,
OneDriveSource,
dest.ContainerName,
deets,
opts.RestorePermissions,
pool,
errs)
if err != nil {
el.AddRecoverable(err)
@ -138,16 +140,13 @@ func RestoreCollections(
// - error, if any besides recoverable
func RestoreCollection(
ctx context.Context,
creds account.M365Config,
rh RestoreHandler,
backupVersion int,
service graph.Servicer,
dc data.RestoreCollection,
caches *restoreCaches,
source driveSource,
restoreContainerName string,
deets *details.Builder,
restorePerms bool,
pool *sync.Pool,
errs *fault.Bus,
) (support.CollectionMetrics, error) {
var (
@ -170,7 +169,7 @@ func RestoreCollection(
}
if _, ok := caches.DriveIDToRootFolderID[drivePath.DriveID]; !ok {
root, err := api.GetDriveRoot(ctx, service, drivePath.DriveID)
root, err := rh.GetRootFolder(ctx, drivePath.DriveID)
if err != nil {
return metrics, clues.Wrap(err, "getting drive root id")
}
@ -207,8 +206,7 @@ func RestoreCollection(
// Create restore folders and get the folder ID of the folder the data stream will be restored in
restoreFolderID, err := CreateRestoreFolders(
ctx,
creds,
service,
rh,
drivePath,
restoreDir,
dc.FullPath(),
@ -267,11 +265,10 @@ func RestoreCollection(
defer wg.Done()
defer func() { <-semaphoreCh }()
copyBufferPtr := pool.Get().(*[]byte)
defer pool.Put(copyBufferPtr)
copyBufferPtr := caches.pool.Get().(*[]byte)
defer caches.pool.Put(copyBufferPtr)
copyBuffer := *copyBufferPtr
ictx := clues.Add(ctx, "restore_item_id", itemData.UUID())
itemPath, err := dc.FullPath().AppendItem(itemData.UUID())
@ -282,11 +279,9 @@ func RestoreCollection(
itemInfo, skipped, err := restoreItem(
ictx,
creds,
rh,
dc,
backupVersion,
source,
service,
drivePath,
restoreFolderID,
copyBuffer,
@ -332,11 +327,9 @@ func RestoreCollection(
// returns the item info, a bool (true = restore was skipped), and an error
func restoreItem(
ctx context.Context,
creds account.M365Config,
dc data.RestoreCollection,
rh RestoreHandler,
fibn data.FetchItemByNamer,
backupVersion int,
source driveSource,
service graph.Servicer,
drivePath *path.DrivePath,
restoreFolderID string,
copyBuffer []byte,
@ -351,10 +344,9 @@ func restoreItem(
if backupVersion < version.OneDrive1DataAndMetaFiles {
itemInfo, err := restoreV0File(
ctx,
source,
service,
rh,
drivePath,
dc,
fibn,
restoreFolderID,
copyBuffer,
itemData)
@ -401,11 +393,9 @@ func restoreItem(
if backupVersion < version.OneDrive6NameInMeta {
itemInfo, err := restoreV1File(
ctx,
source,
creds,
service,
rh,
drivePath,
dc,
fibn,
restoreFolderID,
copyBuffer,
restorePerms,
@ -423,11 +413,9 @@ func restoreItem(
itemInfo, err := restoreV6File(
ctx,
source,
creds,
service,
rh,
drivePath,
dc,
fibn,
restoreFolderID,
copyBuffer,
restorePerms,
@ -443,24 +431,22 @@ func restoreItem(
func restoreV0File(
ctx context.Context,
source driveSource,
service graph.Servicer,
rh RestoreHandler,
drivePath *path.DrivePath,
fetcher fileFetcher,
fibn data.FetchItemByNamer,
restoreFolderID string,
copyBuffer []byte,
itemData data.Stream,
) (details.ItemInfo, error) {
_, itemInfo, err := restoreData(
ctx,
service,
fetcher,
rh,
fibn,
itemData.UUID(),
itemData,
drivePath.DriveID,
restoreFolderID,
copyBuffer,
source)
copyBuffer)
if err != nil {
return itemInfo, clues.Wrap(err, "restoring file")
}
@ -468,17 +454,11 @@ func restoreV0File(
return itemInfo, nil
}
type fileFetcher interface {
Fetch(ctx context.Context, name string) (data.Stream, error)
}
func restoreV1File(
ctx context.Context,
source driveSource,
creds account.M365Config,
service graph.Servicer,
rh RestoreHandler,
drivePath *path.DrivePath,
fetcher fileFetcher,
fibn data.FetchItemByNamer,
restoreFolderID string,
copyBuffer []byte,
restorePerms bool,
@ -490,14 +470,13 @@ func restoreV1File(
itemID, itemInfo, err := restoreData(
ctx,
service,
fetcher,
rh,
fibn,
trimmedName,
itemData,
drivePath.DriveID,
restoreFolderID,
copyBuffer,
source)
copyBuffer)
if err != nil {
return details.ItemInfo{}, err
}
@ -511,15 +490,14 @@ func restoreV1File(
// Fetch item permissions from the collection and restore them.
metaName := trimmedName + metadata.MetaFileSuffix
meta, err := fetchAndReadMetadata(ctx, fetcher, metaName)
meta, err := fetchAndReadMetadata(ctx, fibn, metaName)
if err != nil {
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
}
err = RestorePermissions(
ctx,
creds,
service,
rh,
drivePath.DriveID,
itemID,
itemPath,
@ -534,11 +512,9 @@ func restoreV1File(
func restoreV6File(
ctx context.Context,
source driveSource,
creds account.M365Config,
service graph.Servicer,
rh RestoreHandler,
drivePath *path.DrivePath,
fetcher fileFetcher,
fibn data.FetchItemByNamer,
restoreFolderID string,
copyBuffer []byte,
restorePerms bool,
@ -551,7 +527,7 @@ func restoreV6File(
// Get metadata file so we can determine the file name.
metaName := trimmedName + metadata.MetaFileSuffix
meta, err := fetchAndReadMetadata(ctx, fetcher, metaName)
meta, err := fetchAndReadMetadata(ctx, fibn, metaName)
if err != nil {
return details.ItemInfo{}, clues.Wrap(err, "restoring file")
}
@ -574,14 +550,13 @@ func restoreV6File(
itemID, itemInfo, err := restoreData(
ctx,
service,
fetcher,
rh,
fibn,
meta.FileName,
itemData,
drivePath.DriveID,
restoreFolderID,
copyBuffer,
source)
copyBuffer)
if err != nil {
return details.ItemInfo{}, err
}
@ -594,8 +569,7 @@ func restoreV6File(
err = RestorePermissions(
ctx,
creds,
service,
rh,
drivePath.DriveID,
itemID,
itemPath,
@ -615,8 +589,7 @@ func restoreV6File(
// folderCache is mutated, as a side effect of populating the items.
func CreateRestoreFolders(
ctx context.Context,
creds account.M365Config,
service graph.Servicer,
rh RestoreHandler,
drivePath *path.DrivePath,
restoreDir *path.Builder,
folderPath path.Path,
@ -626,7 +599,7 @@ func CreateRestoreFolders(
) (string, error) {
id, err := createRestoreFolders(
ctx,
service,
rh,
drivePath,
restoreDir,
caches)
@ -645,8 +618,7 @@ func CreateRestoreFolders(
err = RestorePermissions(
ctx,
creds,
service,
rh,
drivePath.DriveID,
id,
folderPath,
@ -656,12 +628,17 @@ func CreateRestoreFolders(
return id, err
}
type folderRestorer interface {
GetFolderByNamer
PostItemInContainerer
}
// createRestoreFolders creates the restore folder hierarchy in the specified
// drive and returns the folder ID of the last folder entry in the hierarchy.
// folderCache is mutated, as a side effect of populating the items.
func createRestoreFolders(
ctx context.Context,
service graph.Servicer,
fr folderRestorer,
drivePath *path.DrivePath,
restoreDir *path.Builder,
caches *restoreCaches,
@ -692,7 +669,7 @@ func createRestoreFolders(
continue
}
folderItem, err := api.GetFolderByName(ictx, service, driveID, parentFolderID, folder)
folderItem, err := fr.GetFolderByName(ictx, driveID, parentFolderID, folder)
if err != nil && !errors.Is(err, api.ErrFolderNotFound) {
return "", clues.Wrap(err, "getting folder by display name")
}
@ -706,7 +683,7 @@ func createRestoreFolders(
}
// create the folder if not found
folderItem, err = CreateItem(ictx, service, driveID, parentFolderID, newItem(folder, true))
folderItem, err = fr.PostItemInContainer(ictx, driveID, parentFolderID, newItem(folder, true))
if err != nil {
return "", clues.Wrap(err, "creating folder")
}
@ -720,16 +697,21 @@ func createRestoreFolders(
return parentFolderID, nil
}
type itemRestorer interface {
ItemInfoAugmenter
NewItemContentUploader
PostItemInContainerer
}
// restoreData will create a new item in the specified `parentFolderID` and upload the data.Stream
func restoreData(
ctx context.Context,
service graph.Servicer,
fetcher fileFetcher,
ir itemRestorer,
fibn data.FetchItemByNamer,
name string,
itemData data.Stream,
driveID, parentFolderID string,
copyBuffer []byte,
source driveSource,
) (string, details.ItemInfo, error) {
ctx, end := diagnostics.Span(ctx, "gc:oneDrive:restoreItem", diagnostics.Label("item_uuid", itemData.UUID()))
defer end()
@ -743,17 +725,15 @@ func restoreData(
}
// Create Item
newItem, err := CreateItem(ctx, service, driveID, parentFolderID, newItem(name, false))
newItem, err := ir.PostItemInContainer(ctx, driveID, parentFolderID, newItem(name, false))
if err != nil {
return "", details.ItemInfo{}, err
}
itemID := ptr.Val(newItem.GetId())
ctx = clues.Add(ctx, "upload_item_id", itemID)
r, err := api.PostDriveItem(ctx, service, driveID, itemID)
// Get a drive item writer
w, uploadURL, err := driveItemWriter(ctx, ir, driveID, ptr.Val(newItem.GetId()), ss.Size())
if err != nil {
return "", details.ItemInfo{}, clues.Wrap(err, "get upload session")
return "", details.ItemInfo{}, clues.Wrap(err, "get item upload session")
}
var written int64
@ -765,12 +745,6 @@ func restoreData(
// show "register" any partial file uploads and so if we fail an
// upload the file size will be 0.
for i := 0; i <= maxUploadRetries; i++ {
// Initialize and return an io.Writer to upload data for the
// specified item It does so by creating an upload session and
// using that URL to initialize an `itemWriter`
// TODO: @vkamra verify if var session is the desired input
w := graph.NewLargeItemWriter(itemID, ptr.Val(r.GetUploadUrl()), ss.Size())
pname := name
iReader := itemData.ToReader()
@ -780,7 +754,7 @@ func restoreData(
// If it is not the first try, we have to pull the file
// again from kopia. Ideally we could just seek the stream
// but we don't have a Seeker available here.
itemData, err := fetcher.Fetch(ctx, itemData.UUID())
itemData, err := fibn.FetchItemByName(ctx, itemData.UUID())
if err != nil {
return "", details.ItemInfo{}, clues.Wrap(err, "get data file")
}
@ -803,32 +777,29 @@ func restoreData(
// clear out the bar if err
abort()
// refresh the io.Writer to restart the upload
// TODO: @vkamra verify if var session is the desired input
w = graph.NewLargeItemWriter(ptr.Val(newItem.GetId()), uploadURL, ss.Size())
}
if err != nil {
return "", details.ItemInfo{}, clues.Wrap(err, "uploading file")
}
dii := details.ItemInfo{}
switch source {
case SharePointSource:
dii.SharePoint = sharePointItemInfo(newItem, written)
default:
dii.OneDrive = oneDriveItemInfo(newItem, written)
}
dii := ir.AugmentItemInfo(details.ItemInfo{}, newItem, written, nil)
return ptr.Val(newItem.GetId()), dii, nil
}
func fetchAndReadMetadata(
ctx context.Context,
fetcher fileFetcher,
fibn data.FetchItemByNamer,
metaName string,
) (metadata.Metadata, error) {
ctx = clues.Add(ctx, "meta_file_name", metaName)
metaFile, err := fetcher.Fetch(ctx, metaName)
metaFile, err := fibn.FetchItemByName(ctx, metaName)
if err != nil {
return metadata.Metadata{}, clues.Wrap(err, "getting item metadata")
}

View File

@ -4,55 +4,29 @@ import (
"testing"
"github.com/alcionai/clues"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type MockGraphService struct{}
func (ms *MockGraphService) Client() *msgraphsdk.GraphServiceClient {
return nil
}
func (ms *MockGraphService) Adapter() *msgraphsdk.GraphRequestAdapter {
return nil
}
var _ graph.Servicer = &oneDriveService{}
// TODO(ashmrtn): Merge with similar structs in graph and exchange packages.
type oneDriveService struct {
client msgraphsdk.GraphServiceClient
adapter msgraphsdk.GraphRequestAdapter
credentials account.M365Config
status support.ConnectorOperationStatus
}
func (ods *oneDriveService) Client() *msgraphsdk.GraphServiceClient {
return &ods.client
}
func (ods *oneDriveService) Adapter() *msgraphsdk.GraphRequestAdapter {
return &ods.adapter
ac api.Client
}
func NewOneDriveService(credentials account.M365Config) (*oneDriveService, error) {
adapter, err := graph.CreateAdapter(
credentials.AzureTenantID,
credentials.AzureClientID,
credentials.AzureClientSecret)
ac, err := api.NewClient(credentials)
if err != nil {
return nil, err
}
service := oneDriveService{
adapter: *adapter,
client: *msgraphsdk.NewGraphServiceClient(adapter),
ac: ac,
credentials: credentials,
}
@ -70,10 +44,10 @@ func (ods *oneDriveService) updateStatus(status *support.ConnectorOperationStatu
func loadTestService(t *testing.T) *oneDriveService {
a := tester.NewM365Account(t)
m365, err := a.M365Config()
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
service, err := NewOneDriveService(m365)
service, err := NewOneDriveService(creds)
require.NoError(t, err, clues.ToCore(err))
return service

View File

@ -0,0 +1,32 @@
package testdata
import (
"time"
"github.com/microsoftgraph/msgraph-sdk-go/models"
)
func NewStubDriveItem(
id, name string,
size int64,
created, modified time.Time,
isFile, isShared bool,
) models.DriveItemable {
stubItem := models.NewDriveItem()
stubItem.SetId(&id)
stubItem.SetName(&name)
stubItem.SetSize(&size)
stubItem.SetCreatedDateTime(&created)
stubItem.SetLastModifiedDateTime(&modified)
stubItem.SetAdditionalData(map[string]any{"@microsoft.graph.downloadUrl": "https://corsobackup.io"})
if isFile {
stubItem.SetFile(models.NewFile())
}
if isShared {
stubItem.SetShared(&models.Shared{})
}
return stubItem
}

View File

@ -9,9 +9,9 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type itemProps struct {
@ -31,8 +31,7 @@ type urlCache struct {
refreshMu sync.Mutex
deltaQueryCount int
svc graph.Servicer
itemPagerFunc driveItemPagerFunc
itemPager api.DriveItemEnumerator
errors *fault.Bus
}
@ -41,15 +40,13 @@ type urlCache struct {
func newURLCache(
driveID string,
refreshInterval time.Duration,
svc graph.Servicer,
itemPager api.DriveItemEnumerator,
errors *fault.Bus,
itemPagerFunc driveItemPagerFunc,
) (*urlCache, error) {
err := validateCacheParams(
driveID,
refreshInterval,
svc,
itemPagerFunc)
itemPager)
if err != nil {
return nil, clues.Wrap(err, "cache params")
}
@ -59,8 +56,7 @@ func newURLCache(
lastRefreshTime: time.Time{},
driveID: driveID,
refreshInterval: refreshInterval,
svc: svc,
itemPagerFunc: itemPagerFunc,
itemPager: itemPager,
errors: errors,
},
nil
@ -70,8 +66,7 @@ func newURLCache(
func validateCacheParams(
driveID string,
refreshInterval time.Duration,
svc graph.Servicer,
itemPagerFunc driveItemPagerFunc,
itemPager api.DriveItemEnumerator,
) error {
if len(driveID) == 0 {
return clues.New("drive id is empty")
@ -81,11 +76,7 @@ func validateCacheParams(
return clues.New("invalid refresh interval")
}
if svc == nil {
return clues.New("nil graph servicer")
}
if itemPagerFunc == nil {
if itemPager == nil {
return clues.New("nil item pager")
}
@ -174,7 +165,7 @@ func (uc *urlCache) deltaQuery(
_, _, _, err := collectItems(
ctx,
uc.itemPagerFunc(uc.svc, uc.driveID, ""),
uc.itemPager,
uc.driveID,
"",
uc.updateCache,

View File

@ -16,13 +16,12 @@ import (
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type URLCacheIntegrationSuite struct {
tester.Suite
service graph.Servicer
ac api.Client
user string
driveID string
}
@ -41,69 +40,60 @@ func (suite *URLCacheIntegrationSuite) SetupSuite() {
ctx, flush := tester.NewContext(t)
defer flush()
suite.service = loadTestService(t)
suite.user = tester.SecondaryM365UserID(t)
pager, err := PagerForSource(OneDriveSource, suite.service, suite.user, nil)
acct := tester.NewM365Account(t)
creds, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
odDrives, err := api.GetAllDrives(ctx, pager, true, maxDrivesRetries)
suite.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
suite.driveID = ptr.Val(odDrives[0].GetId())
drive, err := suite.ac.Users().GetDefaultDrive(ctx, suite.user)
require.NoError(t, err, clues.ToCore(err))
suite.driveID = ptr.Val(drive.GetId())
}
// Basic test for urlCache. Create some files in onedrive, then access them via
// url cache
func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
t := suite.T()
var (
t = suite.T()
ac = suite.ac.Drives()
driveID = suite.driveID
newFolderName = tester.DefaultTestRestoreDestination("folder").ContainerName
driveItemPager = suite.ac.Drives().NewItemPager(driveID, "", api.DriveItemSelectDefault())
)
ctx, flush := tester.NewContext(t)
defer flush()
svc := suite.service
driveID := suite.driveID
// Create a new test folder
root, err := svc.Client().Drives().ByDriveId(driveID).Root().Get(ctx, nil)
root, err := ac.GetRootFolder(ctx, driveID)
require.NoError(t, err, clues.ToCore(err))
newFolderName := tester.DefaultTestRestoreDestination("folder").ContainerName
newFolder, err := CreateItem(
newFolder, err := ac.Drives().PostItemInContainer(
ctx,
svc,
driveID,
ptr.Val(root.GetId()),
newItem(newFolderName, true))
require.NoError(t, err, clues.ToCore(err))
require.NotNil(t, newFolder.GetId())
// Delete folder on exit
defer func() {
ictx := clues.Add(ctx, "folder_id", ptr.Val(newFolder.GetId()))
err := api.DeleteDriveItem(
ictx,
loadTestService(t),
driveID,
ptr.Val(newFolder.GetId()))
if err != nil {
logger.CtxErr(ictx, err).Errorw("deleting folder")
}
}()
nfid := ptr.Val(newFolder.GetId())
// Create a bunch of files in the new folder
var items []models.DriveItemable
for i := 0; i < 10; i++ {
newItemName := "testItem_" + dttm.FormatNow(dttm.SafeForTesting)
newItemName := "test_url_cache_basic_" + dttm.FormatNow(dttm.SafeForTesting)
item, err := CreateItem(
item, err := ac.Drives().PostItemInContainer(
ctx,
svc,
driveID,
ptr.Val(newFolder.GetId()),
nfid,
newItem(newItemName, false))
if err != nil {
// Something bad happened, skip this item
@ -117,12 +107,14 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
cache, err := newURLCache(
suite.driveID,
1*time.Hour,
svc,
fault.New(true),
defaultItemPager)
driveItemPager,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
err = cache.refreshCache(ctx)
require.NoError(t, err, clues.ToCore(err))
// Launch parallel requests to the cache, one per item
var wg sync.WaitGroup
for i := 0; i < len(items); i++ {

View File

@ -54,7 +54,7 @@ type Collection struct {
jobs []string
// M365 IDs of the items of this collection
category DataCategory
service graph.Servicer
client api.Sites
ctrl control.Options
betaService *betaAPI.BetaService
statusUpdater support.StatusUpdater
@ -63,7 +63,7 @@ type Collection struct {
// NewCollection helper function for creating a Collection
func NewCollection(
folderPath path.Path,
service graph.Servicer,
ac api.Client,
category DataCategory,
statusUpdater support.StatusUpdater,
ctrlOpts control.Options,
@ -72,7 +72,7 @@ func NewCollection(
fullPath: folderPath,
jobs: make([]string, 0),
data: make(chan data.Stream, collectionChannelBufferSize),
service: service,
client: ac.Sites(),
statusUpdater: statusUpdater,
category: category,
ctrl: ctrlOpts,
@ -175,7 +175,10 @@ func (sc *Collection) populate(ctx context.Context, errs *fault.Bus) {
sc.finishPopulation(ctx, metrics)
}
func (sc *Collection) runPopulate(ctx context.Context, errs *fault.Bus) (support.CollectionMetrics, error) {
func (sc *Collection) runPopulate(
ctx context.Context,
errs *fault.Bus,
) (support.CollectionMetrics, error) {
var (
err error
metrics support.CollectionMetrics
@ -197,7 +200,7 @@ func (sc *Collection) runPopulate(ctx context.Context, errs *fault.Bus) (support
case List:
metrics, err = sc.retrieveLists(ctx, writer, colProgress, errs)
case Pages:
metrics, err = sc.retrievePages(ctx, writer, colProgress, errs)
metrics, err = sc.retrievePages(ctx, sc.client, writer, colProgress, errs)
}
return metrics, err
@ -216,7 +219,12 @@ func (sc *Collection) retrieveLists(
el = errs.Local()
)
lists, err := loadSiteLists(ctx, sc.service, sc.fullPath.ResourceOwner(), sc.jobs, errs)
lists, err := loadSiteLists(
ctx,
sc.client.Stable,
sc.fullPath.ResourceOwner(),
sc.jobs,
errs)
if err != nil {
return metrics, err
}
@ -262,6 +270,7 @@ func (sc *Collection) retrieveLists(
func (sc *Collection) retrievePages(
ctx context.Context,
as api.Sites,
wtr *kjson.JsonSerializationWriter,
progress chan<- struct{},
errs *fault.Bus,
@ -276,7 +285,7 @@ func (sc *Collection) retrievePages(
return metrics, clues.New("beta service required").WithClues(ctx)
}
parent, err := api.GetSite(ctx, sc.service, sc.fullPath.ResourceOwner())
parent, err := as.GetByID(ctx, sc.fullPath.ResourceOwner())
if err != nil {
return metrics, err
}

View File

@ -13,7 +13,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/sharepoint/api"
betaAPI "github.com/alcionai/corso/src/internal/connector/sharepoint/api"
spMock "github.com/alcionai/corso/src/internal/connector/sharepoint/mock"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
@ -21,12 +21,14 @@ import (
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type SharePointCollectionSuite struct {
tester.Suite
siteID string
creds account.M365Config
ac api.Client
}
func (suite *SharePointCollectionSuite) SetupSuite() {
@ -38,6 +40,11 @@ func (suite *SharePointCollectionSuite) SetupSuite() {
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
ac, err := api.NewClient(m365)
require.NoError(t, err, clues.ToCore(err))
suite.ac = ac
}
func TestSharePointCollectionSuite(t *testing.T) {
@ -67,9 +74,12 @@ func (suite *SharePointCollectionSuite) TestCollection_Item_Read() {
// TestListCollection tests basic functionality to create
// SharePoint collection and to use the data stream channel.
func (suite *SharePointCollectionSuite) TestCollection_Items() {
tenant := "some"
user := "user"
dirRoot := "directory"
var (
tenant = "some"
user = "user"
dirRoot = "directory"
)
tables := []struct {
name, itemName string
category DataCategory
@ -130,13 +140,13 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
},
getItem: func(t *testing.T, itemName string) *Item {
byteArray := spMock.Page(itemName)
page, err := api.CreatePageFromBytes(byteArray)
page, err := betaAPI.CreatePageFromBytes(byteArray)
require.NoError(t, err, clues.ToCore(err))
data := &Item{
id: itemName,
data: io.NopCloser(bytes.NewReader(byteArray)),
info: api.PageInfo(page, int64(len(byteArray))),
info: betaAPI.PageInfo(page, int64(len(byteArray))),
}
return data
@ -151,7 +161,12 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
ctx, flush := tester.NewContext(t)
defer flush()
col := NewCollection(test.getDir(t), nil, test.category, nil, control.Defaults())
col := NewCollection(
test.getDir(t),
suite.ac,
test.category,
nil,
control.Defaults())
col.data <- test.getItem(t, test.itemName)
readItems := []data.Stream{}

View File

@ -19,6 +19,7 @@ import (
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type statusUpdater interface {
@ -29,12 +30,11 @@ type statusUpdater interface {
// for the specified user
func DataCollections(
ctx context.Context,
itemClient graph.Requester,
ac api.Client,
selector selectors.Selector,
site idname.Provider,
metadata []data.RestoreCollection,
creds account.M365Config,
serv graph.Servicer,
su statusUpdater,
ctrlOpts control.Options,
errs *fault.Bus,
@ -72,7 +72,7 @@ func DataCollections(
case path.ListsCategory:
spcs, err = collectLists(
ctx,
serv,
ac,
creds.AzureTenantID,
site,
su,
@ -86,8 +86,7 @@ func DataCollections(
case path.LibrariesCategory:
spcs, err = collectLibraries(
ctx,
itemClient,
serv,
ac.Drives(),
creds.AzureTenantID,
site,
metadata,
@ -105,7 +104,7 @@ func DataCollections(
spcs, err = collectPages(
ctx,
creds,
serv,
ac,
site,
su,
ctrlOpts,
@ -144,7 +143,7 @@ func DataCollections(
func collectLists(
ctx context.Context,
serv graph.Servicer,
ac api.Client,
tenantID string,
site idname.Provider,
updater statusUpdater,
@ -158,7 +157,7 @@ func collectLists(
spcs = make([]data.BackupCollection, 0)
)
lists, err := preFetchLists(ctx, serv, site.ID())
lists, err := preFetchLists(ctx, ac.Stable, site.ID())
if err != nil {
return nil, err
}
@ -179,7 +178,12 @@ func collectLists(
el.AddRecoverable(clues.Wrap(err, "creating list collection path").WithClues(ctx))
}
collection := NewCollection(dir, serv, List, updater.UpdateStatus, ctrlOpts)
collection := NewCollection(
dir,
ac,
List,
updater.UpdateStatus,
ctrlOpts)
collection.AddJob(tuple.id)
spcs = append(spcs, collection)
@ -192,8 +196,7 @@ func collectLists(
// all the drives associated with the site.
func collectLibraries(
ctx context.Context,
itemClient graph.Requester,
serv graph.Servicer,
ad api.Drives,
tenantID string,
site idname.Provider,
metadata []data.RestoreCollection,
@ -208,12 +211,10 @@ func collectLibraries(
var (
collections = []data.BackupCollection{}
colls = onedrive.NewCollections(
itemClient,
&libraryBackupHandler{ad},
tenantID,
site.ID(),
onedrive.SharePointSource,
folderMatcher{scope},
serv,
updater.UpdateStatus,
ctrlOpts)
)
@ -231,7 +232,7 @@ func collectLibraries(
func collectPages(
ctx context.Context,
creds account.M365Config,
serv graph.Servicer,
ac api.Client,
site idname.Provider,
updater statusUpdater,
ctrlOpts control.Options,
@ -277,7 +278,12 @@ func collectPages(
el.AddRecoverable(clues.Wrap(err, "creating page collection path").WithClues(ctx))
}
collection := NewCollection(dir, serv, Pages, updater.UpdateStatus, ctrlOpts)
collection := NewCollection(
dir,
ac,
Pages,
updater.UpdateStatus,
ctrlOpts)
collection.betaService = betaService
collection.AddJob(tuple.ID)

View File

@ -10,21 +10,24 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname/mock"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ---------------------------------------------------------------------------
// consts
// ---------------------------------------------------------------------------
const (
testBaseDrivePath = "drives/driveID1/root:"
)
var testBaseDrivePath = path.Builder{}.Append(
odConsts.DrivesPathDir,
"driveID1",
odConsts.RootPathDir)
type testFolderMatcher struct {
scope selectors.SharePointScope
@ -34,8 +37,8 @@ func (fm testFolderMatcher) IsAny() bool {
return fm.scope.IsAny(selectors.SharePointLibraryFolder)
}
func (fm testFolderMatcher) Matches(path string) bool {
return fm.scope.Matches(selectors.SharePointLibraryFolder, path)
func (fm testFolderMatcher) Matches(p string) bool {
return fm.scope.Matches(selectors.SharePointLibraryFolder, p)
}
// ---------------------------------------------------------------------------
@ -54,11 +57,15 @@ func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
anyFolder := (&selectors.SharePointBackup{}).LibraryFolders(selectors.Any())[0]
const (
tenant = "tenant"
tenantID = "tenant"
site = "site"
driveID = "driveID1"
)
pb := path.Builder{}.Append(testBaseDrivePath.Elements()...)
ep, err := libraryBackupHandler{}.CanonicalPath(pb, tenantID, site)
require.NoError(suite.T(), err, clues.ToCore(err))
tests := []struct {
testCase string
items []models.DriveItemable
@ -73,18 +80,13 @@ func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
{
testCase: "Single File",
items: []models.DriveItemable{
driveRootItem("root"),
driveItem("file", testBaseDrivePath, "root", true),
driveRootItem(odConsts.RootID),
driveItem("file", testBaseDrivePath.String(), odConsts.RootID, true),
},
scope: anyFolder,
expect: assert.NoError,
expectedCollectionIDs: []string{"root"},
expectedCollectionPaths: expectedPathAsSlice(
suite.T(),
tenant,
site,
testBaseDrivePath,
),
expectedCollectionIDs: []string{odConsts.RootID},
expectedCollectionPaths: []string{ep.String()},
expectedItemCount: 1,
expectedFileCount: 1,
expectedContainerCount: 1,
@ -111,12 +113,10 @@ func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
)
c := onedrive.NewCollections(
graph.NewNoTimeoutHTTPWrapper(),
tenant,
&libraryBackupHandler{api.Drives{}},
tenantID,
site,
onedrive.SharePointSource,
testFolderMatcher{test.scope},
&MockGraphService{},
nil,
control.Defaults())
@ -203,13 +203,16 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
a = tester.NewM365Account(t)
)
account, err := a.M365Config()
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
ac, err := api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
col, err := collectPages(
ctx,
account,
nil,
creds,
ac,
mock.NewProvider(siteID, siteID),
&MockGraphService{},
control.Defaults(),

View File

@ -8,7 +8,6 @@ import (
"github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/onedrive"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/pkg/account"
)
@ -56,16 +55,3 @@ func createTestService(t *testing.T, credentials account.M365Config) *graph.Serv
return graph.NewService(adapter)
}
func expectedPathAsSlice(t *testing.T, tenant, user string, rest ...string) []string {
res := make([]string, 0, len(rest))
for _, r := range rest {
p, err := onedrive.GetCanonicalPath(r, tenant, user, onedrive.SharePointSource)
require.NoError(t, err, clues.ToCore(err))
res = append(res, p.String())
}
return res
}

View File

@ -0,0 +1,275 @@
package sharepoint
import (
"context"
"net/http"
"strings"
"github.com/microsoftgraph/msgraph-sdk-go/drives"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/connector/onedrive"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
var _ onedrive.BackupHandler = &libraryBackupHandler{}
type libraryBackupHandler struct {
ac api.Drives
}
func (h libraryBackupHandler) Get(
ctx context.Context,
url string,
headers map[string]string,
) (*http.Response, error) {
return h.ac.Get(ctx, url, headers)
}
func (h libraryBackupHandler) PathPrefix(
tenantID, resourceOwner, driveID string,
) (path.Path, error) {
return path.Build(
tenantID,
resourceOwner,
path.SharePointService,
path.LibrariesCategory,
false,
odConsts.DrivesPathDir,
driveID,
odConsts.RootPathDir)
}
func (h libraryBackupHandler) CanonicalPath(
folders *path.Builder,
tenantID, resourceOwner string,
) (path.Path, error) {
return folders.ToDataLayerSharePointPath(tenantID, resourceOwner, path.LibrariesCategory, false)
}
func (h libraryBackupHandler) ServiceCat() (path.ServiceType, path.CategoryType) {
return path.SharePointService, path.LibrariesCategory
}
func (h libraryBackupHandler) NewDrivePager(
resourceOwner string,
fields []string,
) api.DrivePager {
return h.ac.NewSiteDrivePager(resourceOwner, fields)
}
func (h libraryBackupHandler) NewItemPager(
driveID, link string,
fields []string,
) api.DriveItemEnumerator {
return h.ac.NewItemPager(driveID, link, fields)
}
func (h libraryBackupHandler) AugmentItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentItemInfo(dii, item, size, parentPath)
}
// constructWebURL is a helper function for recreating the webURL
// for the originating SharePoint site. Uses the additionalData map
// from a models.DriveItemable that possesses a downloadURL within the map.
// Returns "" if the map is nil or key is not present.
func constructWebURL(adtl map[string]any) string {
var (
desiredKey = "@microsoft.graph.downloadUrl"
sep = `/_layouts`
url string
)
if adtl == nil {
return url
}
r := adtl[desiredKey]
point, ok := r.(*string)
if !ok {
return url
}
value := ptr.Val(point)
if len(value) == 0 {
return url
}
temp := strings.Split(value, sep)
url = temp[0]
return url
}
func (h libraryBackupHandler) FormatDisplayPath(
driveName string,
pb *path.Builder,
) string {
return "/" + driveName + "/" + pb.String()
}
func (h libraryBackupHandler) NewLocationIDer(
driveID string,
elems ...string,
) details.LocationIDer {
return details.NewSharePointLocationIDer(driveID, elems...)
}
func (h libraryBackupHandler) GetItemPermission(
ctx context.Context,
driveID, itemID string,
) (models.PermissionCollectionResponseable, error) {
return h.ac.GetItemPermission(ctx, driveID, itemID)
}
func (h libraryBackupHandler) GetItem(
ctx context.Context,
driveID, itemID string,
) (models.DriveItemable, error) {
return h.ac.GetItem(ctx, driveID, itemID)
}
// ---------------------------------------------------------------------------
// Restore
// ---------------------------------------------------------------------------
var _ onedrive.RestoreHandler = &libraryRestoreHandler{}
type libraryRestoreHandler struct {
ac api.Drives
}
func NewRestoreHandler(ac api.Client) *libraryRestoreHandler {
return &libraryRestoreHandler{ac.Drives()}
}
func (h libraryRestoreHandler) AugmentItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
return augmentItemInfo(dii, item, size, parentPath)
}
func (h libraryRestoreHandler) NewItemContentUpload(
ctx context.Context,
driveID, itemID string,
) (models.UploadSessionable, error) {
return h.ac.NewItemContentUpload(ctx, driveID, itemID)
}
func (h libraryRestoreHandler) DeleteItemPermission(
ctx context.Context,
driveID, itemID, permissionID string,
) error {
return h.ac.DeleteItemPermission(ctx, driveID, itemID, permissionID)
}
func (h libraryRestoreHandler) PostItemPermissionUpdate(
ctx context.Context,
driveID, itemID string,
body *drives.ItemItemsItemInvitePostRequestBody,
) (drives.ItemItemsItemInviteResponseable, error) {
return h.ac.PostItemPermissionUpdate(ctx, driveID, itemID, body)
}
func (h libraryRestoreHandler) PostItemInContainer(
ctx context.Context,
driveID, parentFolderID string,
newItem models.DriveItemable,
) (models.DriveItemable, error) {
return h.ac.PostItemInContainer(ctx, driveID, parentFolderID, newItem)
}
func (h libraryRestoreHandler) GetFolderByName(
ctx context.Context,
driveID, parentFolderID, folderName string,
) (models.DriveItemable, error) {
return h.ac.GetFolderByName(ctx, driveID, parentFolderID, folderName)
}
func (h libraryRestoreHandler) GetRootFolder(
ctx context.Context,
driveID string,
) (models.DriveItemable, error) {
return h.ac.GetRootFolder(ctx, driveID)
}
// ---------------------------------------------------------------------------
// Common
// ---------------------------------------------------------------------------
func augmentItemInfo(
dii details.ItemInfo,
item models.DriveItemable,
size int64,
parentPath *path.Builder,
) details.ItemInfo {
var driveName, siteID, driveID, weburl, creatorEmail string
// TODO: we rely on this info for details/restore lookups,
// so if it's nil we have an issue, and will need an alternative
// way to source the data.
if item.GetCreatedBy() != nil && item.GetCreatedBy().GetUser() != nil {
// User is sometimes not available when created via some
// external applications (like backup/restore solutions)
additionalData := item.GetCreatedBy().GetUser().GetAdditionalData()
ed, ok := additionalData["email"]
if !ok {
ed = additionalData["displayName"]
}
if ed != nil {
creatorEmail = *ed.(*string)
}
}
gsi := item.GetSharepointIds()
if gsi != nil {
siteID = ptr.Val(gsi.GetSiteId())
weburl = ptr.Val(gsi.GetSiteUrl())
if len(weburl) == 0 {
weburl = constructWebURL(item.GetAdditionalData())
}
}
if item.GetParentReference() != nil {
driveID = ptr.Val(item.GetParentReference().GetDriveId())
driveName = strings.TrimSpace(ptr.Val(item.GetParentReference().GetName()))
}
var pps string
if parentPath != nil {
pps = parentPath.String()
}
dii.SharePoint = &details.SharePointInfo{
Created: ptr.Val(item.GetCreatedDateTime()),
DriveID: driveID,
DriveName: driveName,
ItemName: ptr.Val(item.GetName()),
ItemType: details.SharePointLibrary,
Modified: ptr.Val(item.GetLastModifiedDateTime()),
Owner: creatorEmail,
ParentPath: pps,
SiteID: siteID,
Size: size,
WebURL: weburl,
}
return dii
}

View File

@ -0,0 +1,58 @@
package sharepoint
import (
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
type LibraryBackupHandlerUnitSuite struct {
tester.Suite
}
func TestLibraryBackupHandlerUnitSuite(t *testing.T) {
suite.Run(t, &LibraryBackupHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *LibraryBackupHandlerUnitSuite) TestCanonicalPath() {
tenantID, resourceOwner := "tenant", "resourceOwner"
table := []struct {
name string
expect string
expectErr assert.ErrorAssertionFunc
}{
{
name: "sharepoint",
expect: "tenant/sharepoint/resourceOwner/libraries/prefix",
expectErr: assert.NoError,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
h := libraryBackupHandler{}
p := path.Builder{}.Append("prefix")
result, err := h.CanonicalPath(p, tenantID, resourceOwner)
test.expectErr(t, err, clues.ToCore(err))
if result != nil {
assert.Equal(t, test.expect, result.String())
}
})
}
}
func (suite *LibraryBackupHandlerUnitSuite) TestServiceCat() {
t := suite.T()
s, c := libraryBackupHandler{}.ServiceCat()
assert.Equal(t, path.SharePointService, s)
assert.Equal(t, path.LibrariesCategory, c)
}

View File

@ -6,7 +6,6 @@ import (
"fmt"
"io"
"runtime/trace"
"sync"
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
@ -18,12 +17,12 @@ import (
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
//----------------------------------------------------------------------------
@ -43,13 +42,11 @@ import (
func RestoreCollections(
ctx context.Context,
backupVersion int,
creds account.M365Config,
service graph.Servicer,
ac api.Client,
dest control.RestoreDestination,
opts control.Options,
dcs []data.RestoreCollection,
deets *details.Builder,
pool *sync.Pool,
errs *fault.Bus,
) (*support.ConnectorOperationStatus, error) {
var (
@ -83,22 +80,19 @@ func RestoreCollections(
case path.LibrariesCategory:
metrics, err = onedrive.RestoreCollection(
ictx,
creds,
libraryRestoreHandler{ac.Drives()},
backupVersion,
service,
dc,
caches,
onedrive.SharePointSource,
dest.ContainerName,
deets,
opts.RestorePermissions,
pool,
errs)
case path.ListsCategory:
metrics, err = RestoreListCollection(
ictx,
service,
ac.Stable,
dc,
dest.ContainerName,
deets,
@ -107,7 +101,7 @@ func RestoreCollections(
case path.PagesCategory:
metrics, err = RestorePageCollection(
ictx,
creds,
ac.Stable,
dc,
dest.ContainerName,
deets,
@ -292,7 +286,7 @@ func RestoreListCollection(
// - the context cancellation station. True iff context is canceled.
func RestorePageCollection(
ctx context.Context,
creds account.M365Config,
gs graph.Servicer,
dc data.RestoreCollection,
restoreContainerName string,
deets *details.Builder,
@ -309,17 +303,9 @@ func RestorePageCollection(
defer end()
adpt, err := graph.CreateAdapter(
creds.AzureTenantID,
creds.AzureClientID,
creds.AzureClientSecret)
if err != nil {
return metrics, clues.Wrap(err, "constructing graph client")
}
var (
el = errs.Local()
service = betaAPI.NewBetaService(adpt)
service = betaAPI.NewBetaService(gs.Adapter())
items = dc.Items(ctx, errs)
)

View File

@ -70,19 +70,24 @@ type BackupCollection interface {
// RestoreCollection is an extension of Collection that is used during restores.
type RestoreCollection interface {
Collection
FetchItemByNamer
}
type FetchItemByNamer interface {
// Fetch retrieves an item with the given name from the Collection if it
// exists. Items retrieved with Fetch may still appear in the channel returned
// by Items().
Fetch(ctx context.Context, name string) (Stream, error)
FetchItemByName(ctx context.Context, name string) (Stream, error)
}
// NotFoundRestoreCollection is a wrapper for a Collection that returns
// NoFetchRestoreCollection is a wrapper for a Collection that returns
// ErrNotFound for all Fetch calls.
type NotFoundRestoreCollection struct {
type NoFetchRestoreCollection struct {
Collection
FetchItemByNamer
}
func (c NotFoundRestoreCollection) Fetch(context.Context, string) (Stream, error) {
func (c NoFetchRestoreCollection) FetchItemByName(context.Context, string) (Stream, error) {
return nil, ErrNotFound
}

View File

@ -30,7 +30,7 @@ func (kdc *kopiaDataCollection) addStream(
ctx context.Context,
name string,
) error {
s, err := kdc.Fetch(ctx, name)
s, err := kdc.FetchItemByName(ctx, name)
if err != nil {
return err
}
@ -64,7 +64,7 @@ func (kdc kopiaDataCollection) FullPath() path.Path {
// Fetch returns the file with the given name from the collection as a
// data.Stream. Returns a data.ErrNotFound error if the file isn't in the
// collection.
func (kdc kopiaDataCollection) Fetch(
func (kdc kopiaDataCollection) FetchItemByName(
ctx context.Context,
name string,
) (data.Stream, error) {

View File

@ -264,7 +264,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
}
}
func (suite *KopiaDataCollectionUnitSuite) TestFetch() {
func (suite *KopiaDataCollectionUnitSuite) TestFetchItemByName() {
var (
tenant = "a-tenant"
user = "a-user"
@ -381,7 +381,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestFetch() {
expectedVersion: serializationVersion,
}
s, err := col.Fetch(ctx, test.inputName)
s, err := col.FetchItemByName(ctx, test.inputName)
test.lookupErr(t, err)

View File

@ -86,7 +86,7 @@ func (mc *mergeCollection) Items(
// match found or the first error that is not data.ErrNotFound. If multiple
// collections have the requested item, the instance in the collection with the
// lexicographically smallest storage path is returned.
func (mc *mergeCollection) Fetch(
func (mc *mergeCollection) FetchItemByName(
ctx context.Context,
name string,
) (data.Stream, error) {
@ -99,7 +99,7 @@ func (mc *mergeCollection) Fetch(
logger.Ctx(ictx).Debug("looking for item in merged collection")
s, err := c.Fetch(ictx, name)
s, err := c.FetchItemByName(ictx, name)
if err == nil {
return s, nil
} else if err != nil && !errors.Is(err, data.ErrNotFound) {

View File

@ -76,8 +76,8 @@ func (suite *MergeCollectionUnitSuite) TestItems() {
// Not testing fetch here so safe to use this wrapper.
cols := []data.RestoreCollection{
data.NotFoundRestoreCollection{Collection: c1},
data.NotFoundRestoreCollection{Collection: c2},
data.NoFetchRestoreCollection{Collection: c1},
data.NoFetchRestoreCollection{Collection: c2},
}
dc := &mergeCollection{fullPath: pth}
@ -123,7 +123,7 @@ func (suite *MergeCollectionUnitSuite) TestAddCollection_DifferentPathFails() {
assert.Error(t, err, clues.ToCore(err))
}
func (suite *MergeCollectionUnitSuite) TestFetch() {
func (suite *MergeCollectionUnitSuite) TestFetchItemByName() {
var (
fileData1 = []byte("abcdefghijklmnopqrstuvwxyz")
fileData2 = []byte("zyxwvutsrqponmlkjihgfedcba")
@ -275,7 +275,7 @@ func (suite *MergeCollectionUnitSuite) TestFetch() {
require.NoError(t, err, "adding collection", clues.ToCore(err))
}
s, err := dc.Fetch(ctx, test.fileName)
s, err := dc.FetchItemByName(ctx, test.fileName)
test.expectError(t, err, clues.ToCore(err))
if err != nil {

View File

@ -1465,7 +1465,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Path
// TestProduceRestoreCollections_Fetch tests that the Fetch function still works
// properly even with different Restore and Storage paths and items from
// different kopia directories.
func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Fetch() {
func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_FetchItemByName() {
t := suite.T()
ctx, flush := tester.NewContext(t)
@ -1507,7 +1507,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Fetc
// Item from first kopia directory.
f := suite.files[suite.testPath1.String()][0]
item, err := result[0].Fetch(ctx, f.itemPath.Item())
item, err := result[0].FetchItemByName(ctx, f.itemPath.Item())
require.NoError(t, err, "fetching file", clues.ToCore(err))
r := item.ToReader()
@ -1520,7 +1520,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections_Fetc
// Item from second kopia directory.
f = suite.files[suite.testPath2.String()][0]
item, err = result[0].Fetch(ctx, f.itemPath.Item())
item, err = result[0].FetchItemByName(ctx, f.itemPath.Item())
require.NoError(t, err, "fetching file", clues.ToCore(err))
r = item.ToReader()

View File

@ -30,6 +30,7 @@ import (
"github.com/alcionai/corso/src/internal/connector/onedrive"
odConsts "github.com/alcionai/corso/src/internal/connector/onedrive/consts"
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
"github.com/alcionai/corso/src/internal/connector/sharepoint"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/events"
evmock "github.com/alcionai/corso/src/internal/events/mock"
@ -347,7 +348,6 @@ func generateContainerOfItems(
ctx context.Context, //revive:disable-line:context-as-argument
gc *connector.GraphConnector,
service path.ServiceType,
acct account.Account,
cat path.CategoryType,
sel selectors.Selector,
tenantID, resourceOwner, driveID, destFldr string,
@ -397,7 +397,6 @@ func generateContainerOfItems(
deets, err := gc.ConsumeRestoreCollections(
ctx,
backupVersion,
acct,
sel,
dest,
opts,
@ -468,7 +467,7 @@ func buildCollections(
mc.Data[i] = c.items[i].data
}
collections = append(collections, data.NotFoundRestoreCollection{Collection: mc})
collections = append(collections, data.NoFetchRestoreCollection{Collection: mc})
}
return collections
@ -513,6 +512,7 @@ func toDataLayerPath(
type BackupOpIntegrationSuite struct {
tester.Suite
user, site string
ac api.Client
}
func TestBackupOpIntegrationSuite(t *testing.T) {
@ -524,8 +524,18 @@ func TestBackupOpIntegrationSuite(t *testing.T) {
}
func (suite *BackupOpIntegrationSuite) SetupSuite() {
suite.user = tester.M365UserID(suite.T())
suite.site = tester.M365SiteID(suite.T())
t := suite.T()
suite.user = tester.M365UserID(t)
suite.site = tester.M365SiteID(t)
a := tester.NewM365Account(t)
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
}
func (suite *BackupOpIntegrationSuite) TestNewBackupOperation() {
@ -847,7 +857,6 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
ctx,
gc,
service,
acct,
category,
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
m365.AzureTenantID, uidn.ID(), "", destName,
@ -1029,7 +1038,6 @@ func testExchangeContinuousBackups(suite *BackupOpIntegrationSuite, toggles cont
ctx,
gc,
service,
acct,
category,
selectors.NewExchangeRestore([]string{uidn.ID()}).Selector,
m365.AzureTenantID, suite.user, "", container3,
@ -1316,9 +1324,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalOneDrive() {
gtdi := func(
t *testing.T,
ctx context.Context,
gs graph.Servicer,
) string {
d, err := api.GetUsersDrive(ctx, gs, suite.user)
d, err := suite.ac.Users().GetDefaultDrive(ctx, suite.user)
if err != nil {
err = graph.Wrap(ctx, err, "retrieving default user drive").
With("user", suite.user)
@ -1332,6 +1339,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalOneDrive() {
return id
}
grh := func(ac api.Client) onedrive.RestoreHandler {
return onedrive.NewRestoreHandler(ac)
}
runDriveIncrementalTest(
suite,
suite.user,
@ -1341,6 +1352,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalOneDrive() {
path.FilesCategory,
ic,
gtdi,
grh,
false)
}
@ -1355,9 +1367,8 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() {
gtdi := func(
t *testing.T,
ctx context.Context,
gs graph.Servicer,
) string {
d, err := api.GetSitesDefaultDrive(ctx, gs, suite.site)
d, err := suite.ac.Sites().GetDefaultDrive(ctx, suite.site)
if err != nil {
err = graph.Wrap(ctx, err, "retrieving default site drive").
With("site", suite.site)
@ -1371,6 +1382,10 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() {
return id
}
grh := func(ac api.Client) onedrive.RestoreHandler {
return sharepoint.NewRestoreHandler(ac)
}
runDriveIncrementalTest(
suite,
suite.site,
@ -1380,6 +1395,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_incrementalSharePoint() {
path.LibrariesCategory,
ic,
gtdi,
grh,
true)
}
@ -1390,7 +1406,8 @@ func runDriveIncrementalTest(
service path.ServiceType,
category path.CategoryType,
includeContainers func([]string) selectors.Selector,
getTestDriveID func(*testing.T, context.Context, graph.Servicer) string,
getTestDriveID func(*testing.T, context.Context) string,
getRestoreHandler func(api.Client) onedrive.RestoreHandler,
skipPermissionsTests bool,
) {
t := suite.T()
@ -1429,12 +1446,14 @@ func runDriveIncrementalTest(
require.NoError(t, err, clues.ToCore(err))
gc, sel := GCWithSelector(t, ctx, acct, resource, sel, nil, nil)
ac := gc.AC.Drives()
rh := getRestoreHandler(gc.AC)
roidn := inMock.NewProvider(sel.ID(), sel.Name())
var (
atid = creds.AzureTenantID
driveID = getTestDriveID(t, ctx, gc.Service)
driveID = getTestDriveID(t, ctx)
fileDBF = func(id, timeStamp, subject, body string) []byte {
return []byte(id + subject)
}
@ -1462,7 +1481,6 @@ func runDriveIncrementalTest(
ctx,
gc,
service,
acct,
category,
sel,
atid, roidn.ID(), driveID, destName,
@ -1488,7 +1506,7 @@ func runDriveIncrementalTest(
// onedrive package `getFolder` function.
itemURL := fmt.Sprintf("https://graph.microsoft.com/v1.0/drives/%s/root:/%s", driveID, destName)
resp, err := drives.
NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.AC.Stable.Adapter()).
Get(ctx, nil)
require.NoError(t, err, "getting drive folder ID", "folder name", destName, clues.ToCore(err))
@ -1543,9 +1561,8 @@ func runDriveIncrementalTest(
driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName)
driveItem.SetFile(models.NewFile())
newFile, err = onedrive.CreateItem(
newFile, err = ac.PostItemInContainer(
ctx,
gc.Service,
driveID,
targetContainer,
driveItem)
@ -1562,19 +1579,14 @@ func runDriveIncrementalTest(
{
name: "add permission to new file",
updateFiles: func(t *testing.T) {
driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName)
driveItem.SetFile(models.NewFile())
err = onedrive.UpdatePermissions(
ctx,
creds,
gc.Service,
rh,
driveID,
*newFile.GetId(),
ptr.Val(newFile.GetId()),
[]metadata.Permission{writePerm},
[]metadata.Permission{},
permissionIDMappings,
)
permissionIDMappings)
require.NoErrorf(t, err, "adding permission to file %v", clues.ToCore(err))
// no expectedDeets: metadata isn't tracked
},
@ -1585,13 +1597,9 @@ func runDriveIncrementalTest(
{
name: "remove permission from new file",
updateFiles: func(t *testing.T) {
driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName)
driveItem.SetFile(models.NewFile())
err = onedrive.UpdatePermissions(
ctx,
creds,
gc.Service,
rh,
driveID,
*newFile.GetId(),
[]metadata.Permission{},
@ -1608,13 +1616,9 @@ func runDriveIncrementalTest(
name: "add permission to container",
updateFiles: func(t *testing.T) {
targetContainer := containerIDs[container1]
driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName)
driveItem.SetFile(models.NewFile())
err = onedrive.UpdatePermissions(
ctx,
creds,
gc.Service,
rh,
driveID,
targetContainer,
[]metadata.Permission{writePerm},
@ -1631,13 +1635,9 @@ func runDriveIncrementalTest(
name: "remove permission from container",
updateFiles: func(t *testing.T) {
targetContainer := containerIDs[container1]
driveItem := models.NewDriveItem()
driveItem.SetName(&newFileName)
driveItem.SetFile(models.NewFile())
err = onedrive.UpdatePermissions(
ctx,
creds,
gc.Service,
rh,
driveID,
targetContainer,
[]metadata.Permission{},
@ -1653,9 +1653,8 @@ func runDriveIncrementalTest(
{
name: "update contents of a file",
updateFiles: func(t *testing.T) {
err := api.PutDriveItemContent(
err := suite.ac.Drives().PutItemContent(
ctx,
gc.Service,
driveID,
ptr.Val(newFile.GetId()),
[]byte("new content"))
@ -1678,9 +1677,8 @@ func runDriveIncrementalTest(
parentRef.SetId(&container)
driveItem.SetParentReference(parentRef)
err := api.PatchDriveItem(
err := suite.ac.Drives().PatchItem(
ctx,
gc.Service,
driveID,
ptr.Val(newFile.GetId()),
driveItem)
@ -1702,9 +1700,8 @@ func runDriveIncrementalTest(
parentRef.SetId(&dest)
driveItem.SetParentReference(parentRef)
err := api.PatchDriveItem(
err := suite.ac.Drives().PatchItem(
ctx,
gc.Service,
driveID,
ptr.Val(newFile.GetId()),
driveItem)
@ -1723,9 +1720,8 @@ func runDriveIncrementalTest(
{
name: "delete file",
updateFiles: func(t *testing.T) {
err := api.DeleteDriveItem(
err := suite.ac.Drives().DeleteItem(
ctx,
newDeleteServicer(t),
driveID,
ptr.Val(newFile.GetId()))
require.NoErrorf(t, err, "deleting file %v", clues.ToCore(err))
@ -1748,9 +1744,8 @@ func runDriveIncrementalTest(
parentRef.SetId(&parent)
driveItem.SetParentReference(parentRef)
err := api.PatchDriveItem(
err := suite.ac.Drives().PatchItem(
ctx,
gc.Service,
driveID,
child,
driveItem)
@ -1777,9 +1772,8 @@ func runDriveIncrementalTest(
parentRef.SetId(&parent)
driveItem.SetParentReference(parentRef)
err := api.PatchDriveItem(
err := suite.ac.Drives().PatchItem(
ctx,
gc.Service,
driveID,
child,
driveItem)
@ -1800,9 +1794,8 @@ func runDriveIncrementalTest(
name: "delete a folder",
updateFiles: func(t *testing.T) {
container := containerIDs[containerRename]
err := api.DeleteDriveItem(
err := suite.ac.Drives().DeleteItem(
ctx,
newDeleteServicer(t),
driveID,
container)
require.NoError(t, err, "deleting folder", clues.ToCore(err))
@ -1821,7 +1814,6 @@ func runDriveIncrementalTest(
ctx,
gc,
service,
acct,
category,
sel,
atid, roidn.ID(), driveID, container3,
@ -1834,7 +1826,7 @@ func runDriveIncrementalTest(
"https://graph.microsoft.com/v1.0/drives/%s/root:/%s",
driveID,
container3)
resp, err := drives.NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.Service.Adapter()).
resp, err := drives.NewItemItemsDriveItemItemRequestBuilder(itemURL, gc.AC.Stable.Adapter()).
Get(ctx, nil)
require.NoError(t, err, "getting drive folder ID", "folder name", container3, clues.ToCore(err))
@ -1928,7 +1920,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveOwnerMigration() {
connector.Users)
require.NoError(t, err, clues.ToCore(err))
userable, err := gc.Discovery.Users().GetByID(ctx, suite.user)
userable, err := gc.AC.Users().GetByID(ctx, suite.user)
require.NoError(t, err, clues.ToCore(err))
uid := ptr.Val(userable.GetId())
@ -2046,19 +2038,3 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePoint() {
runAndCheckBackup(t, ctx, &bo, mb, false)
checkBackupIsInManifests(t, ctx, kw, &bo, sels, suite.site, path.LibrariesCategory)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func newDeleteServicer(t *testing.T) graph.Servicer {
acct := tester.NewM365Account(t)
m365, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
a, err := graph.CreateAdapter(acct.ID(), m365.AzureClientID, m365.AzureClientSecret)
require.NoError(t, err, clues.ToCore(err))
return graph.NewService(a)
}

View File

@ -727,6 +727,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
itemParents1, err := path.GetDriveFolderPath(itemPath1)
require.NoError(suite.T(), err, clues.ToCore(err))
itemParents1String := itemParents1.String()
table := []struct {
name string
populatedModels map[model.StableID]backup.Backup
@ -899,7 +901,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
ItemInfo: details.ItemInfo{
OneDrive: &details.OneDriveInfo{
ItemType: details.OneDriveItem,
ParentPath: itemParents1,
ParentPath: itemParents1String,
Size: 42,
},
},

View File

@ -7,7 +7,6 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
@ -37,7 +36,6 @@ type (
ConsumeRestoreCollections(
ctx context.Context,
backupVersion int,
acct account.Account,
selector selectors.Selector,
dest control.RestoreDestination,
opts control.Options,

View File

@ -553,7 +553,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
mr: mockManifestRestorer{
mockRestoreProducer: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id": {data.NotFoundRestoreCollection{Collection: mockColl{id: "id_coll"}}},
"id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}},
},
},
mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "")},
@ -580,8 +580,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
mr: mockManifestRestorer{
mockRestoreProducer: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id": {data.NotFoundRestoreCollection{Collection: mockColl{id: "id_coll"}}},
"incmpl_id": {data.NotFoundRestoreCollection{Collection: mockColl{id: "incmpl_id_coll"}}},
"id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}},
"incmpl_id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "incmpl_id_coll"}}},
},
},
mans: []kopia.ManifestEntry{
@ -600,7 +600,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
mr: mockManifestRestorer{
mockRestoreProducer: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"id": {data.NotFoundRestoreCollection{Collection: mockColl{id: "id_coll"}}},
"id": {data.NoFetchRestoreCollection{Collection: mockColl{id: "id_coll"}}},
},
},
mans: []kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "bid")},
@ -616,8 +616,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
mr: mockManifestRestorer{
mockRestoreProducer: mockRestoreProducer{
collsByID: map[string][]data.RestoreCollection{
"mail": {data.NotFoundRestoreCollection{Collection: mockColl{id: "mail_coll"}}},
"contact": {data.NotFoundRestoreCollection{Collection: mockColl{id: "contact_coll"}}},
"mail": {data.NoFetchRestoreCollection{Collection: mockColl{id: "mail_coll"}}},
"contact": {data.NoFetchRestoreCollection{Collection: mockColl{id: "contact_coll"}}},
},
},
mans: []kopia.ManifestEntry{
@ -681,7 +681,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
for _, dc := range dcs {
if !assert.IsTypef(
t,
data.NotFoundRestoreCollection{},
data.NoFetchRestoreCollection{},
dc,
"unexpected type returned [%T]",
dc,
@ -689,7 +689,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
continue
}
tmp := dc.(data.NotFoundRestoreCollection)
tmp := dc.(data.NoFetchRestoreCollection)
if !assert.IsTypef(
t,

View File

@ -36,12 +36,12 @@ type RestoreOperation struct {
operation
BackupID model.StableID `json:"backupID"`
Destination control.RestoreDestination `json:"destination"`
Results RestoreResults `json:"results"`
Selectors selectors.Selector `json:"selectors"`
Destination control.RestoreDestination `json:"destination"`
Version string `json:"version"`
account account.Account
acct account.Account
rc inject.RestoreConsumer
}
@ -66,11 +66,11 @@ func NewRestoreOperation(
) (RestoreOperation, error) {
op := RestoreOperation{
operation: newOperation(opts, bus, kw, sw),
acct: acct,
BackupID: backupID,
Selectors: sel,
Destination: dest,
Selectors: sel,
Version: "v0",
account: acct,
rc: rc,
}
if err := op.validate(); err != nil {
@ -116,7 +116,7 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
restoreID: uuid.NewString(),
}
start = time.Now()
sstore = streamstore.NewStreamer(op.kopia, op.account.ID(), op.Selectors.PathService())
sstore = streamstore.NewStreamer(op.kopia, op.acct.ID(), op.Selectors.PathService())
)
// -----
@ -135,7 +135,7 @@ func (op *RestoreOperation) Run(ctx context.Context) (restoreDetails *details.De
ctx = clues.Add(
ctx,
"tenant_id", clues.Hide(op.account.ID()),
"tenant_id", clues.Hide(op.acct.ID()),
"backup_id", op.BackupID,
"service", op.Selectors.Service,
"destination_container", clues.Hide(op.Destination.ContainerName))
@ -256,7 +256,6 @@ func (op *RestoreOperation) do(
ctx,
op.rc,
bup.Version,
op.account,
op.Selectors,
op.Destination,
op.Options,
@ -314,7 +313,6 @@ func consumeRestoreCollections(
ctx context.Context,
rc inject.RestoreConsumer,
backupVersion int,
acct account.Account,
sel selectors.Selector,
dest control.RestoreDestination,
opts control.Options,
@ -330,7 +328,6 @@ func consumeRestoreCollections(
deets, err := rc.ConsumeRestoreCollections(
ctx,
backupVersion,
acct,
sel,
dest,
opts,

View File

@ -15,7 +15,6 @@ import (
"github.com/alcionai/corso/src/internal/connector"
"github.com/alcionai/corso/src/internal/connector/exchange"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/mock"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/events"
@ -50,7 +49,6 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
kw = &kopia.Wrapper{}
sw = &store.Wrapper{}
gc = &mock.GraphConnector{}
acct = account.Account{}
now = time.Now()
dest = tester.DefaultTestRestoreDestination("")
)
@ -70,7 +68,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
NumBytes: 42,
},
cs: []data.RestoreCollection{
data.NotFoundRestoreCollection{
data.NoFetchRestoreCollection{
Collection: &exchMock.DataCollection{},
},
},
@ -112,7 +110,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
kw,
sw,
gc,
acct,
account.Account{},
"foo",
selectors.Selector{DiscreteOwner: "test"},
dest,
@ -220,7 +218,6 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
kw = &kopia.Wrapper{}
sw = &store.Wrapper{}
gc = &mock.GraphConnector{}
acct = tester.NewM365Account(suite.T())
dest = tester.DefaultTestRestoreDestination("")
opts = control.Defaults()
)
@ -230,18 +227,19 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
kw *kopia.Wrapper
sw *store.Wrapper
rc inject.RestoreConsumer
acct account.Account
targets []string
errCheck assert.ErrorAssertionFunc
}{
{"good", kw, sw, gc, acct, nil, assert.NoError},
{"missing kopia", nil, sw, gc, acct, nil, assert.Error},
{"missing modelstore", kw, nil, gc, acct, nil, assert.Error},
{"missing restore consumer", kw, sw, nil, acct, nil, assert.Error},
{"good", kw, sw, gc, nil, assert.NoError},
{"missing kopia", nil, sw, gc, nil, assert.Error},
{"missing modelstore", kw, nil, gc, nil, assert.Error},
{"missing restore consumer", kw, sw, nil, nil, assert.Error},
}
for _, test := range table {
suite.Run(test.name, func() {
ctx, flush := tester.NewContext(suite.T())
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
_, err := NewRestoreOperation(
@ -250,12 +248,12 @@ func (suite *RestoreOpIntegrationSuite) TestNewRestoreOperation() {
test.kw,
test.sw,
test.rc,
test.acct,
tester.NewM365Account(t),
"backup-id",
selectors.Selector{DiscreteOwner: "test"},
dest,
evmock.NewBus())
test.errCheck(suite.T(), err, clues.ToCore(err))
test.errCheck(t, err, clues.ToCore(err))
})
}
}
@ -346,18 +344,7 @@ func setupSharePointBackup(
evmock.NewBus())
require.NoError(t, err, clues.ToCore(err))
// get the count of drives
m365, err := acct.M365Config()
require.NoError(t, err, clues.ToCore(err))
adpt, err := graph.CreateAdapter(
m365.AzureTenantID,
m365.AzureClientID,
m365.AzureClientSecret)
require.NoError(t, err, clues.ToCore(err))
service := graph.NewService(adpt)
spPgr := api.NewSiteDrivePager(service, owner, []string{"id", "name"})
spPgr := gc.AC.Drives().NewSiteDrivePager(owner, []string{"id", "name"})
drives, err := api.GetAllDrives(ctx, spPgr, true, 3)
require.NoError(t, err, clues.ToCore(err))

View File

@ -30,13 +30,13 @@ func ToDrivePath(p Path) (*DrivePath, error) {
}
// Returns the path to the folder within the drive (i.e. under `root:`)
func GetDriveFolderPath(p Path) (string, error) {
func GetDriveFolderPath(p Path) (*Builder, error) {
drivePath, err := ToDrivePath(p)
if err != nil {
return "", err
return nil, err
}
return Builder{}.Append(drivePath.Folders...).String(), nil
return Builder{}.Append(drivePath.Folders...), nil
}
// BuildDriveLocation takes a driveID and a set of unescaped element names,

View File

@ -450,8 +450,7 @@ func (pb Builder) ToDataLayerPath(
tenant,
service.String(),
user,
category.String(),
),
category.String()),
service: service,
category: category,
hasItem: isItem,

View File

@ -1,6 +1,9 @@
package api
import (
"context"
"net/http"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/connector/graph"
@ -27,6 +30,11 @@ type Client struct {
// downloading large items such as drive item content or outlook
// mail and event attachments.
LargeItem graph.Servicer
// The Requester provides a client specifically for calling
// arbitrary urls instead of constructing queries using the
// graph api client.
Requester graph.Requester
}
// NewClient produces a new exchange api client. Must be used in
@ -42,7 +50,9 @@ func NewClient(creds account.M365Config) (Client, error) {
return Client{}, err
}
return Client{creds, s, li}, nil
rqr := graph.NewNoTimeoutHTTPWrapper()
return Client{creds, s, li, rqr}, nil
}
// Service generates a new graph servicer. New servicers are used for paged
@ -75,3 +85,20 @@ func newLargeItemService(creds account.M365Config) (*graph.Service, error) {
return a, nil
}
type Getter interface {
Get(
ctx context.Context,
url string,
headers map[string]string,
) (*http.Response, error)
}
// Get performs an ad-hoc get request using its graph.Requester
func (c Client) Get(
ctx context.Context,
url string,
headers map[string]string,
) (*http.Response, error) {
return c.Requester.Request(ctx, http.MethodGet, url, nil, headers)
}

View File

@ -10,14 +10,12 @@ import (
"github.com/stretchr/testify/suite"
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
)
type ExchangeServiceSuite struct {
tester.Suite
gs graph.Servicer
credentials account.M365Config
}
@ -38,14 +36,6 @@ func (suite *ExchangeServiceSuite) SetupSuite() {
require.NoError(t, err, clues.ToCore(err))
suite.credentials = m365
adpt, err := graph.CreateAdapter(
m365.AzureTenantID,
m365.AzureClientID,
m365.AzureClientSecret)
require.NoError(t, err, clues.ToCore(err))
suite.gs = graph.NewService(adpt)
}
//nolint:lll

View File

@ -296,9 +296,8 @@ type contactPager struct {
options *users.ItemContactFoldersItemContactsRequestBuilderGetRequestConfiguration
}
func NewContactPager(
func (c Contacts) NewContactPager(
ctx context.Context,
gs graph.Servicer,
userID, containerID string,
immutableIDs bool,
) itemPager {
@ -309,7 +308,7 @@ func NewContactPager(
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
}
builder := gs.
builder := c.Stable.
Client().
Users().
ByUserId(userID).
@ -317,7 +316,7 @@ func NewContactPager(
ByContactFolderId(containerID).
Contacts()
return &contactPager{gs, builder, config}
return &contactPager{c.Stable, builder, config}
}
func (p *contactPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
@ -364,9 +363,8 @@ func getContactDeltaBuilder(
return builder
}
func NewContactDeltaPager(
func (c Contacts) NewContactDeltaPager(
ctx context.Context,
gs graph.Servicer,
userID, containerID, oldDelta string,
immutableIDs bool,
) itemPager {
@ -379,12 +377,12 @@ func NewContactDeltaPager(
var builder *users.ItemContactFoldersItemContactsDeltaRequestBuilder
if oldDelta != "" {
builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(oldDelta, gs.Adapter())
builder = users.NewItemContactFoldersItemContactsDeltaRequestBuilder(oldDelta, c.Stable.Adapter())
} else {
builder = getContactDeltaBuilder(ctx, gs, userID, containerID, options)
builder = getContactDeltaBuilder(ctx, c.Stable, userID, containerID, options)
}
return &contactDeltaPager{gs, userID, containerID, builder, options}
return &contactDeltaPager{c.Stable, userID, containerID, builder, options}
}
func (p *contactDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
@ -419,8 +417,8 @@ func (c Contacts) GetAddedAndRemovedItemIDs(
"category", selectors.ExchangeContact,
"container_id", containerID)
pager := NewContactPager(ctx, c.Stable, userID, containerID, immutableIDs)
deltaPager := NewContactDeltaPager(ctx, c.Stable, userID, containerID, oldDelta, immutableIDs)
pager := c.NewContactPager(ctx, userID, containerID, immutableIDs)
deltaPager := c.NewContactDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs)
return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries)
}

View File

@ -9,184 +9,41 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/account"
)
// ---------------------------------------------------------------------------
// Drives
// controller
// ---------------------------------------------------------------------------
func GetUsersDrive(
ctx context.Context,
srv graph.Servicer,
user string,
) (models.Driveable, error) {
d, err := srv.Client().
Users().
ByUserId(user).
Drive().
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting user's drive")
}
return d, nil
func (c Client) Drives() Drives {
return Drives{c}
}
func GetSitesDefaultDrive(
ctx context.Context,
srv graph.Servicer,
site string,
) (models.Driveable, error) {
d, err := srv.Client().
Sites().
BySiteId(site).
Drive().
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting site's drive")
}
return d, nil
}
func GetDriveRoot(
ctx context.Context,
srv graph.Servicer,
driveID string,
) (models.DriveItemable, error) {
root, err := srv.Client().
Drives().
ByDriveId(driveID).
Root().
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting drive root")
}
return root, nil
// Drives is an interface-compliant provider of the client.
type Drives struct {
Client
}
// ---------------------------------------------------------------------------
// Drive Items
// Folders
// ---------------------------------------------------------------------------
// generic drive item getter
func GetDriveItem(
ctx context.Context,
srv graph.Servicer,
driveID, itemID string,
) (models.DriveItemable, error) {
di, err := srv.Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting item")
}
return di, nil
}
func PostDriveItem(
ctx context.Context,
srv graph.Servicer,
driveID, itemID string,
) (models.UploadSessionable, error) {
session := drives.NewItemItemsItemCreateUploadSessionPostRequestBody()
r, err := srv.Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
CreateUploadSession().
Post(ctx, session, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "uploading drive item")
}
return r, nil
}
func PatchDriveItem(
ctx context.Context,
srv graph.Servicer,
driveID, itemID string,
item models.DriveItemable,
) error {
_, err := srv.Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Patch(ctx, item, nil)
if err != nil {
return graph.Wrap(ctx, err, "patching drive item")
}
return nil
}
func PutDriveItemContent(
ctx context.Context,
srv graph.Servicer,
driveID, itemID string,
content []byte,
) error {
_, err := srv.Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Content().
Put(ctx, content, nil)
if err != nil {
return graph.Wrap(ctx, err, "uploading drive item content")
}
return nil
}
// deletes require unique http clients
// https://github.com/alcionai/corso/issues/2707
func DeleteDriveItem(
ctx context.Context,
gs graph.Servicer,
driveID, itemID string,
) error {
err := gs.Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Delete(ctx, nil)
if err != nil {
return graph.Wrap(ctx, err, "deleting item").With("item_id", itemID)
}
return nil
}
const itemByPathRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s:/%s"
var ErrFolderNotFound = clues.New("folder not found")
// GetFolderByName will lookup the specified folder by name within the parentFolderID folder.
func GetFolderByName(
func (c Drives) GetFolderByName(
ctx context.Context,
srv graph.Servicer,
driveID, parentFolderID, folder string,
driveID, parentFolderID, folderID string,
) (models.DriveItemable, error) {
// The `Children().Get()` API doesn't yet support $filter, so using that to find a folder
// will be sub-optimal.
// Instead, we leverage OneDrive path-based addressing -
// https://learn.microsoft.com/en-us/graph/onedrive-addressing-driveitems#path-based-addressing
// - which allows us to lookup an item by its path relative to the parent ID
rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folder)
builder := drives.NewItemItemsDriveItemItemRequestBuilder(rawURL, srv.Adapter())
rawURL := fmt.Sprintf(itemByPathRawURLFmt, driveID, parentFolderID, folderID)
builder := drives.NewItemItemsDriveItemItemRequestBuilder(rawURL, c.Stable.Adapter())
foundItem, err := builder.Get(ctx, nil)
if err != nil {
@ -205,16 +62,163 @@ func GetFolderByName(
return foundItem, nil
}
func (c Drives) GetRootFolder(
ctx context.Context,
driveID string,
) (models.DriveItemable, error) {
root, err := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Root().
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting drive root")
}
return root, nil
}
// ---------------------------------------------------------------------------
// Items
// ---------------------------------------------------------------------------
// generic drive item getter
func (c Drives) GetItem(
ctx context.Context,
driveID, itemID string,
) (models.DriveItemable, error) {
di, err := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting item")
}
return di, nil
}
func (c Drives) NewItemContentUpload(
ctx context.Context,
driveID, itemID string,
) (models.UploadSessionable, error) {
session := drives.NewItemItemsItemCreateUploadSessionPostRequestBody()
r, err := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
CreateUploadSession().
Post(ctx, session, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "uploading drive item")
}
return r, nil
}
const itemChildrenRawURLFmt = "https://graph.microsoft.com/v1.0/drives/%s/items/%s/children"
// PostItemInContainer creates a new item in the specified folder
func (c Drives) PostItemInContainer(
ctx context.Context,
driveID, parentFolderID string,
newItem models.DriveItemable,
) (models.DriveItemable, error) {
// Graph SDK doesn't yet provide a POST method for `/children` so we set the `rawUrl` ourselves as recommended
// here: https://github.com/microsoftgraph/msgraph-sdk-go/issues/155#issuecomment-1136254310
rawURL := fmt.Sprintf(itemChildrenRawURLFmt, driveID, parentFolderID)
builder := drives.NewItemItemsRequestBuilder(rawURL, c.Stable.Adapter())
newItem, err := builder.Post(ctx, newItem, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "creating item in folder")
}
return newItem, nil
}
func (c Drives) PatchItem(
ctx context.Context,
driveID, itemID string,
item models.DriveItemable,
) error {
_, err := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Patch(ctx, item, nil)
if err != nil {
return graph.Wrap(ctx, err, "patching drive item")
}
return nil
}
func (c Drives) PutItemContent(
ctx context.Context,
driveID, itemID string,
content []byte,
) error {
_, err := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Content().
Put(ctx, content, nil)
if err != nil {
return graph.Wrap(ctx, err, "uploading drive item content")
}
return nil
}
// deletes require unique http clients
// https://github.com/alcionai/corso/issues/2707
func (c Drives) DeleteItem(
ctx context.Context,
driveID, itemID string,
) error {
// deletes require unique http clients
// https://github.com/alcionai/corso/issues/2707
srv, err := c.Service()
if err != nil {
return graph.Wrap(ctx, err, "creating adapter to delete item permission")
}
err = srv.
Client().
Drives().
ByDriveId(driveID).
Items().
ByDriveItemId(itemID).
Delete(ctx, nil)
if err != nil {
return graph.Wrap(ctx, err, "deleting item").With("item_id", itemID)
}
return nil
}
// ---------------------------------------------------------------------------
// Permissions
// ---------------------------------------------------------------------------
func GetItemPermission(
func (c Drives) GetItemPermission(
ctx context.Context,
service graph.Servicer,
driveID, itemID string,
) (models.PermissionCollectionResponseable, error) {
perm, err := service.
perm, err := c.Stable.
Client().
Drives().
ByDriveId(driveID).
@ -229,15 +233,15 @@ func GetItemPermission(
return perm, nil
}
func PostItemPermissionUpdate(
func (c Drives) PostItemPermissionUpdate(
ctx context.Context,
service graph.Servicer,
driveID, itemID string,
body *drives.ItemItemsItemInvitePostRequestBody,
) (drives.ItemItemsItemInviteResponseable, error) {
ctx = graph.ConsumeNTokens(ctx, graph.PermissionsLC)
itm, err := service.Client().
itm, err := c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().
@ -251,17 +255,18 @@ func PostItemPermissionUpdate(
return itm, nil
}
func DeleteDriveItemPermission(
func (c Drives) DeleteItemPermission(
ctx context.Context,
creds account.M365Config,
driveID, itemID, permissionID string,
) error {
a, err := graph.CreateAdapter(creds.AzureTenantID, creds.AzureClientID, creds.AzureClientSecret)
// deletes require unique http clients
// https://github.com/alcionai/corso/issues/2707
srv, err := c.Service()
if err != nil {
return graph.Wrap(ctx, err, "creating adapter to delete item permission")
}
err = graph.NewService(a).
err = srv.
Client().
Drives().
ByDriveId(driveID).

View File

@ -21,18 +21,26 @@ import (
// item pager
// ---------------------------------------------------------------------------
type driveItemPager struct {
type DriveItemEnumerator interface {
GetPage(context.Context) (DeltaPageLinker, error)
SetNext(nextLink string)
Reset()
ValuesIn(DeltaPageLinker) ([]models.DriveItemable, error)
}
var _ DriveItemEnumerator = &DriveItemPager{}
type DriveItemPager struct {
gs graph.Servicer
driveID string
builder *drives.ItemItemsItemDeltaRequestBuilder
options *drives.ItemItemsItemDeltaRequestBuilderGetRequestConfiguration
}
func NewItemPager(
gs graph.Servicer,
func (c Drives) NewItemPager(
driveID, link string,
selectFields []string,
) *driveItemPager {
) *DriveItemPager {
preferHeaderItems := []string{
"deltashowremovedasdeleted",
"deltatraversepermissiongaps",
@ -48,24 +56,25 @@ func NewItemPager(
},
}
res := &driveItemPager{
gs: gs,
res := &DriveItemPager{
gs: c.Stable,
driveID: driveID,
options: requestConfig,
builder: gs.Client().
builder: c.Stable.
Client().
Drives().
ByDriveId(driveID).
Items().ByDriveItemId(onedrive.RootID).Delta(),
}
if len(link) > 0 {
res.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, gs.Adapter())
res.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, c.Stable.Adapter())
}
return res
}
func (p *driveItemPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
func (p *DriveItemPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
var (
resp DeltaPageLinker
err error
@ -79,11 +88,11 @@ func (p *driveItemPager) GetPage(ctx context.Context) (DeltaPageLinker, error) {
return resp, nil
}
func (p *driveItemPager) SetNext(link string) {
func (p *DriveItemPager) SetNext(link string) {
p.builder = drives.NewItemItemsItemDeltaRequestBuilder(link, p.gs.Adapter())
}
func (p *driveItemPager) Reset() {
func (p *DriveItemPager) Reset() {
p.builder = p.gs.Client().
Drives().
ByDriveId(p.driveID).
@ -92,7 +101,7 @@ func (p *driveItemPager) Reset() {
Delta()
}
func (p *driveItemPager) ValuesIn(l DeltaPageLinker) ([]models.DriveItemable, error) {
func (p *DriveItemPager) ValuesIn(l DeltaPageLinker) ([]models.DriveItemable, error) {
return getValues[models.DriveItemable](l)
}
@ -100,6 +109,8 @@ func (p *driveItemPager) ValuesIn(l DeltaPageLinker) ([]models.DriveItemable, er
// user pager
// ---------------------------------------------------------------------------
var _ DrivePager = &userDrivePager{}
type userDrivePager struct {
userID string
gs graph.Servicer
@ -107,8 +118,7 @@ type userDrivePager struct {
options *users.ItemDrivesRequestBuilderGetRequestConfiguration
}
func NewUserDrivePager(
gs graph.Servicer,
func (c Drives) NewUserDrivePager(
userID string,
fields []string,
) *userDrivePager {
@ -120,9 +130,13 @@ func NewUserDrivePager(
res := &userDrivePager{
userID: userID,
gs: gs,
gs: c.Stable,
options: requestConfig,
builder: gs.Client().Users().ByUserId(userID).Drives(),
builder: c.Stable.
Client().
Users().
ByUserId(userID).
Drives(),
}
return res
@ -140,7 +154,12 @@ func (p *userDrivePager) GetPage(ctx context.Context) (PageLinker, error) {
err error
)
d, err := p.gs.Client().Users().ByUserId(p.userID).Drive().Get(ctx, nil)
d, err := p.gs.
Client().
Users().
ByUserId(p.userID).
Drive().
Get(ctx, nil)
if err != nil {
return nil, graph.Stack(ctx, err)
}
@ -180,6 +199,8 @@ func (p *userDrivePager) ValuesIn(l PageLinker) ([]models.Driveable, error) {
// site pager
// ---------------------------------------------------------------------------
var _ DrivePager = &siteDrivePager{}
type siteDrivePager struct {
gs graph.Servicer
builder *sites.ItemDrivesRequestBuilder
@ -191,8 +212,7 @@ type siteDrivePager struct {
// in a query. NOTE: Fields are case-sensitive. Incorrect field settings will
// cause errors during later paging.
// Available fields: https://learn.microsoft.com/en-us/graph/api/resources/drive?view=graph-rest-1.0
func NewSiteDrivePager(
gs graph.Servicer,
func (c Drives) NewSiteDrivePager(
siteID string,
fields []string,
) *siteDrivePager {
@ -203,9 +223,13 @@ func NewSiteDrivePager(
}
res := &siteDrivePager{
gs: gs,
gs: c.Stable,
options: requestConfig,
builder: gs.Client().Sites().BySiteId(siteID).Drives(),
builder: c.Stable.
Client().
Sites().
BySiteId(siteID).
Drives(),
}
return res
@ -313,7 +337,8 @@ func GetAllDrives(
func getValues[T any](l PageLinker) ([]T, error) {
page, ok := l.(interface{ GetValue() []T })
if !ok {
return nil, clues.New("page does not comply with GetValue() interface").With("page_item_type", fmt.Sprintf("%T", l))
return nil, clues.New("page does not comply with GetValue() interface").
With("page_item_type", fmt.Sprintf("%T", l))
}
return page.GetValue(), nil

View File

@ -8,7 +8,6 @@ import (
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/services/m365/api"
@ -17,23 +16,18 @@ import (
type OneDriveAPISuite struct {
tester.Suite
creds account.M365Config
service graph.Servicer
ac api.Client
}
func (suite *OneDriveAPISuite) SetupSuite() {
t := suite.T()
a := tester.NewM365Account(t)
m365, err := a.M365Config()
creds, err := a.M365Config()
require.NoError(t, err, clues.ToCore(err))
suite.creds = m365
adpt, err := graph.CreateAdapter(
m365.AzureTenantID,
m365.AzureClientID,
m365.AzureClientSecret)
suite.creds = creds
suite.ac, err = api.NewClient(creds)
require.NoError(t, err, clues.ToCore(err))
suite.service = graph.NewService(adpt)
}
func TestOneDriveAPIs(t *testing.T) {
@ -51,7 +45,8 @@ func (suite *OneDriveAPISuite) TestCreatePagerAndGetPage() {
defer flush()
siteID := tester.M365SiteID(t)
pager := api.NewSiteDrivePager(suite.service, siteID, []string{"name"})
pager := suite.ac.Drives().NewSiteDrivePager(siteID, []string{"name"})
a, err := pager.GetPage(ctx)
assert.NoError(t, err, clues.ToCore(err))
assert.NotNil(t, a)

View File

@ -446,9 +446,8 @@ type eventPager struct {
options *users.ItemCalendarsItemEventsRequestBuilderGetRequestConfiguration
}
func NewEventPager(
func (c Events) NewEventPager(
ctx context.Context,
gs graph.Servicer,
userID, containerID string,
immutableIDs bool,
) (itemPager, error) {
@ -456,7 +455,7 @@ func NewEventPager(
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
}
builder := gs.
builder := c.Stable.
Client().
Users().
ByUserId(userID).
@ -464,7 +463,7 @@ func NewEventPager(
ByCalendarId(containerID).
Events()
return &eventPager{gs, builder, options}, nil
return &eventPager{c.Stable, builder, options}, nil
}
func (p *eventPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
@ -501,9 +500,8 @@ type eventDeltaPager struct {
options *users.ItemCalendarsItemEventsDeltaRequestBuilderGetRequestConfiguration
}
func NewEventDeltaPager(
func (c Events) NewEventDeltaPager(
ctx context.Context,
gs graph.Servicer,
userID, containerID, oldDelta string,
immutableIDs bool,
) (itemPager, error) {
@ -514,12 +512,12 @@ func NewEventDeltaPager(
var builder *users.ItemCalendarsItemEventsDeltaRequestBuilder
if oldDelta == "" {
builder = getEventDeltaBuilder(ctx, gs, userID, containerID, options)
builder = getEventDeltaBuilder(ctx, c.Stable, userID, containerID, options)
} else {
builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, gs.Adapter())
builder = users.NewItemCalendarsItemEventsDeltaRequestBuilder(oldDelta, c.Stable.Adapter())
}
return &eventDeltaPager{gs, userID, containerID, builder, options}, nil
return &eventDeltaPager{c.Stable, userID, containerID, builder, options}, nil
}
func getEventDeltaBuilder(
@ -571,12 +569,12 @@ func (c Events) GetAddedAndRemovedItemIDs(
) ([]string, []string, DeltaUpdate, error) {
ctx = clues.Add(ctx, "container_id", containerID)
pager, err := NewEventPager(ctx, c.Stable, userID, containerID, immutableIDs)
pager, err := c.NewEventPager(ctx, userID, containerID, immutableIDs)
if err != nil {
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating non-delta pager")
}
deltaPager, err := NewEventDeltaPager(ctx, c.Stable, userID, containerID, oldDelta, immutableIDs)
deltaPager, err := c.NewEventDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs)
if err != nil {
return nil, nil, DeltaUpdate{}, graph.Wrap(ctx, err, "creating delta pager")
}

View File

@ -197,12 +197,12 @@ type mailFolderPager struct {
builder *users.ItemMailFoldersRequestBuilder
}
func NewMailFolderPager(service graph.Servicer, userID string) mailFolderPager {
func (c Mail) NewMailFolderPager(userID string) mailFolderPager {
// v1.0 non delta /mailFolders endpoint does not return any of the nested folders
rawURL := fmt.Sprintf(mailFoldersBetaURLTemplate, userID)
builder := users.NewItemMailFoldersRequestBuilder(rawURL, service.Adapter())
builder := users.NewItemMailFoldersRequestBuilder(rawURL, c.Stable.Adapter())
return mailFolderPager{service, builder}
return mailFolderPager{c.Stable, builder}
}
func (p *mailFolderPager) getPage(ctx context.Context) (PageLinker, error) {
@ -241,7 +241,7 @@ func (c Mail) EnumerateContainers(
errs *fault.Bus,
) error {
el := errs.Local()
pgr := NewMailFolderPager(c.Stable, userID)
pgr := c.NewMailFolderPager(userID)
for {
if el.Failure() != nil {
@ -544,9 +544,8 @@ type mailPager struct {
options *users.ItemMailFoldersItemMessagesRequestBuilderGetRequestConfiguration
}
func NewMailPager(
func (c Mail) NewMailPager(
ctx context.Context,
gs graph.Servicer,
userID, containerID string,
immutableIDs bool,
) itemPager {
@ -557,7 +556,7 @@ func NewMailPager(
Headers: newPreferHeaders(preferPageSize(maxNonDeltaPageSize), preferImmutableIDs(immutableIDs)),
}
builder := gs.
builder := c.Stable.
Client().
Users().
ByUserId(userID).
@ -565,7 +564,7 @@ func NewMailPager(
ByMailFolderId(containerID).
Messages()
return &mailPager{gs, builder, config}
return &mailPager{c.Stable, builder, config}
}
func (p *mailPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
@ -620,9 +619,8 @@ func getMailDeltaBuilder(
return builder
}
func NewMailDeltaPager(
func (c Mail) NewMailDeltaPager(
ctx context.Context,
gs graph.Servicer,
userID, containerID, oldDelta string,
immutableIDs bool,
) itemPager {
@ -636,12 +634,12 @@ func NewMailDeltaPager(
var builder *users.ItemMailFoldersItemMessagesDeltaRequestBuilder
if len(oldDelta) > 0 {
builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(oldDelta, gs.Adapter())
builder = users.NewItemMailFoldersItemMessagesDeltaRequestBuilder(oldDelta, c.Stable.Adapter())
} else {
builder = getMailDeltaBuilder(ctx, gs, userID, containerID, config)
builder = getMailDeltaBuilder(ctx, c.Stable, userID, containerID, config)
}
return &mailDeltaPager{gs, userID, containerID, builder, config}
return &mailDeltaPager{c.Stable, userID, containerID, builder, config}
}
func (p *mailDeltaPager) getPage(ctx context.Context) (DeltaPageLinker, error) {
@ -683,8 +681,8 @@ func (c Mail) GetAddedAndRemovedItemIDs(
"category", selectors.ExchangeMail,
"container_id", containerID)
pager := NewMailPager(ctx, c.Stable, userID, containerID, immutableIDs)
deltaPager := NewMailDeltaPager(ctx, c.Stable, userID, containerID, oldDelta, immutableIDs)
pager := c.NewMailPager(ctx, userID, containerID, immutableIDs)
deltaPager := c.NewMailDeltaPager(ctx, userID, containerID, oldDelta, immutableIDs)
return getAddedAndRemovedItemIDs(ctx, c.Stable, pager, deltaPager, oldDelta, canMakeDeltaQueries)
}

View File

@ -32,23 +32,9 @@ type Sites struct {
}
// ---------------------------------------------------------------------------
// methods
// api calls
// ---------------------------------------------------------------------------
// GetSite returns a minimal Site with the SiteID and the WebURL
// TODO: delete in favor of sites.GetByID()
func GetSite(ctx context.Context, gs graph.Servicer, siteID string) (models.Siteable, error) {
resp, err := gs.Client().
Sites().
BySiteId(siteID).
Get(ctx, nil)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return resp, nil
}
// GetAll retrieves all sites.
func (c Sites) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Siteable, error) {
service, err := c.Service()
@ -171,6 +157,27 @@ func (c Sites) GetIDAndName(ctx context.Context, siteID string) (string, string,
return ptr.Val(s.GetId()), ptr.Val(s.GetWebUrl()), nil
}
// ---------------------------------------------------------------------------
// Info
// ---------------------------------------------------------------------------
func (c Sites) GetDefaultDrive(
ctx context.Context,
site string,
) (models.Driveable, error) {
d, err := c.Stable.
Client().
Sites().
BySiteId(site).
Drive().
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting site's default drive")
}
return d, nil
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------

View File

@ -0,0 +1,187 @@
package api
import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/common/tform"
"github.com/alcionai/corso/src/pkg/path"
)
// ---------------------------------------------------------------------------
// User Info
// ---------------------------------------------------------------------------
type UserInfo struct {
ServicesEnabled map[path.ServiceType]struct{}
Mailbox MailboxInfo
}
type MailboxInfo struct {
Purpose string
ArchiveFolder string
DateFormat string
TimeFormat string
DelegateMeetMsgDeliveryOpt string
Timezone string
AutomaticRepliesSetting AutomaticRepliesSettings
Language Language
WorkingHours WorkingHours
ErrGetMailBoxSetting []error
QuotaExceeded bool
}
type AutomaticRepliesSettings struct {
ExternalAudience string
ExternalReplyMessage string
InternalReplyMessage string
ScheduledEndDateTime timeInfo
ScheduledStartDateTime timeInfo
Status string
}
type timeInfo struct {
DateTime string
Timezone string
}
type Language struct {
Locale string
DisplayName string
}
type WorkingHours struct {
DaysOfWeek []string
StartTime string
EndTime string
TimeZone struct {
Name string
}
}
func newUserInfo() *UserInfo {
return &UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{
path.ExchangeService: {},
path.OneDriveService: {},
},
}
}
// ServiceEnabled returns true if the UserInfo has an entry for the
// service. If no entry exists, the service is assumed to not be enabled.
func (ui *UserInfo) ServiceEnabled(service path.ServiceType) bool {
if ui == nil || len(ui.ServicesEnabled) == 0 {
return false
}
_, ok := ui.ServicesEnabled[service]
return ok
}
// Returns if we can run delta queries on a mailbox. We cannot run
// them if the mailbox is full which is indicated by QuotaExceeded.
func (ui *UserInfo) CanMakeDeltaQueries() bool {
return !ui.Mailbox.QuotaExceeded
}
func parseMailboxSettings(
settings models.Userable,
mi MailboxInfo,
) MailboxInfo {
var (
additionalData = settings.GetAdditionalData()
err error
)
mi.ArchiveFolder, err = str.AnyValueToString("archiveFolder", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.Timezone, err = str.AnyValueToString("timeZone", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.DateFormat, err = str.AnyValueToString("dateFormat", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.TimeFormat, err = str.AnyValueToString("timeFormat", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.Purpose, err = str.AnyValueToString("userPurpose", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.DelegateMeetMsgDeliveryOpt, err = str.AnyValueToString("delegateMeetingMessageDeliveryOptions", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
// decode automatic replies settings
replySetting, err := tform.AnyValueToT[map[string]any]("automaticRepliesSetting", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.Status, err = str.AnyValueToString("status", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ExternalAudience, err = str.AnyValueToString("externalAudience", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ExternalReplyMessage, err = str.AnyValueToString("externalReplyMessage", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.InternalReplyMessage, err = str.AnyValueToString("internalReplyMessage", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
// decode scheduledStartDateTime
startDateTime, err := tform.AnyValueToT[map[string]any]("scheduledStartDateTime", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ScheduledStartDateTime.DateTime, err = str.AnyValueToString("dateTime", startDateTime)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ScheduledStartDateTime.Timezone, err = str.AnyValueToString("timeZone", startDateTime)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
endDateTime, err := tform.AnyValueToT[map[string]any]("scheduledEndDateTime", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ScheduledEndDateTime.DateTime, err = str.AnyValueToString("dateTime", endDateTime)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ScheduledEndDateTime.Timezone, err = str.AnyValueToString("timeZone", endDateTime)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
// Language decode
language, err := tform.AnyValueToT[map[string]any]("language", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.Language.DisplayName, err = str.AnyValueToString("displayName", language)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.Language.Locale, err = str.AnyValueToString("locale", language)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
// working hours
workingHours, err := tform.AnyValueToT[map[string]any]("workingHours", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.WorkingHours.StartTime, err = str.AnyValueToString("startTime", workingHours)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.WorkingHours.EndTime, err = str.AnyValueToString("endTime", workingHours)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
timeZone, err := tform.AnyValueToT[map[string]any]("timeZone", workingHours)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.WorkingHours.TimeZone.Name, err = str.AnyValueToString("name", timeZone)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
days, err := tform.AnyValueToT[[]any]("daysOfWeek", workingHours)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
for _, day := range days {
s, err := str.AnyToString(day)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.WorkingHours.DaysOfWeek = append(mi.WorkingHours.DaysOfWeek, s)
}
return mi
}

View File

@ -12,8 +12,6 @@ import (
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/common/tform"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger"
@ -39,85 +37,7 @@ type Users struct {
}
// ---------------------------------------------------------------------------
// structs
// ---------------------------------------------------------------------------
type UserInfo struct {
ServicesEnabled map[path.ServiceType]struct{}
Mailbox MailboxInfo
}
type MailboxInfo struct {
Purpose string
ArchiveFolder string
DateFormat string
TimeFormat string
DelegateMeetMsgDeliveryOpt string
Timezone string
AutomaticRepliesSetting AutomaticRepliesSettings
Language Language
WorkingHours WorkingHours
ErrGetMailBoxSetting []error
QuotaExceeded bool
}
type AutomaticRepliesSettings struct {
ExternalAudience string
ExternalReplyMessage string
InternalReplyMessage string
ScheduledEndDateTime timeInfo
ScheduledStartDateTime timeInfo
Status string
}
type timeInfo struct {
DateTime string
Timezone string
}
type Language struct {
Locale string
DisplayName string
}
type WorkingHours struct {
DaysOfWeek []string
StartTime string
EndTime string
TimeZone struct {
Name string
}
}
func newUserInfo() *UserInfo {
return &UserInfo{
ServicesEnabled: map[path.ServiceType]struct{}{
path.ExchangeService: {},
path.OneDriveService: {},
},
}
}
// ServiceEnabled returns true if the UserInfo has an entry for the
// service. If no entry exists, the service is assumed to not be enabled.
func (ui *UserInfo) ServiceEnabled(service path.ServiceType) bool {
if ui == nil || len(ui.ServicesEnabled) == 0 {
return false
}
_, ok := ui.ServicesEnabled[service]
return ok
}
// Returns if we can run delta queries on a mailbox. We cannot run
// them if the mailbox is full which is indicated by QuotaExceeded.
func (ui *UserInfo) CanMakeDeltaQueries() bool {
return !ui.Mailbox.QuotaExceeded
}
// ---------------------------------------------------------------------------
// methods
// User CRUD
// ---------------------------------------------------------------------------
// Filter out both guest users, and (for on-prem installations) non-synced users.
@ -133,28 +53,26 @@ func (ui *UserInfo) CanMakeDeltaQueries() bool {
//nolint:lll
var userFilterNoGuests = "onPremisesSyncEnabled eq true OR userType ne 'Guest'"
func userOptions(fs *string) *users.UsersRequestBuilderGetRequestConfiguration {
return &users.UsersRequestBuilderGetRequestConfiguration{
Headers: newEventualConsistencyHeaders(),
QueryParameters: &users.UsersRequestBuilderGetQueryParameters{
Select: idAnd(userPrincipalName, displayName),
Filter: fs,
Count: ptr.To(true),
},
}
}
// GetAll retrieves all users.
func (c Users) GetAll(ctx context.Context, errs *fault.Bus) ([]models.Userable, error) {
func (c Users) GetAll(
ctx context.Context,
errs *fault.Bus,
) ([]models.Userable, error) {
service, err := c.Service()
if err != nil {
return nil, err
}
var resp models.UserCollectionResponseable
resp, err = service.Client().Users().Get(ctx, userOptions(&userFilterNoGuests))
config := &users.UsersRequestBuilderGetRequestConfiguration{
Headers: newEventualConsistencyHeaders(),
QueryParameters: &users.UsersRequestBuilderGetQueryParameters{
Select: idAnd(userPrincipalName, displayName),
Filter: &userFilterNoGuests,
Count: ptr.To(true),
},
}
resp, err := service.Client().Users().Get(ctx, config)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting all users")
}
@ -241,238 +159,6 @@ func (c Users) GetAllIDsAndNames(ctx context.Context, errs *fault.Bus) (idname.C
return idname.NewCache(idToName), nil
}
func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
// Assume all services are enabled
// then filter down to only services the user has enabled
userInfo := newUserInfo()
requestParameters := users.ItemMailFoldersRequestBuilderGetQueryParameters{
Select: idAnd(),
Top: ptr.To[int32](1), // if we get any folders, then we have access.
}
options := users.ItemMailFoldersRequestBuilderGetRequestConfiguration{
QueryParameters: &requestParameters,
}
mfs, err := c.GetMailFolders(ctx, userID, options)
if err != nil {
logger.CtxErr(ctx, err).Error("getting user's mail folders")
if graph.IsErrUserNotFound(err) {
return nil, clues.Stack(graph.ErrResourceOwnerNotFound, err)
}
if !graph.IsErrExchangeMailFolderNotFound(err) {
return nil, clues.Stack(err)
}
delete(userInfo.ServicesEnabled, path.ExchangeService)
}
if _, err := c.GetDrives(ctx, userID); err != nil {
logger.CtxErr(ctx, err).Error("getting user's drives")
if graph.IsErrUserNotFound(err) {
return nil, clues.Stack(graph.ErrResourceOwnerNotFound, err)
}
if !clues.HasLabel(err, graph.LabelsMysiteNotFound) {
return nil, clues.Stack(err)
}
delete(userInfo.ServicesEnabled, path.OneDriveService)
}
mbxInfo, err := c.getMailboxSettings(ctx, userID)
if err != nil {
return nil, err
}
userInfo.Mailbox = mbxInfo
// TODO: This tries to determine if the user has hit their mailbox
// limit by trying to fetch an item and seeing if we get the quota
// exceeded error. Ideally(if available) we should convert this to
// pull the user's usage via an api and compare if they have used
// up their quota.
if mfs != nil {
mf := mfs.GetValue()[0] // we will always have one
options := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
Top: ptr.To[int32](1), // just one item is enough
},
}
_, err = c.Stable.Client().
Users().
ByUserId(userID).
MailFolders().
ByMailFolderId(ptr.Val(mf.GetId())).
Messages().
Delta().
Get(ctx, options)
if err != nil && !graph.IsErrQuotaExceeded(err) {
return nil, err
}
userInfo.Mailbox.QuotaExceeded = graph.IsErrQuotaExceeded(err)
}
return userInfo, nil
}
// TODO: remove when exchange api goes into this package
func (c Users) GetMailFolders(
ctx context.Context,
userID string,
options users.ItemMailFoldersRequestBuilderGetRequestConfiguration,
) (models.MailFolderCollectionResponseable, error) {
mailFolders, err := c.Stable.Client().Users().ByUserId(userID).MailFolders().Get(ctx, &options)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting MailFolders")
}
return mailFolders, nil
}
// TODO: remove when drive api goes into this package
func (c Users) GetDrives(ctx context.Context, userID string) (models.DriveCollectionResponseable, error) {
drives, err := c.Stable.Client().Users().ByUserId(userID).Drives().Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting drives")
}
return drives, nil
}
func (c Users) getMailboxSettings(
ctx context.Context,
userID string,
) (MailboxInfo, error) {
var (
rawURL = fmt.Sprintf("https://graph.microsoft.com/v1.0/users/%s/mailboxSettings", userID)
adapter = c.Stable.Adapter()
mi = MailboxInfo{
ErrGetMailBoxSetting: []error{},
}
)
settings, err := users.NewUserItemRequestBuilder(rawURL, adapter).Get(ctx, nil)
if err != nil && !(graph.IsErrAccessDenied(err) || graph.IsErrExchangeMailFolderNotFound(err)) {
logger.CtxErr(ctx, err).Error("getting mailbox settings")
return mi, graph.Wrap(ctx, err, "getting additional data")
}
if graph.IsErrAccessDenied(err) {
logger.Ctx(ctx).Info("err getting additional data: access denied")
mi.ErrGetMailBoxSetting = append(mi.ErrGetMailBoxSetting, clues.New("access denied"))
return mi, nil
}
if graph.IsErrExchangeMailFolderNotFound(err) {
logger.Ctx(ctx).Info("mailfolders not found")
mi.ErrGetMailBoxSetting = append(mi.ErrGetMailBoxSetting, ErrMailBoxSettingsNotFound)
return mi, nil
}
additionalData := settings.GetAdditionalData()
mi.ArchiveFolder, err = str.AnyValueToString("archiveFolder", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.Timezone, err = str.AnyValueToString("timeZone", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.DateFormat, err = str.AnyValueToString("dateFormat", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.TimeFormat, err = str.AnyValueToString("timeFormat", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.Purpose, err = str.AnyValueToString("userPurpose", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.DelegateMeetMsgDeliveryOpt, err = str.AnyValueToString("delegateMeetingMessageDeliveryOptions", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
// decode automatic replies settings
replySetting, err := tform.AnyValueToT[map[string]any]("automaticRepliesSetting", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.Status, err = str.AnyValueToString("status", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ExternalAudience, err = str.AnyValueToString("externalAudience", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ExternalReplyMessage, err = str.AnyValueToString("externalReplyMessage", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.InternalReplyMessage, err = str.AnyValueToString("internalReplyMessage", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
// decode scheduledStartDateTime
startDateTime, err := tform.AnyValueToT[map[string]any]("scheduledStartDateTime", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ScheduledStartDateTime.DateTime, err = str.AnyValueToString("dateTime", startDateTime)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ScheduledStartDateTime.Timezone, err = str.AnyValueToString("timeZone", startDateTime)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
endDateTime, err := tform.AnyValueToT[map[string]any]("scheduledEndDateTime", replySetting)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ScheduledEndDateTime.DateTime, err = str.AnyValueToString("dateTime", endDateTime)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.AutomaticRepliesSetting.ScheduledEndDateTime.Timezone, err = str.AnyValueToString("timeZone", endDateTime)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
// Language decode
language, err := tform.AnyValueToT[map[string]any]("language", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.Language.DisplayName, err = str.AnyValueToString("displayName", language)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.Language.Locale, err = str.AnyValueToString("locale", language)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
// working hours
workingHours, err := tform.AnyValueToT[map[string]any]("workingHours", additionalData)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.WorkingHours.StartTime, err = str.AnyValueToString("startTime", workingHours)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.WorkingHours.EndTime, err = str.AnyValueToString("endTime", workingHours)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
timeZone, err := tform.AnyValueToT[map[string]any]("timeZone", workingHours)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.WorkingHours.TimeZone.Name, err = str.AnyValueToString("name", timeZone)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
days, err := tform.AnyValueToT[[]any]("daysOfWeek", workingHours)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
for _, day := range days {
s, err := str.AnyToString(day)
mi.ErrGetMailBoxSetting = appendIfErr(mi.ErrGetMailBoxSetting, err)
mi.WorkingHours.DaysOfWeek = append(mi.WorkingHours.DaysOfWeek, s)
}
return mi, nil
}
func appendIfErr(errs []error, err error) []error {
if err == nil {
return errs
@ -481,6 +167,177 @@ func appendIfErr(errs []error, err error) []error {
return append(errs, err)
}
// ---------------------------------------------------------------------------
// Info
// ---------------------------------------------------------------------------
func (c Users) GetInfo(ctx context.Context, userID string) (*UserInfo, error) {
var (
// Assume all services are enabled
// then filter down to only services the user has enabled
userInfo = newUserInfo()
mailFolderFound = true
)
// check whether the user is able to access their onedrive drive.
// if they cannot, we can assume they are ineligible for onedrive backups.
if _, err := c.GetDefaultDrive(ctx, userID); err != nil {
if !clues.HasLabel(err, graph.LabelsMysiteNotFound) {
logger.CtxErr(ctx, err).Error("getting user's drive")
return nil, graph.Wrap(ctx, err, "getting user's drive")
}
logger.Ctx(ctx).Info("resource owner does not have a drive")
delete(userInfo.ServicesEnabled, path.OneDriveService)
}
// check whether the user is able to access their inbox.
// if they cannot, we can assume they are ineligible for exchange backups.
inbx, err := c.GetMailInbox(ctx, userID)
if err != nil {
err = graph.Stack(ctx, err)
if graph.IsErrUserNotFound(err) {
logger.CtxErr(ctx, err).Error("user not found")
return nil, clues.Stack(graph.ErrResourceOwnerNotFound, err)
}
if !graph.IsErrExchangeMailFolderNotFound(err) {
logger.CtxErr(ctx, err).Error("getting user's mail folder")
return nil, err
}
logger.Ctx(ctx).Info("resource owner does not have a mailbox enabled")
delete(userInfo.ServicesEnabled, path.ExchangeService)
mailFolderFound = false
}
// check whether the user has accessible mailbox settings.
// if they do, aggregate them in the MailboxInfo
mi := MailboxInfo{
ErrGetMailBoxSetting: []error{},
}
if !mailFolderFound {
mi.ErrGetMailBoxSetting = append(mi.ErrGetMailBoxSetting, ErrMailBoxSettingsNotFound)
userInfo.Mailbox = mi
return userInfo, nil
}
mboxSettings, err := c.getMailboxSettings(ctx, userID)
if err != nil {
logger.CtxErr(ctx, err).Info("err getting user's mailbox settings")
if !graph.IsErrAccessDenied(err) {
return nil, graph.Wrap(ctx, err, "getting user's mailbox settings")
}
mi.ErrGetMailBoxSetting = append(mi.ErrGetMailBoxSetting, clues.New("access denied"))
} else {
mi = parseMailboxSettings(mboxSettings, mi)
}
err = c.getFirstInboxMessage(ctx, userID, ptr.Val(inbx.GetId()))
if err != nil {
if !graph.IsErrQuotaExceeded(err) {
return nil, err
}
userInfo.Mailbox.QuotaExceeded = graph.IsErrQuotaExceeded(err)
}
userInfo.Mailbox = mi
return userInfo, nil
}
func (c Users) getMailboxSettings(
ctx context.Context,
userID string,
) (models.Userable, error) {
settings, err := users.
NewUserItemRequestBuilder(
fmt.Sprintf("https://graph.microsoft.com/v1.0/users/%s/mailboxSettings", userID),
c.Stable.Adapter(),
).
Get(ctx, nil)
if err != nil {
return nil, graph.Stack(ctx, err)
}
return settings, nil
}
func (c Users) GetMailInbox(
ctx context.Context,
userID string,
) (models.MailFolderable, error) {
inbox, err := c.Stable.
Client().
Users().
ByUserId(userID).
MailFolders().
ByMailFolderId("inbox").
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting MailFolders")
}
return inbox, nil
}
func (c Users) GetDefaultDrive(
ctx context.Context,
userID string,
) (models.Driveable, error) {
d, err := c.Stable.
Client().
Users().
ByUserId(userID).
Drive().
Get(ctx, nil)
if err != nil {
return nil, graph.Wrap(ctx, err, "getting user's drive")
}
return d, nil
}
// TODO: This tries to determine if the user has hit their mailbox
// limit by trying to fetch an item and seeing if we get the quota
// exceeded error. Ideally(if available) we should convert this to
// pull the user's usage via an api and compare if they have used
// up their quota.
func (c Users) getFirstInboxMessage(
ctx context.Context,
userID, inboxID string,
) error {
config := &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetRequestConfiguration{
QueryParameters: &users.ItemMailFoldersItemMessagesDeltaRequestBuilderGetQueryParameters{
Select: idAnd(),
},
Headers: newPreferHeaders(preferPageSize(1)),
}
_, err := c.Stable.
Client().
Users().
ByUserId(userID).
MailFolders().
ByMailFolderId(inboxID).
Messages().
Delta().
Get(ctx, config)
if err != nil {
return graph.Stack(ctx, err)
}
return nil
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------

View File

@ -5,7 +5,6 @@ import (
"github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/microsoftgraph/msgraph-sdk-go/users"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
@ -79,16 +78,7 @@ func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (b
return false, clues.Wrap(err, "getting mailbox").WithClues(ctx)
}
requestParameters := users.ItemMailFoldersRequestBuilderGetQueryParameters{
Select: []string{"id"},
Top: ptr.To[int32](1), // if we get any folders, then we have access.
}
options := users.ItemMailFoldersRequestBuilderGetRequestConfiguration{
QueryParameters: &requestParameters,
}
_, err = uapi.GetMailFolders(ctx, userID, options)
_, err = uapi.GetMailInbox(ctx, userID)
if err != nil {
// we consider this a non-error case, since it
// answers the question the caller is asking.
@ -100,6 +90,10 @@ func UserHasMailbox(ctx context.Context, acct account.Account, userID string) (b
return false, clues.Stack(graph.ErrResourceOwnerNotFound, err)
}
if graph.IsErrExchangeMailFolderNotFound(err) {
return false, nil
}
return false, clues.Stack(err)
}
@ -114,7 +108,7 @@ func UserHasDrives(ctx context.Context, acct account.Account, userID string) (bo
return false, clues.Wrap(err, "getting drives").WithClues(ctx)
}
_, err = uapi.GetDrives(ctx, userID)
_, err = uapi.GetDefaultDrive(ctx, userID)
if err != nil {
// we consider this a non-error case, since it
// answers the question the caller is asking.