populate sharepoint itemInfo (#1687)

## Description

Currently, all drive backup and restore actions
populatet a details.OneDriveInfo struct.  This
change branches that struct between one-
drive and sharepoint info, depending on the
current source.

## Type of change

- [x] 🌻 Feature

## Issue(s)

* #1616

## Test Plan

- [x]  Unit test
This commit is contained in:
Keepers 2022-12-08 10:26:56 -07:00 committed by GitHub
parent 9cab212776
commit 4c976298d4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 391 additions and 185 deletions

View File

@ -49,6 +49,7 @@ type Collection struct {
driveItemIDs []string
// M365 ID of the drive this collection was created from
driveID string
source driveSource
service graph.Service
statusUpdater support.StatusUpdater
itemReader itemReaderFunc
@ -59,7 +60,7 @@ type itemReaderFunc func(
ctx context.Context,
service graph.Service,
driveID, itemID string,
) (itemInfo *details.OneDriveInfo, itemData io.ReadCloser, err error)
) (itemInfo details.ItemInfo, itemData io.ReadCloser, err error)
// NewCollection creates a Collection
func NewCollection(
@ -67,17 +68,25 @@ func NewCollection(
driveID string,
service graph.Service,
statusUpdater support.StatusUpdater,
source driveSource,
) *Collection {
c := &Collection{
folderPath: folderPath,
driveItemIDs: []string{},
driveID: driveID,
source: source,
service: service,
data: make(chan data.Stream, collectionChannelBufferSize),
statusUpdater: statusUpdater,
}
// Allows tests to set a mock populator
c.itemReader = driveItemReader
switch source {
case SharePointSource:
c.itemReader = sharePointItemReader
default:
c.itemReader = oneDriveItemReader
}
return c
}
@ -114,7 +123,7 @@ func (oc Collection) State() data.CollectionState {
type Item struct {
id string
data io.ReadCloser
info *details.OneDriveInfo
info details.ItemInfo
}
func (od *Item) UUID() string {
@ -131,7 +140,7 @@ func (od Item) Deleted() bool {
}
func (od *Item) Info() details.ItemInfo {
return details.ItemInfo{OneDrive: od.info}
return od.info
}
// TODO(ashmrtn): Uncomment when #1702 is resolved.
@ -190,7 +199,7 @@ func (oc *Collection) populateItems(ctx context.Context) {
// Read the item
var (
itemInfo *details.OneDriveInfo
itemInfo details.ItemInfo
itemData io.ReadCloser
err error
)
@ -213,18 +222,32 @@ func (oc *Collection) populateItems(ctx context.Context) {
return
}
var (
itemName string
itemSize int64
)
switch oc.source {
case SharePointSource:
itemInfo.SharePoint.ParentPath = parentPathString
itemName = itemInfo.SharePoint.ItemName
itemSize = itemInfo.SharePoint.Size
default:
itemInfo.OneDrive.ParentPath = parentPathString
itemName = itemInfo.OneDrive.ItemName
itemSize = itemInfo.OneDrive.Size
}
progReader, closer := observe.ItemProgress(itemData, observe.ItemBackupMsg, itemName, itemSize)
go closer()
// Item read successfully, add to collection
atomic.AddInt64(&itemsRead, 1)
// byteCount iteration
atomic.AddInt64(&byteCount, itemInfo.Size)
itemInfo.ParentPath = parentPathString
progReader, closer := observe.ItemProgress(itemData, observe.ItemBackupMsg, itemInfo.ItemName, itemInfo.Size)
go closer()
atomic.AddInt64(&byteCount, itemSize)
oc.data <- &Item{
id: itemInfo.ItemName,
id: itemName,
data: progReader,
info: itemInfo,
}

View File

@ -7,7 +7,6 @@ import (
"io"
"sync"
"testing"
"time"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/stretchr/testify/assert"
@ -20,31 +19,31 @@ import (
"github.com/alcionai/corso/src/pkg/backup/details"
)
type OneDriveCollectionSuite struct {
type CollectionUnitTestSuite struct {
suite.Suite
}
// Allows `*OneDriveCollectionSuite` to be used as a graph.Service
// Allows `*CollectionUnitTestSuite` to be used as a graph.Service
// TODO: Implement these methods
func (suite *OneDriveCollectionSuite) Client() *msgraphsdk.GraphServiceClient {
func (suite *CollectionUnitTestSuite) Client() *msgraphsdk.GraphServiceClient {
return nil
}
func (suite *OneDriveCollectionSuite) Adapter() *msgraphsdk.GraphRequestAdapter {
func (suite *CollectionUnitTestSuite) Adapter() *msgraphsdk.GraphRequestAdapter {
return nil
}
func (suite *OneDriveCollectionSuite) ErrPolicy() bool {
func (suite *CollectionUnitTestSuite) ErrPolicy() bool {
return false
}
func TestOneDriveCollectionSuite(t *testing.T) {
suite.Run(t, new(OneDriveCollectionSuite))
func TestCollectionUnitTestSuite(t *testing.T) {
suite.Run(t, new(CollectionUnitTestSuite))
}
// Returns a status update function that signals the specified WaitGroup when it is done
func (suite *OneDriveCollectionSuite) testStatusUpdater(
func (suite *CollectionUnitTestSuite) testStatusUpdater(
wg *sync.WaitGroup,
statusToUpdate *support.ConnectorOperationStatus,
) support.StatusUpdater {
@ -56,93 +55,139 @@ func (suite *OneDriveCollectionSuite) testStatusUpdater(
}
}
func (suite *OneDriveCollectionSuite) TestOneDriveCollection() {
t := suite.T()
wg := sync.WaitGroup{}
collStatus := support.ConnectorOperationStatus{}
now := time.Now()
func (suite *CollectionUnitTestSuite) TestCollection() {
var (
testItemID = "fakeItemID"
testItemName = "itemName"
testItemData = []byte("testdata")
)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "a-tenant", "a-user", OneDriveSource)
require.NoError(t, err)
driveFolderPath, err := getDriveFolderPath(folderPath)
require.NoError(t, err)
coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus))
require.NotNil(t, coll)
assert.Equal(t, folderPath, coll.FullPath())
testItemID := "fakeItemID"
testItemName := "itemName"
testItemData := []byte("testdata")
// Set a item reader, add an item and validate we get the item back
coll.Add(testItemID)
coll.itemReader = func(context.Context, graph.Service, string, string) (*details.OneDriveInfo, io.ReadCloser, error) {
return &details.OneDriveInfo{
ItemName: testItemName,
Modified: now,
}, io.NopCloser(bytes.NewReader(testItemData)), nil
table := []struct {
name string
source driveSource
itemReader itemReaderFunc
infoFrom func(*testing.T, details.ItemInfo) (string, string)
}{
{
name: "oneDrive",
source: OneDriveSource,
itemReader: func(context.Context, graph.Service, string, string) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: testItemName}},
io.NopCloser(bytes.NewReader(testItemData)),
nil
},
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
require.NotNil(t, dii.OneDrive)
return dii.OneDrive.ItemName, dii.OneDrive.ParentPath
},
},
{
name: "sharePoint",
source: SharePointSource,
itemReader: func(context.Context, graph.Service, string, string) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: testItemName}},
io.NopCloser(bytes.NewReader(testItemData)),
nil
},
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
require.NotNil(t, dii.SharePoint)
return dii.SharePoint.ItemName, dii.SharePoint.ParentPath
},
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
var (
wg = sync.WaitGroup{}
collStatus = support.ConnectorOperationStatus{}
readItems = []data.Stream{}
)
// Read items from the collection
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "tenant", "owner", test.source)
require.NoError(t, err)
driveFolderPath, err := getDriveFolderPath(folderPath)
require.NoError(t, err)
readItems := []data.Stream{}
coll := NewCollection(folderPath, "drive-id", suite, suite.testStatusUpdater(&wg, &collStatus), test.source)
require.NotNil(t, coll)
assert.Equal(t, folderPath, coll.FullPath())
for item := range coll.Items() {
readItems = append(readItems, item)
// Set a item reader, add an item and validate we get the item back
coll.Add(testItemID)
coll.itemReader = test.itemReader
// Read items from the collection
wg.Add(1)
for item := range coll.Items() {
readItems = append(readItems, item)
}
wg.Wait()
// Expect only 1 item
require.Len(t, readItems, 1)
require.Equal(t, 1, collStatus.ObjectCount)
require.Equal(t, 1, collStatus.Successful)
// Validate item info and data
readItem := readItems[0]
readItemInfo := readItem.(data.StreamInfo)
assert.Equal(t, testItemName, readItem.UUID())
readData, err := io.ReadAll(readItem.ToReader())
require.NoError(t, err)
name, parentPath := test.infoFrom(t, readItemInfo.Info())
assert.Equal(t, testItemData, readData)
assert.Equal(t, testItemName, name)
assert.Equal(t, driveFolderPath, parentPath)
})
}
wg.Wait()
// Expect only 1 item
require.Len(t, readItems, 1)
require.Equal(t, 1, collStatus.ObjectCount)
require.Equal(t, 1, collStatus.Successful)
// Validate item info and data
readItem := readItems[0]
readItemInfo := readItem.(data.StreamInfo)
assert.Equal(t, testItemName, readItem.UUID())
// TODO(ashmrtn): Uncomment when #1702 is resolved.
// require.Implements(t, (*data.StreamModTime)(nil), readItem)
// mt := readItem.(data.StreamModTime)
// assert.Equal(t, now, mt.ModTime())
readData, err := io.ReadAll(readItem.ToReader())
require.NoError(t, err)
assert.Equal(t, testItemData, readData)
require.NotNil(t, readItemInfo.Info())
require.NotNil(t, readItemInfo.Info().OneDrive)
assert.Equal(t, testItemName, readItemInfo.Info().OneDrive.ItemName)
assert.Equal(t, driveFolderPath, readItemInfo.Info().OneDrive.ParentPath)
}
func (suite *OneDriveCollectionSuite) TestOneDriveCollectionReadError() {
t := suite.T()
collStatus := support.ConnectorOperationStatus{}
wg := sync.WaitGroup{}
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", OneDriveSource)
require.NoError(t, err)
coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus))
coll.Add("testItemID")
readError := errors.New("Test error")
coll.itemReader = func(context.Context, graph.Service, string, string) (*details.OneDriveInfo, io.ReadCloser, error) {
return nil, nil, readError
func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
table := []struct {
name string
source driveSource
}{
{
name: "oneDrive",
source: OneDriveSource,
},
{
name: "sharePoint",
source: SharePointSource,
},
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
var (
collStatus = support.ConnectorOperationStatus{}
wg = sync.WaitGroup{}
)
coll.Items()
wg.Wait()
// Expect no items
require.Equal(t, 1, collStatus.ObjectCount)
require.Equal(t, 0, collStatus.Successful)
wg.Add(1)
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
require.NoError(t, err)
coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus), test.source)
coll.Add("testItemID")
readError := errors.New("Test error")
coll.itemReader = func(context.Context, graph.Service, string, string) (details.ItemInfo, io.ReadCloser, error) {
return details.ItemInfo{}, nil, readError
}
coll.Items()
wg.Wait()
// Expect no items
require.Equal(t, 1, collStatus.ObjectCount)
require.Equal(t, 0, collStatus.Successful)
})
}
}

View File

@ -138,6 +138,7 @@ func (c *Collections) UpdateCollections(ctx context.Context, driveID string, ite
driveID,
c.service,
c.statusUpdater,
c.source,
)
c.CollectionMap[collectionPath.String()] = col

View File

@ -11,7 +11,6 @@ import (
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/connector/uploadsession"
@ -25,23 +24,61 @@ const (
downloadURLKey = "@microsoft.graph.downloadUrl"
)
// itemReader will return a io.ReadCloser for the specified item
// sharePointItemReader will return a io.ReadCloser for the specified item
// It crafts this by querying M365 for a download URL for the item
// and using a http client to initialize a reader
func sharePointItemReader(
ctx context.Context,
service graph.Service,
driveID, itemID string,
) (details.ItemInfo, io.ReadCloser, error) {
item, rc, err := driveItemReader(ctx, service, driveID, itemID)
if err != nil {
return details.ItemInfo{}, nil, err
}
dii := details.ItemInfo{
SharePoint: sharePointItemInfo(item, *item.GetSize()),
}
return dii, rc, nil
}
// oneDriveItemReader will return a io.ReadCloser for the specified item
// It crafts this by querying M365 for a download URL for the item
// and using a http client to initialize a reader
func oneDriveItemReader(
ctx context.Context,
service graph.Service,
driveID, itemID string,
) (details.ItemInfo, io.ReadCloser, error) {
item, rc, err := driveItemReader(ctx, service, driveID, itemID)
if err != nil {
return details.ItemInfo{}, nil, err
}
dii := details.ItemInfo{
OneDrive: oneDriveItemInfo(item, *item.GetSize()),
}
return dii, rc, nil
}
// driveItemReader will return a io.ReadCloser for the specified item
// It crafts this by querying M365 for a download URL for the item
// and using a http client to initialize a reader
func driveItemReader(
ctx context.Context,
service graph.Service,
driveID, itemID string,
) (*details.OneDriveInfo, io.ReadCloser, error) {
logger.Ctx(ctx).Debugf("Reading Item %s at %s", itemID, time.Now())
) (models.DriveItemable, io.ReadCloser, error) {
logger.Ctx(ctx).Debugw("Reading Item", "id", itemID, "time", time.Now())
item, err := service.Client().DrivesById(driveID).ItemsById(itemID).Get(ctx, nil)
if err != nil {
return nil, nil, errors.Wrapf(err, "failed to get item %s", itemID)
}
logger.Ctx(ctx).Debugw("reading item", "name", *item.GetName(), "time", common.Now())
// Get the download URL - https://docs.microsoft.com/en-us/graph/api/driveitem-get-content
// These URLs are pre-authenticated and can be used to download the data using the standard
// http client
@ -63,15 +100,15 @@ func driveItemReader(
return nil, nil, errors.Wrapf(err, "failed to download file from %s", *downloadURL)
}
return driveItemInfo(item, *item.GetSize()), resp.Body, nil
return item, resp.Body, nil
}
// driveItemInfo will populate a details.OneDriveInfo struct
// oneDriveItemInfo will populate a details.OneDriveInfo struct
// with properties from the drive item. ItemSize is specified
// separately for restore processes because the local itemable
// doesn't have its size value updated as a side effect of creation,
// and kiota drops any SetSize update.
func driveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDriveInfo {
func oneDriveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDriveInfo {
ed, ok := di.GetCreatedBy().GetUser().GetAdditionalData()["email"]
email := ""
@ -89,6 +126,39 @@ func driveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDriveInf
}
}
// sharePointItemInfo will populate a details.SharePointInfo struct
// with properties from the drive item. ItemSize is specified
// separately for restore processes because the local itemable
// doesn't have its size value updated as a side effect of creation,
// and kiota drops any SetSize update.
func sharePointItemInfo(di models.DriveItemable, itemSize int64) *details.SharePointInfo {
var (
id string
url string
)
gsi := di.GetSharepointIds()
if gsi != nil {
if gsi.GetSiteId() != nil {
id = *gsi.GetSiteId()
}
if gsi.GetSiteUrl() != nil {
url = *gsi.GetSiteUrl()
}
}
return &details.SharePointInfo{
ItemType: details.OneDriveItem,
ItemName: *di.GetName(),
Created: *di.GetCreatedDateTime(),
Modified: *di.GetLastModifiedDateTime(),
Size: itemSize,
Owner: id,
WebURL: url,
}
}
// driveItemWriter is used to initialize and return an io.Writer to upload data for the specified item
// It does so by creating an upload session and using that URL to initialize an `itemWriter`
func driveItemWriter(

View File

@ -5,23 +5,25 @@ import (
"context"
"io"
"testing"
"time"
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester"
)
type ItemIntegrationSuite struct {
suite.Suite
user string
driveID string
client *msgraphsdk.GraphServiceClient
adapter *msgraphsdk.GraphRequestAdapter
// site string
// siteDriveID string
user string
userDriveID string
client *msgraphsdk.GraphServiceClient
adapter *msgraphsdk.GraphRequestAdapter
}
func (suite *ItemIntegrationSuite) Client() *msgraphsdk.GraphServiceClient {
@ -49,31 +51,45 @@ func TestItemIntegrationSuite(t *testing.T) {
}
func (suite *ItemIntegrationSuite) SetupSuite() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
_, err := tester.GetRequiredEnvVars(tester.M365AcctCredEnvs...)
require.NoError(suite.T(), err)
require.NoError(t, err)
a := tester.NewM365Account(suite.T())
a := tester.NewM365Account(t)
m365, err := a.M365Config()
require.NoError(suite.T(), err)
require.NoError(t, err)
adapter, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
require.NoError(suite.T(), err)
require.NoError(t, err)
suite.client = msgraphsdk.NewGraphServiceClient(adapter)
suite.adapter = adapter
suite.user = tester.SecondaryM365UserID(suite.T())
// TODO: fulfill file preconditions required for testing (expected files w/in drive
// and guarateed drive read-write access)
// suite.site = tester.M365SiteID(t)
// spDrives, err := drives(ctx, suite, suite.site, SharePointSource)
// require.NoError(t, err)
// // Test Requirement 1: Need a drive
// require.Greaterf(t, len(spDrives), 0, "site %s does not have a drive", suite.site)
drives, err := drives(ctx, suite, suite.user, OneDriveSource)
require.NoError(suite.T(), err)
// // Pick the first drive
// suite.siteDriveID = *spDrives[0].GetId()
suite.user = tester.SecondaryM365UserID(t)
odDrives, err := drives(ctx, suite, suite.user, OneDriveSource)
require.NoError(t, err)
// Test Requirement 1: Need a drive
require.Greaterf(suite.T(), len(drives), 0, "user %s does not have a drive", suite.user)
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
// Pick the first drive
suite.driveID = *drives[0].GetId()
suite.userDriveID = *odDrives[0].GetId()
}
// TestItemReader is an integration test that makes a few assumptions
@ -81,7 +97,7 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
// 1) It assumes the test user has a drive
// 2) It assumes the drive has a file it can use to test `driveItemReader`
// The test checks these in below
func (suite *ItemIntegrationSuite) TestItemReader() {
func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
ctx, flush := tester.NewContext()
defer flush()
@ -97,7 +113,7 @@ func (suite *ItemIntegrationSuite) TestItemReader() {
return nil
}
err := collectItems(ctx, suite, suite.driveID, itemCollector)
err := collectItems(ctx, suite, suite.userDriveID, itemCollector)
require.NoError(suite.T(), err)
// Test Requirement 2: Need a file
@ -106,73 +122,90 @@ func (suite *ItemIntegrationSuite) TestItemReader() {
driveItemID,
"no file item found for user %s drive %s",
suite.user,
suite.driveID,
suite.userDriveID,
)
// Read data for the file
itemInfo, itemData, err := driveItemReader(ctx, suite, suite.driveID, driveItemID)
itemInfo, itemData, err := oneDriveItemReader(ctx, suite, suite.userDriveID, driveItemID)
require.NoError(suite.T(), err)
require.NotNil(suite.T(), itemInfo)
require.NotEmpty(suite.T(), itemInfo.ItemName)
require.NotNil(suite.T(), itemInfo.OneDrive)
require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName)
size, err := io.Copy(io.Discard, itemData)
require.NoError(suite.T(), err)
require.NotZero(suite.T(), size)
require.Equal(suite.T(), size, itemInfo.Size)
suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.ItemName)
require.Equal(suite.T(), size, itemInfo.OneDrive.Size)
suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName)
}
// TestItemWriter is an integration test for uploading data to OneDrive
// It creates a new `testfolder_<timestamp` folder with a new
// testitem_<timestamp> item and writes data to it
func (suite *ItemIntegrationSuite) TestItemWriter() {
ctx, flush := tester.NewContext()
defer flush()
table := []struct {
name string
driveID string
}{
{
name: "",
driveID: suite.userDriveID,
},
// {
// name: "sharePoint",
// driveID: suite.siteDriveID,
// },
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
root, err := suite.Client().DrivesById(suite.driveID).Root().Get(ctx, nil)
require.NoError(suite.T(), err)
root, err := suite.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
require.NoError(suite.T(), err)
// Test Requirement 2: "Test Folder" should exist
folder, err := getFolder(ctx, suite, suite.driveID, *root.GetId(), "Test Folder")
require.NoError(suite.T(), err)
// Test Requirement 2: "Test Folder" should exist
folder, err := getFolder(ctx, suite, test.driveID, *root.GetId(), "Test Folder")
require.NoError(suite.T(), err)
newFolderName := "testfolder_" + time.Now().Format("2006-01-02T15-04-05")
suite.T().Logf("Test will create folder %s", newFolderName)
newFolderName := "testfolder_" + common.FormatNow(common.SimpleTimeTesting)
suite.T().Logf("Test will create folder %s", newFolderName)
newFolder, err := createItem(ctx, suite, suite.driveID, *folder.GetId(), newItem(newFolderName, true))
require.NoError(suite.T(), err)
newFolder, err := createItem(ctx, suite, test.driveID, *folder.GetId(), newItem(newFolderName, true))
require.NoError(suite.T(), err)
require.NotNil(suite.T(), newFolder.GetId())
require.NotNil(suite.T(), newFolder.GetId())
newItemName := "testItem_" + time.Now().Format("2006-01-02T15-04-05")
suite.T().Logf("Test will create item %s", newItemName)
newItemName := "testItem_" + common.FormatNow(common.SimpleTimeTesting)
suite.T().Logf("Test will create item %s", newItemName)
newItem, err := createItem(ctx, suite, suite.driveID, *newFolder.GetId(), newItem(newItemName, false))
require.NoError(suite.T(), err)
newItem, err := createItem(ctx, suite, test.driveID, *newFolder.GetId(), newItem(newItemName, false))
require.NoError(suite.T(), err)
require.NotNil(suite.T(), newItem.GetId())
require.NotNil(suite.T(), newItem.GetId())
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
// newly created item should fail because it's a file not a folder
_, err = getFolder(ctx, suite, suite.driveID, *newFolder.GetId(), newItemName)
require.ErrorIs(suite.T(), err, errFolderNotFound)
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
// newly created item should fail because it's a file not a folder
_, err = getFolder(ctx, suite, test.driveID, *newFolder.GetId(), newItemName)
require.ErrorIs(suite.T(), err, errFolderNotFound)
// Initialize a 100KB mockDataProvider
td, writeSize := mockDataReader(int64(100 * 1024))
// Initialize a 100KB mockDataProvider
td, writeSize := mockDataReader(int64(100 * 1024))
w, err := driveItemWriter(ctx, suite, suite.driveID, *newItem.GetId(), writeSize)
require.NoError(suite.T(), err)
w, err := driveItemWriter(ctx, suite, test.driveID, *newItem.GetId(), writeSize)
require.NoError(suite.T(), err)
// Using a 32 KB buffer for the copy allows us to validate the
// multi-part upload. `io.CopyBuffer` will only write 32 KB at
// a time
copyBuffer := make([]byte, 32*1024)
// Using a 32 KB buffer for the copy allows us to validate the
// multi-part upload. `io.CopyBuffer` will only write 32 KB at
// a time
copyBuffer := make([]byte, 32*1024)
size, err := io.CopyBuffer(w, td, copyBuffer)
require.NoError(suite.T(), err)
size, err := io.CopyBuffer(w, td, copyBuffer)
require.NoError(suite.T(), err)
require.Equal(suite.T(), writeSize, size)
require.Equal(suite.T(), writeSize, size)
})
}
}
func mockDataReader(size int64) (io.Reader, int64) {
@ -181,17 +214,34 @@ func mockDataReader(size int64) (io.Reader, int64) {
}
func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
ctx, flush := tester.NewContext()
defer flush()
table := []struct {
name string
driveID string
}{
{
name: "oneDrive",
driveID: suite.userDriveID,
},
// {
// name: "sharePoint",
// driveID: suite.siteDriveID,
// },
}
for _, test := range table {
suite.T().Run(test.name, func(t *testing.T) {
ctx, flush := tester.NewContext()
defer flush()
root, err := suite.Client().DrivesById(suite.driveID).Root().Get(ctx, nil)
require.NoError(suite.T(), err)
root, err := suite.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
require.NoError(suite.T(), err)
// Lookup a folder that doesn't exist
_, err = getFolder(ctx, suite, suite.driveID, *root.GetId(), "FolderDoesNotExist")
require.ErrorIs(suite.T(), err, errFolderNotFound)
// Lookup a folder that doesn't exist
_, err = getFolder(ctx, suite, test.driveID, *root.GetId(), "FolderDoesNotExist")
require.ErrorIs(suite.T(), err, errFolderNotFound)
// Lookup a folder that does exist
_, err = getFolder(ctx, suite, suite.driveID, *root.GetId(), "")
require.NoError(suite.T(), err)
// Lookup a folder that does exist
_, err = getFolder(ctx, suite, test.driveID, *root.GetId(), "")
require.NoError(suite.T(), err)
})
}
}

View File

@ -66,7 +66,7 @@ func RestoreCollections(
// Iterate through the data collections and restore the contents of each
for _, dc := range dcs {
temp, canceled := RestoreCollection(ctx, service, dc, dest.ContainerName, deets, errUpdater)
temp, canceled := RestoreCollection(ctx, service, dc, OneDriveSource, dest.ContainerName, deets, errUpdater)
restoreMetrics.Combine(temp)
@ -93,6 +93,7 @@ func RestoreCollection(
ctx context.Context,
service graph.Service,
dc data.Collection,
source driveSource,
restoreContainerName string,
deets *details.Details,
errUpdater func(string, error),
@ -151,7 +152,8 @@ func RestoreCollection(
itemData,
drivePath.driveID,
restoreFolderID,
copyBuffer)
copyBuffer,
source)
if err != nil {
errUpdater(itemData.UUID(), err)
continue
@ -169,9 +171,7 @@ func RestoreCollection(
itemPath.String(),
itemPath.ShortRef(),
"",
details.ItemInfo{
OneDrive: itemInfo,
})
itemInfo)
metrics.Successes++
}
@ -230,7 +230,8 @@ func restoreItem(
itemData data.Stream,
driveID, parentFolderID string,
copyBuffer []byte,
) (*details.OneDriveInfo, error) {
source driveSource,
) (details.ItemInfo, error) {
ctx, end := D.Span(ctx, "gc:oneDrive:restoreItem", D.Label("item_uuid", itemData.UUID()))
defer end()
@ -240,19 +241,19 @@ func restoreItem(
// Get the stream size (needed to create the upload session)
ss, ok := itemData.(data.StreamSize)
if !ok {
return nil, errors.Errorf("item %q does not implement DataStreamInfo", itemName)
return details.ItemInfo{}, errors.Errorf("item %q does not implement DataStreamInfo", itemName)
}
// Create Item
newItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(itemData.UUID(), false))
if err != nil {
return nil, errors.Wrapf(err, "failed to create item %s", itemName)
return details.ItemInfo{}, errors.Wrapf(err, "failed to create item %s", itemName)
}
// Get a drive item writer
w, err := driveItemWriter(ctx, service, driveID, *newItem.GetId(), ss.Size())
if err != nil {
return nil, errors.Wrapf(err, "failed to create item upload session %s", itemName)
return details.ItemInfo{}, errors.Wrapf(err, "failed to create item upload session %s", itemName)
}
iReader := itemData.ToReader()
@ -263,8 +264,17 @@ func restoreItem(
// Upload the stream data
written, err := io.CopyBuffer(w, progReader, copyBuffer)
if err != nil {
return nil, errors.Wrapf(err, "failed to upload data: item %s", itemName)
return details.ItemInfo{}, errors.Wrapf(err, "failed to upload data: item %s", itemName)
}
return driveItemInfo(newItem, written), nil
dii := details.ItemInfo{}
switch source {
case SharePointSource:
dii.SharePoint = sharePointItemInfo(newItem, written)
default:
dii.OneDrive = oneDriveItemInfo(newItem, written)
}
return dii, nil
}

View File

@ -40,7 +40,14 @@ func RestoreCollections(
switch dc.FullPath().Category() {
case path.LibrariesCategory:
metrics, canceled = onedrive.RestoreCollection(ctx, service, dc, dest.ContainerName, deets, errUpdater)
metrics, canceled = onedrive.RestoreCollection(
ctx,
service,
dc,
onedrive.OneDriveSource,
dest.ContainerName,
deets,
errUpdater)
default:
return nil, errors.Errorf("category %s not supported", dc.FullPath().Category())
}