populate sharepoint itemInfo (#1687)
## Description Currently, all drive backup and restore actions populatet a details.OneDriveInfo struct. This change branches that struct between one- drive and sharepoint info, depending on the current source. ## Type of change - [x] 🌻 Feature ## Issue(s) * #1616 ## Test Plan - [x] ⚡ Unit test
This commit is contained in:
parent
9cab212776
commit
4c976298d4
@ -49,6 +49,7 @@ type Collection struct {
|
|||||||
driveItemIDs []string
|
driveItemIDs []string
|
||||||
// M365 ID of the drive this collection was created from
|
// M365 ID of the drive this collection was created from
|
||||||
driveID string
|
driveID string
|
||||||
|
source driveSource
|
||||||
service graph.Service
|
service graph.Service
|
||||||
statusUpdater support.StatusUpdater
|
statusUpdater support.StatusUpdater
|
||||||
itemReader itemReaderFunc
|
itemReader itemReaderFunc
|
||||||
@ -59,7 +60,7 @@ type itemReaderFunc func(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Service,
|
service graph.Service,
|
||||||
driveID, itemID string,
|
driveID, itemID string,
|
||||||
) (itemInfo *details.OneDriveInfo, itemData io.ReadCloser, err error)
|
) (itemInfo details.ItemInfo, itemData io.ReadCloser, err error)
|
||||||
|
|
||||||
// NewCollection creates a Collection
|
// NewCollection creates a Collection
|
||||||
func NewCollection(
|
func NewCollection(
|
||||||
@ -67,17 +68,25 @@ func NewCollection(
|
|||||||
driveID string,
|
driveID string,
|
||||||
service graph.Service,
|
service graph.Service,
|
||||||
statusUpdater support.StatusUpdater,
|
statusUpdater support.StatusUpdater,
|
||||||
|
source driveSource,
|
||||||
) *Collection {
|
) *Collection {
|
||||||
c := &Collection{
|
c := &Collection{
|
||||||
folderPath: folderPath,
|
folderPath: folderPath,
|
||||||
driveItemIDs: []string{},
|
driveItemIDs: []string{},
|
||||||
driveID: driveID,
|
driveID: driveID,
|
||||||
|
source: source,
|
||||||
service: service,
|
service: service,
|
||||||
data: make(chan data.Stream, collectionChannelBufferSize),
|
data: make(chan data.Stream, collectionChannelBufferSize),
|
||||||
statusUpdater: statusUpdater,
|
statusUpdater: statusUpdater,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Allows tests to set a mock populator
|
// Allows tests to set a mock populator
|
||||||
c.itemReader = driveItemReader
|
switch source {
|
||||||
|
case SharePointSource:
|
||||||
|
c.itemReader = sharePointItemReader
|
||||||
|
default:
|
||||||
|
c.itemReader = oneDriveItemReader
|
||||||
|
}
|
||||||
|
|
||||||
return c
|
return c
|
||||||
}
|
}
|
||||||
@ -114,7 +123,7 @@ func (oc Collection) State() data.CollectionState {
|
|||||||
type Item struct {
|
type Item struct {
|
||||||
id string
|
id string
|
||||||
data io.ReadCloser
|
data io.ReadCloser
|
||||||
info *details.OneDriveInfo
|
info details.ItemInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
func (od *Item) UUID() string {
|
func (od *Item) UUID() string {
|
||||||
@ -131,7 +140,7 @@ func (od Item) Deleted() bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (od *Item) Info() details.ItemInfo {
|
func (od *Item) Info() details.ItemInfo {
|
||||||
return details.ItemInfo{OneDrive: od.info}
|
return od.info
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(ashmrtn): Uncomment when #1702 is resolved.
|
// TODO(ashmrtn): Uncomment when #1702 is resolved.
|
||||||
@ -190,7 +199,7 @@ func (oc *Collection) populateItems(ctx context.Context) {
|
|||||||
|
|
||||||
// Read the item
|
// Read the item
|
||||||
var (
|
var (
|
||||||
itemInfo *details.OneDriveInfo
|
itemInfo details.ItemInfo
|
||||||
itemData io.ReadCloser
|
itemData io.ReadCloser
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
@ -213,18 +222,32 @@ func (oc *Collection) populateItems(ctx context.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
itemName string
|
||||||
|
itemSize int64
|
||||||
|
)
|
||||||
|
|
||||||
|
switch oc.source {
|
||||||
|
case SharePointSource:
|
||||||
|
itemInfo.SharePoint.ParentPath = parentPathString
|
||||||
|
itemName = itemInfo.SharePoint.ItemName
|
||||||
|
itemSize = itemInfo.SharePoint.Size
|
||||||
|
default:
|
||||||
|
itemInfo.OneDrive.ParentPath = parentPathString
|
||||||
|
itemName = itemInfo.OneDrive.ItemName
|
||||||
|
itemSize = itemInfo.OneDrive.Size
|
||||||
|
}
|
||||||
|
|
||||||
|
progReader, closer := observe.ItemProgress(itemData, observe.ItemBackupMsg, itemName, itemSize)
|
||||||
|
go closer()
|
||||||
|
|
||||||
// Item read successfully, add to collection
|
// Item read successfully, add to collection
|
||||||
atomic.AddInt64(&itemsRead, 1)
|
atomic.AddInt64(&itemsRead, 1)
|
||||||
// byteCount iteration
|
// byteCount iteration
|
||||||
atomic.AddInt64(&byteCount, itemInfo.Size)
|
atomic.AddInt64(&byteCount, itemSize)
|
||||||
|
|
||||||
itemInfo.ParentPath = parentPathString
|
|
||||||
progReader, closer := observe.ItemProgress(itemData, observe.ItemBackupMsg, itemInfo.ItemName, itemInfo.Size)
|
|
||||||
|
|
||||||
go closer()
|
|
||||||
|
|
||||||
oc.data <- &Item{
|
oc.data <- &Item{
|
||||||
id: itemInfo.ItemName,
|
id: itemName,
|
||||||
data: progReader,
|
data: progReader,
|
||||||
info: itemInfo,
|
info: itemInfo,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -7,7 +7,6 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"sync"
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
|
||||||
|
|
||||||
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
|
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@ -20,31 +19,31 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
)
|
)
|
||||||
|
|
||||||
type OneDriveCollectionSuite struct {
|
type CollectionUnitTestSuite struct {
|
||||||
suite.Suite
|
suite.Suite
|
||||||
}
|
}
|
||||||
|
|
||||||
// Allows `*OneDriveCollectionSuite` to be used as a graph.Service
|
// Allows `*CollectionUnitTestSuite` to be used as a graph.Service
|
||||||
// TODO: Implement these methods
|
// TODO: Implement these methods
|
||||||
|
|
||||||
func (suite *OneDriveCollectionSuite) Client() *msgraphsdk.GraphServiceClient {
|
func (suite *CollectionUnitTestSuite) Client() *msgraphsdk.GraphServiceClient {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OneDriveCollectionSuite) Adapter() *msgraphsdk.GraphRequestAdapter {
|
func (suite *CollectionUnitTestSuite) Adapter() *msgraphsdk.GraphRequestAdapter {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OneDriveCollectionSuite) ErrPolicy() bool {
|
func (suite *CollectionUnitTestSuite) ErrPolicy() bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestOneDriveCollectionSuite(t *testing.T) {
|
func TestCollectionUnitTestSuite(t *testing.T) {
|
||||||
suite.Run(t, new(OneDriveCollectionSuite))
|
suite.Run(t, new(CollectionUnitTestSuite))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns a status update function that signals the specified WaitGroup when it is done
|
// Returns a status update function that signals the specified WaitGroup when it is done
|
||||||
func (suite *OneDriveCollectionSuite) testStatusUpdater(
|
func (suite *CollectionUnitTestSuite) testStatusUpdater(
|
||||||
wg *sync.WaitGroup,
|
wg *sync.WaitGroup,
|
||||||
statusToUpdate *support.ConnectorOperationStatus,
|
statusToUpdate *support.ConnectorOperationStatus,
|
||||||
) support.StatusUpdater {
|
) support.StatusUpdater {
|
||||||
@ -56,40 +55,70 @@ func (suite *OneDriveCollectionSuite) testStatusUpdater(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OneDriveCollectionSuite) TestOneDriveCollection() {
|
func (suite *CollectionUnitTestSuite) TestCollection() {
|
||||||
t := suite.T()
|
var (
|
||||||
wg := sync.WaitGroup{}
|
testItemID = "fakeItemID"
|
||||||
collStatus := support.ConnectorOperationStatus{}
|
testItemName = "itemName"
|
||||||
now := time.Now()
|
testItemData = []byte("testdata")
|
||||||
|
)
|
||||||
|
|
||||||
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "a-tenant", "a-user", OneDriveSource)
|
table := []struct {
|
||||||
|
name string
|
||||||
|
source driveSource
|
||||||
|
itemReader itemReaderFunc
|
||||||
|
infoFrom func(*testing.T, details.ItemInfo) (string, string)
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "oneDrive",
|
||||||
|
source: OneDriveSource,
|
||||||
|
itemReader: func(context.Context, graph.Service, string, string) (details.ItemInfo, io.ReadCloser, error) {
|
||||||
|
return details.ItemInfo{OneDrive: &details.OneDriveInfo{ItemName: testItemName}},
|
||||||
|
io.NopCloser(bytes.NewReader(testItemData)),
|
||||||
|
nil
|
||||||
|
},
|
||||||
|
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
|
||||||
|
require.NotNil(t, dii.OneDrive)
|
||||||
|
return dii.OneDrive.ItemName, dii.OneDrive.ParentPath
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "sharePoint",
|
||||||
|
source: SharePointSource,
|
||||||
|
itemReader: func(context.Context, graph.Service, string, string) (details.ItemInfo, io.ReadCloser, error) {
|
||||||
|
return details.ItemInfo{SharePoint: &details.SharePointInfo{ItemName: testItemName}},
|
||||||
|
io.NopCloser(bytes.NewReader(testItemData)),
|
||||||
|
nil
|
||||||
|
},
|
||||||
|
infoFrom: func(t *testing.T, dii details.ItemInfo) (string, string) {
|
||||||
|
require.NotNil(t, dii.SharePoint)
|
||||||
|
return dii.SharePoint.ItemName, dii.SharePoint.ParentPath
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
|
var (
|
||||||
|
wg = sync.WaitGroup{}
|
||||||
|
collStatus = support.ConnectorOperationStatus{}
|
||||||
|
readItems = []data.Stream{}
|
||||||
|
)
|
||||||
|
|
||||||
|
folderPath, err := GetCanonicalPath("drive/driveID1/root:/dir1/dir2/dir3", "tenant", "owner", test.source)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
driveFolderPath, err := getDriveFolderPath(folderPath)
|
driveFolderPath, err := getDriveFolderPath(folderPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus))
|
coll := NewCollection(folderPath, "drive-id", suite, suite.testStatusUpdater(&wg, &collStatus), test.source)
|
||||||
require.NotNil(t, coll)
|
require.NotNil(t, coll)
|
||||||
assert.Equal(t, folderPath, coll.FullPath())
|
assert.Equal(t, folderPath, coll.FullPath())
|
||||||
|
|
||||||
testItemID := "fakeItemID"
|
|
||||||
testItemName := "itemName"
|
|
||||||
testItemData := []byte("testdata")
|
|
||||||
|
|
||||||
// Set a item reader, add an item and validate we get the item back
|
// Set a item reader, add an item and validate we get the item back
|
||||||
coll.Add(testItemID)
|
coll.Add(testItemID)
|
||||||
|
coll.itemReader = test.itemReader
|
||||||
coll.itemReader = func(context.Context, graph.Service, string, string) (*details.OneDriveInfo, io.ReadCloser, error) {
|
|
||||||
return &details.OneDriveInfo{
|
|
||||||
ItemName: testItemName,
|
|
||||||
Modified: now,
|
|
||||||
}, io.NopCloser(bytes.NewReader(testItemData)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read items from the collection
|
// Read items from the collection
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
|
|
||||||
readItems := []data.Stream{}
|
|
||||||
|
|
||||||
for item := range coll.Items() {
|
for item := range coll.Items() {
|
||||||
readItems = append(readItems, item)
|
readItems = append(readItems, item)
|
||||||
}
|
}
|
||||||
@ -106,43 +135,59 @@ func (suite *OneDriveCollectionSuite) TestOneDriveCollection() {
|
|||||||
readItemInfo := readItem.(data.StreamInfo)
|
readItemInfo := readItem.(data.StreamInfo)
|
||||||
|
|
||||||
assert.Equal(t, testItemName, readItem.UUID())
|
assert.Equal(t, testItemName, readItem.UUID())
|
||||||
|
|
||||||
// TODO(ashmrtn): Uncomment when #1702 is resolved.
|
|
||||||
// require.Implements(t, (*data.StreamModTime)(nil), readItem)
|
|
||||||
// mt := readItem.(data.StreamModTime)
|
|
||||||
// assert.Equal(t, now, mt.ModTime())
|
|
||||||
|
|
||||||
readData, err := io.ReadAll(readItem.ToReader())
|
readData, err := io.ReadAll(readItem.ToReader())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
name, parentPath := test.infoFrom(t, readItemInfo.Info())
|
||||||
|
|
||||||
assert.Equal(t, testItemData, readData)
|
assert.Equal(t, testItemData, readData)
|
||||||
require.NotNil(t, readItemInfo.Info())
|
assert.Equal(t, testItemName, name)
|
||||||
require.NotNil(t, readItemInfo.Info().OneDrive)
|
assert.Equal(t, driveFolderPath, parentPath)
|
||||||
assert.Equal(t, testItemName, readItemInfo.Info().OneDrive.ItemName)
|
})
|
||||||
assert.Equal(t, driveFolderPath, readItemInfo.Info().OneDrive.ParentPath)
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OneDriveCollectionSuite) TestOneDriveCollectionReadError() {
|
func (suite *CollectionUnitTestSuite) TestCollectionReadError() {
|
||||||
t := suite.T()
|
table := []struct {
|
||||||
collStatus := support.ConnectorOperationStatus{}
|
name string
|
||||||
wg := sync.WaitGroup{}
|
source driveSource
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "oneDrive",
|
||||||
|
source: OneDriveSource,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "sharePoint",
|
||||||
|
source: SharePointSource,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
|
var (
|
||||||
|
collStatus = support.ConnectorOperationStatus{}
|
||||||
|
wg = sync.WaitGroup{}
|
||||||
|
)
|
||||||
|
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
|
|
||||||
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", OneDriveSource)
|
folderPath, err := GetCanonicalPath("drive/driveID1/root:/folderPath", "a-tenant", "a-user", test.source)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus))
|
coll := NewCollection(folderPath, "fakeDriveID", suite, suite.testStatusUpdater(&wg, &collStatus), test.source)
|
||||||
coll.Add("testItemID")
|
coll.Add("testItemID")
|
||||||
|
|
||||||
readError := errors.New("Test error")
|
readError := errors.New("Test error")
|
||||||
|
|
||||||
coll.itemReader = func(context.Context, graph.Service, string, string) (*details.OneDriveInfo, io.ReadCloser, error) {
|
coll.itemReader = func(context.Context, graph.Service, string, string) (details.ItemInfo, io.ReadCloser, error) {
|
||||||
return nil, nil, readError
|
return details.ItemInfo{}, nil, readError
|
||||||
}
|
}
|
||||||
|
|
||||||
coll.Items()
|
coll.Items()
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
// Expect no items
|
// Expect no items
|
||||||
require.Equal(t, 1, collStatus.ObjectCount)
|
require.Equal(t, 1, collStatus.ObjectCount)
|
||||||
require.Equal(t, 0, collStatus.Successful)
|
require.Equal(t, 0, collStatus.Successful)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -138,6 +138,7 @@ func (c *Collections) UpdateCollections(ctx context.Context, driveID string, ite
|
|||||||
driveID,
|
driveID,
|
||||||
c.service,
|
c.service,
|
||||||
c.statusUpdater,
|
c.statusUpdater,
|
||||||
|
c.source,
|
||||||
)
|
)
|
||||||
|
|
||||||
c.CollectionMap[collectionPath.String()] = col
|
c.CollectionMap[collectionPath.String()] = col
|
||||||
|
|||||||
@ -11,7 +11,6 @@ import (
|
|||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/connector/uploadsession"
|
"github.com/alcionai/corso/src/internal/connector/uploadsession"
|
||||||
@ -25,23 +24,61 @@ const (
|
|||||||
downloadURLKey = "@microsoft.graph.downloadUrl"
|
downloadURLKey = "@microsoft.graph.downloadUrl"
|
||||||
)
|
)
|
||||||
|
|
||||||
// itemReader will return a io.ReadCloser for the specified item
|
// sharePointItemReader will return a io.ReadCloser for the specified item
|
||||||
|
// It crafts this by querying M365 for a download URL for the item
|
||||||
|
// and using a http client to initialize a reader
|
||||||
|
func sharePointItemReader(
|
||||||
|
ctx context.Context,
|
||||||
|
service graph.Service,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (details.ItemInfo, io.ReadCloser, error) {
|
||||||
|
item, rc, err := driveItemReader(ctx, service, driveID, itemID)
|
||||||
|
if err != nil {
|
||||||
|
return details.ItemInfo{}, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dii := details.ItemInfo{
|
||||||
|
SharePoint: sharePointItemInfo(item, *item.GetSize()),
|
||||||
|
}
|
||||||
|
|
||||||
|
return dii, rc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// oneDriveItemReader will return a io.ReadCloser for the specified item
|
||||||
|
// It crafts this by querying M365 for a download URL for the item
|
||||||
|
// and using a http client to initialize a reader
|
||||||
|
func oneDriveItemReader(
|
||||||
|
ctx context.Context,
|
||||||
|
service graph.Service,
|
||||||
|
driveID, itemID string,
|
||||||
|
) (details.ItemInfo, io.ReadCloser, error) {
|
||||||
|
item, rc, err := driveItemReader(ctx, service, driveID, itemID)
|
||||||
|
if err != nil {
|
||||||
|
return details.ItemInfo{}, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dii := details.ItemInfo{
|
||||||
|
OneDrive: oneDriveItemInfo(item, *item.GetSize()),
|
||||||
|
}
|
||||||
|
|
||||||
|
return dii, rc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// driveItemReader will return a io.ReadCloser for the specified item
|
||||||
// It crafts this by querying M365 for a download URL for the item
|
// It crafts this by querying M365 for a download URL for the item
|
||||||
// and using a http client to initialize a reader
|
// and using a http client to initialize a reader
|
||||||
func driveItemReader(
|
func driveItemReader(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Service,
|
service graph.Service,
|
||||||
driveID, itemID string,
|
driveID, itemID string,
|
||||||
) (*details.OneDriveInfo, io.ReadCloser, error) {
|
) (models.DriveItemable, io.ReadCloser, error) {
|
||||||
logger.Ctx(ctx).Debugf("Reading Item %s at %s", itemID, time.Now())
|
logger.Ctx(ctx).Debugw("Reading Item", "id", itemID, "time", time.Now())
|
||||||
|
|
||||||
item, err := service.Client().DrivesById(driveID).ItemsById(itemID).Get(ctx, nil)
|
item, err := service.Client().DrivesById(driveID).ItemsById(itemID).Get(ctx, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, errors.Wrapf(err, "failed to get item %s", itemID)
|
return nil, nil, errors.Wrapf(err, "failed to get item %s", itemID)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Ctx(ctx).Debugw("reading item", "name", *item.GetName(), "time", common.Now())
|
|
||||||
|
|
||||||
// Get the download URL - https://docs.microsoft.com/en-us/graph/api/driveitem-get-content
|
// Get the download URL - https://docs.microsoft.com/en-us/graph/api/driveitem-get-content
|
||||||
// These URLs are pre-authenticated and can be used to download the data using the standard
|
// These URLs are pre-authenticated and can be used to download the data using the standard
|
||||||
// http client
|
// http client
|
||||||
@ -63,15 +100,15 @@ func driveItemReader(
|
|||||||
return nil, nil, errors.Wrapf(err, "failed to download file from %s", *downloadURL)
|
return nil, nil, errors.Wrapf(err, "failed to download file from %s", *downloadURL)
|
||||||
}
|
}
|
||||||
|
|
||||||
return driveItemInfo(item, *item.GetSize()), resp.Body, nil
|
return item, resp.Body, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// driveItemInfo will populate a details.OneDriveInfo struct
|
// oneDriveItemInfo will populate a details.OneDriveInfo struct
|
||||||
// with properties from the drive item. ItemSize is specified
|
// with properties from the drive item. ItemSize is specified
|
||||||
// separately for restore processes because the local itemable
|
// separately for restore processes because the local itemable
|
||||||
// doesn't have its size value updated as a side effect of creation,
|
// doesn't have its size value updated as a side effect of creation,
|
||||||
// and kiota drops any SetSize update.
|
// and kiota drops any SetSize update.
|
||||||
func driveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDriveInfo {
|
func oneDriveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDriveInfo {
|
||||||
ed, ok := di.GetCreatedBy().GetUser().GetAdditionalData()["email"]
|
ed, ok := di.GetCreatedBy().GetUser().GetAdditionalData()["email"]
|
||||||
|
|
||||||
email := ""
|
email := ""
|
||||||
@ -89,6 +126,39 @@ func driveItemInfo(di models.DriveItemable, itemSize int64) *details.OneDriveInf
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// sharePointItemInfo will populate a details.SharePointInfo struct
|
||||||
|
// with properties from the drive item. ItemSize is specified
|
||||||
|
// separately for restore processes because the local itemable
|
||||||
|
// doesn't have its size value updated as a side effect of creation,
|
||||||
|
// and kiota drops any SetSize update.
|
||||||
|
func sharePointItemInfo(di models.DriveItemable, itemSize int64) *details.SharePointInfo {
|
||||||
|
var (
|
||||||
|
id string
|
||||||
|
url string
|
||||||
|
)
|
||||||
|
|
||||||
|
gsi := di.GetSharepointIds()
|
||||||
|
if gsi != nil {
|
||||||
|
if gsi.GetSiteId() != nil {
|
||||||
|
id = *gsi.GetSiteId()
|
||||||
|
}
|
||||||
|
|
||||||
|
if gsi.GetSiteUrl() != nil {
|
||||||
|
url = *gsi.GetSiteUrl()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &details.SharePointInfo{
|
||||||
|
ItemType: details.OneDriveItem,
|
||||||
|
ItemName: *di.GetName(),
|
||||||
|
Created: *di.GetCreatedDateTime(),
|
||||||
|
Modified: *di.GetLastModifiedDateTime(),
|
||||||
|
Size: itemSize,
|
||||||
|
Owner: id,
|
||||||
|
WebURL: url,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// driveItemWriter is used to initialize and return an io.Writer to upload data for the specified item
|
// driveItemWriter is used to initialize and return an io.Writer to upload data for the specified item
|
||||||
// It does so by creating an upload session and using that URL to initialize an `itemWriter`
|
// It does so by creating an upload session and using that URL to initialize an `itemWriter`
|
||||||
func driveItemWriter(
|
func driveItemWriter(
|
||||||
|
|||||||
@ -5,21 +5,23 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
|
||||||
|
|
||||||
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
|
msgraphsdk "github.com/microsoftgraph/msgraph-sdk-go"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ItemIntegrationSuite struct {
|
type ItemIntegrationSuite struct {
|
||||||
suite.Suite
|
suite.Suite
|
||||||
|
// site string
|
||||||
|
// siteDriveID string
|
||||||
user string
|
user string
|
||||||
driveID string
|
userDriveID string
|
||||||
client *msgraphsdk.GraphServiceClient
|
client *msgraphsdk.GraphServiceClient
|
||||||
adapter *msgraphsdk.GraphRequestAdapter
|
adapter *msgraphsdk.GraphRequestAdapter
|
||||||
}
|
}
|
||||||
@ -49,31 +51,45 @@ func TestItemIntegrationSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ItemIntegrationSuite) SetupSuite() {
|
func (suite *ItemIntegrationSuite) SetupSuite() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
_, err := tester.GetRequiredEnvVars(tester.M365AcctCredEnvs...)
|
_, err := tester.GetRequiredEnvVars(tester.M365AcctCredEnvs...)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
a := tester.NewM365Account(suite.T())
|
a := tester.NewM365Account(t)
|
||||||
|
|
||||||
m365, err := a.M365Config()
|
m365, err := a.M365Config()
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
adapter, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
|
adapter, err := graph.CreateAdapter(m365.AzureTenantID, m365.AzureClientID, m365.AzureClientSecret)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
suite.client = msgraphsdk.NewGraphServiceClient(adapter)
|
suite.client = msgraphsdk.NewGraphServiceClient(adapter)
|
||||||
suite.adapter = adapter
|
suite.adapter = adapter
|
||||||
|
|
||||||
suite.user = tester.SecondaryM365UserID(suite.T())
|
// TODO: fulfill file preconditions required for testing (expected files w/in drive
|
||||||
|
// and guarateed drive read-write access)
|
||||||
|
// suite.site = tester.M365SiteID(t)
|
||||||
|
// spDrives, err := drives(ctx, suite, suite.site, SharePointSource)
|
||||||
|
// require.NoError(t, err)
|
||||||
|
// // Test Requirement 1: Need a drive
|
||||||
|
// require.Greaterf(t, len(spDrives), 0, "site %s does not have a drive", suite.site)
|
||||||
|
|
||||||
drives, err := drives(ctx, suite, suite.user, OneDriveSource)
|
// // Pick the first drive
|
||||||
require.NoError(suite.T(), err)
|
// suite.siteDriveID = *spDrives[0].GetId()
|
||||||
|
|
||||||
|
suite.user = tester.SecondaryM365UserID(t)
|
||||||
|
|
||||||
|
odDrives, err := drives(ctx, suite, suite.user, OneDriveSource)
|
||||||
|
require.NoError(t, err)
|
||||||
// Test Requirement 1: Need a drive
|
// Test Requirement 1: Need a drive
|
||||||
require.Greaterf(suite.T(), len(drives), 0, "user %s does not have a drive", suite.user)
|
require.Greaterf(t, len(odDrives), 0, "user %s does not have a drive", suite.user)
|
||||||
|
|
||||||
// Pick the first drive
|
// Pick the first drive
|
||||||
suite.driveID = *drives[0].GetId()
|
suite.userDriveID = *odDrives[0].GetId()
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestItemReader is an integration test that makes a few assumptions
|
// TestItemReader is an integration test that makes a few assumptions
|
||||||
@ -81,7 +97,7 @@ func (suite *ItemIntegrationSuite) SetupSuite() {
|
|||||||
// 1) It assumes the test user has a drive
|
// 1) It assumes the test user has a drive
|
||||||
// 2) It assumes the drive has a file it can use to test `driveItemReader`
|
// 2) It assumes the drive has a file it can use to test `driveItemReader`
|
||||||
// The test checks these in below
|
// The test checks these in below
|
||||||
func (suite *ItemIntegrationSuite) TestItemReader() {
|
func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
@ -97,7 +113,7 @@ func (suite *ItemIntegrationSuite) TestItemReader() {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
err := collectItems(ctx, suite, suite.driveID, itemCollector)
|
err := collectItems(ctx, suite, suite.userDriveID, itemCollector)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
// Test Requirement 2: Need a file
|
// Test Requirement 2: Need a file
|
||||||
@ -106,62 +122,77 @@ func (suite *ItemIntegrationSuite) TestItemReader() {
|
|||||||
driveItemID,
|
driveItemID,
|
||||||
"no file item found for user %s drive %s",
|
"no file item found for user %s drive %s",
|
||||||
suite.user,
|
suite.user,
|
||||||
suite.driveID,
|
suite.userDriveID,
|
||||||
)
|
)
|
||||||
|
|
||||||
// Read data for the file
|
// Read data for the file
|
||||||
|
|
||||||
itemInfo, itemData, err := driveItemReader(ctx, suite, suite.driveID, driveItemID)
|
itemInfo, itemData, err := oneDriveItemReader(ctx, suite, suite.userDriveID, driveItemID)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
require.NotNil(suite.T(), itemInfo)
|
require.NotNil(suite.T(), itemInfo.OneDrive)
|
||||||
require.NotEmpty(suite.T(), itemInfo.ItemName)
|
require.NotEmpty(suite.T(), itemInfo.OneDrive.ItemName)
|
||||||
|
|
||||||
size, err := io.Copy(io.Discard, itemData)
|
size, err := io.Copy(io.Discard, itemData)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
require.NotZero(suite.T(), size)
|
require.NotZero(suite.T(), size)
|
||||||
require.Equal(suite.T(), size, itemInfo.Size)
|
require.Equal(suite.T(), size, itemInfo.OneDrive.Size)
|
||||||
suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.ItemName)
|
suite.T().Logf("Read %d bytes from file %s.", size, itemInfo.OneDrive.ItemName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TestItemWriter is an integration test for uploading data to OneDrive
|
// TestItemWriter is an integration test for uploading data to OneDrive
|
||||||
// It creates a new `testfolder_<timestamp` folder with a new
|
// It creates a new `testfolder_<timestamp` folder with a new
|
||||||
// testitem_<timestamp> item and writes data to it
|
// testitem_<timestamp> item and writes data to it
|
||||||
func (suite *ItemIntegrationSuite) TestItemWriter() {
|
func (suite *ItemIntegrationSuite) TestItemWriter() {
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
driveID string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
driveID: suite.userDriveID,
|
||||||
|
},
|
||||||
|
// {
|
||||||
|
// name: "sharePoint",
|
||||||
|
// driveID: suite.siteDriveID,
|
||||||
|
// },
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
root, err := suite.Client().DrivesById(suite.driveID).Root().Get(ctx, nil)
|
root, err := suite.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
// Test Requirement 2: "Test Folder" should exist
|
// Test Requirement 2: "Test Folder" should exist
|
||||||
folder, err := getFolder(ctx, suite, suite.driveID, *root.GetId(), "Test Folder")
|
folder, err := getFolder(ctx, suite, test.driveID, *root.GetId(), "Test Folder")
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
newFolderName := "testfolder_" + time.Now().Format("2006-01-02T15-04-05")
|
newFolderName := "testfolder_" + common.FormatNow(common.SimpleTimeTesting)
|
||||||
suite.T().Logf("Test will create folder %s", newFolderName)
|
suite.T().Logf("Test will create folder %s", newFolderName)
|
||||||
|
|
||||||
newFolder, err := createItem(ctx, suite, suite.driveID, *folder.GetId(), newItem(newFolderName, true))
|
newFolder, err := createItem(ctx, suite, test.driveID, *folder.GetId(), newItem(newFolderName, true))
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
require.NotNil(suite.T(), newFolder.GetId())
|
require.NotNil(suite.T(), newFolder.GetId())
|
||||||
|
|
||||||
newItemName := "testItem_" + time.Now().Format("2006-01-02T15-04-05")
|
newItemName := "testItem_" + common.FormatNow(common.SimpleTimeTesting)
|
||||||
suite.T().Logf("Test will create item %s", newItemName)
|
suite.T().Logf("Test will create item %s", newItemName)
|
||||||
|
|
||||||
newItem, err := createItem(ctx, suite, suite.driveID, *newFolder.GetId(), newItem(newItemName, false))
|
newItem, err := createItem(ctx, suite, test.driveID, *newFolder.GetId(), newItem(newItemName, false))
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
require.NotNil(suite.T(), newItem.GetId())
|
require.NotNil(suite.T(), newItem.GetId())
|
||||||
|
|
||||||
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
|
// HACK: Leveraging this to test getFolder behavior for a file. `getFolder()` on the
|
||||||
// newly created item should fail because it's a file not a folder
|
// newly created item should fail because it's a file not a folder
|
||||||
_, err = getFolder(ctx, suite, suite.driveID, *newFolder.GetId(), newItemName)
|
_, err = getFolder(ctx, suite, test.driveID, *newFolder.GetId(), newItemName)
|
||||||
require.ErrorIs(suite.T(), err, errFolderNotFound)
|
require.ErrorIs(suite.T(), err, errFolderNotFound)
|
||||||
|
|
||||||
// Initialize a 100KB mockDataProvider
|
// Initialize a 100KB mockDataProvider
|
||||||
td, writeSize := mockDataReader(int64(100 * 1024))
|
td, writeSize := mockDataReader(int64(100 * 1024))
|
||||||
|
|
||||||
w, err := driveItemWriter(ctx, suite, suite.driveID, *newItem.GetId(), writeSize)
|
w, err := driveItemWriter(ctx, suite, test.driveID, *newItem.GetId(), writeSize)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
// Using a 32 KB buffer for the copy allows us to validate the
|
// Using a 32 KB buffer for the copy allows us to validate the
|
||||||
@ -173,6 +204,8 @@ func (suite *ItemIntegrationSuite) TestItemWriter() {
|
|||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
require.Equal(suite.T(), writeSize, size)
|
require.Equal(suite.T(), writeSize, size)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func mockDataReader(size int64) (io.Reader, int64) {
|
func mockDataReader(size int64) (io.Reader, int64) {
|
||||||
@ -181,17 +214,34 @@ func mockDataReader(size int64) (io.Reader, int64) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
|
func (suite *ItemIntegrationSuite) TestDriveGetFolder() {
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
driveID string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "oneDrive",
|
||||||
|
driveID: suite.userDriveID,
|
||||||
|
},
|
||||||
|
// {
|
||||||
|
// name: "sharePoint",
|
||||||
|
// driveID: suite.siteDriveID,
|
||||||
|
// },
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.T().Run(test.name, func(t *testing.T) {
|
||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
root, err := suite.Client().DrivesById(suite.driveID).Root().Get(ctx, nil)
|
root, err := suite.Client().DrivesById(test.driveID).Root().Get(ctx, nil)
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
|
||||||
// Lookup a folder that doesn't exist
|
// Lookup a folder that doesn't exist
|
||||||
_, err = getFolder(ctx, suite, suite.driveID, *root.GetId(), "FolderDoesNotExist")
|
_, err = getFolder(ctx, suite, test.driveID, *root.GetId(), "FolderDoesNotExist")
|
||||||
require.ErrorIs(suite.T(), err, errFolderNotFound)
|
require.ErrorIs(suite.T(), err, errFolderNotFound)
|
||||||
|
|
||||||
// Lookup a folder that does exist
|
// Lookup a folder that does exist
|
||||||
_, err = getFolder(ctx, suite, suite.driveID, *root.GetId(), "")
|
_, err = getFolder(ctx, suite, test.driveID, *root.GetId(), "")
|
||||||
require.NoError(suite.T(), err)
|
require.NoError(suite.T(), err)
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -66,7 +66,7 @@ func RestoreCollections(
|
|||||||
|
|
||||||
// Iterate through the data collections and restore the contents of each
|
// Iterate through the data collections and restore the contents of each
|
||||||
for _, dc := range dcs {
|
for _, dc := range dcs {
|
||||||
temp, canceled := RestoreCollection(ctx, service, dc, dest.ContainerName, deets, errUpdater)
|
temp, canceled := RestoreCollection(ctx, service, dc, OneDriveSource, dest.ContainerName, deets, errUpdater)
|
||||||
|
|
||||||
restoreMetrics.Combine(temp)
|
restoreMetrics.Combine(temp)
|
||||||
|
|
||||||
@ -93,6 +93,7 @@ func RestoreCollection(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
service graph.Service,
|
service graph.Service,
|
||||||
dc data.Collection,
|
dc data.Collection,
|
||||||
|
source driveSource,
|
||||||
restoreContainerName string,
|
restoreContainerName string,
|
||||||
deets *details.Details,
|
deets *details.Details,
|
||||||
errUpdater func(string, error),
|
errUpdater func(string, error),
|
||||||
@ -151,7 +152,8 @@ func RestoreCollection(
|
|||||||
itemData,
|
itemData,
|
||||||
drivePath.driveID,
|
drivePath.driveID,
|
||||||
restoreFolderID,
|
restoreFolderID,
|
||||||
copyBuffer)
|
copyBuffer,
|
||||||
|
source)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errUpdater(itemData.UUID(), err)
|
errUpdater(itemData.UUID(), err)
|
||||||
continue
|
continue
|
||||||
@ -169,9 +171,7 @@ func RestoreCollection(
|
|||||||
itemPath.String(),
|
itemPath.String(),
|
||||||
itemPath.ShortRef(),
|
itemPath.ShortRef(),
|
||||||
"",
|
"",
|
||||||
details.ItemInfo{
|
itemInfo)
|
||||||
OneDrive: itemInfo,
|
|
||||||
})
|
|
||||||
|
|
||||||
metrics.Successes++
|
metrics.Successes++
|
||||||
}
|
}
|
||||||
@ -230,7 +230,8 @@ func restoreItem(
|
|||||||
itemData data.Stream,
|
itemData data.Stream,
|
||||||
driveID, parentFolderID string,
|
driveID, parentFolderID string,
|
||||||
copyBuffer []byte,
|
copyBuffer []byte,
|
||||||
) (*details.OneDriveInfo, error) {
|
source driveSource,
|
||||||
|
) (details.ItemInfo, error) {
|
||||||
ctx, end := D.Span(ctx, "gc:oneDrive:restoreItem", D.Label("item_uuid", itemData.UUID()))
|
ctx, end := D.Span(ctx, "gc:oneDrive:restoreItem", D.Label("item_uuid", itemData.UUID()))
|
||||||
defer end()
|
defer end()
|
||||||
|
|
||||||
@ -240,19 +241,19 @@ func restoreItem(
|
|||||||
// Get the stream size (needed to create the upload session)
|
// Get the stream size (needed to create the upload session)
|
||||||
ss, ok := itemData.(data.StreamSize)
|
ss, ok := itemData.(data.StreamSize)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, errors.Errorf("item %q does not implement DataStreamInfo", itemName)
|
return details.ItemInfo{}, errors.Errorf("item %q does not implement DataStreamInfo", itemName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create Item
|
// Create Item
|
||||||
newItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(itemData.UUID(), false))
|
newItem, err := createItem(ctx, service, driveID, parentFolderID, newItem(itemData.UUID(), false))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrapf(err, "failed to create item %s", itemName)
|
return details.ItemInfo{}, errors.Wrapf(err, "failed to create item %s", itemName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get a drive item writer
|
// Get a drive item writer
|
||||||
w, err := driveItemWriter(ctx, service, driveID, *newItem.GetId(), ss.Size())
|
w, err := driveItemWriter(ctx, service, driveID, *newItem.GetId(), ss.Size())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrapf(err, "failed to create item upload session %s", itemName)
|
return details.ItemInfo{}, errors.Wrapf(err, "failed to create item upload session %s", itemName)
|
||||||
}
|
}
|
||||||
|
|
||||||
iReader := itemData.ToReader()
|
iReader := itemData.ToReader()
|
||||||
@ -263,8 +264,17 @@ func restoreItem(
|
|||||||
// Upload the stream data
|
// Upload the stream data
|
||||||
written, err := io.CopyBuffer(w, progReader, copyBuffer)
|
written, err := io.CopyBuffer(w, progReader, copyBuffer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrapf(err, "failed to upload data: item %s", itemName)
|
return details.ItemInfo{}, errors.Wrapf(err, "failed to upload data: item %s", itemName)
|
||||||
}
|
}
|
||||||
|
|
||||||
return driveItemInfo(newItem, written), nil
|
dii := details.ItemInfo{}
|
||||||
|
|
||||||
|
switch source {
|
||||||
|
case SharePointSource:
|
||||||
|
dii.SharePoint = sharePointItemInfo(newItem, written)
|
||||||
|
default:
|
||||||
|
dii.OneDrive = oneDriveItemInfo(newItem, written)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dii, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -40,7 +40,14 @@ func RestoreCollections(
|
|||||||
|
|
||||||
switch dc.FullPath().Category() {
|
switch dc.FullPath().Category() {
|
||||||
case path.LibrariesCategory:
|
case path.LibrariesCategory:
|
||||||
metrics, canceled = onedrive.RestoreCollection(ctx, service, dc, dest.ContainerName, deets, errUpdater)
|
metrics, canceled = onedrive.RestoreCollection(
|
||||||
|
ctx,
|
||||||
|
service,
|
||||||
|
dc,
|
||||||
|
onedrive.OneDriveSource,
|
||||||
|
dest.ContainerName,
|
||||||
|
deets,
|
||||||
|
errUpdater)
|
||||||
default:
|
default:
|
||||||
return nil, errors.Errorf("category %s not supported", dc.FullPath().Category())
|
return nil, errors.Errorf("category %s not supported", dc.FullPath().Category())
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user