GC: OneDrive: Efficient Drive Name Update (#2733)

<!-- Insert PR description-->
Updates the call for `populateItems()` for OneDrive. Reduces the overall amount of calls made to obtain the Drive Name. 
---
### Brief Description
- DriveName is saved with a collection field driveMap. 
- The driveMap is a map where the key is the M365ID and the value is the display name for the drive
- During `populateItems()`, the driveName the map is sufficient for OneDrive but requires an update for SharePoint. During SharePoint process, the map is updated as additional driveLibraries are referenced. 

#### Does this PR need a docs update or release note?


- [x]  No

#### Type of change


- [x] 🐛 Bugfix
#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* closes  #2732<issue>

#### Test Plan

- [x]  Unit test
This commit is contained in:
Danny 2023-03-10 20:18:08 -05:00 committed by GitHub
parent fd661d216a
commit 09124e7a73
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 187 additions and 161 deletions

View File

@ -72,8 +72,11 @@ type Collection struct {
folderPath path.Path folderPath path.Path
// M365 IDs of file items within this collection // M365 IDs of file items within this collection
driveItems map[string]models.DriveItemable driveItems map[string]models.DriveItemable
// M365 ID of the drive this collection was created from
driveID string // Primary M365 ID of the drive this collection was created from
driveID string
// Display Name of the associated drive
driveName string
source driveSource source driveSource
service graph.Servicer service graph.Servicer
statusUpdater support.StatusUpdater statusUpdater support.StatusUpdater
@ -337,6 +340,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
itemMeta io.ReadCloser itemMeta io.ReadCloser
itemMetaSize int itemMetaSize int
metaSuffix string metaSuffix string
err error
) )
ctx = clues.Add(ctx, ctx = clues.Add(ctx,
@ -345,16 +349,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
"backup_item_size", itemSize, "backup_item_size", itemSize,
) )
// TODO: Removing the logic below because it introduces an extra Graph API call for item.SetParentReference(setName(item.GetParentReference(), oc.driveName))
// every item being backed up. This can lead to throttling errors.
//
// pr, err := fetchParentReference(ctx, oc.service, item.GetParentReference())
// if err != nil {
// el.AddRecoverable(clues.Wrap(err, "getting parent reference").Label(fault.LabelForceNoBackupCreation))
// return
// }
// item.SetParentReference(pr)
isFile := item.GetFile() != nil isFile := item.GetFile() != nil

View File

@ -69,8 +69,9 @@ type Collections struct {
ctrl control.Options ctrl control.Options
// collectionMap allows lookup of the data.BackupCollection // collectionMap allows lookup of the data.BackupCollection
// for a OneDrive folder // for a OneDrive folder.
CollectionMap map[string]*Collection // driveID -> itemID -> collection
CollectionMap map[string]map[string]*Collection
// Not the most ideal, but allows us to change the pager function for testing // Not the most ideal, but allows us to change the pager function for testing
// as needed. This will allow us to mock out some scenarios during testing. // as needed. This will allow us to mock out some scenarios during testing.
@ -107,7 +108,7 @@ func NewCollections(
resourceOwner: resourceOwner, resourceOwner: resourceOwner,
source: source, source: source,
matcher: matcher, matcher: matcher,
CollectionMap: map[string]*Collection{}, CollectionMap: map[string]map[string]*Collection{},
drivePagerFunc: PagerForSource, drivePagerFunc: PagerForSource,
itemPagerFunc: defaultItemPager, itemPagerFunc: defaultItemPager,
service: service, service: service,
@ -283,6 +284,7 @@ func (c *Collections) Get(
// Drive ID -> folder ID -> folder path // Drive ID -> folder ID -> folder path
folderPaths = map[string]map[string]string{} folderPaths = map[string]map[string]string{}
// Items that should be excluded when sourcing data from the base backup. // Items that should be excluded when sourcing data from the base backup.
// Parent Path -> item ID -> {}
excludedItems = map[string]map[string]struct{}{} excludedItems = map[string]map[string]struct{}{}
) )
@ -293,19 +295,24 @@ func (c *Collections) Get(
prevDelta = prevDeltas[driveID] prevDelta = prevDeltas[driveID]
oldPaths = oldPathsByDriveID[driveID] oldPaths = oldPathsByDriveID[driveID]
numOldDelta = 0 numOldDelta = 0
ictx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName)
) )
if _, ok := c.CollectionMap[driveID]; !ok {
c.CollectionMap[driveID] = map[string]*Collection{}
}
if len(prevDelta) > 0 { if len(prevDelta) > 0 {
numOldDelta++ numOldDelta++
} }
logger.Ctx(ctx).Infow( logger.Ctx(ictx).Infow(
"previous metadata for drive", "previous metadata for drive",
"num_paths_entries", len(oldPaths), "num_paths_entries", len(oldPaths),
"num_deltas_entries", numOldDelta) "num_deltas_entries", numOldDelta)
delta, paths, excluded, err := collectItems( delta, paths, excluded, err := collectItems(
ctx, ictx,
c.itemPagerFunc(c.service, driveID, ""), c.itemPagerFunc(c.service, driveID, ""),
driveID, driveID,
driveName, driveName,
@ -337,12 +344,10 @@ func (c *Collections) Get(
folderPaths[driveID] = map[string]string{} folderPaths[driveID] = map[string]string{}
maps.Copy(folderPaths[driveID], paths) maps.Copy(folderPaths[driveID], paths)
logger.Ctx(ctx).Infow( logger.Ctx(ictx).Infow(
"persisted metadata for drive", "persisted metadata for drive",
"num_paths_entries", "num_paths_entries", len(paths),
len(paths), "num_deltas_entries", numDeltas)
"num_deltas_entries",
numDeltas)
if !delta.Reset { if !delta.Reset {
p, err := GetCanonicalPath( p, err := GetCanonicalPath(
@ -352,10 +357,18 @@ func (c *Collections) Get(
c.source) c.source)
if err != nil { if err != nil {
return nil, nil, return nil, nil,
clues.Wrap(err, "making exclude prefix for drive").WithClues(ctx).With("drive_id", driveID) clues.Wrap(err, "making exclude prefix").WithClues(ictx)
} }
excludedItems[p.String()] = excluded pstr := p.String()
eidi, ok := excludedItems[pstr]
if !ok {
eidi = map[string]struct{}{}
}
maps.Copy(eidi, excluded)
excludedItems[pstr] = eidi
continue continue
} }
@ -363,29 +376,27 @@ func (c *Collections) Get(
// Set all folders in previous backup but not in the current // Set all folders in previous backup but not in the current
// one with state deleted // one with state deleted
modifiedPaths := map[string]struct{}{} modifiedPaths := map[string]struct{}{}
for _, p := range c.CollectionMap { for _, p := range c.CollectionMap[driveID] {
modifiedPaths[p.FullPath().String()] = struct{}{} modifiedPaths[p.FullPath().String()] = struct{}{}
} }
for i, p := range oldPaths { for fldID, p := range oldPaths {
_, found := paths[i] if _, ok := paths[fldID]; ok {
if found {
continue continue
} }
_, found = modifiedPaths[p] if _, ok := modifiedPaths[p]; ok {
if found {
// Original folder was deleted and new folder with the // Original folder was deleted and new folder with the
// same name/path was created in its place // same name/path was created in its place
continue continue
} }
delete(paths, i) delete(paths, fldID)
prevPath, err := path.FromDataLayerPath(p, false) prevPath, err := path.FromDataLayerPath(p, false)
if err != nil { if err != nil {
return nil, nil, err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p)
clues.Wrap(err, "invalid previous path").WithClues(ctx).With("deleted_path", p) return nil, map[string]map[string]struct{}{}, err
} }
col := NewCollection( col := NewCollection(
@ -397,18 +408,21 @@ func (c *Collections) Get(
c.statusUpdater, c.statusUpdater,
c.source, c.source,
c.ctrl, c.ctrl,
true, true)
)
c.CollectionMap[i] = col c.CollectionMap[driveID][fldID] = col
} }
} }
observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items to backup", c.NumItems))) observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items to backup", c.NumItems)))
// Add an extra for the metadata collection. // Add an extra for the metadata collection.
collections := make([]data.BackupCollection, 0, len(c.CollectionMap)+1) collections := []data.BackupCollection{}
for _, coll := range c.CollectionMap {
collections = append(collections, coll) for _, driveColls := range c.CollectionMap {
for _, coll := range driveColls {
collections = append(collections, coll)
}
} }
service, category := c.source.toPathServiceCat() service, category := c.source.toPathServiceCat()
@ -439,13 +453,13 @@ func (c *Collections) Get(
} }
func updateCollectionPaths( func updateCollectionPaths(
id string, driveID, itemID string,
cmap map[string]*Collection, cmap map[string]map[string]*Collection,
curPath path.Path, curPath path.Path,
) (bool, error) { ) (bool, error) {
var initialCurPath path.Path var initialCurPath path.Path
col, found := cmap[id] col, found := cmap[driveID][itemID]
if found { if found {
initialCurPath = col.FullPath() initialCurPath = col.FullPath()
if initialCurPath.String() == curPath.String() { if initialCurPath.String() == curPath.String() {
@ -459,8 +473,8 @@ func updateCollectionPaths(
return found, nil return found, nil
} }
for i, c := range cmap { for iID, c := range cmap[driveID] {
if i == id { if iID == itemID {
continue continue
} }
@ -502,7 +516,10 @@ func (c *Collections) handleDelete(
prevPath, err = path.FromDataLayerPath(prevPathStr, false) prevPath, err = path.FromDataLayerPath(prevPathStr, false)
if err != nil { if err != nil {
return clues.Wrap(err, "invalid previous path"). return clues.Wrap(err, "invalid previous path").
With("collection_id", itemID, "path_string", prevPathStr) With(
"drive_id", driveID,
"item_id", itemID,
"path_string", prevPathStr)
} }
} }
@ -529,10 +546,9 @@ func (c *Collections) handleDelete(
c.source, c.source,
c.ctrl, c.ctrl,
// DoNotMerge is not checked for deleted items. // DoNotMerge is not checked for deleted items.
false, false)
)
c.CollectionMap[itemID] = col c.CollectionMap[driveID][itemID] = col
return nil return nil
} }
@ -604,7 +620,7 @@ func (c *Collections) UpdateCollections(
oldPaths map[string]string, oldPaths map[string]string,
newPaths map[string]string, newPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
itemCollection map[string]string, itemCollection map[string]map[string]string,
invalidPrevDelta bool, invalidPrevDelta bool,
errs *fault.Bus, errs *fault.Bus,
) error { ) error {
@ -688,7 +704,7 @@ func (c *Collections) UpdateCollections(
// update newPaths so we don't accidentally clobber previous deletes. // update newPaths so we don't accidentally clobber previous deletes.
updatePath(newPaths, itemID, collectionPath.String()) updatePath(newPaths, itemID, collectionPath.String())
found, err := updateCollectionPaths(itemID, c.CollectionMap, collectionPath) found, err := updateCollectionPaths(driveID, itemID, c.CollectionMap, collectionPath)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ictx) return clues.Stack(err).WithClues(ictx)
} }
@ -708,7 +724,9 @@ func (c *Collections) UpdateCollections(
c.ctrl, c.ctrl,
invalidPrevDelta, invalidPrevDelta,
) )
c.CollectionMap[itemID] = col col.driveName = driveName
c.CollectionMap[driveID][itemID] = col
c.NumContainers++ c.NumContainers++
if c.source != OneDriveSource || item.GetRoot() != nil { if c.source != OneDriveSource || item.GetRoot() != nil {
@ -729,10 +747,10 @@ func (c *Collections) UpdateCollections(
} }
// Get the collection for this item. // Get the collection for this item.
collectionID := ptr.Val(item.GetParentReference().GetId()) parentID := ptr.Val(item.GetParentReference().GetId())
ictx = clues.Add(ictx, "collection_id", collectionID) ictx = clues.Add(ictx, "parent_id", parentID)
collection, found := c.CollectionMap[collectionID] collection, found := c.CollectionMap[driveID][parentID]
if !found { if !found {
return clues.New("item seen before parent folder").WithClues(ictx) return clues.New("item seen before parent folder").WithClues(ictx)
} }
@ -740,9 +758,9 @@ func (c *Collections) UpdateCollections(
// Delete the file from previous collection. This will // Delete the file from previous collection. This will
// only kick in if the file was moved multiple times // only kick in if the file was moved multiple times
// within a single delta query // within a single delta query
itemColID, found := itemCollection[itemID] icID, found := itemCollection[driveID][itemID]
if found { if found {
pcollection, found := c.CollectionMap[itemColID] pcollection, found := c.CollectionMap[driveID][icID]
if !found { if !found {
return clues.New("previous collection not found").WithClues(ictx) return clues.New("previous collection not found").WithClues(ictx)
} }
@ -753,7 +771,7 @@ func (c *Collections) UpdateCollections(
} }
} }
itemCollection[itemID] = collectionID itemCollection[driveID][itemID] = parentID
if collection.Add(item) { if collection.Add(item) {
c.NumItems++ c.NumItems++

View File

@ -87,15 +87,15 @@ func getExpectedPathGenerator(t *testing.T,
} }
} }
type OneDriveCollectionsSuite struct { type OneDriveCollectionsUnitSuite struct {
tester.Suite tester.Suite
} }
func TestOneDriveCollectionsSuite(t *testing.T) { func TestOneDriveCollectionsUnitSuite(t *testing.T) {
suite.Run(t, &OneDriveCollectionsSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &OneDriveCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *OneDriveCollectionsSuite) TestGetCanonicalPath() { func (suite *OneDriveCollectionsUnitSuite) TestGetCanonicalPath() {
tenant, resourceOwner := "tenant", "resourceOwner" tenant, resourceOwner := "tenant", "resourceOwner"
table := []struct { table := []struct {
@ -150,10 +150,11 @@ func getDelList(files ...string) map[string]struct{} {
return delList return delList
} }
func (suite *OneDriveCollectionsSuite) TestUpdateCollections() { func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0] anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0]
const ( const (
driveID = "driveID1"
tenant = "tenant" tenant = "tenant"
user = "user" user = "user"
folder = "/folder" folder = "/folder"
@ -758,14 +759,21 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
for _, tt := range tests { for _, tt := range tests {
suite.Run(tt.testCase, func() { suite.Run(tt.testCase, func() {
t := suite.T()
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
excludes := map[string]struct{}{} var (
outputFolderMap := map[string]string{} t = suite.T()
excludes = map[string]struct{}{}
outputFolderMap = map[string]string{}
itemCollection = map[string]map[string]string{
driveID: {},
}
errs = fault.New(true)
)
maps.Copy(outputFolderMap, tt.inputFolderMap) maps.Copy(outputFolderMap, tt.inputFolderMap)
c := NewCollections( c := NewCollections(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
tenant, tenant,
@ -776,12 +784,11 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
nil, nil,
control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}}) control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}})
itemCollection := map[string]string{} c.CollectionMap[driveID] = map[string]*Collection{}
errs := fault.New(true)
err := c.UpdateCollections( err := c.UpdateCollections(
ctx, ctx,
"driveID1", driveID,
"General", "General",
tt.items, tt.items,
tt.inputFolderMap, tt.inputFolderMap,
@ -791,21 +798,21 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
false, false,
errs) errs)
tt.expect(t, err) tt.expect(t, err)
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap), "total collections") assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count") assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count") assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count")
assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count") assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count")
assert.Equal(t, tt.expectedSkippedCount, len(errs.Skipped()), "skipped items") assert.Equal(t, tt.expectedSkippedCount, len(errs.Skipped()), "skipped items")
for id, sp := range tt.expectedCollectionIDs { for id, sp := range tt.expectedCollectionIDs {
if !assert.Containsf(t, c.CollectionMap, id, "missing collection with id %s", id) { if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) {
// Skip collections we don't find so we don't get an NPE. // Skip collections we don't find so we don't get an NPE.
continue continue
} }
assert.Equalf(t, sp.state, c.CollectionMap[id].State(), "state for collection %s", id) assert.Equalf(t, sp.state, c.CollectionMap[driveID][id].State(), "state for collection %s", id)
assert.Equalf(t, sp.curPath, c.CollectionMap[id].FullPath(), "current path for collection %s", id) assert.Equalf(t, sp.curPath, c.CollectionMap[driveID][id].FullPath(), "current path for collection %s", id)
assert.Equalf(t, sp.prevPath, c.CollectionMap[id].PreviousPath(), "prev path for collection %s", id) assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id)
} }
assert.Equal(t, tt.expectedMetadataPaths, outputFolderMap, "metadata paths") assert.Equal(t, tt.expectedMetadataPaths, outputFolderMap, "metadata paths")
@ -814,7 +821,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
} }
} }
func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() { func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
tenant := "a-tenant" tenant := "a-tenant"
user := "a-user" user := "a-user"
driveID1 := "1" driveID1 := "1"
@ -1216,11 +1223,16 @@ func (p *mockItemPager) ValuesIn(gapi.DeltaPageLinker) ([]models.DriveItemable,
return p.toReturn[idx].items, nil return p.toReturn[idx].items, nil
} }
func (suite *OneDriveCollectionsSuite) TestGet() { func (suite *OneDriveCollectionsUnitSuite) TestGet() {
anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0] var (
anyFolder = (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0]
tenant := "a-tenant" tenant = "a-tenant"
user := "a-user" user = "a-user"
empty = ""
next = "next"
delta = "delta1"
delta2 = "delta2"
)
metadataPath, err := path.Builder{}.ToServiceCategoryMetadataPath( metadataPath, err := path.Builder{}.ToServiceCategoryMetadataPath(
tenant, tenant,
@ -1231,11 +1243,6 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
) )
require.NoError(suite.T(), err, "making metadata path") require.NoError(suite.T(), err, "making metadata path")
empty := ""
next := "next"
delta := "delta1"
delta2 := "delta2"
driveID1 := uuid.NewString() driveID1 := uuid.NewString()
drive1 := models.NewDrive() drive1 := models.NewDrive()
drive1.SetId(&driveID1) drive1.SetId(&driveID1)
@ -1246,17 +1253,19 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
drive2.SetId(&driveID2) drive2.SetId(&driveID2)
drive2.SetName(&driveID2) drive2.SetName(&driveID2)
driveBasePath1 := fmt.Sprintf(rootDrivePattern, driveID1) var (
driveBasePath2 := fmt.Sprintf(rootDrivePattern, driveID2) driveBasePath1 = fmt.Sprintf(rootDrivePattern, driveID1)
driveBasePath2 = fmt.Sprintf(rootDrivePattern, driveID2)
expectedPath1 := getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath1) expectedPath1 = getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath1)
expectedPath2 := getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath2) expectedPath2 = getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath2)
rootFolderPath1 := expectedPath1("") rootFolderPath1 = expectedPath1("")
folderPath1 := expectedPath1("/folder") folderPath1 = expectedPath1("/folder")
rootFolderPath2 := expectedPath2("") rootFolderPath2 = expectedPath2("")
folderPath2 := expectedPath2("/folder") folderPath2 = expectedPath2("/folder")
)
table := []struct { table := []struct {
name string name string
@ -2095,7 +2104,7 @@ func getDeltaError() error {
return deltaError return deltaError
} }
func (suite *OneDriveCollectionsSuite) TestCollectItems() { func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
next := "next" next := "next"
delta := "delta" delta := "delta"
prevDelta := "prev-delta" prevDelta := "prev-delta"
@ -2175,7 +2184,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() {
oldPaths map[string]string, oldPaths map[string]string,
newPaths map[string]string, newPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
itemCollection map[string]string, itemCollection map[string]map[string]string,
doNotMergeItems bool, doNotMergeItems bool,
errs *fault.Bus, errs *fault.Bus,
) error { ) error {

View File

@ -77,12 +77,12 @@ func DataCollections(
collections = append(collections, odcs...) collections = append(collections, odcs...)
for k, v := range excludes { for k, ex := range excludes {
if _, ok := allExcludes[k]; !ok { if _, ok := allExcludes[k]; !ok {
allExcludes[k] = map[string]struct{}{} allExcludes[k] = map[string]struct{}{}
} }
maps.Copy(allExcludes[k], v) maps.Copy(allExcludes[k], ex)
} }
} }

View File

@ -137,7 +137,7 @@ type itemCollector func(
oldPaths map[string]string, oldPaths map[string]string,
newPaths map[string]string, newPaths map[string]string,
excluded map[string]struct{}, excluded map[string]struct{},
fileCollectionMap map[string]string, itemCollections map[string]map[string]string,
validPrevDelta bool, validPrevDelta bool,
errs *fault.Bus, errs *fault.Bus,
) error ) error
@ -199,7 +199,10 @@ func collectItems(
// file belongs to. This is useful to delete a file from the // file belongs to. This is useful to delete a file from the
// collection it was previously in, in case it was moved to a // collection it was previously in, in case it was moved to a
// different collection within the same delta query // different collection within the same delta query
itemCollection = map[string]string{} // drive ID -> item ID -> item ID
itemCollection = map[string]map[string]string{
driveID: {},
}
) )
if !invalidPrevDelta { if !invalidPrevDelta {
@ -373,15 +376,15 @@ func GetAllFolders(
ictx := clues.Add(ctx, "drive_id", id, "drive_name", name) // TODO: pii ictx := clues.Add(ctx, "drive_id", id, "drive_name", name) // TODO: pii
collector := func( collector := func(
innerCtx context.Context, _ context.Context,
driveID, driveName string, _, _ string,
items []models.DriveItemable, items []models.DriveItemable,
oldPaths map[string]string, _ map[string]string,
newPaths map[string]string, _ map[string]string,
excluded map[string]struct{}, _ map[string]struct{},
itemCollection map[string]string, _ map[string]map[string]string,
doNotMergeItems bool, _ bool,
errs *fault.Bus, _ *fault.Bus,
) error { ) error {
for _, item := range items { for _, item := range items {
// Skip the root item. // Skip the root item.
@ -412,7 +415,15 @@ func GetAllFolders(
return nil return nil
} }
_, _, _, err = collectItems(ictx, defaultItemPager(gs, id, ""), id, name, collector, map[string]string{}, "", errs) _, _, _, err = collectItems(
ictx,
defaultItemPager(gs, id, ""),
id,
name,
collector,
map[string]string{},
"",
errs)
if err != nil { if err != nil {
el.AddRecoverable(clues.Wrap(err, "enumerating items in drive")) el.AddRecoverable(clues.Wrap(err, "enumerating items in drive"))
} }

View File

@ -405,33 +405,12 @@ func constructWebURL(adtl map[string]any) string {
return url return url
} }
// func fetchParentReference( func setName(orig models.ItemReferenceable, driveName string) models.ItemReferenceable {
// ctx context.Context, if orig == nil {
// service graph.Servicer, return nil
// orig models.ItemReferenceable, }
// ) (models.ItemReferenceable, error) {
// if orig == nil || service == nil || ptr.Val(orig.GetName()) != "" {
// return orig, nil
// }
// options := &msdrives.DriveItemRequestBuilderGetRequestConfiguration{ orig.SetName(&driveName)
// QueryParameters: &msdrives.DriveItemRequestBuilderGetQueryParameters{
// Select: []string{"name"},
// },
// }
// driveID := ptr.Val(orig.GetDriveId()) return orig
}
// if driveID == "" {
// return orig, nil
// }
// drive, err := service.Client().DrivesById(driveID).Get(ctx, options)
// if err != nil {
// return nil, graph.Stack(ctx, err)
// }
// orig.SetName(drive.GetName())
// return orig, nil
// }

View File

@ -67,15 +67,15 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
var driveItem models.DriveItemable var driveItem models.DriveItemable
// This item collector tries to find "a" drive item that is a file to test the reader function // This item collector tries to find "a" drive item that is a file to test the reader function
itemCollector := func( itemCollector := func(
ctx context.Context, _ context.Context,
driveID, driveName string, _, _ string,
items []models.DriveItemable, items []models.DriveItemable,
oldPaths map[string]string, _ map[string]string,
newPaths map[string]string, _ map[string]string,
excluded map[string]struct{}, _ map[string]struct{},
itemCollection map[string]string, _ map[string]map[string]string,
doNotMergeItems bool, _ bool,
errs *fault.Bus, _ *fault.Bus,
) error { ) error {
for _, item := range items { for _, item := range items {
if item.GetFile() != nil { if item.GetFile() != nil {
@ -91,8 +91,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
defaultItemPager( defaultItemPager(
suite.service, suite.service,
suite.userDriveID, suite.userDriveID,
"", ""),
),
suite.userDriveID, suite.userDriveID,
"General", "General",
itemCollector, itemCollector,

View File

@ -40,20 +40,21 @@ func (fm testFolderMatcher) Matches(path string) bool {
// tests // tests
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
type SharePointLibrariesSuite struct { type SharePointLibrariesUnitSuite struct {
tester.Suite tester.Suite
} }
func TestSharePointLibrariesSuite(t *testing.T) { func TestSharePointLibrariesUnitSuite(t *testing.T) {
suite.Run(t, &SharePointLibrariesSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &SharePointLibrariesUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
func (suite *SharePointLibrariesSuite) TestUpdateCollections() { func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
anyFolder := (&selectors.SharePointBackup{}).Libraries(selectors.Any())[0] anyFolder := (&selectors.SharePointBackup{}).Libraries(selectors.Any())[0]
const ( const (
tenant = "tenant" tenant = "tenant"
site = "site" site = "site"
driveID = "driveID1"
) )
tests := []struct { tests := []struct {
@ -90,14 +91,22 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
for _, test := range tests { for _, test := range tests {
suite.Run(test.testCase, func() { suite.Run(test.testCase, func() {
t := suite.T()
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
paths := map[string]string{} var (
newPaths := map[string]string{} t = suite.T()
excluded := map[string]struct{}{} paths = map[string]string{}
newPaths = map[string]string{}
excluded = map[string]struct{}{}
itemColls = map[string]map[string]string{
driveID: {},
}
collMap = map[string]map[string]*onedrive.Collection{
driveID: {},
}
)
c := onedrive.NewCollections( c := onedrive.NewCollections(
graph.HTTPClient(graph.NoTimeout()), graph.HTTPClient(graph.NoTimeout()),
tenant, tenant,
@ -107,26 +116,32 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
&MockGraphService{}, &MockGraphService{},
nil, nil,
control.Options{}) control.Options{})
c.CollectionMap = collMap
err := c.UpdateCollections( err := c.UpdateCollections(
ctx, ctx,
"driveID1", driveID,
"General", "General",
test.items, test.items,
paths, paths,
newPaths, newPaths,
excluded, excluded,
map[string]string{}, itemColls,
true, true,
fault.New(true)) fault.New(true))
test.expect(t, err) test.expect(t, err)
assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths") assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths")
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count") assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count") assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count") assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
for _, collPath := range test.expectedCollectionIDs { for _, collPath := range test.expectedCollectionIDs {
assert.Contains(t, c.CollectionMap, collPath) assert.Contains(t, c.CollectionMap[driveID], collPath)
} }
for _, col := range c.CollectionMap {
for _, col := range c.CollectionMap[driveID] {
assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String()) assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String())
} }
}) })