GC: OneDrive: Efficient Drive Name Update (#2733)
<!-- Insert PR description--> Updates the call for `populateItems()` for OneDrive. Reduces the overall amount of calls made to obtain the Drive Name. --- ### Brief Description - DriveName is saved with a collection field driveMap. - The driveMap is a map where the key is the M365ID and the value is the display name for the drive - During `populateItems()`, the driveName the map is sufficient for OneDrive but requires an update for SharePoint. During SharePoint process, the map is updated as additional driveLibraries are referenced. #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🐛 Bugfix #### Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * closes #2732<issue> #### Test Plan - [x] ⚡ Unit test
This commit is contained in:
parent
fd661d216a
commit
09124e7a73
@ -72,8 +72,11 @@ type Collection struct {
|
||||
folderPath path.Path
|
||||
// M365 IDs of file items within this collection
|
||||
driveItems map[string]models.DriveItemable
|
||||
// M365 ID of the drive this collection was created from
|
||||
driveID string
|
||||
|
||||
// Primary M365 ID of the drive this collection was created from
|
||||
driveID string
|
||||
// Display Name of the associated drive
|
||||
driveName string
|
||||
source driveSource
|
||||
service graph.Servicer
|
||||
statusUpdater support.StatusUpdater
|
||||
@ -337,6 +340,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
||||
itemMeta io.ReadCloser
|
||||
itemMetaSize int
|
||||
metaSuffix string
|
||||
err error
|
||||
)
|
||||
|
||||
ctx = clues.Add(ctx,
|
||||
@ -345,16 +349,7 @@ func (oc *Collection) populateItems(ctx context.Context, errs *fault.Bus) {
|
||||
"backup_item_size", itemSize,
|
||||
)
|
||||
|
||||
// TODO: Removing the logic below because it introduces an extra Graph API call for
|
||||
// every item being backed up. This can lead to throttling errors.
|
||||
//
|
||||
// pr, err := fetchParentReference(ctx, oc.service, item.GetParentReference())
|
||||
// if err != nil {
|
||||
// el.AddRecoverable(clues.Wrap(err, "getting parent reference").Label(fault.LabelForceNoBackupCreation))
|
||||
// return
|
||||
// }
|
||||
|
||||
// item.SetParentReference(pr)
|
||||
item.SetParentReference(setName(item.GetParentReference(), oc.driveName))
|
||||
|
||||
isFile := item.GetFile() != nil
|
||||
|
||||
|
||||
@ -69,8 +69,9 @@ type Collections struct {
|
||||
ctrl control.Options
|
||||
|
||||
// collectionMap allows lookup of the data.BackupCollection
|
||||
// for a OneDrive folder
|
||||
CollectionMap map[string]*Collection
|
||||
// for a OneDrive folder.
|
||||
// driveID -> itemID -> collection
|
||||
CollectionMap map[string]map[string]*Collection
|
||||
|
||||
// Not the most ideal, but allows us to change the pager function for testing
|
||||
// as needed. This will allow us to mock out some scenarios during testing.
|
||||
@ -107,7 +108,7 @@ func NewCollections(
|
||||
resourceOwner: resourceOwner,
|
||||
source: source,
|
||||
matcher: matcher,
|
||||
CollectionMap: map[string]*Collection{},
|
||||
CollectionMap: map[string]map[string]*Collection{},
|
||||
drivePagerFunc: PagerForSource,
|
||||
itemPagerFunc: defaultItemPager,
|
||||
service: service,
|
||||
@ -283,6 +284,7 @@ func (c *Collections) Get(
|
||||
// Drive ID -> folder ID -> folder path
|
||||
folderPaths = map[string]map[string]string{}
|
||||
// Items that should be excluded when sourcing data from the base backup.
|
||||
// Parent Path -> item ID -> {}
|
||||
excludedItems = map[string]map[string]struct{}{}
|
||||
)
|
||||
|
||||
@ -293,19 +295,24 @@ func (c *Collections) Get(
|
||||
prevDelta = prevDeltas[driveID]
|
||||
oldPaths = oldPathsByDriveID[driveID]
|
||||
numOldDelta = 0
|
||||
ictx = clues.Add(ctx, "drive_id", driveID, "drive_name", driveName)
|
||||
)
|
||||
|
||||
if _, ok := c.CollectionMap[driveID]; !ok {
|
||||
c.CollectionMap[driveID] = map[string]*Collection{}
|
||||
}
|
||||
|
||||
if len(prevDelta) > 0 {
|
||||
numOldDelta++
|
||||
}
|
||||
|
||||
logger.Ctx(ctx).Infow(
|
||||
logger.Ctx(ictx).Infow(
|
||||
"previous metadata for drive",
|
||||
"num_paths_entries", len(oldPaths),
|
||||
"num_deltas_entries", numOldDelta)
|
||||
|
||||
delta, paths, excluded, err := collectItems(
|
||||
ctx,
|
||||
ictx,
|
||||
c.itemPagerFunc(c.service, driveID, ""),
|
||||
driveID,
|
||||
driveName,
|
||||
@ -337,12 +344,10 @@ func (c *Collections) Get(
|
||||
folderPaths[driveID] = map[string]string{}
|
||||
maps.Copy(folderPaths[driveID], paths)
|
||||
|
||||
logger.Ctx(ctx).Infow(
|
||||
logger.Ctx(ictx).Infow(
|
||||
"persisted metadata for drive",
|
||||
"num_paths_entries",
|
||||
len(paths),
|
||||
"num_deltas_entries",
|
||||
numDeltas)
|
||||
"num_paths_entries", len(paths),
|
||||
"num_deltas_entries", numDeltas)
|
||||
|
||||
if !delta.Reset {
|
||||
p, err := GetCanonicalPath(
|
||||
@ -352,10 +357,18 @@ func (c *Collections) Get(
|
||||
c.source)
|
||||
if err != nil {
|
||||
return nil, nil,
|
||||
clues.Wrap(err, "making exclude prefix for drive").WithClues(ctx).With("drive_id", driveID)
|
||||
clues.Wrap(err, "making exclude prefix").WithClues(ictx)
|
||||
}
|
||||
|
||||
excludedItems[p.String()] = excluded
|
||||
pstr := p.String()
|
||||
|
||||
eidi, ok := excludedItems[pstr]
|
||||
if !ok {
|
||||
eidi = map[string]struct{}{}
|
||||
}
|
||||
|
||||
maps.Copy(eidi, excluded)
|
||||
excludedItems[pstr] = eidi
|
||||
|
||||
continue
|
||||
}
|
||||
@ -363,29 +376,27 @@ func (c *Collections) Get(
|
||||
// Set all folders in previous backup but not in the current
|
||||
// one with state deleted
|
||||
modifiedPaths := map[string]struct{}{}
|
||||
for _, p := range c.CollectionMap {
|
||||
for _, p := range c.CollectionMap[driveID] {
|
||||
modifiedPaths[p.FullPath().String()] = struct{}{}
|
||||
}
|
||||
|
||||
for i, p := range oldPaths {
|
||||
_, found := paths[i]
|
||||
if found {
|
||||
for fldID, p := range oldPaths {
|
||||
if _, ok := paths[fldID]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
_, found = modifiedPaths[p]
|
||||
if found {
|
||||
if _, ok := modifiedPaths[p]; ok {
|
||||
// Original folder was deleted and new folder with the
|
||||
// same name/path was created in its place
|
||||
continue
|
||||
}
|
||||
|
||||
delete(paths, i)
|
||||
delete(paths, fldID)
|
||||
|
||||
prevPath, err := path.FromDataLayerPath(p, false)
|
||||
if err != nil {
|
||||
return nil, nil,
|
||||
clues.Wrap(err, "invalid previous path").WithClues(ctx).With("deleted_path", p)
|
||||
err = clues.Wrap(err, "invalid previous path").WithClues(ictx).With("deleted_path", p)
|
||||
return nil, map[string]map[string]struct{}{}, err
|
||||
}
|
||||
|
||||
col := NewCollection(
|
||||
@ -397,18 +408,21 @@ func (c *Collections) Get(
|
||||
c.statusUpdater,
|
||||
c.source,
|
||||
c.ctrl,
|
||||
true,
|
||||
)
|
||||
c.CollectionMap[i] = col
|
||||
true)
|
||||
|
||||
c.CollectionMap[driveID][fldID] = col
|
||||
}
|
||||
}
|
||||
|
||||
observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items to backup", c.NumItems)))
|
||||
|
||||
// Add an extra for the metadata collection.
|
||||
collections := make([]data.BackupCollection, 0, len(c.CollectionMap)+1)
|
||||
for _, coll := range c.CollectionMap {
|
||||
collections = append(collections, coll)
|
||||
collections := []data.BackupCollection{}
|
||||
|
||||
for _, driveColls := range c.CollectionMap {
|
||||
for _, coll := range driveColls {
|
||||
collections = append(collections, coll)
|
||||
}
|
||||
}
|
||||
|
||||
service, category := c.source.toPathServiceCat()
|
||||
@ -439,13 +453,13 @@ func (c *Collections) Get(
|
||||
}
|
||||
|
||||
func updateCollectionPaths(
|
||||
id string,
|
||||
cmap map[string]*Collection,
|
||||
driveID, itemID string,
|
||||
cmap map[string]map[string]*Collection,
|
||||
curPath path.Path,
|
||||
) (bool, error) {
|
||||
var initialCurPath path.Path
|
||||
|
||||
col, found := cmap[id]
|
||||
col, found := cmap[driveID][itemID]
|
||||
if found {
|
||||
initialCurPath = col.FullPath()
|
||||
if initialCurPath.String() == curPath.String() {
|
||||
@ -459,8 +473,8 @@ func updateCollectionPaths(
|
||||
return found, nil
|
||||
}
|
||||
|
||||
for i, c := range cmap {
|
||||
if i == id {
|
||||
for iID, c := range cmap[driveID] {
|
||||
if iID == itemID {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -502,7 +516,10 @@ func (c *Collections) handleDelete(
|
||||
prevPath, err = path.FromDataLayerPath(prevPathStr, false)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "invalid previous path").
|
||||
With("collection_id", itemID, "path_string", prevPathStr)
|
||||
With(
|
||||
"drive_id", driveID,
|
||||
"item_id", itemID,
|
||||
"path_string", prevPathStr)
|
||||
}
|
||||
}
|
||||
|
||||
@ -529,10 +546,9 @@ func (c *Collections) handleDelete(
|
||||
c.source,
|
||||
c.ctrl,
|
||||
// DoNotMerge is not checked for deleted items.
|
||||
false,
|
||||
)
|
||||
false)
|
||||
|
||||
c.CollectionMap[itemID] = col
|
||||
c.CollectionMap[driveID][itemID] = col
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -604,7 +620,7 @@ func (c *Collections) UpdateCollections(
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
itemCollection map[string]string,
|
||||
itemCollection map[string]map[string]string,
|
||||
invalidPrevDelta bool,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
@ -688,7 +704,7 @@ func (c *Collections) UpdateCollections(
|
||||
// update newPaths so we don't accidentally clobber previous deletes.
|
||||
updatePath(newPaths, itemID, collectionPath.String())
|
||||
|
||||
found, err := updateCollectionPaths(itemID, c.CollectionMap, collectionPath)
|
||||
found, err := updateCollectionPaths(driveID, itemID, c.CollectionMap, collectionPath)
|
||||
if err != nil {
|
||||
return clues.Stack(err).WithClues(ictx)
|
||||
}
|
||||
@ -708,7 +724,9 @@ func (c *Collections) UpdateCollections(
|
||||
c.ctrl,
|
||||
invalidPrevDelta,
|
||||
)
|
||||
c.CollectionMap[itemID] = col
|
||||
col.driveName = driveName
|
||||
|
||||
c.CollectionMap[driveID][itemID] = col
|
||||
c.NumContainers++
|
||||
|
||||
if c.source != OneDriveSource || item.GetRoot() != nil {
|
||||
@ -729,10 +747,10 @@ func (c *Collections) UpdateCollections(
|
||||
}
|
||||
|
||||
// Get the collection for this item.
|
||||
collectionID := ptr.Val(item.GetParentReference().GetId())
|
||||
ictx = clues.Add(ictx, "collection_id", collectionID)
|
||||
parentID := ptr.Val(item.GetParentReference().GetId())
|
||||
ictx = clues.Add(ictx, "parent_id", parentID)
|
||||
|
||||
collection, found := c.CollectionMap[collectionID]
|
||||
collection, found := c.CollectionMap[driveID][parentID]
|
||||
if !found {
|
||||
return clues.New("item seen before parent folder").WithClues(ictx)
|
||||
}
|
||||
@ -740,9 +758,9 @@ func (c *Collections) UpdateCollections(
|
||||
// Delete the file from previous collection. This will
|
||||
// only kick in if the file was moved multiple times
|
||||
// within a single delta query
|
||||
itemColID, found := itemCollection[itemID]
|
||||
icID, found := itemCollection[driveID][itemID]
|
||||
if found {
|
||||
pcollection, found := c.CollectionMap[itemColID]
|
||||
pcollection, found := c.CollectionMap[driveID][icID]
|
||||
if !found {
|
||||
return clues.New("previous collection not found").WithClues(ictx)
|
||||
}
|
||||
@ -753,7 +771,7 @@ func (c *Collections) UpdateCollections(
|
||||
}
|
||||
}
|
||||
|
||||
itemCollection[itemID] = collectionID
|
||||
itemCollection[driveID][itemID] = parentID
|
||||
|
||||
if collection.Add(item) {
|
||||
c.NumItems++
|
||||
|
||||
@ -87,15 +87,15 @@ func getExpectedPathGenerator(t *testing.T,
|
||||
}
|
||||
}
|
||||
|
||||
type OneDriveCollectionsSuite struct {
|
||||
type OneDriveCollectionsUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestOneDriveCollectionsSuite(t *testing.T) {
|
||||
suite.Run(t, &OneDriveCollectionsSuite{Suite: tester.NewUnitSuite(t)})
|
||||
func TestOneDriveCollectionsUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &OneDriveCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestGetCanonicalPath() {
|
||||
func (suite *OneDriveCollectionsUnitSuite) TestGetCanonicalPath() {
|
||||
tenant, resourceOwner := "tenant", "resourceOwner"
|
||||
|
||||
table := []struct {
|
||||
@ -150,10 +150,11 @@ func getDelList(files ...string) map[string]struct{} {
|
||||
return delList
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
|
||||
func (suite *OneDriveCollectionsUnitSuite) TestUpdateCollections() {
|
||||
anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0]
|
||||
|
||||
const (
|
||||
driveID = "driveID1"
|
||||
tenant = "tenant"
|
||||
user = "user"
|
||||
folder = "/folder"
|
||||
@ -758,14 +759,21 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
|
||||
|
||||
for _, tt := range tests {
|
||||
suite.Run(tt.testCase, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
excludes := map[string]struct{}{}
|
||||
outputFolderMap := map[string]string{}
|
||||
var (
|
||||
t = suite.T()
|
||||
excludes = map[string]struct{}{}
|
||||
outputFolderMap = map[string]string{}
|
||||
itemCollection = map[string]map[string]string{
|
||||
driveID: {},
|
||||
}
|
||||
errs = fault.New(true)
|
||||
)
|
||||
|
||||
maps.Copy(outputFolderMap, tt.inputFolderMap)
|
||||
|
||||
c := NewCollections(
|
||||
graph.HTTPClient(graph.NoTimeout()),
|
||||
tenant,
|
||||
@ -776,12 +784,11 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
|
||||
nil,
|
||||
control.Options{ToggleFeatures: control.Toggles{EnablePermissionsBackup: true}})
|
||||
|
||||
itemCollection := map[string]string{}
|
||||
errs := fault.New(true)
|
||||
c.CollectionMap[driveID] = map[string]*Collection{}
|
||||
|
||||
err := c.UpdateCollections(
|
||||
ctx,
|
||||
"driveID1",
|
||||
driveID,
|
||||
"General",
|
||||
tt.items,
|
||||
tt.inputFolderMap,
|
||||
@ -791,21 +798,21 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
|
||||
false,
|
||||
errs)
|
||||
tt.expect(t, err)
|
||||
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap), "total collections")
|
||||
assert.Equal(t, len(tt.expectedCollectionIDs), len(c.CollectionMap[driveID]), "total collections")
|
||||
assert.Equal(t, tt.expectedItemCount, c.NumItems, "item count")
|
||||
assert.Equal(t, tt.expectedFileCount, c.NumFiles, "file count")
|
||||
assert.Equal(t, tt.expectedContainerCount, c.NumContainers, "container count")
|
||||
assert.Equal(t, tt.expectedSkippedCount, len(errs.Skipped()), "skipped items")
|
||||
|
||||
for id, sp := range tt.expectedCollectionIDs {
|
||||
if !assert.Containsf(t, c.CollectionMap, id, "missing collection with id %s", id) {
|
||||
if !assert.Containsf(t, c.CollectionMap[driveID], id, "missing collection with id %s", id) {
|
||||
// Skip collections we don't find so we don't get an NPE.
|
||||
continue
|
||||
}
|
||||
|
||||
assert.Equalf(t, sp.state, c.CollectionMap[id].State(), "state for collection %s", id)
|
||||
assert.Equalf(t, sp.curPath, c.CollectionMap[id].FullPath(), "current path for collection %s", id)
|
||||
assert.Equalf(t, sp.prevPath, c.CollectionMap[id].PreviousPath(), "prev path for collection %s", id)
|
||||
assert.Equalf(t, sp.state, c.CollectionMap[driveID][id].State(), "state for collection %s", id)
|
||||
assert.Equalf(t, sp.curPath, c.CollectionMap[driveID][id].FullPath(), "current path for collection %s", id)
|
||||
assert.Equalf(t, sp.prevPath, c.CollectionMap[driveID][id].PreviousPath(), "prev path for collection %s", id)
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.expectedMetadataPaths, outputFolderMap, "metadata paths")
|
||||
@ -814,7 +821,7 @@ func (suite *OneDriveCollectionsSuite) TestUpdateCollections() {
|
||||
}
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
|
||||
func (suite *OneDriveCollectionsUnitSuite) TestDeserializeMetadata() {
|
||||
tenant := "a-tenant"
|
||||
user := "a-user"
|
||||
driveID1 := "1"
|
||||
@ -1216,11 +1223,16 @@ func (p *mockItemPager) ValuesIn(gapi.DeltaPageLinker) ([]models.DriveItemable,
|
||||
return p.toReturn[idx].items, nil
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestGet() {
|
||||
anyFolder := (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0]
|
||||
|
||||
tenant := "a-tenant"
|
||||
user := "a-user"
|
||||
func (suite *OneDriveCollectionsUnitSuite) TestGet() {
|
||||
var (
|
||||
anyFolder = (&selectors.OneDriveBackup{}).Folders(selectors.Any())[0]
|
||||
tenant = "a-tenant"
|
||||
user = "a-user"
|
||||
empty = ""
|
||||
next = "next"
|
||||
delta = "delta1"
|
||||
delta2 = "delta2"
|
||||
)
|
||||
|
||||
metadataPath, err := path.Builder{}.ToServiceCategoryMetadataPath(
|
||||
tenant,
|
||||
@ -1231,11 +1243,6 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
|
||||
)
|
||||
require.NoError(suite.T(), err, "making metadata path")
|
||||
|
||||
empty := ""
|
||||
next := "next"
|
||||
delta := "delta1"
|
||||
delta2 := "delta2"
|
||||
|
||||
driveID1 := uuid.NewString()
|
||||
drive1 := models.NewDrive()
|
||||
drive1.SetId(&driveID1)
|
||||
@ -1246,17 +1253,19 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
|
||||
drive2.SetId(&driveID2)
|
||||
drive2.SetName(&driveID2)
|
||||
|
||||
driveBasePath1 := fmt.Sprintf(rootDrivePattern, driveID1)
|
||||
driveBasePath2 := fmt.Sprintf(rootDrivePattern, driveID2)
|
||||
var (
|
||||
driveBasePath1 = fmt.Sprintf(rootDrivePattern, driveID1)
|
||||
driveBasePath2 = fmt.Sprintf(rootDrivePattern, driveID2)
|
||||
|
||||
expectedPath1 := getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath1)
|
||||
expectedPath2 := getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath2)
|
||||
expectedPath1 = getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath1)
|
||||
expectedPath2 = getExpectedPathGenerator(suite.T(), tenant, user, driveBasePath2)
|
||||
|
||||
rootFolderPath1 := expectedPath1("")
|
||||
folderPath1 := expectedPath1("/folder")
|
||||
rootFolderPath1 = expectedPath1("")
|
||||
folderPath1 = expectedPath1("/folder")
|
||||
|
||||
rootFolderPath2 := expectedPath2("")
|
||||
folderPath2 := expectedPath2("/folder")
|
||||
rootFolderPath2 = expectedPath2("")
|
||||
folderPath2 = expectedPath2("/folder")
|
||||
)
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
@ -2095,7 +2104,7 @@ func getDeltaError() error {
|
||||
return deltaError
|
||||
}
|
||||
|
||||
func (suite *OneDriveCollectionsSuite) TestCollectItems() {
|
||||
func (suite *OneDriveCollectionsUnitSuite) TestCollectItems() {
|
||||
next := "next"
|
||||
delta := "delta"
|
||||
prevDelta := "prev-delta"
|
||||
@ -2175,7 +2184,7 @@ func (suite *OneDriveCollectionsSuite) TestCollectItems() {
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
itemCollection map[string]string,
|
||||
itemCollection map[string]map[string]string,
|
||||
doNotMergeItems bool,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
|
||||
@ -77,12 +77,12 @@ func DataCollections(
|
||||
|
||||
collections = append(collections, odcs...)
|
||||
|
||||
for k, v := range excludes {
|
||||
for k, ex := range excludes {
|
||||
if _, ok := allExcludes[k]; !ok {
|
||||
allExcludes[k] = map[string]struct{}{}
|
||||
}
|
||||
|
||||
maps.Copy(allExcludes[k], v)
|
||||
maps.Copy(allExcludes[k], ex)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -137,7 +137,7 @@ type itemCollector func(
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
fileCollectionMap map[string]string,
|
||||
itemCollections map[string]map[string]string,
|
||||
validPrevDelta bool,
|
||||
errs *fault.Bus,
|
||||
) error
|
||||
@ -199,7 +199,10 @@ func collectItems(
|
||||
// file belongs to. This is useful to delete a file from the
|
||||
// collection it was previously in, in case it was moved to a
|
||||
// different collection within the same delta query
|
||||
itemCollection = map[string]string{}
|
||||
// drive ID -> item ID -> item ID
|
||||
itemCollection = map[string]map[string]string{
|
||||
driveID: {},
|
||||
}
|
||||
)
|
||||
|
||||
if !invalidPrevDelta {
|
||||
@ -373,15 +376,15 @@ func GetAllFolders(
|
||||
|
||||
ictx := clues.Add(ctx, "drive_id", id, "drive_name", name) // TODO: pii
|
||||
collector := func(
|
||||
innerCtx context.Context,
|
||||
driveID, driveName string,
|
||||
_ context.Context,
|
||||
_, _ string,
|
||||
items []models.DriveItemable,
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
itemCollection map[string]string,
|
||||
doNotMergeItems bool,
|
||||
errs *fault.Bus,
|
||||
_ map[string]string,
|
||||
_ map[string]string,
|
||||
_ map[string]struct{},
|
||||
_ map[string]map[string]string,
|
||||
_ bool,
|
||||
_ *fault.Bus,
|
||||
) error {
|
||||
for _, item := range items {
|
||||
// Skip the root item.
|
||||
@ -412,7 +415,15 @@ func GetAllFolders(
|
||||
return nil
|
||||
}
|
||||
|
||||
_, _, _, err = collectItems(ictx, defaultItemPager(gs, id, ""), id, name, collector, map[string]string{}, "", errs)
|
||||
_, _, _, err = collectItems(
|
||||
ictx,
|
||||
defaultItemPager(gs, id, ""),
|
||||
id,
|
||||
name,
|
||||
collector,
|
||||
map[string]string{},
|
||||
"",
|
||||
errs)
|
||||
if err != nil {
|
||||
el.AddRecoverable(clues.Wrap(err, "enumerating items in drive"))
|
||||
}
|
||||
|
||||
@ -405,33 +405,12 @@ func constructWebURL(adtl map[string]any) string {
|
||||
return url
|
||||
}
|
||||
|
||||
// func fetchParentReference(
|
||||
// ctx context.Context,
|
||||
// service graph.Servicer,
|
||||
// orig models.ItemReferenceable,
|
||||
// ) (models.ItemReferenceable, error) {
|
||||
// if orig == nil || service == nil || ptr.Val(orig.GetName()) != "" {
|
||||
// return orig, nil
|
||||
// }
|
||||
func setName(orig models.ItemReferenceable, driveName string) models.ItemReferenceable {
|
||||
if orig == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// options := &msdrives.DriveItemRequestBuilderGetRequestConfiguration{
|
||||
// QueryParameters: &msdrives.DriveItemRequestBuilderGetQueryParameters{
|
||||
// Select: []string{"name"},
|
||||
// },
|
||||
// }
|
||||
orig.SetName(&driveName)
|
||||
|
||||
// driveID := ptr.Val(orig.GetDriveId())
|
||||
|
||||
// if driveID == "" {
|
||||
// return orig, nil
|
||||
// }
|
||||
|
||||
// drive, err := service.Client().DrivesById(driveID).Get(ctx, options)
|
||||
// if err != nil {
|
||||
// return nil, graph.Stack(ctx, err)
|
||||
// }
|
||||
|
||||
// orig.SetName(drive.GetName())
|
||||
|
||||
// return orig, nil
|
||||
// }
|
||||
return orig
|
||||
}
|
||||
|
||||
@ -67,15 +67,15 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
||||
var driveItem models.DriveItemable
|
||||
// This item collector tries to find "a" drive item that is a file to test the reader function
|
||||
itemCollector := func(
|
||||
ctx context.Context,
|
||||
driveID, driveName string,
|
||||
_ context.Context,
|
||||
_, _ string,
|
||||
items []models.DriveItemable,
|
||||
oldPaths map[string]string,
|
||||
newPaths map[string]string,
|
||||
excluded map[string]struct{},
|
||||
itemCollection map[string]string,
|
||||
doNotMergeItems bool,
|
||||
errs *fault.Bus,
|
||||
_ map[string]string,
|
||||
_ map[string]string,
|
||||
_ map[string]struct{},
|
||||
_ map[string]map[string]string,
|
||||
_ bool,
|
||||
_ *fault.Bus,
|
||||
) error {
|
||||
for _, item := range items {
|
||||
if item.GetFile() != nil {
|
||||
@ -91,8 +91,7 @@ func (suite *ItemIntegrationSuite) TestItemReader_oneDrive() {
|
||||
defaultItemPager(
|
||||
suite.service,
|
||||
suite.userDriveID,
|
||||
"",
|
||||
),
|
||||
""),
|
||||
suite.userDriveID,
|
||||
"General",
|
||||
itemCollector,
|
||||
|
||||
@ -40,20 +40,21 @@ func (fm testFolderMatcher) Matches(path string) bool {
|
||||
// tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type SharePointLibrariesSuite struct {
|
||||
type SharePointLibrariesUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestSharePointLibrariesSuite(t *testing.T) {
|
||||
suite.Run(t, &SharePointLibrariesSuite{Suite: tester.NewUnitSuite(t)})
|
||||
func TestSharePointLibrariesUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &SharePointLibrariesUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
|
||||
func (suite *SharePointLibrariesUnitSuite) TestUpdateCollections() {
|
||||
anyFolder := (&selectors.SharePointBackup{}).Libraries(selectors.Any())[0]
|
||||
|
||||
const (
|
||||
tenant = "tenant"
|
||||
site = "site"
|
||||
tenant = "tenant"
|
||||
site = "site"
|
||||
driveID = "driveID1"
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
@ -90,14 +91,22 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
|
||||
|
||||
for _, test := range tests {
|
||||
suite.Run(test.testCase, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext()
|
||||
defer flush()
|
||||
|
||||
paths := map[string]string{}
|
||||
newPaths := map[string]string{}
|
||||
excluded := map[string]struct{}{}
|
||||
var (
|
||||
t = suite.T()
|
||||
paths = map[string]string{}
|
||||
newPaths = map[string]string{}
|
||||
excluded = map[string]struct{}{}
|
||||
itemColls = map[string]map[string]string{
|
||||
driveID: {},
|
||||
}
|
||||
collMap = map[string]map[string]*onedrive.Collection{
|
||||
driveID: {},
|
||||
}
|
||||
)
|
||||
|
||||
c := onedrive.NewCollections(
|
||||
graph.HTTPClient(graph.NoTimeout()),
|
||||
tenant,
|
||||
@ -107,26 +116,32 @@ func (suite *SharePointLibrariesSuite) TestUpdateCollections() {
|
||||
&MockGraphService{},
|
||||
nil,
|
||||
control.Options{})
|
||||
|
||||
c.CollectionMap = collMap
|
||||
|
||||
err := c.UpdateCollections(
|
||||
ctx,
|
||||
"driveID1",
|
||||
driveID,
|
||||
"General",
|
||||
test.items,
|
||||
paths,
|
||||
newPaths,
|
||||
excluded,
|
||||
map[string]string{},
|
||||
itemColls,
|
||||
true,
|
||||
fault.New(true))
|
||||
|
||||
test.expect(t, err)
|
||||
assert.Equal(t, len(test.expectedCollectionIDs), len(c.CollectionMap), "collection paths")
|
||||
assert.Equal(t, test.expectedItemCount, c.NumItems, "item count")
|
||||
assert.Equal(t, test.expectedFileCount, c.NumFiles, "file count")
|
||||
assert.Equal(t, test.expectedContainerCount, c.NumContainers, "container count")
|
||||
|
||||
for _, collPath := range test.expectedCollectionIDs {
|
||||
assert.Contains(t, c.CollectionMap, collPath)
|
||||
assert.Contains(t, c.CollectionMap[driveID], collPath)
|
||||
}
|
||||
for _, col := range c.CollectionMap {
|
||||
|
||||
for _, col := range c.CollectionMap[driveID] {
|
||||
assert.Contains(t, test.expectedCollectionPaths, col.FullPath().String())
|
||||
}
|
||||
})
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user