Restore for Groups SharePoint Sites (#4209)

Still a few pending items like CLI handling and restoring specific items. Will be handled in a follow up.

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [x] 🕐 Yes, but in a later PR
- [ ]  No

#### Type of change

<!--- Please check the type of change your PR introduces: --->
- [x] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [ ] 🧹 Tech Debt/Cleanup

#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* https://github.com/alcionai/corso/issues/3992

#### Test Plan

<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
- [x]  Unit test
- [ ] 💚 E2E
This commit is contained in:
Abin Simon 2023-09-14 14:19:34 +05:30 committed by GitHub
parent 6e1be1f08c
commit 80f11d9876
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 293 additions and 19 deletions

View File

@ -5,8 +5,8 @@ import (
"github.com/spf13/pflag" "github.com/spf13/pflag"
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm"
) )
// called by restore.go to map subcommands to provider-specific handling. // called by restore.go to map subcommands to provider-specific handling.
@ -79,5 +79,25 @@ func restoreGroupsCmd(cmd *cobra.Command, args []string) error {
return nil return nil
} }
return Only(ctx, utils.ErrNotYetImplemented) opts := utils.MakeGroupsOpts(cmd)
opts.RestoreCfg.DTTMFormat = dttm.HumanReadableDriveItem
if flags.RunModeFV == flags.RunModeFlagTest {
return nil
}
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts); err != nil {
return err
}
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
return runRestore(
ctx,
cmd,
opts.RestoreCfg,
sel.Selector,
flags.BackupIDFV,
"Groups")
} }

View File

@ -83,8 +83,7 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
cmd.SetOut(new(bytes.Buffer)) // drop output cmd.SetOut(new(bytes.Buffer)) // drop output
cmd.SetErr(new(bytes.Buffer)) // drop output cmd.SetErr(new(bytes.Buffer)) // drop output
err := cmd.Execute() err := cmd.Execute()
// assert.NoError(t, err, clues.ToCore(err)) assert.NoError(t, err, clues.ToCore(err))
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
opts := utils.MakeGroupsOpts(cmd) opts := utils.MakeGroupsOpts(cmd)
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV) assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)

View File

@ -161,6 +161,10 @@ func (ctrl *Controller) incrementAwaitingMessages() {
} }
func (ctrl *Controller) CacheItemInfo(dii details.ItemInfo) { func (ctrl *Controller) CacheItemInfo(dii details.ItemInfo) {
if dii.Groups != nil {
ctrl.backupDriveIDNames.Add(dii.Groups.DriveID, dii.Groups.DriveName)
}
if dii.SharePoint != nil { if dii.SharePoint != nil {
ctrl.backupDriveIDNames.Add(dii.SharePoint.DriveID, dii.SharePoint.DriveName) ctrl.backupDriveIDNames.Add(dii.SharePoint.DriveID, dii.SharePoint.DriveName)
} }

View File

@ -270,6 +270,8 @@ func (suite *ControllerUnitSuite) TestController_CacheItemInfo() {
odname = "od-name" odname = "od-name"
spid = "sp-id" spid = "sp-id"
spname = "sp-name" spname = "sp-name"
gpid = "gp-id"
gpname = "gp-name"
// intentionally declared outside the test loop // intentionally declared outside the test loop
ctrl = &Controller{ ctrl = &Controller{
wg: &sync.WaitGroup{}, wg: &sync.WaitGroup{},
@ -324,6 +326,17 @@ func (suite *ControllerUnitSuite) TestController_CacheItemInfo() {
expectID: spid, expectID: spid,
expectName: spname, expectName: spname,
}, },
{
name: "groups",
dii: details.ItemInfo{
Groups: &details.GroupsInfo{
DriveID: gpid,
DriveName: gpname,
},
},
expectID: gpid,
expectName: gpname,
},
} }
for _, test := range table { for _, test := range table {
suite.Run(test.name, func() { suite.Run(test.name, func() {
@ -423,6 +436,20 @@ func (suite *ControllerIntegrationSuite) TestEmptyCollections() {
Service: selectors.ServiceSharePoint, Service: selectors.ServiceSharePoint,
}, },
}, },
{
name: "GroupsNil",
col: nil,
sel: selectors.Selector{
Service: selectors.ServiceGroups,
},
},
{
name: "GroupsEmpty",
col: []data.RestoreCollection{},
sel: selectors.Selector{
Service: selectors.ServiceGroups,
},
},
} }
for _, test := range table { for _, test := range table {
@ -1249,6 +1276,28 @@ func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
// path.ListsCategory.String(), // path.ListsCategory.String(),
}, },
}, },
{
name: "Groups",
resourceCat: resource.Sites,
selectorFunc: func(t *testing.T) selectors.Selector {
sel := selectors.NewGroupsBackup([]string{tconfig.M365TeamID(t)})
sel.Include(
sel.LibraryFolders([]string{selectors.NoneTgt}),
// not yet in use
// sel.Pages([]string{selectors.NoneTgt}),
// sel.Lists([]string{selectors.NoneTgt}),
)
return sel.Selector
},
service: path.GroupsService,
categories: []string{
path.LibrariesCategory.String(),
// not yet in use
// path.PagesCategory.String(),
// path.ListsCategory.String(),
},
},
} }
for _, test := range table { for _, test := range table {
@ -1381,12 +1430,14 @@ func (suite *DisconnectedUnitSuite) TestController_Status() {
func (suite *DisconnectedUnitSuite) TestVerifyBackupInputs_allServices() { func (suite *DisconnectedUnitSuite) TestVerifyBackupInputs_allServices() {
sites := []string{"abc.site.foo", "bar.site.baz"} sites := []string{"abc.site.foo", "bar.site.baz"}
groups := []string{"123", "456"}
tests := []struct { tests := []struct {
name string name string
excludes func(t *testing.T) selectors.Selector excludes func(t *testing.T) selectors.Selector
filters func(t *testing.T) selectors.Selector filters func(t *testing.T) selectors.Selector
includes func(t *testing.T) selectors.Selector includes func(t *testing.T) selectors.Selector
cachedIDs []string
checkError assert.ErrorAssertionFunc checkError assert.ErrorAssertionFunc
}{ }{
{ {
@ -1433,6 +1484,7 @@ func (suite *DisconnectedUnitSuite) TestVerifyBackupInputs_allServices() {
{ {
name: "valid sites", name: "valid sites",
checkError: assert.NoError, checkError: assert.NoError,
cachedIDs: sites,
excludes: func(t *testing.T) selectors.Selector { excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"}) sel := selectors.NewSharePointBackup([]string{"abc.site.foo", "bar.site.baz"})
sel.DiscreteOwner = "abc.site.foo" sel.DiscreteOwner = "abc.site.foo"
@ -1455,6 +1507,7 @@ func (suite *DisconnectedUnitSuite) TestVerifyBackupInputs_allServices() {
{ {
name: "invalid sites", name: "invalid sites",
checkError: assert.Error, checkError: assert.Error,
cachedIDs: sites,
excludes: func(t *testing.T) selectors.Selector { excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"}) sel := selectors.NewSharePointBackup([]string{"fnords.smarfs.brawnhilda"})
sel.Exclude(sel.AllData()) sel.Exclude(sel.AllData())
@ -1471,17 +1524,61 @@ func (suite *DisconnectedUnitSuite) TestVerifyBackupInputs_allServices() {
return sel.Selector return sel.Selector
}, },
}, },
{
name: "valid groups",
checkError: assert.NoError,
cachedIDs: groups,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewGroupsBackup([]string{"123", "456"})
sel.DiscreteOwner = "123"
sel.Exclude(sel.AllData())
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewGroupsBackup([]string{"123", "456"})
sel.DiscreteOwner = "123"
sel.Filter(sel.AllData())
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewGroupsBackup([]string{"123", "456"})
sel.DiscreteOwner = "123"
sel.Include(sel.AllData())
return sel.Selector
},
},
{
name: "invalid groups",
checkError: assert.Error,
cachedIDs: groups,
excludes: func(t *testing.T) selectors.Selector {
sel := selectors.NewGroupsBackup([]string{"789"})
sel.Exclude(sel.AllData())
return sel.Selector
},
filters: func(t *testing.T) selectors.Selector {
sel := selectors.NewGroupsBackup([]string{"789"})
sel.Filter(sel.AllData())
return sel.Selector
},
includes: func(t *testing.T) selectors.Selector {
sel := selectors.NewGroupsBackup([]string{"789"})
sel.Include(sel.AllData())
return sel.Selector
},
},
} }
for _, test := range tests { for _, test := range tests {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
err := verifyBackupInputs(test.excludes(t), sites) err := verifyBackupInputs(test.excludes(t), test.cachedIDs)
test.checkError(t, err, clues.ToCore(err)) test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.filters(t), sites) err = verifyBackupInputs(test.filters(t), test.cachedIDs)
test.checkError(t, err, clues.ToCore(err)) test.checkError(t, err, clues.ToCore(err))
err = verifyBackupInputs(test.includes(t), sites) err = verifyBackupInputs(test.includes(t), test.cachedIDs)
test.checkError(t, err, clues.ToCore(err)) test.checkError(t, err, clues.ToCore(err))
}) })
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/alcionai/corso/src/internal/m365/collection/drive" "github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/exchange" "github.com/alcionai/corso/src/internal/m365/service/exchange"
"github.com/alcionai/corso/src/internal/m365/service/groups"
"github.com/alcionai/corso/src/internal/m365/service/onedrive" "github.com/alcionai/corso/src/internal/m365/service/onedrive"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint" "github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/m365/support"
@ -77,6 +78,16 @@ func (ctrl *Controller) ConsumeRestoreCollections(
deets, deets,
errs, errs,
ctr) ctr)
case path.GroupsService:
status, err = groups.ConsumeRestoreCollections(
ctx,
rcc,
ctrl.AC,
ctrl.backupDriveIDNames,
dcs,
deets,
errs,
ctr)
default: default:
err = clues.Wrap(clues.New(service.String()), "service not supported") err = clues.Wrap(clues.New(service.String()), "service not supported")
} }

View File

@ -5,12 +5,17 @@ import (
"errors" "errors"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/microsoftgraph/msgraph-sdk-go/models"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive"
"github.com/alcionai/corso/src/internal/m365/support" "github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -30,7 +35,8 @@ func ConsumeRestoreCollections(
) (*support.ControllerOperationStatus, error) { ) (*support.ControllerOperationStatus, error) {
var ( var (
restoreMetrics support.CollectionMetrics restoreMetrics support.CollectionMetrics
// caches = onedrive.NewRestoreCaches(backupDriveIDNames) caches = drive.NewRestoreCaches(backupDriveIDNames)
lrh = drive.NewLibraryRestoreHandler(ac, rcc.Selector.PathService())
el = errs.Local() el = errs.Local()
) )
@ -52,6 +58,7 @@ func ConsumeRestoreCollections(
var ( var (
err error err error
resp models.Siteable
category = dc.FullPath().Category() category = dc.FullPath().Category()
metrics support.CollectionMetrics metrics support.CollectionMetrics
ictx = clues.Add(ctx, ictx = clues.Add(ctx,
@ -63,8 +70,39 @@ func ConsumeRestoreCollections(
switch dc.FullPath().Category() { switch dc.FullPath().Category() {
case path.LibrariesCategory: case path.LibrariesCategory:
// TODO // TODO(meain): As of now we only restore the root site
// and that too to whatever is currently the root site of the
// group and not the original one. Not sure if the
// original can be changed.
resp, err = ac.Groups().GetRootSite(ctx, rcc.ProtectedResource.ID())
if err != nil {
return nil, err
}
pr := idname.NewProvider(ptr.Val(resp.GetId()), ptr.Val(resp.GetName()))
srcc := inject.RestoreConsumerConfig{
BackupVersion: rcc.BackupVersion,
Options: rcc.Options,
ProtectedResource: pr,
RestoreConfig: rcc.RestoreConfig,
Selector: rcc.Selector,
}
err = caches.Populate(ctx, lrh, srcc.ProtectedResource.ID())
if err != nil {
return nil, clues.Wrap(err, "initializing restore caches")
}
metrics, err = drive.RestoreCollection(
ictx,
lrh,
srcc,
dc,
caches,
deets,
control.DefaultRestoreContainerName(dttm.HumanReadableDriveItem),
errs,
ctr)
default: default:
return nil, clues.New("data category not supported"). return nil, clues.New("data category not supported").
With("category", category). With("category", category).

View File

@ -28,7 +28,7 @@ func locationRef(
// was in the root of the data type. // was in the root of the data type.
elems := repoRef.Folders() elems := repoRef.Folders()
if ent.OneDrive != nil || ent.SharePoint != nil { if ent.OneDrive != nil || ent.SharePoint != nil || ent.Groups != nil {
dp, err := path.ToDrivePath(repoRef) dp, err := path.ToDrivePath(repoRef)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "fallback for LocationRef") return nil, clues.Wrap(err, "fallback for LocationRef")
@ -73,6 +73,8 @@ func drivePathMerge(
if ent.SharePoint != nil { if ent.SharePoint != nil {
driveID = ent.SharePoint.DriveID driveID = ent.SharePoint.DriveID
} else if ent.Groups != nil {
driveID = ent.Groups.DriveID
} else if ent.OneDrive != nil { } else if ent.OneDrive != nil {
driveID = ent.OneDrive.DriveID driveID = ent.OneDrive.DriveID
} }
@ -87,9 +89,21 @@ func drivePathMerge(
driveID = odp.DriveID driveID = odp.DriveID
} }
return basicLocationPath( driveLoc := path.BuildDriveLocation(driveID, locRef.Elements()...)
repoRef,
path.BuildDriveLocation(driveID, locRef.Elements()...)) if ent.Groups != nil {
siteID := ent.Groups.SiteID
// Fallback to getting from RepoRef.
if len(siteID) == 0 {
folders := repoRef.Folders()
siteID = folders[1]
}
driveLoc = path.BuildGroupsDriveLocation(siteID, driveID, locRef.Elements()...)
}
return basicLocationPath(repoRef, driveLoc)
} }
func makeRestorePathsForEntry( func makeRestorePathsForEntry(
@ -129,13 +143,14 @@ func makeRestorePathsForEntry(
// * Exchange Email/Contacts // * Exchange Email/Contacts
// * OneDrive/SharePoint (needs drive information) // * OneDrive/SharePoint (needs drive information)
switch true { switch true {
case ent.Exchange != nil || ent.Groups != nil: case ent.Exchange != nil:
// TODO(ashmrtn): Eventually make Events have it's own function to handle // TODO(ashmrtn): Eventually make Events have it's own function to handle
// setting the restore destination properly. // setting the restore destination properly.
res.RestorePath, err = basicLocationPath(repoRef, locRef) res.RestorePath, err = basicLocationPath(repoRef, locRef)
case ent.OneDrive != nil || case ent.OneDrive != nil ||
(ent.SharePoint != nil && ent.SharePoint.ItemType == details.SharePointLibrary) || (ent.SharePoint != nil && ent.SharePoint.ItemType == details.SharePointLibrary) ||
(ent.SharePoint != nil && ent.SharePoint.ItemType == details.OneDriveItem): (ent.SharePoint != nil && ent.SharePoint.ItemType == details.OneDriveItem) ||
(ent.Groups != nil && ent.Groups.ItemType == details.SharePointLibrary):
res.RestorePath, err = drivePathMerge(ent, repoRef, locRef) res.RestorePath, err = drivePathMerge(ent, repoRef, locRef)
default: default:
return res, clues.New("unknown entry type").WithClues(ctx) return res, clues.New("unknown entry type").WithClues(ctx)

View File

@ -48,8 +48,10 @@ func (suite *RestorePathTransformerUnitSuite) TestGetPaths() {
var ( var (
driveID = "some-drive-id" driveID = "some-drive-id"
siteID = "some-site-id"
extraItemName = "some-item" extraItemName = "some-item"
SharePointRootItemPath = testdata.SharePointRootPath.MustAppend(extraItemName, true) SharePointRootItemPath = testdata.SharePointRootPath.MustAppend(extraItemName, true)
GroupsRootItemPath = testdata.GroupsRootPath.MustAppend(extraItemName, true)
) )
table := []struct { table := []struct {
@ -59,6 +61,67 @@ func (suite *RestorePathTransformerUnitSuite) TestGetPaths() {
expectErr assert.ErrorAssertionFunc expectErr assert.ErrorAssertionFunc
expected []expectPaths expected []expectPaths
}{ }{
{
name: "Groups List Errors",
// No version bump for the change so we always have to check for this.
backupVersion: version.All8MigrateUserPNToID,
input: []*details.Entry{
{
RepoRef: GroupsRootItemPath.RR.String(),
LocationRef: GroupsRootItemPath.Loc.String(),
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.SharePointList,
},
},
},
},
expectErr: assert.Error,
},
{
name: "Groups Page Errors",
// No version bump for the change so we always have to check for this.
backupVersion: version.All8MigrateUserPNToID,
input: []*details.Entry{
{
RepoRef: GroupsRootItemPath.RR.String(),
LocationRef: GroupsRootItemPath.Loc.String(),
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.SharePointPage,
},
},
},
},
expectErr: assert.Error,
},
{
name: "Groups, no LocationRef, no DriveID, item in root",
backupVersion: version.OneDrive6NameInMeta,
input: []*details.Entry{
{
RepoRef: GroupsRootItemPath.RR.String(),
ItemInfo: details.ItemInfo{
Groups: &details.GroupsInfo{
ItemType: details.SharePointLibrary,
SiteID: siteID,
},
},
},
},
expectErr: assert.NoError,
expected: []expectPaths{
{
storage: GroupsRootItemPath.RR.String(),
restore: toRestore(
GroupsRootItemPath.RR,
append(
[]string{"sites", siteID, "drives"},
// testdata path has '.d' on the drives folder we need to remove.
GroupsRootItemPath.RR.Folders()[3:]...)...),
},
},
},
{ {
name: "SharePoint List Errors", name: "SharePoint List Errors",
// No version bump for the change so we always have to check for this. // No version bump for the change so we always have to check for this.

View File

@ -428,7 +428,9 @@ func formatDetailsForRestoration(
return nil, clues.Wrap(err, "getting restore paths") return nil, clues.Wrap(err, "getting restore paths")
} }
if sel.Service == selectors.ServiceOneDrive || sel.Service == selectors.ServiceSharePoint { if sel.Service == selectors.ServiceOneDrive ||
sel.Service == selectors.ServiceSharePoint ||
sel.Service == selectors.ServiceGroups {
paths, err = onedrive.AugmentRestorePaths(backupVersion, paths) paths, err = onedrive.AugmentRestorePaths(backupVersion, paths)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "augmenting paths") return nil, clues.Wrap(err, "augmenting paths")

View File

@ -45,7 +45,9 @@ func locFromRepo(rr path.Path, isItem bool) *path.Builder {
loc = loc.Append(strings.TrimSuffix(e, folderSuffix)) loc = loc.Append(strings.TrimSuffix(e, folderSuffix))
} }
if rr.Service() == path.OneDriveService || rr.Category() == path.LibrariesCategory { if rr.Service() == path.GroupsService {
loc = loc.PopFront().PopFront().PopFront()
} else if rr.Service() == path.OneDriveService || rr.Category() == path.LibrariesCategory {
loc = loc.PopFront() loc = loc.PopFront()
} }
@ -730,6 +732,8 @@ var (
}, },
} }
GroupsRootPath = mustPathRep("tenant-id/groups/group-id/libraries/sites/site-id/drives/foo/root:", false)
SharePointRootPath = mustPathRep("tenant-id/sharepoint/site-id/libraries/drives/foo/root:", false) SharePointRootPath = mustPathRep("tenant-id/sharepoint/site-id/libraries/drives/foo/root:", false)
SharePointLibraryPath = SharePointRootPath.MustAppend("library", false) SharePointLibraryPath = SharePointRootPath.MustAppend("library", false)
SharePointBasePath1 = SharePointLibraryPath.MustAppend("a", false) SharePointBasePath1 = SharePointLibraryPath.MustAppend("a", false)

View File

@ -1,6 +1,8 @@
package path package path
import "github.com/alcionai/clues" import (
"github.com/alcionai/clues"
)
// TODO: Move this into m365/collection/drive // TODO: Move this into m365/collection/drive
// drivePath is used to represent path components // drivePath is used to represent path components
@ -27,6 +29,15 @@ func ToDrivePath(p Path) (*DrivePath, error) {
With("path_folders", p.Folder(false)) With("path_folders", p.Folder(false))
} }
// FIXME(meain): Don't have any service specific code within this
// function. Change this to either accept only the fragment of the
// path that is the drive path or have a separate function for each
// service.
if p.Service() == GroupsService {
// Groups have an extra /sites/<siteID> in the path
return &DrivePath{DriveID: folders[3], Root: folders[4], Folders: folders[5:]}, nil
}
return &DrivePath{DriveID: folders[1], Root: folders[2], Folders: folders[3:]}, nil return &DrivePath{DriveID: folders[1], Root: folders[2], Folders: folders[3:]}, nil
} }
@ -49,3 +60,13 @@ func BuildDriveLocation(
) *Builder { ) *Builder {
return Builder{}.Append("drives", driveID).Append(unescapedElements...) return Builder{}.Append("drives", driveID).Append(unescapedElements...)
} }
// BuildGroupsDriveLocation is same as BuildDriveLocation, but for
// group drives and thus includes siteID.
func BuildGroupsDriveLocation(
siteID string,
driveID string,
unescapedElements ...string,
) *Builder {
return Builder{}.Append("sites", siteID, "drives", driveID).Append(unescapedElements...)
}