Create service specific handlers that know how to run an export (#4491)
First step in reducing the number of places we have to check the service type manually. Create a way to get a handle to a service specific handler and implement exports for those handlers --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change - [ ] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [x] 🧹 Tech Debt/Cleanup #### Issue(s) * #4254 #### Test Plan - [ ] 💪 Manual - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
3df3a44c7b
commit
74cf0ab737
@ -25,9 +25,9 @@ var ErrNoResourceLookup = clues.New("missing resource lookup client")
|
|||||||
|
|
||||||
// must comply with BackupProducer and RestoreConsumer
|
// must comply with BackupProducer and RestoreConsumer
|
||||||
var (
|
var (
|
||||||
_ inject.BackupProducer = &Controller{}
|
_ inject.BackupProducer = &Controller{}
|
||||||
_ inject.RestoreConsumer = &Controller{}
|
_ inject.RestoreConsumer = &Controller{}
|
||||||
_ inject.ExportConsumer = &Controller{}
|
_ inject.ToServiceHandler = &Controller{}
|
||||||
)
|
)
|
||||||
|
|
||||||
// Controller is a struct used to wrap the GraphServiceClient and
|
// Controller is a struct used to wrap the GraphServiceClient and
|
||||||
|
|||||||
@ -1,89 +1,33 @@
|
|||||||
package m365
|
package m365
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ProduceExportCollections exports data from the specified collections
|
// NewServiceHandler returns an instance of a struct capable of running various
|
||||||
func (ctrl *Controller) ProduceExportCollections(
|
// operations for a given service.
|
||||||
ctx context.Context,
|
func (ctrl *Controller) NewServiceHandler(
|
||||||
backupVersion int,
|
|
||||||
sels selectors.Selector,
|
|
||||||
exportCfg control.ExportConfig,
|
|
||||||
opts control.Options,
|
opts control.Options,
|
||||||
dcs []data.RestoreCollection,
|
service path.ServiceType,
|
||||||
stats *data.ExportStats,
|
) (inject.ServiceHandler, error) {
|
||||||
errs *fault.Bus,
|
switch service {
|
||||||
) ([]export.Collectioner, error) {
|
case path.OneDriveService:
|
||||||
ctx, end := diagnostics.Span(ctx, "m365:export")
|
return onedrive.NewOneDriveHandler(opts), nil
|
||||||
defer end()
|
|
||||||
|
|
||||||
ctx = graph.BindRateLimiterConfig(ctx, graph.LimiterCfg{Service: sels.PathService()})
|
case path.SharePointService:
|
||||||
ctx = clues.Add(ctx, "export_config", exportCfg)
|
return sharepoint.NewSharePointHandler(opts), nil
|
||||||
|
|
||||||
var (
|
case path.GroupsService:
|
||||||
expCollections []export.Collectioner
|
return groups.NewGroupsHandler(opts), nil
|
||||||
status *support.ControllerOperationStatus
|
|
||||||
deets = &details.Builder{}
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
|
|
||||||
switch sels.Service {
|
|
||||||
case selectors.ServiceOneDrive:
|
|
||||||
expCollections, err = onedrive.ProduceExportCollections(
|
|
||||||
ctx,
|
|
||||||
backupVersion,
|
|
||||||
exportCfg,
|
|
||||||
opts,
|
|
||||||
dcs,
|
|
||||||
deets,
|
|
||||||
stats,
|
|
||||||
errs)
|
|
||||||
case selectors.ServiceSharePoint:
|
|
||||||
expCollections, err = sharepoint.ProduceExportCollections(
|
|
||||||
ctx,
|
|
||||||
backupVersion,
|
|
||||||
exportCfg,
|
|
||||||
opts,
|
|
||||||
dcs,
|
|
||||||
ctrl.backupDriveIDNames,
|
|
||||||
deets,
|
|
||||||
stats,
|
|
||||||
errs)
|
|
||||||
case selectors.ServiceGroups:
|
|
||||||
expCollections, err = groups.ProduceExportCollections(
|
|
||||||
ctx,
|
|
||||||
backupVersion,
|
|
||||||
exportCfg,
|
|
||||||
opts,
|
|
||||||
dcs,
|
|
||||||
ctrl.backupDriveIDNames,
|
|
||||||
ctrl.backupSiteIDWebURL,
|
|
||||||
deets,
|
|
||||||
stats,
|
|
||||||
errs)
|
|
||||||
|
|
||||||
default:
|
|
||||||
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ctrl.incrementAwaitingMessages()
|
return nil, clues.New("unrecognized service").
|
||||||
ctrl.UpdateStatus(status)
|
With("service_type", service.String())
|
||||||
|
|
||||||
return expCollections, err
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -17,7 +17,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ inject.BackupProducer = &Controller{}
|
var _ inject.BackupProducer = &Controller{}
|
||||||
@ -87,9 +86,7 @@ func (ctrl Controller) CacheItemInfo(dii details.ItemInfo) {}
|
|||||||
func (ctrl Controller) ProduceExportCollections(
|
func (ctrl Controller) ProduceExportCollections(
|
||||||
_ context.Context,
|
_ context.Context,
|
||||||
_ int,
|
_ int,
|
||||||
_ selectors.Selector,
|
|
||||||
_ control.ExportConfig,
|
_ control.ExportConfig,
|
||||||
_ control.Options,
|
|
||||||
_ []data.RestoreCollection,
|
_ []data.RestoreCollection,
|
||||||
_ *data.ExportStats,
|
_ *data.ExportStats,
|
||||||
_ *fault.Bus,
|
_ *fault.Bus,
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
@ -18,17 +19,41 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var _ inject.ServiceHandler = &groupsHandler{}
|
||||||
|
|
||||||
|
func NewGroupsHandler(
|
||||||
|
opts control.Options,
|
||||||
|
) *groupsHandler {
|
||||||
|
return &groupsHandler{
|
||||||
|
opts: opts,
|
||||||
|
backupDriveIDNames: idname.NewCache(nil),
|
||||||
|
backupSiteIDWebURL: idname.NewCache(nil),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type groupsHandler struct {
|
||||||
|
opts control.Options
|
||||||
|
|
||||||
|
backupDriveIDNames idname.CacheBuilder
|
||||||
|
backupSiteIDWebURL idname.CacheBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *groupsHandler) CacheItemInfo(v details.ItemInfo) {
|
||||||
|
if v.Groups == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h.backupDriveIDNames.Add(v.Groups.DriveID, v.Groups.DriveName)
|
||||||
|
h.backupSiteIDWebURL.Add(v.Groups.SiteID, v.Groups.WebURL)
|
||||||
|
}
|
||||||
|
|
||||||
// ProduceExportCollections will create the export collections for the
|
// ProduceExportCollections will create the export collections for the
|
||||||
// given restore collections.
|
// given restore collections.
|
||||||
func ProduceExportCollections(
|
func (h *groupsHandler) ProduceExportCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
exportCfg control.ExportConfig,
|
exportCfg control.ExportConfig,
|
||||||
opts control.Options,
|
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
backupDriveIDNames idname.Cacher,
|
|
||||||
backupSiteIDWebURL idname.Cacher,
|
|
||||||
deets *details.Builder,
|
|
||||||
stats *data.ExportStats,
|
stats *data.ExportStats,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collectioner, error) {
|
) ([]export.Collectioner, error) {
|
||||||
@ -55,13 +80,14 @@ func ProduceExportCollections(
|
|||||||
backupVersion,
|
backupVersion,
|
||||||
exportCfg,
|
exportCfg,
|
||||||
stats)
|
stats)
|
||||||
|
|
||||||
case path.LibrariesCategory:
|
case path.LibrariesCategory:
|
||||||
drivePath, err := path.ToDrivePath(restoreColl.FullPath())
|
drivePath, err := path.ToDrivePath(restoreColl.FullPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "transforming path to drive path").WithClues(ctx)
|
return nil, clues.Wrap(err, "transforming path to drive path").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
driveName, ok := backupDriveIDNames.NameOf(drivePath.DriveID)
|
driveName, ok := h.backupDriveIDNames.NameOf(drivePath.DriveID)
|
||||||
if !ok {
|
if !ok {
|
||||||
// This should not happen, but just in case
|
// This should not happen, but just in case
|
||||||
logger.Ctx(ctx).With("drive_id", drivePath.DriveID).Info("drive name not found, using drive id")
|
logger.Ctx(ctx).With("drive_id", drivePath.DriveID).Info("drive name not found, using drive id")
|
||||||
@ -71,7 +97,7 @@ func ProduceExportCollections(
|
|||||||
rfds := restoreColl.FullPath().Folders()
|
rfds := restoreColl.FullPath().Folders()
|
||||||
siteName := rfds[1] // use siteID by default
|
siteName := rfds[1] // use siteID by default
|
||||||
|
|
||||||
webURL, ok := backupSiteIDWebURL.NameOf(siteName)
|
webURL, ok := h.backupSiteIDWebURL.NameOf(siteName)
|
||||||
if !ok {
|
if !ok {
|
||||||
// This should not happen, but just in case
|
// This should not happen, but just in case
|
||||||
logger.Ctx(ctx).With("site_id", rfds[1]).Info("site weburl not found, using site id")
|
logger.Ctx(ctx).With("site_id", rfds[1]).Info("site weburl not found, using site id")
|
||||||
|
|||||||
@ -4,21 +4,19 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
groupMock "github.com/alcionai/corso/src/internal/m365/service/groups/mock"
|
groupMock "github.com/alcionai/corso/src/internal/m365/service/groups/mock"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -87,9 +85,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
|||||||
Path: p,
|
Path: p,
|
||||||
ItemData: []data.Item{
|
ItemData: []data.Item{
|
||||||
&dataMock.Item{
|
&dataMock.Item{
|
||||||
ItemID: itemID,
|
ItemID: itemID,
|
||||||
Reader: body,
|
Reader: body,
|
||||||
ItemInfo: dii,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -99,17 +96,14 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
|||||||
|
|
||||||
stats := data.ExportStats{}
|
stats := data.ExportStats{}
|
||||||
|
|
||||||
ecs, err := ProduceExportCollections(
|
ecs, err := NewGroupsHandler(control.DefaultOptions()).
|
||||||
ctx,
|
ProduceExportCollections(
|
||||||
int(version.Backup),
|
ctx,
|
||||||
exportCfg,
|
int(version.Backup),
|
||||||
control.DefaultOptions(),
|
exportCfg,
|
||||||
dcs,
|
dcs,
|
||||||
nil,
|
&stats,
|
||||||
nil,
|
fault.New(true))
|
||||||
nil,
|
|
||||||
&stats,
|
|
||||||
fault.New(true))
|
|
||||||
assert.NoError(t, err, "export collections error")
|
assert.NoError(t, err, "export collections error")
|
||||||
assert.Len(t, ecs, 1, "num of collections")
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
@ -154,13 +148,19 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
|||||||
driveName = "driveName1"
|
driveName = "driveName1"
|
||||||
exportCfg = control.ExportConfig{}
|
exportCfg = control.ExportConfig{}
|
||||||
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
|
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
|
||||||
driveNameCache = idname.NewCache(
|
|
||||||
// Cache check with lowercased ids
|
dii = details.ItemInfo{
|
||||||
map[string]string{strings.ToLower(driveID): driveName})
|
Groups: &details.GroupsInfo{
|
||||||
siteWebURLCache = idname.NewCache(
|
ItemType: details.SharePointLibrary,
|
||||||
// Cache check with lowercased ids
|
ItemName: "name1",
|
||||||
map[string]string{strings.ToLower(siteID): siteWebURL})
|
Size: 1,
|
||||||
dii = odStub.DriveItemInfo()
|
DriveName: driveName,
|
||||||
|
DriveID: driveID,
|
||||||
|
SiteID: siteID,
|
||||||
|
WebURL: siteWebURL,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
expectedPath = "Libraries/" + siteEscapedName + "/" + driveName
|
expectedPath = "Libraries/" + siteEscapedName + "/" + driveName
|
||||||
expectedItems = []export.Item{
|
expectedItems = []export.Item{
|
||||||
{
|
{
|
||||||
@ -171,8 +171,6 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
dii.OneDrive.ItemName = "name1"
|
|
||||||
|
|
||||||
p, err := dpb.ToDataLayerPath(
|
p, err := dpb.ToDataLayerPath(
|
||||||
"t",
|
"t",
|
||||||
"u",
|
"u",
|
||||||
@ -189,9 +187,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
|||||||
Path: p,
|
Path: p,
|
||||||
ItemData: []data.Item{
|
ItemData: []data.Item{
|
||||||
&dataMock.Item{
|
&dataMock.Item{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
ItemInfo: dii,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -199,17 +196,16 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
handler := NewGroupsHandler(control.DefaultOptions())
|
||||||
|
handler.CacheItemInfo(dii)
|
||||||
|
|
||||||
stats := data.ExportStats{}
|
stats := data.ExportStats{}
|
||||||
|
|
||||||
ecs, err := ProduceExportCollections(
|
ecs, err := handler.ProduceExportCollections(
|
||||||
ctx,
|
ctx,
|
||||||
int(version.Backup),
|
int(version.Backup),
|
||||||
exportCfg,
|
exportCfg,
|
||||||
control.DefaultOptions(),
|
|
||||||
dcs,
|
dcs,
|
||||||
driveNameCache,
|
|
||||||
siteWebURLCache,
|
|
||||||
nil,
|
|
||||||
&stats,
|
&stats,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
assert.NoError(t, err, "export collections error")
|
assert.NoError(t, err, "export collections error")
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
@ -14,15 +15,29 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var _ inject.ServiceHandler = &onedriveHandler{}
|
||||||
|
|
||||||
|
func NewOneDriveHandler(
|
||||||
|
opts control.Options,
|
||||||
|
) *onedriveHandler {
|
||||||
|
return &onedriveHandler{
|
||||||
|
opts: opts,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type onedriveHandler struct {
|
||||||
|
opts control.Options
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *onedriveHandler) CacheItemInfo(v details.ItemInfo) {}
|
||||||
|
|
||||||
// ProduceExportCollections will create the export collections for the
|
// ProduceExportCollections will create the export collections for the
|
||||||
// given restore collections.
|
// given restore collections.
|
||||||
func ProduceExportCollections(
|
func (h *onedriveHandler) ProduceExportCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
exportCfg control.ExportConfig,
|
exportCfg control.ExportConfig,
|
||||||
opts control.Options,
|
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
deets *details.Builder,
|
|
||||||
stats *data.ExportStats,
|
stats *data.ExportStats,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collectioner, error) {
|
) ([]export.Collectioner, error) {
|
||||||
|
|||||||
@ -14,7 +14,6 @@ import (
|
|||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -313,7 +312,6 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
var (
|
var (
|
||||||
exportCfg = control.ExportConfig{}
|
exportCfg = control.ExportConfig{}
|
||||||
dpb = odConsts.DriveFolderPrefixBuilder("driveID1")
|
dpb = odConsts.DriveFolderPrefixBuilder("driveID1")
|
||||||
dii = odStub.DriveItemInfo()
|
|
||||||
expectedItems = []export.Item{
|
expectedItems = []export.Item{
|
||||||
{
|
{
|
||||||
ID: "id1.data",
|
ID: "id1.data",
|
||||||
@ -323,8 +321,6 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
dii.OneDrive.ItemName = "name1"
|
|
||||||
|
|
||||||
p, err := dpb.ToDataLayerOneDrivePath("t", "u", false)
|
p, err := dpb.ToDataLayerOneDrivePath("t", "u", false)
|
||||||
assert.NoError(t, err, "build path")
|
assert.NoError(t, err, "build path")
|
||||||
|
|
||||||
@ -334,9 +330,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
Path: p,
|
Path: p,
|
||||||
ItemData: []data.Item{
|
ItemData: []data.Item{
|
||||||
&dataMock.Item{
|
&dataMock.Item{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
ItemInfo: dii,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -346,15 +341,14 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
|
|
||||||
stats := data.ExportStats{}
|
stats := data.ExportStats{}
|
||||||
|
|
||||||
ecs, err := ProduceExportCollections(
|
ecs, err := NewOneDriveHandler(control.DefaultOptions()).
|
||||||
ctx,
|
ProduceExportCollections(
|
||||||
int(version.Backup),
|
ctx,
|
||||||
exportCfg,
|
int(version.Backup),
|
||||||
control.DefaultOptions(),
|
exportCfg,
|
||||||
dcs,
|
dcs,
|
||||||
nil,
|
&stats,
|
||||||
&stats,
|
fault.New(true))
|
||||||
fault.New(true))
|
|
||||||
assert.NoError(t, err, "export collections error")
|
assert.NoError(t, err, "export collections error")
|
||||||
assert.Len(t, ecs, 1, "num of collections")
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
@ -16,16 +17,40 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var _ inject.ServiceHandler = &sharepointHandler{}
|
||||||
|
|
||||||
|
func NewSharePointHandler(
|
||||||
|
opts control.Options,
|
||||||
|
) *sharepointHandler {
|
||||||
|
return &sharepointHandler{
|
||||||
|
opts: opts,
|
||||||
|
backupDriveIDNames: idname.NewCache(nil),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type sharepointHandler struct {
|
||||||
|
opts control.Options
|
||||||
|
backupDriveIDNames idname.CacheBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *sharepointHandler) CacheItemInfo(v details.ItemInfo) {
|
||||||
|
// Old versions would store SharePoint data as OneDrive.
|
||||||
|
switch {
|
||||||
|
case v.SharePoint != nil:
|
||||||
|
h.backupDriveIDNames.Add(v.SharePoint.DriveID, v.SharePoint.DriveName)
|
||||||
|
|
||||||
|
case v.OneDrive != nil:
|
||||||
|
h.backupDriveIDNames.Add(v.OneDrive.DriveID, v.OneDrive.DriveName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ProduceExportCollections will create the export collections for the
|
// ProduceExportCollections will create the export collections for the
|
||||||
// given restore collections.
|
// given restore collections.
|
||||||
func ProduceExportCollections(
|
func (h *sharepointHandler) ProduceExportCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
exportCfg control.ExportConfig,
|
exportCfg control.ExportConfig,
|
||||||
opts control.Options,
|
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
backupDriveIDNames idname.CacheBuilder,
|
|
||||||
deets *details.Builder,
|
|
||||||
stats *data.ExportStats,
|
stats *data.ExportStats,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collectioner, error) {
|
) ([]export.Collectioner, error) {
|
||||||
@ -40,7 +65,7 @@ func ProduceExportCollections(
|
|||||||
return nil, clues.Wrap(err, "transforming path to drive path").WithClues(ctx)
|
return nil, clues.Wrap(err, "transforming path to drive path").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
driveName, ok := backupDriveIDNames.NameOf(drivePath.DriveID)
|
driveName, ok := h.backupDriveIDNames.NameOf(drivePath.DriveID)
|
||||||
if !ok {
|
if !ok {
|
||||||
// This should not happen, but just in case
|
// This should not happen, but just in case
|
||||||
logger.Ctx(ctx).With("drive_id", drivePath.DriveID).Info("drive name not found, using drive id")
|
logger.Ctx(ctx).With("drive_id", drivePath.DriveID).Info("drive name not found, using drive id")
|
||||||
|
|||||||
@ -4,20 +4,18 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
|
||||||
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
|
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -60,81 +58,111 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
var (
|
var (
|
||||||
driveID = "driveID1"
|
driveID = "driveID1"
|
||||||
driveName = "driveName1"
|
driveName = "driveName1"
|
||||||
exportCfg = control.ExportConfig{}
|
itemName = "name1"
|
||||||
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
|
exportCfg = control.ExportConfig{}
|
||||||
cache = idname.NewCache(
|
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
|
||||||
// Cache check with lowercased ids
|
|
||||||
map[string]string{strings.ToLower(driveID): driveName})
|
|
||||||
dii = odStub.DriveItemInfo()
|
|
||||||
expectedPath = path.LibrariesCategory.HumanString() + "/" + driveName
|
expectedPath = path.LibrariesCategory.HumanString() + "/" + driveName
|
||||||
expectedItems = []export.Item{
|
expectedItems = []export.Item{
|
||||||
{
|
{
|
||||||
ID: "id1.data",
|
ID: "id1.data",
|
||||||
Name: "name1",
|
Name: itemName,
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
dii.OneDrive.ItemName = "name1"
|
|
||||||
|
|
||||||
p, err := dpb.ToDataLayerSharePointPath("t", "u", path.LibrariesCategory, false)
|
p, err := dpb.ToDataLayerSharePointPath("t", "u", path.LibrariesCategory, false)
|
||||||
assert.NoError(t, err, "build path")
|
assert.NoError(t, err, "build path")
|
||||||
|
|
||||||
dcs := []data.RestoreCollection{
|
table := []struct {
|
||||||
data.FetchRestoreCollection{
|
name string
|
||||||
Collection: dataMock.Collection{
|
itemInfo details.ItemInfo
|
||||||
Path: p,
|
}{
|
||||||
ItemData: []data.Item{
|
{
|
||||||
&dataMock.Item{
|
name: "OneDriveLegacyItemInfo",
|
||||||
ItemID: "id1.data",
|
itemInfo: details.ItemInfo{
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
OneDrive: &details.OneDriveInfo{
|
||||||
ItemInfo: dii,
|
ItemType: details.OneDriveItem,
|
||||||
},
|
ItemName: itemName,
|
||||||
|
Size: 1,
|
||||||
|
DriveName: driveName,
|
||||||
|
DriveID: driveID,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "SharePointItemInfo",
|
||||||
|
itemInfo: details.ItemInfo{
|
||||||
|
SharePoint: &details.SharePointInfo{
|
||||||
|
ItemType: details.SharePointLibrary,
|
||||||
|
ItemName: itemName,
|
||||||
|
Size: 1,
|
||||||
|
DriveName: driveName,
|
||||||
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
FetchItemByNamer: finD{id: "id1.meta", name: "name1"},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
stats := data.ExportStats{}
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
ecs, err := ProduceExportCollections(
|
dcs := []data.RestoreCollection{
|
||||||
ctx,
|
data.FetchRestoreCollection{
|
||||||
int(version.Backup),
|
Collection: dataMock.Collection{
|
||||||
exportCfg,
|
Path: p,
|
||||||
control.DefaultOptions(),
|
ItemData: []data.Item{
|
||||||
dcs,
|
&dataMock.Item{
|
||||||
cache,
|
ItemID: "id1.data",
|
||||||
nil,
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
&stats,
|
},
|
||||||
fault.New(true))
|
},
|
||||||
assert.NoError(t, err, "export collections error")
|
},
|
||||||
assert.Len(t, ecs, 1, "num of collections")
|
FetchItemByNamer: finD{id: "id1.meta", name: itemName},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
handler := NewSharePointHandler(control.DefaultOptions())
|
||||||
|
handler.CacheItemInfo(test.itemInfo)
|
||||||
|
|
||||||
fitems := []export.Item{}
|
stats := data.ExportStats{}
|
||||||
size := 0
|
|
||||||
|
|
||||||
for item := range ecs[0].Items(ctx) {
|
ecs, err := handler.ProduceExportCollections(
|
||||||
// unwrap the body from stats reader
|
ctx,
|
||||||
b, err := io.ReadAll(item.Body)
|
int(version.Backup),
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
exportCfg,
|
||||||
|
dcs,
|
||||||
|
&stats,
|
||||||
|
fault.New(true))
|
||||||
|
assert.NoError(t, err, "export collections error")
|
||||||
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
size += len(b)
|
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
||||||
bitem := io.NopCloser(bytes.NewBuffer(b))
|
|
||||||
item.Body = bitem
|
|
||||||
|
|
||||||
fitems = append(fitems, item)
|
fitems := []export.Item{}
|
||||||
|
size := 0
|
||||||
|
|
||||||
|
for item := range ecs[0].Items(ctx) {
|
||||||
|
// unwrap the body from stats reader
|
||||||
|
b, err := io.ReadAll(item.Body)
|
||||||
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
size += len(b)
|
||||||
|
bitem := io.NopCloser(bytes.NewBuffer(b))
|
||||||
|
item.Body = bitem
|
||||||
|
|
||||||
|
fitems = append(fitems, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedItems, fitems, "items")
|
||||||
|
|
||||||
|
expectedStats := data.ExportStats{}
|
||||||
|
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
|
||||||
|
expectedStats.UpdateResourceCount(path.FilesCategory)
|
||||||
|
assert.Equal(t, expectedStats, stats, "stats")
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, expectedItems, fitems, "items")
|
|
||||||
|
|
||||||
expectedStats := data.ExportStats{}
|
|
||||||
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
|
|
||||||
expectedStats.UpdateResourceCount(path.FilesCategory)
|
|
||||||
assert.Equal(t, expectedStats, stats, "stats")
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -261,9 +261,7 @@ func (op *ExportOperation) do(
|
|||||||
ctx,
|
ctx,
|
||||||
op.ec,
|
op.ec,
|
||||||
bup.Version,
|
bup.Version,
|
||||||
op.Selectors,
|
|
||||||
op.ExportCfg,
|
op.ExportCfg,
|
||||||
op.Options,
|
|
||||||
dcs,
|
dcs,
|
||||||
// We also have opStats, but that tracks different data.
|
// We also have opStats, but that tracks different data.
|
||||||
// Maybe we can look into merging them some time in the future.
|
// Maybe we can look into merging them some time in the future.
|
||||||
@ -329,9 +327,7 @@ func produceExportCollections(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ec inject.ExportConsumer,
|
ec inject.ExportConsumer,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
sel selectors.Selector,
|
|
||||||
exportCfg control.ExportConfig,
|
exportCfg control.ExportConfig,
|
||||||
opts control.Options,
|
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
exportStats *data.ExportStats,
|
exportStats *data.ExportStats,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
@ -342,12 +338,15 @@ func produceExportCollections(
|
|||||||
close(complete)
|
close(complete)
|
||||||
}()
|
}()
|
||||||
|
|
||||||
|
ctx, end := diagnostics.Span(ctx, "m365:export")
|
||||||
|
defer end()
|
||||||
|
|
||||||
|
ctx = clues.Add(ctx, "export_config", exportCfg)
|
||||||
|
|
||||||
expCollections, err := ec.ProduceExportCollections(
|
expCollections, err := ec.ProduceExportCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupVersion,
|
backupVersion,
|
||||||
sel,
|
|
||||||
exportCfg,
|
exportCfg,
|
||||||
opts,
|
|
||||||
dcs,
|
dcs,
|
||||||
exportStats,
|
exportStats,
|
||||||
errs)
|
errs)
|
||||||
|
|||||||
@ -37,7 +37,7 @@ func TestExportUnitSuite(t *testing.T) {
|
|||||||
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ExportUnitSuite) TestExportOperation_PersistResults() {
|
func (suite *ExportUnitSuite) TestExportOperation_Export() {
|
||||||
var (
|
var (
|
||||||
kw = &kopia.Wrapper{}
|
kw = &kopia.Wrapper{}
|
||||||
sw = store.NewWrapper(&kopia.ModelStore{})
|
sw = store.NewWrapper(&kopia.ModelStore{})
|
||||||
|
|||||||
@ -15,7 +15,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type (
|
type (
|
||||||
@ -85,16 +84,12 @@ type (
|
|||||||
ProduceExportCollections(
|
ProduceExportCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
selector selectors.Selector,
|
|
||||||
exportCfg control.ExportConfig,
|
exportCfg control.ExportConfig,
|
||||||
opts control.Options,
|
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
stats *data.ExportStats,
|
stats *data.ExportStats,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collectioner, error)
|
) ([]export.Collectioner, error)
|
||||||
|
|
||||||
Wait() *data.CollectionStats
|
|
||||||
|
|
||||||
CacheItemInfoer
|
CacheItemInfoer
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,4 +112,17 @@ type (
|
|||||||
RepoMaintenancer interface {
|
RepoMaintenancer interface {
|
||||||
RepoMaintenance(ctx context.Context, opts repository.Maintenance) error
|
RepoMaintenance(ctx context.Context, opts repository.Maintenance) error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ServiceHandler contains the set of functions required to implement all
|
||||||
|
// service-specific functionality for backups, restores, and exports.
|
||||||
|
ServiceHandler interface {
|
||||||
|
ExportConsumer
|
||||||
|
}
|
||||||
|
|
||||||
|
ToServiceHandler interface {
|
||||||
|
NewServiceHandler(
|
||||||
|
opts control.Options,
|
||||||
|
service path.ServiceType,
|
||||||
|
) (ServiceHandler, error)
|
||||||
|
}
|
||||||
)
|
)
|
||||||
|
|||||||
@ -15,9 +15,10 @@ import (
|
|||||||
|
|
||||||
type DataProvider interface {
|
type DataProvider interface {
|
||||||
inject.BackupProducer
|
inject.BackupProducer
|
||||||
inject.ExportConsumer
|
|
||||||
inject.RestoreConsumer
|
inject.RestoreConsumer
|
||||||
|
|
||||||
|
inject.ToServiceHandler
|
||||||
|
|
||||||
VerifyAccess(ctx context.Context) error
|
VerifyAccess(ctx context.Context) error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,6 +3,8 @@ package repository
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
"github.com/alcionai/corso/src/internal/operations"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -26,12 +28,17 @@ func (r repository) NewExport(
|
|||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
exportCfg control.ExportConfig,
|
exportCfg control.ExportConfig,
|
||||||
) (operations.ExportOperation, error) {
|
) (operations.ExportOperation, error) {
|
||||||
|
handler, err := r.Provider.NewServiceHandler(r.Opts, sel.PathService())
|
||||||
|
if err != nil {
|
||||||
|
return operations.ExportOperation{}, clues.Stack(err)
|
||||||
|
}
|
||||||
|
|
||||||
return operations.NewExportOperation(
|
return operations.NewExportOperation(
|
||||||
ctx,
|
ctx,
|
||||||
r.Opts,
|
r.Opts,
|
||||||
r.dataLayer,
|
r.dataLayer,
|
||||||
store.NewWrapper(r.modelStore),
|
store.NewWrapper(r.modelStore),
|
||||||
r.Provider,
|
handler,
|
||||||
r.Account,
|
r.Account,
|
||||||
model.StableID(backupID),
|
model.StableID(backupID),
|
||||||
sel,
|
sel,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user