some code cleanup before groups export (#4202)
various tidbits of data cleanup before moving forward with adding export behavior to groups. * move duplicate collections mocks into data/mock * move the export collection struct into pkg/export (to prevent future duplicates in the next PR) * rename export.Collection to Collectioner, because it's an interface. * some other non-logic rearrangement --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🧹 Tech Debt/Cleanup #### Issue(s) * #3991 #### Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
a2e80a178a
commit
9a8c413b52
@ -6,7 +6,6 @@ import (
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
@ -80,5 +79,18 @@ func exportGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
||||
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
||||
|
||||
return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "Groups")
|
||||
}
|
||||
|
||||
@ -76,8 +76,7 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
|
||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||
err := cmd.Execute()
|
||||
// assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||
|
||||
@ -6,7 +6,6 @@ import (
|
||||
"github.com/spf13/pflag"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
)
|
||||
|
||||
@ -80,5 +79,18 @@ func exportTeamsCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
return Only(ctx, utils.ErrNotYetImplemented)
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
||||
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
||||
|
||||
return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "Teams")
|
||||
}
|
||||
|
||||
@ -76,10 +76,9 @@ func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
|
||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||
err := cmd.Execute()
|
||||
// assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
opts := utils.MakeTeamsOpts(cmd)
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||
|
||||
assert.Equal(t, testdata.Archive, opts.ExportCfg.Archive)
|
||||
|
||||
@ -86,7 +86,7 @@ func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
|
||||
// assert.NoError(t, err, clues.ToCore(err))
|
||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
||||
|
||||
opts := utils.MakeTeamsOpts(cmd)
|
||||
opts := utils.MakeGroupsOpts(cmd)
|
||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||
|
||||
assert.Equal(t, testdata.Collisions, opts.RestoreCfg.Collisions)
|
||||
|
||||
@ -1,30 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
)
|
||||
|
||||
type TeamsOpts struct {
|
||||
Teams []string
|
||||
|
||||
RestoreCfg RestoreCfgOpts
|
||||
ExportCfg ExportCfgOpts
|
||||
|
||||
Populated flags.PopulatedFlags
|
||||
}
|
||||
|
||||
func MakeTeamsOpts(cmd *cobra.Command) TeamsOpts {
|
||||
return TeamsOpts{
|
||||
Teams: flags.UserFV,
|
||||
|
||||
RestoreCfg: makeRestoreCfgOpts(cmd),
|
||||
ExportCfg: makeExportCfgOpts(cmd),
|
||||
|
||||
// populated contains the list of flags that appear in the
|
||||
// command, according to pflags. Use this to differentiate
|
||||
// between an "empty" and a "missing" value.
|
||||
Populated: flags.GetPopulatedFlags(cmd),
|
||||
}
|
||||
}
|
||||
@ -32,10 +32,8 @@ func (z zipCollection) Items(ctx context.Context) <-chan export.Item {
|
||||
defer close(rc)
|
||||
|
||||
rc <- export.Item{
|
||||
Data: export.ItemData{
|
||||
Name: "Corso_Export_" + dttm.FormatNow(dttm.HumanReadable) + ".zip",
|
||||
Body: z.reader,
|
||||
},
|
||||
Name: "Corso_Export_" + dttm.FormatNow(dttm.HumanReadable) + ".zip",
|
||||
Body: z.reader,
|
||||
}
|
||||
|
||||
return rc
|
||||
@ -45,8 +43,8 @@ func (z zipCollection) Items(ctx context.Context) <-chan export.Item {
|
||||
// them into a single collection.
|
||||
func ZipExportCollection(
|
||||
ctx context.Context,
|
||||
expCollections []export.Collection,
|
||||
) (export.Collection, error) {
|
||||
expCollections []export.Collectioner,
|
||||
) (export.Collectioner, error) {
|
||||
if len(expCollections) == 0 {
|
||||
return nil, clues.New("no export collections provided")
|
||||
}
|
||||
@ -71,7 +69,7 @@ func ZipExportCollection(
|
||||
return
|
||||
}
|
||||
|
||||
name := item.Data.Name
|
||||
name := item.Name
|
||||
|
||||
// We assume folder and name to not contain any path separators.
|
||||
// Also, this should always use `/` as this is
|
||||
@ -86,7 +84,7 @@ func ZipExportCollection(
|
||||
return
|
||||
}
|
||||
|
||||
_, err = io.CopyBuffer(f, item.Data.Body, buf)
|
||||
_, err = io.CopyBuffer(f, item.Body, buf)
|
||||
if err != nil {
|
||||
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
|
||||
return
|
||||
|
||||
@ -10,6 +10,10 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Collections
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// A Collection represents the set of data within a single logical location
|
||||
// denoted by FullPath.
|
||||
type Collection interface {
|
||||
@ -56,12 +60,9 @@ type RestoreCollection interface {
|
||||
FetchItemByNamer
|
||||
}
|
||||
|
||||
type FetchItemByNamer interface {
|
||||
// Fetch retrieves an item with the given name from the Collection if it
|
||||
// exists. Items retrieved with Fetch may still appear in the channel returned
|
||||
// by Items().
|
||||
FetchItemByName(ctx context.Context, name string) (Item, error)
|
||||
}
|
||||
// ---------------------------------------------------------------------------
|
||||
// Items
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Item represents a single item within a Collection
|
||||
type Item interface {
|
||||
@ -74,23 +75,6 @@ type Item interface {
|
||||
Deleted() bool
|
||||
}
|
||||
|
||||
// LocationPather provides a LocationPath describing the path with Display Names
|
||||
// instead of canonical IDs
|
||||
type LocationPather interface {
|
||||
LocationPath() *path.Builder
|
||||
}
|
||||
|
||||
// PreviousLocationPather provides both the current location of the collection
|
||||
// as well as the location of the item in the previous backup.
|
||||
//
|
||||
// TODO(ashmrtn): If we guarantee that we persist the location of collections in
|
||||
// addition to the path of the item then we could just have a single
|
||||
// *LocationPather interface with current and previous location functions.
|
||||
type PreviousLocationPather interface {
|
||||
LocationPather
|
||||
PreviousLocationPath() details.LocationIDer
|
||||
}
|
||||
|
||||
// ItemInfo returns the details.ItemInfo for the item.
|
||||
type ItemInfo interface {
|
||||
Info() (details.ItemInfo, error)
|
||||
@ -108,3 +92,31 @@ type ItemSize interface {
|
||||
type ItemModTime interface {
|
||||
ModTime() time.Time
|
||||
}
|
||||
|
||||
type FetchItemByNamer interface {
|
||||
// Fetch retrieves an item with the given name from the Collection if it
|
||||
// exists. Items retrieved with Fetch may still appear in the channel returned
|
||||
// by Items().
|
||||
FetchItemByName(ctx context.Context, name string) (Item, error)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Paths
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// LocationPather provides a LocationPath describing the path with Display Names
|
||||
// instead of canonical IDs
|
||||
type LocationPather interface {
|
||||
LocationPath() *path.Builder
|
||||
}
|
||||
|
||||
// PreviousLocationPather provides both the current location of the collection
|
||||
// as well as the location of the item in the previous backup.
|
||||
//
|
||||
// TODO(ashmrtn): If we guarantee that we persist the location of collections in
|
||||
// addition to the path of the item then we could just have a single
|
||||
// *LocationPather interface with current and previous location functions.
|
||||
type PreviousLocationPather interface {
|
||||
LocationPather
|
||||
PreviousLocationPath() details.LocationIDer
|
||||
}
|
||||
|
||||
@ -78,14 +78,39 @@ var (
|
||||
_ data.RestoreCollection = &Collection{}
|
||||
)
|
||||
|
||||
type Collection struct{}
|
||||
type Collection struct {
|
||||
Path path.Path
|
||||
ItemData []*Item
|
||||
ItemsRecoverableErrs []error
|
||||
}
|
||||
|
||||
func (c Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||
return nil
|
||||
ch := make(chan data.Item)
|
||||
|
||||
go func() {
|
||||
defer close(ch)
|
||||
|
||||
el := errs.Local()
|
||||
|
||||
for _, item := range c.ItemData {
|
||||
if item.ReadErr != nil {
|
||||
el.AddRecoverable(ctx, item.ReadErr)
|
||||
continue
|
||||
}
|
||||
|
||||
ch <- item
|
||||
}
|
||||
}()
|
||||
|
||||
for _, err := range c.ItemsRecoverableErrs {
|
||||
errs.AddRecoverable(ctx, err)
|
||||
}
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
func (c Collection) FullPath() path.Path {
|
||||
return nil
|
||||
return c.Path
|
||||
}
|
||||
|
||||
func (c Collection) PreviousPath() path.Path {
|
||||
|
||||
@ -13,83 +13,61 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
)
|
||||
|
||||
var _ export.Collection = &ExportCollection{}
|
||||
|
||||
// ExportCollection is the implementation of export.ExportCollection for OneDrive
|
||||
type ExportCollection struct {
|
||||
// baseDir contains the path of the collection
|
||||
baseDir string
|
||||
|
||||
// backingCollection is the restore collection from which we will
|
||||
// create the export collection.
|
||||
backingCollection data.RestoreCollection
|
||||
|
||||
// backupVersion is the backupVersion of the backup this collection was part
|
||||
// of. This is required to figure out how to get the name of the
|
||||
// item.
|
||||
backupVersion int
|
||||
}
|
||||
|
||||
func NewExportCollection(
|
||||
baseDir string,
|
||||
backingCollection data.RestoreCollection,
|
||||
backingCollection []data.RestoreCollection,
|
||||
backupVersion int,
|
||||
) ExportCollection {
|
||||
return ExportCollection{
|
||||
baseDir: baseDir,
|
||||
backingCollection: backingCollection,
|
||||
backupVersion: backupVersion,
|
||||
) export.Collectioner {
|
||||
return export.BaseCollection{
|
||||
BaseDir: baseDir,
|
||||
BackingCollection: backingCollection,
|
||||
BackupVersion: backupVersion,
|
||||
Stream: streamItems,
|
||||
}
|
||||
}
|
||||
|
||||
func (ec ExportCollection) BasePath() string {
|
||||
return ec.baseDir
|
||||
}
|
||||
|
||||
func (ec ExportCollection) Items(ctx context.Context) <-chan export.Item {
|
||||
ch := make(chan export.Item)
|
||||
go items(ctx, ec, ch)
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
// items converts items in backing collection to export items
|
||||
func items(ctx context.Context, ec ExportCollection, ch chan<- export.Item) {
|
||||
// streamItems streams the streamItems in the backingCollection into the export stream chan
|
||||
func streamItems(
|
||||
ctx context.Context,
|
||||
drc []data.RestoreCollection,
|
||||
backupVersion int,
|
||||
ch chan<- export.Item,
|
||||
) {
|
||||
defer close(ch)
|
||||
|
||||
errs := fault.New(false)
|
||||
|
||||
for item := range ec.backingCollection.Items(ctx, errs) {
|
||||
itemUUID := item.ID()
|
||||
if isMetadataFile(itemUUID, ec.backupVersion) {
|
||||
continue
|
||||
for _, rc := range drc {
|
||||
for item := range rc.Items(ctx, errs) {
|
||||
itemUUID := item.ID()
|
||||
if isMetadataFile(itemUUID, backupVersion) {
|
||||
continue
|
||||
}
|
||||
|
||||
name, err := getItemName(ctx, itemUUID, backupVersion, rc)
|
||||
|
||||
ch <- export.Item{
|
||||
ID: itemUUID,
|
||||
Name: name,
|
||||
Body: item.ToReader(),
|
||||
Error: err,
|
||||
}
|
||||
}
|
||||
|
||||
name, err := getItemName(ctx, itemUUID, ec.backupVersion, ec.backingCollection)
|
||||
items, recovered := errs.ItemsAndRecovered()
|
||||
|
||||
ch <- export.Item{
|
||||
ID: itemUUID,
|
||||
Data: export.ItemData{
|
||||
Name: name,
|
||||
Body: item.ToReader(),
|
||||
},
|
||||
Error: err,
|
||||
// Return all the items that we failed to source from the persistence layer
|
||||
for _, err := range items {
|
||||
ch <- export.Item{
|
||||
ID: err.ID,
|
||||
Error: &err,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eitems, erecovereable := errs.ItemsAndRecovered()
|
||||
|
||||
// Return all the items that we failed to source from the persistence layer
|
||||
for _, err := range eitems {
|
||||
ch <- export.Item{
|
||||
ID: err.ID,
|
||||
Error: &err,
|
||||
}
|
||||
}
|
||||
|
||||
for _, ec := range erecovereable {
|
||||
ch <- export.Item{
|
||||
Error: ec,
|
||||
for _, err := range recovered {
|
||||
ch <- export.Item{
|
||||
Error: err,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
@ -58,7 +59,11 @@ func (suite *ExportUnitSuite) TestIsMetadataFile() {
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
assert.Equal(suite.T(), test.isMeta, isMetadataFile(test.id, test.backupVersion), "is metadata")
|
||||
assert.Equal(
|
||||
suite.T(),
|
||||
test.isMeta,
|
||||
isMetadataFile(test.id, test.backupVersion),
|
||||
"is metadata")
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -86,47 +91,47 @@ func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, err
|
||||
|
||||
func (suite *ExportUnitSuite) TestGetItemName() {
|
||||
table := []struct {
|
||||
tname string
|
||||
name string
|
||||
id string
|
||||
backupVersion int
|
||||
name string
|
||||
expectName string
|
||||
fin data.FetchItemByNamer
|
||||
errFunc assert.ErrorAssertionFunc
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
tname: "legacy",
|
||||
name: "legacy",
|
||||
id: "name",
|
||||
backupVersion: version.OneDrive1DataAndMetaFiles,
|
||||
name: "name",
|
||||
errFunc: assert.NoError,
|
||||
expectName: "name",
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
tname: "name in filename",
|
||||
name: "name in filename",
|
||||
id: "name.data",
|
||||
backupVersion: version.OneDrive4DirIncludesPermissions,
|
||||
name: "name",
|
||||
errFunc: assert.NoError,
|
||||
expectName: "name",
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
tname: "name in metadata",
|
||||
name: "name in metadata",
|
||||
id: "id.data",
|
||||
backupVersion: version.Backup,
|
||||
name: "name",
|
||||
expectName: "name",
|
||||
fin: finD{id: "id.meta", name: "name"},
|
||||
errFunc: assert.NoError,
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
tname: "name in metadata but error",
|
||||
name: "name in metadata but error",
|
||||
id: "id.data",
|
||||
backupVersion: version.Backup,
|
||||
name: "",
|
||||
expectName: "",
|
||||
fin: finD{err: assert.AnError},
|
||||
errFunc: assert.Error,
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.tname, func() {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
@ -137,9 +142,9 @@ func (suite *ExportUnitSuite) TestGetItemName() {
|
||||
test.id,
|
||||
test.backupVersion,
|
||||
test.fin)
|
||||
test.errFunc(t, err)
|
||||
test.expectErr(t, err, clues.ToCore(err))
|
||||
|
||||
assert.Equal(t, test.name, name, "name")
|
||||
assert.Equal(t, test.expectName, name, "name")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
65
src/internal/m365/collection/groups/export.go
Normal file
65
src/internal/m365/collection/groups/export.go
Normal file
@ -0,0 +1,65 @@
|
||||
package groups
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
)
|
||||
|
||||
func NewExportCollection(
|
||||
baseDir string,
|
||||
backingCollections []data.RestoreCollection,
|
||||
backupVersion int,
|
||||
) export.Collectioner {
|
||||
return export.BaseCollection{
|
||||
BaseDir: baseDir,
|
||||
BackingCollection: backingCollections,
|
||||
BackupVersion: backupVersion,
|
||||
Stream: streamItems,
|
||||
}
|
||||
}
|
||||
|
||||
// streamItems streams the items in the backingCollection into the export stream chan
|
||||
func streamItems(
|
||||
ctx context.Context,
|
||||
drc []data.RestoreCollection,
|
||||
backupVersion int,
|
||||
ch chan<- export.Item,
|
||||
) {
|
||||
defer close(ch)
|
||||
|
||||
errs := fault.New(false)
|
||||
|
||||
for _, rc := range drc {
|
||||
for item := range rc.Items(ctx, errs) {
|
||||
itemID := item.ID()
|
||||
|
||||
// channel message items have no name
|
||||
name := itemID
|
||||
|
||||
ch <- export.Item{
|
||||
ID: itemID,
|
||||
Name: name,
|
||||
Body: item.ToReader(),
|
||||
}
|
||||
}
|
||||
|
||||
items, recovered := errs.ItemsAndRecovered()
|
||||
|
||||
// Return all the items that we failed to source from the persistence layer
|
||||
for _, err := range items {
|
||||
ch <- export.Item{
|
||||
ID: err.ID,
|
||||
Error: &err,
|
||||
}
|
||||
}
|
||||
|
||||
for _, err := range recovered {
|
||||
ch <- export.Item{
|
||||
Error: err,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
99
src/internal/m365/collection/groups/export_test.go
Normal file
99
src/internal/m365/collection/groups/export_test.go
Normal file
@ -0,0 +1,99 @@
|
||||
package groups
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
)
|
||||
|
||||
type ExportUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestExportUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *ExportUnitSuite) TestStreamItems() {
|
||||
table := []struct {
|
||||
name string
|
||||
backingColl dataMock.Collection
|
||||
expectName string
|
||||
expectErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "no errors",
|
||||
backingColl: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{ItemID: "zim"},
|
||||
},
|
||||
},
|
||||
expectName: "zim",
|
||||
expectErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "only recoverable errors",
|
||||
backingColl: dataMock.Collection{
|
||||
ItemsRecoverableErrs: []error{
|
||||
clues.New("The knowledge... it fills me! It is neat!"),
|
||||
},
|
||||
},
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
{
|
||||
name: "items and recoverable errors",
|
||||
backingColl: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{ItemID: "gir"},
|
||||
},
|
||||
ItemsRecoverableErrs: []error{
|
||||
clues.New("I miss my cupcake."),
|
||||
},
|
||||
},
|
||||
expectName: "gir",
|
||||
expectErr: assert.Error,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
ch := make(chan export.Item)
|
||||
|
||||
go streamItems(
|
||||
ctx,
|
||||
[]data.RestoreCollection{test.backingColl},
|
||||
version.NoBackup,
|
||||
ch)
|
||||
|
||||
var (
|
||||
itm export.Item
|
||||
err error
|
||||
)
|
||||
|
||||
for i := range ch {
|
||||
if i.Error == nil {
|
||||
itm = i
|
||||
} else {
|
||||
err = i.Error
|
||||
}
|
||||
}
|
||||
|
||||
test.expectErr(t, err, clues.ToCore(err))
|
||||
|
||||
assert.Equal(t, test.expectName, itm.Name, "item name")
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -8,6 +8,7 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
@ -27,7 +28,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
||||
opts control.Options,
|
||||
dcs []data.RestoreCollection,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collection, error) {
|
||||
) ([]export.Collectioner, error) {
|
||||
ctx, end := diagnostics.Span(ctx, "m365:export")
|
||||
defer end()
|
||||
|
||||
@ -35,7 +36,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
||||
ctx = clues.Add(ctx, "export_config", exportCfg) // TODO(meain): needs PII control
|
||||
|
||||
var (
|
||||
expCollections []export.Collection
|
||||
expCollections []export.Collectioner
|
||||
status *support.ControllerOperationStatus
|
||||
deets = &details.Builder{}
|
||||
err error
|
||||
@ -61,6 +62,15 @@ func (ctrl *Controller) ProduceExportCollections(
|
||||
ctrl.backupDriveIDNames,
|
||||
deets,
|
||||
errs)
|
||||
case selectors.ServiceGroups:
|
||||
expCollections, err = groups.ProduceExportCollections(
|
||||
ctx,
|
||||
backupVersion,
|
||||
exportCfg,
|
||||
opts,
|
||||
dcs,
|
||||
deets,
|
||||
errs)
|
||||
|
||||
default:
|
||||
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
|
||||
|
||||
@ -78,7 +78,7 @@ func (ctrl Controller) ProduceExportCollections(
|
||||
_ control.Options,
|
||||
_ []data.RestoreCollection,
|
||||
_ *fault.Bus,
|
||||
) ([]export.Collection, error) {
|
||||
) ([]export.Collectioner, error) {
|
||||
return nil, ctrl.Err
|
||||
}
|
||||
|
||||
|
||||
52
src/internal/m365/service/groups/export.go
Normal file
52
src/internal/m365/service/groups/export.go
Normal file
@ -0,0 +1,52 @@
|
||||
package groups
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
// ProduceExportCollections will create the export collections for the
|
||||
// given restore collections.
|
||||
func ProduceExportCollections(
|
||||
ctx context.Context,
|
||||
backupVersion int,
|
||||
exportCfg control.ExportConfig,
|
||||
opts control.Options,
|
||||
dcs []data.RestoreCollection,
|
||||
deets *details.Builder,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collectioner, error) {
|
||||
var (
|
||||
el = errs.Local()
|
||||
ec = make([]export.Collectioner, 0, len(dcs))
|
||||
)
|
||||
|
||||
for _, restoreColl := range dcs {
|
||||
var (
|
||||
fp = restoreColl.FullPath()
|
||||
cat = fp.Category()
|
||||
folders = []string{cat.String()}
|
||||
)
|
||||
|
||||
switch cat {
|
||||
case path.ChannelMessagesCategory:
|
||||
folders = append(folders, fp.Folders()...)
|
||||
}
|
||||
|
||||
coll := groups.NewExportCollection(
|
||||
path.Builder{}.Append(folders...).String(),
|
||||
[]data.RestoreCollection{restoreColl},
|
||||
backupVersion)
|
||||
|
||||
ec = append(ec, coll)
|
||||
}
|
||||
|
||||
return ec, el.Failure()
|
||||
}
|
||||
111
src/internal/m365/service/groups/export_test.go
Normal file
111
src/internal/m365/service/groups/export_test.go
Normal file
@ -0,0 +1,111 @@
|
||||
package groups
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||
groupMock "github.com/alcionai/corso/src/internal/m365/service/groups/mock"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/internal/version"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type ExportUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestExportUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
type finD struct {
|
||||
id string
|
||||
name string
|
||||
err error
|
||||
}
|
||||
|
||||
func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
|
||||
if fd.err != nil {
|
||||
return nil, fd.err
|
||||
}
|
||||
|
||||
if name == fd.id {
|
||||
return &dataMock.Item{
|
||||
ItemID: fd.id,
|
||||
Reader: io.NopCloser(bytes.NewBufferString(`{"displayname": "` + fd.name + `"}`)),
|
||||
}, nil
|
||||
}
|
||||
|
||||
return nil, assert.AnError
|
||||
}
|
||||
|
||||
func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
t := suite.T()
|
||||
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
var (
|
||||
itemID = "itemID"
|
||||
containerName = "channelID"
|
||||
exportCfg = control.ExportConfig{}
|
||||
dii = groupMock.ItemInfo()
|
||||
expectedPath = path.ChannelMessagesCategory.String() + "/" + containerName
|
||||
expectedItems = []export.Item{
|
||||
{
|
||||
ID: itemID,
|
||||
Name: dii.Groups.ItemName,
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
p, err := path.Build("t", "pr", path.GroupsService, path.ChannelMessagesCategory, false, containerName)
|
||||
assert.NoError(t, err, "build path")
|
||||
|
||||
dcs := []data.RestoreCollection{
|
||||
data.FetchRestoreCollection{
|
||||
Collection: dataMock.Collection{
|
||||
Path: p,
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: itemID,
|
||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
ItemInfo: dii,
|
||||
},
|
||||
},
|
||||
},
|
||||
FetchItemByNamer: finD{id: itemID, name: dii.Groups.ItemName},
|
||||
},
|
||||
}
|
||||
|
||||
ecs, err := ProduceExportCollections(
|
||||
ctx,
|
||||
int(version.Backup),
|
||||
exportCfg,
|
||||
control.DefaultOptions(),
|
||||
dcs,
|
||||
nil,
|
||||
fault.New(true))
|
||||
assert.NoError(t, err, "export collections error")
|
||||
assert.Len(t, ecs, 1, "num of collections")
|
||||
|
||||
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
||||
|
||||
fitems := []export.Item{}
|
||||
for item := range ecs[0].Items(ctx) {
|
||||
fitems = append(fitems, item)
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedItems, fitems, "items")
|
||||
}
|
||||
15
src/internal/m365/service/groups/mock/mock.go
Normal file
15
src/internal/m365/service/groups/mock/mock.go
Normal file
@ -0,0 +1,15 @@
|
||||
package stub
|
||||
|
||||
import (
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
)
|
||||
|
||||
func ItemInfo() details.ItemInfo {
|
||||
return details.ItemInfo{
|
||||
Groups: &details.GroupsInfo{
|
||||
ItemType: details.GroupsChannelMessage,
|
||||
ItemName: "itemID",
|
||||
Size: 1,
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -24,10 +24,10 @@ func ProduceExportCollections(
|
||||
dcs []data.RestoreCollection,
|
||||
deets *details.Builder,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collection, error) {
|
||||
) ([]export.Collectioner, error) {
|
||||
var (
|
||||
el = errs.Local()
|
||||
ec = make([]export.Collection, 0, len(dcs))
|
||||
ec = make([]export.Collectioner, 0, len(dcs))
|
||||
)
|
||||
|
||||
for _, dc := range dcs {
|
||||
@ -38,7 +38,12 @@ func ProduceExportCollections(
|
||||
|
||||
baseDir := path.Builder{}.Append(drivePath.Folders...)
|
||||
|
||||
ec = append(ec, drive.NewExportCollection(baseDir.String(), dc, backupVersion))
|
||||
ec = append(
|
||||
ec,
|
||||
drive.NewExportCollection(
|
||||
baseDir.String(),
|
||||
[]data.RestoreCollection{dc},
|
||||
backupVersion))
|
||||
}
|
||||
|
||||
return ec, el.Failure()
|
||||
|
||||
@ -19,7 +19,6 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type ExportUnitSuite struct {
|
||||
@ -51,36 +50,6 @@ func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, err
|
||||
return nil, assert.AnError
|
||||
}
|
||||
|
||||
type mockRestoreCollection struct {
|
||||
path path.Path
|
||||
items []*dataMock.Item
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||
ch := make(chan data.Item)
|
||||
|
||||
go func() {
|
||||
defer close(ch)
|
||||
|
||||
el := errs.Local()
|
||||
|
||||
for _, item := range rc.items {
|
||||
if item.ReadErr != nil {
|
||||
el.AddRecoverable(ctx, item.ReadErr)
|
||||
continue
|
||||
}
|
||||
|
||||
ch <- item
|
||||
}
|
||||
}()
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) FullPath() path.Path {
|
||||
return rc.path
|
||||
}
|
||||
|
||||
func (suite *ExportUnitSuite) TestGetItems() {
|
||||
table := []struct {
|
||||
name string
|
||||
@ -92,8 +61,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
name: "single item",
|
||||
version: 1,
|
||||
backingCollection: data.NoFetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: "name1",
|
||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
@ -103,11 +72,9 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
},
|
||||
expectedItems: []export.Item{
|
||||
{
|
||||
ID: "name1",
|
||||
Data: export.ItemData{
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
ID: "name1",
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -115,8 +82,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
name: "multiple items",
|
||||
version: 1,
|
||||
backingCollection: data.NoFetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: "name1",
|
||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
@ -130,18 +97,14 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
},
|
||||
expectedItems: []export.Item{
|
||||
{
|
||||
ID: "name1",
|
||||
Data: export.ItemData{
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
ID: "name1",
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
{
|
||||
ID: "name2",
|
||||
Data: export.ItemData{
|
||||
Name: "name2",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body2"))),
|
||||
},
|
||||
ID: "name2",
|
||||
Name: "name2",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body2"))),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -149,8 +112,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
name: "single item with data suffix",
|
||||
version: 2,
|
||||
backingCollection: data.NoFetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: "name1.data",
|
||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
@ -160,11 +123,9 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
},
|
||||
expectedItems: []export.Item{
|
||||
{
|
||||
ID: "name1.data",
|
||||
Data: export.ItemData{
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
ID: "name1.data",
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -172,8 +133,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
name: "single item name from metadata",
|
||||
version: version.Backup,
|
||||
backingCollection: data.FetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: "id1.data",
|
||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
@ -184,11 +145,9 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
},
|
||||
expectedItems: []export.Item{
|
||||
{
|
||||
ID: "id1.data",
|
||||
Data: export.ItemData{
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
ID: "id1.data",
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -196,8 +155,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
name: "single item name from metadata with error",
|
||||
version: version.Backup,
|
||||
backingCollection: data.FetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{ItemID: "id1.data"},
|
||||
},
|
||||
},
|
||||
@ -214,8 +173,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
name: "items with success and metadata read error",
|
||||
version: version.Backup,
|
||||
backingCollection: data.FetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: "missing.data",
|
||||
},
|
||||
@ -233,11 +192,9 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
Error: assert.AnError,
|
||||
},
|
||||
{
|
||||
ID: "id1.data",
|
||||
Data: export.ItemData{
|
||||
Name: "name1",
|
||||
Body: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
},
|
||||
ID: "id1.data",
|
||||
Name: "name1",
|
||||
Body: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -245,8 +202,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
name: "items with success and fetch error",
|
||||
version: version.OneDrive1DataAndMetaFiles,
|
||||
backingCollection: data.FetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: "name0",
|
||||
Reader: io.NopCloser(bytes.NewBufferString("body0")),
|
||||
@ -264,18 +221,14 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
},
|
||||
expectedItems: []export.Item{
|
||||
{
|
||||
ID: "name0",
|
||||
Data: export.ItemData{
|
||||
Name: "name0",
|
||||
Body: io.NopCloser(bytes.NewBufferString("body0")),
|
||||
},
|
||||
ID: "name0",
|
||||
Name: "name0",
|
||||
Body: io.NopCloser(bytes.NewBufferString("body0")),
|
||||
},
|
||||
{
|
||||
ID: "name2",
|
||||
Data: export.ItemData{
|
||||
Name: "name2",
|
||||
Body: io.NopCloser(bytes.NewBufferString("body2")),
|
||||
},
|
||||
ID: "name2",
|
||||
Name: "name2",
|
||||
Body: io.NopCloser(bytes.NewBufferString("body2")),
|
||||
},
|
||||
{
|
||||
ID: "",
|
||||
@ -292,7 +245,10 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
ec := drive.NewExportCollection("", test.backingCollection, test.version)
|
||||
ec := drive.NewExportCollection(
|
||||
"",
|
||||
[]data.RestoreCollection{test.backingCollection},
|
||||
test.version)
|
||||
|
||||
items := ec.Items(ctx)
|
||||
|
||||
@ -308,8 +264,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
// to simplify testing.
|
||||
for i, item := range fitems {
|
||||
assert.Equal(t, test.expectedItems[i].ID, item.ID, "id")
|
||||
assert.Equal(t, test.expectedItems[i].Data.Name, item.Data.Name, "name")
|
||||
assert.Equal(t, test.expectedItems[i].Data.Body, item.Data.Body, "body")
|
||||
assert.Equal(t, test.expectedItems[i].Name, item.Name, "name")
|
||||
assert.Equal(t, test.expectedItems[i].Body, item.Body, "body")
|
||||
assert.ErrorIs(t, item.Error, test.expectedItems[i].Error)
|
||||
}
|
||||
})
|
||||
@ -328,11 +284,9 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
dii = odStub.DriveItemInfo()
|
||||
expectedItems = []export.Item{
|
||||
{
|
||||
ID: "id1.data",
|
||||
Data: export.ItemData{
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
ID: "id1.data",
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
}
|
||||
)
|
||||
@ -344,9 +298,9 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
|
||||
dcs := []data.RestoreCollection{
|
||||
data.FetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
path: p,
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
Path: p,
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: "id1.data",
|
||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
|
||||
@ -27,10 +27,10 @@ func ProduceExportCollections(
|
||||
backupDriveIDNames idname.CacheBuilder,
|
||||
deets *details.Builder,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collection, error) {
|
||||
) ([]export.Collectioner, error) {
|
||||
var (
|
||||
el = errs.Local()
|
||||
ec = make([]export.Collection, 0, len(dcs))
|
||||
ec = make([]export.Collectioner, 0, len(dcs))
|
||||
)
|
||||
|
||||
for _, dc := range dcs {
|
||||
@ -51,7 +51,12 @@ func ProduceExportCollections(
|
||||
Append(driveName).
|
||||
Append(drivePath.Folders...)
|
||||
|
||||
ec = append(ec, drive.NewExportCollection(baseDir.String(), dc, backupVersion))
|
||||
ec = append(
|
||||
ec,
|
||||
drive.NewExportCollection(
|
||||
baseDir.String(),
|
||||
[]data.RestoreCollection{dc},
|
||||
backupVersion))
|
||||
}
|
||||
|
||||
return ec, el.Failure()
|
||||
|
||||
@ -20,7 +20,6 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type ExportUnitSuite struct {
|
||||
@ -52,36 +51,6 @@ func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, err
|
||||
return nil, assert.AnError
|
||||
}
|
||||
|
||||
type mockRestoreCollection struct {
|
||||
path path.Path
|
||||
items []*dataMock.Item
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||
ch := make(chan data.Item)
|
||||
|
||||
go func() {
|
||||
defer close(ch)
|
||||
|
||||
el := errs.Local()
|
||||
|
||||
for _, item := range rc.items {
|
||||
if item.ReadErr != nil {
|
||||
el.AddRecoverable(ctx, item.ReadErr)
|
||||
continue
|
||||
}
|
||||
|
||||
ch <- item
|
||||
}
|
||||
}()
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
func (rc mockRestoreCollection) FullPath() path.Path {
|
||||
return rc.path
|
||||
}
|
||||
|
||||
func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
t := suite.T()
|
||||
|
||||
@ -100,11 +69,9 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
expectedPath = "Libraries/" + driveName
|
||||
expectedItems = []export.Item{
|
||||
{
|
||||
ID: "id1.data",
|
||||
Data: export.ItemData{
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
ID: "id1.data",
|
||||
Name: "name1",
|
||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||
},
|
||||
}
|
||||
)
|
||||
@ -116,9 +83,9 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
|
||||
dcs := []data.RestoreCollection{
|
||||
data.FetchRestoreCollection{
|
||||
Collection: mockRestoreCollection{
|
||||
path: p,
|
||||
items: []*dataMock.Item{
|
||||
Collection: dataMock.Collection{
|
||||
Path: p,
|
||||
ItemData: []*dataMock.Item{
|
||||
{
|
||||
ItemID: "id1.data",
|
||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||
|
||||
@ -105,7 +105,7 @@ type exportStats struct {
|
||||
|
||||
// Run begins a synchronous export operation.
|
||||
func (op *ExportOperation) Run(ctx context.Context) (
|
||||
expColl []export.Collection,
|
||||
expColl []export.Collectioner,
|
||||
err error,
|
||||
) {
|
||||
defer func() {
|
||||
@ -199,7 +199,7 @@ func (op *ExportOperation) do(
|
||||
opStats *exportStats,
|
||||
detailsStore streamstore.Reader,
|
||||
start time.Time,
|
||||
) ([]export.Collection, error) {
|
||||
) ([]export.Collectioner, error) {
|
||||
logger.Ctx(ctx).
|
||||
With("control_options", op.Options, "selectors", op.Selectors).
|
||||
Info("exporting selection")
|
||||
@ -268,7 +268,7 @@ func (op *ExportOperation) do(
|
||||
dcs,
|
||||
op.Errors)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "exporting collections")
|
||||
return nil, clues.Stack(err)
|
||||
}
|
||||
|
||||
opStats.ctrl = op.ec.Wait()
|
||||
@ -281,7 +281,7 @@ func (op *ExportOperation) do(
|
||||
return nil, clues.Wrap(err, "zipping export collections")
|
||||
}
|
||||
|
||||
return []export.Collection{zc}, nil
|
||||
return []export.Collectioner{zc}, nil
|
||||
}
|
||||
|
||||
return expCollections, nil
|
||||
@ -334,7 +334,7 @@ func exportRestoreCollections(
|
||||
opts control.Options,
|
||||
dcs []data.RestoreCollection,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collection, error) {
|
||||
) ([]export.Collectioner, error) {
|
||||
complete := observe.MessageWithCompletion(ctx, "Preparing export")
|
||||
defer func() {
|
||||
complete <- struct{}{}
|
||||
|
||||
@ -29,15 +29,15 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
type ExportOpSuite struct {
|
||||
type ExportUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestExportOpSuite(t *testing.T) {
|
||||
suite.Run(t, &ExportOpSuite{Suite: tester.NewUnitSuite(t)})
|
||||
func TestExportUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *ExportOpSuite) TestExportOperation_PersistResults() {
|
||||
func (suite *ExportUnitSuite) TestExportOperation_PersistResults() {
|
||||
var (
|
||||
kw = &kopia.Wrapper{}
|
||||
sw = store.NewWrapper(&kopia.ModelStore{})
|
||||
@ -163,56 +163,57 @@ func (r *ReadSeekCloser) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (suite *ExportOpSuite) TestZipExports() {
|
||||
func (suite *ExportUnitSuite) TestZipExports() {
|
||||
table := []struct {
|
||||
name string
|
||||
collection []export.Collection
|
||||
shouldErr bool
|
||||
readErr bool
|
||||
name string
|
||||
inputColls []export.Collectioner
|
||||
expectZipErr assert.ErrorAssertionFunc
|
||||
expectReadErr assert.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "nothing",
|
||||
collection: []export.Collection{},
|
||||
shouldErr: true,
|
||||
name: "nothing",
|
||||
inputColls: []export.Collectioner{},
|
||||
expectZipErr: assert.Error,
|
||||
expectReadErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "empty",
|
||||
collection: []export.Collection{
|
||||
inputColls: []export.Collectioner{
|
||||
expCol{
|
||||
base: "",
|
||||
items: []export.Item{},
|
||||
},
|
||||
},
|
||||
expectZipErr: assert.NoError,
|
||||
expectReadErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "one item",
|
||||
collection: []export.Collection{
|
||||
inputColls: []export.Collectioner{
|
||||
expCol{
|
||||
base: "",
|
||||
items: []export.Item{
|
||||
{
|
||||
ID: "id1",
|
||||
Data: export.ItemData{
|
||||
Name: "test",
|
||||
Body: NewReadSeekCloser([]byte("test")),
|
||||
},
|
||||
ID: "id1",
|
||||
Name: "test",
|
||||
Body: NewReadSeekCloser([]byte("test")),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectZipErr: assert.NoError,
|
||||
expectReadErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "multiple items",
|
||||
collection: []export.Collection{
|
||||
inputColls: []export.Collectioner{
|
||||
expCol{
|
||||
base: "",
|
||||
items: []export.Item{
|
||||
{
|
||||
ID: "id1",
|
||||
Data: export.ItemData{
|
||||
Name: "test",
|
||||
Body: NewReadSeekCloser([]byte("test")),
|
||||
},
|
||||
ID: "id1",
|
||||
Name: "test",
|
||||
Body: NewReadSeekCloser([]byte("test")),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -220,19 +221,19 @@ func (suite *ExportOpSuite) TestZipExports() {
|
||||
base: "/fold",
|
||||
items: []export.Item{
|
||||
{
|
||||
ID: "id2",
|
||||
Data: export.ItemData{
|
||||
Name: "test2",
|
||||
Body: NewReadSeekCloser([]byte("test2")),
|
||||
},
|
||||
ID: "id2",
|
||||
Name: "test2",
|
||||
Body: NewReadSeekCloser([]byte("test2")),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectZipErr: assert.NoError,
|
||||
expectReadErr: assert.NoError,
|
||||
},
|
||||
{
|
||||
name: "one item with err",
|
||||
collection: []export.Collection{
|
||||
inputColls: []export.Collectioner{
|
||||
expCol{
|
||||
base: "",
|
||||
items: []export.Item{
|
||||
@ -243,7 +244,8 @@ func (suite *ExportOpSuite) TestZipExports() {
|
||||
},
|
||||
},
|
||||
},
|
||||
readErr: true,
|
||||
expectZipErr: assert.NoError,
|
||||
expectReadErr: assert.Error,
|
||||
},
|
||||
}
|
||||
|
||||
@ -254,46 +256,46 @@ func (suite *ExportOpSuite) TestZipExports() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
zc, err := archive.ZipExportCollection(ctx, test.collection)
|
||||
zc, err := archive.ZipExportCollection(ctx, test.inputColls)
|
||||
test.expectZipErr(t, err, clues.ToCore(err))
|
||||
|
||||
if test.shouldErr {
|
||||
assert.Error(t, err, "error")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err, "error")
|
||||
assert.Empty(t, zc.BasePath(), "base path")
|
||||
|
||||
zippedItems := []export.ItemData{}
|
||||
zippedItems := []export.Item{}
|
||||
|
||||
count := 0
|
||||
for item := range zc.Items(ctx) {
|
||||
assert.True(t, strings.HasPrefix(item.Data.Name, "Corso_Export_"), "name prefix")
|
||||
assert.True(t, strings.HasSuffix(item.Data.Name, ".zip"), "name suffix")
|
||||
assert.True(t, strings.HasPrefix(item.Name, "Corso_Export_"), "name prefix")
|
||||
assert.True(t, strings.HasSuffix(item.Name, ".zip"), "name suffix")
|
||||
|
||||
data, err := io.ReadAll(item.Data.Body)
|
||||
if test.readErr {
|
||||
assert.Error(t, err, "read error")
|
||||
data, err := io.ReadAll(item.Body)
|
||||
test.expectReadErr(t, err, clues.ToCore(err))
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
size := int64(len(data))
|
||||
assert.NotEmpty(t, item.Name, "item name")
|
||||
|
||||
item.Data.Body.Close()
|
||||
item.Body.Close()
|
||||
|
||||
reader, err := zip.NewReader(bytes.NewReader(data), size)
|
||||
require.NoError(t, err, "zip reader")
|
||||
reader, err := zip.NewReader(bytes.NewReader(data), int64(len(data)))
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
for _, f := range reader.File {
|
||||
rc, err := f.Open()
|
||||
assert.NoError(t, err, "open file in zip")
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
data, err := io.ReadAll(rc)
|
||||
require.NoError(t, err, "read zip file content")
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
rc.Close()
|
||||
|
||||
zippedItems = append(zippedItems, export.ItemData{
|
||||
zippedItems = append(zippedItems, export.Item{
|
||||
Name: f.Name,
|
||||
Body: NewReadSeekCloser([]byte(data)),
|
||||
})
|
||||
@ -304,17 +306,28 @@ func (suite *ExportOpSuite) TestZipExports() {
|
||||
|
||||
assert.Equal(t, 1, count, "single item")
|
||||
|
||||
expectedZippedItems := []export.ItemData{}
|
||||
for _, col := range test.collection {
|
||||
expectedZippedItems := []export.Item{}
|
||||
|
||||
for _, col := range test.inputColls {
|
||||
for item := range col.Items(ctx) {
|
||||
if col.BasePath() != "" {
|
||||
item.Data.Name = strings.Join([]string{col.BasePath(), item.Data.Name}, "/")
|
||||
expected := export.Item{
|
||||
Name: item.Name,
|
||||
Body: item.Body,
|
||||
}
|
||||
_, err := item.Data.Body.(io.ReadSeeker).Seek(0, io.SeekStart)
|
||||
require.NoError(t, err, "seek")
|
||||
expectedZippedItems = append(expectedZippedItems, item.Data)
|
||||
|
||||
if col.BasePath() != "" {
|
||||
expected.Name = strings.Join([]string{col.BasePath(), item.Name}, "/")
|
||||
}
|
||||
|
||||
_, err := expected.Body.(io.ReadSeeker).Seek(0, io.SeekStart)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
expected.ID = ""
|
||||
|
||||
expectedZippedItems = append(expectedZippedItems, expected)
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedZippedItems, zippedItems, "items")
|
||||
})
|
||||
}
|
||||
|
||||
@ -74,7 +74,7 @@ type (
|
||||
opts control.Options,
|
||||
dcs []data.RestoreCollection,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collection, error)
|
||||
) ([]export.Collectioner, error)
|
||||
|
||||
Wait() *data.CollectionStats
|
||||
|
||||
|
||||
@ -15,7 +15,7 @@ import (
|
||||
func ConsumeExportCollections(
|
||||
ctx context.Context,
|
||||
exportLocation string,
|
||||
expColl []Collection,
|
||||
expColl []Collectioner,
|
||||
errs *fault.Bus,
|
||||
) error {
|
||||
el := errs.Local()
|
||||
@ -36,7 +36,7 @@ func ConsumeExportCollections(
|
||||
if err := writeItem(ictx, item, folder); err != nil {
|
||||
el.AddRecoverable(
|
||||
ictx,
|
||||
clues.Wrap(err, "writing item").With("file_name", item.Data.Name).WithClues(ctx))
|
||||
clues.Wrap(err, "writing item").With("file_name", item.Name).WithClues(ctx))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -46,16 +46,16 @@ func ConsumeExportCollections(
|
||||
|
||||
// writeItem writes an ExportItem to disk in the specified folder.
|
||||
func writeItem(ctx context.Context, item Item, folder string) error {
|
||||
name := item.Data.Name
|
||||
name := item.Name
|
||||
fpath := filepath.Join(folder, name)
|
||||
|
||||
progReader, pclose := observe.ItemSpinner(
|
||||
ctx,
|
||||
item.Data.Body,
|
||||
item.Body,
|
||||
observe.ItemExportMsg,
|
||||
clues.Hide(name))
|
||||
|
||||
defer item.Data.Body.Close()
|
||||
defer item.Body.Close()
|
||||
defer pclose()
|
||||
|
||||
err := os.MkdirAll(folder, os.ModePerm)
|
||||
|
||||
@ -132,15 +132,13 @@ func (suite *ExportE2ESuite) TestConsumeExportCollection() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
ecs := []Collection{}
|
||||
ecs := []Collectioner{}
|
||||
for _, col := range test.cols {
|
||||
items := []Item{}
|
||||
for _, item := range col.items {
|
||||
items = append(items, Item{
|
||||
Data: ItemData{
|
||||
Name: item.name,
|
||||
Body: io.NopCloser((bytes.NewBufferString(item.body))),
|
||||
},
|
||||
Name: item.name,
|
||||
Body: io.NopCloser((bytes.NewBufferString(item.body))),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@ -3,10 +3,16 @@ package export
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
)
|
||||
|
||||
// Collection is the interface that is returned to the SDK consumer
|
||||
type Collection interface {
|
||||
// ---------------------------------------------------------------------------
|
||||
// Collections
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Collectioner is the interface that is returned to the SDK consumer
|
||||
type Collectioner interface {
|
||||
// BasePath gets the base path of the collection. This is derived
|
||||
// from FullPath, but trim out thing like drive id or any other part
|
||||
// that is not needed to show the path to the collection.
|
||||
@ -16,17 +22,36 @@ type Collection interface {
|
||||
Items(context.Context) <-chan Item
|
||||
}
|
||||
|
||||
// ItemData is the data for an individual item.
|
||||
type ItemData struct {
|
||||
// Name is the name of the item. This is the name that the item
|
||||
// would have had in the service.
|
||||
Name string
|
||||
// BaseCollection holds the foundational details of an export collection.
|
||||
type BaseCollection struct {
|
||||
// BaseDir contains the destination path of the collection.
|
||||
BaseDir string
|
||||
|
||||
// Body is the body of the item. This is an io.ReadCloser and the
|
||||
// SDK consumer is responsible for closing it.
|
||||
Body io.ReadCloser
|
||||
// BackingCollection is the restore collection from which we will
|
||||
// create the export collection.
|
||||
BackingCollection []data.RestoreCollection
|
||||
|
||||
// BackupVersion is the backupVersion of the data source.
|
||||
BackupVersion int
|
||||
|
||||
Stream func(context.Context, []data.RestoreCollection, int, chan<- Item)
|
||||
}
|
||||
|
||||
func (bc BaseCollection) BasePath() string {
|
||||
return bc.BaseDir
|
||||
}
|
||||
|
||||
func (bc BaseCollection) Items(ctx context.Context) <-chan Item {
|
||||
ch := make(chan Item)
|
||||
go bc.Stream(ctx, bc.BackingCollection, bc.BackupVersion, ch)
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Items
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Item is the item that is returned to the SDK consumer
|
||||
type Item struct {
|
||||
// ID will be a unique id for the item. This is same as the id
|
||||
@ -34,10 +59,13 @@ type Item struct {
|
||||
// mostly used just for tracking.
|
||||
ID string
|
||||
|
||||
// Data contains the actual data of the item. It will have both
|
||||
// the name of the item and an io.ReadCloser which contains the
|
||||
// body of the item.
|
||||
Data ItemData
|
||||
// Name is the name of the item. This is the name that the item
|
||||
// would have had in the service.
|
||||
Name string
|
||||
|
||||
// Body is the body of the item. This is an io.ReadCloser and the
|
||||
// SDK consumer is responsible for closing it.
|
||||
Body io.ReadCloser
|
||||
|
||||
// Error will contain any error that happened while trying to get
|
||||
// the item/items like when trying to resolve the name of the item.
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user