some code cleanup before groups export (#4202)
various tidbits of data cleanup before moving forward with adding export behavior to groups. * move duplicate collections mocks into data/mock * move the export collection struct into pkg/export (to prevent future duplicates in the next PR) * rename export.Collection to Collectioner, because it's an interface. * some other non-logic rearrangement --- #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🧹 Tech Debt/Cleanup #### Issue(s) * #3991 #### Test Plan - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
a2e80a178a
commit
9a8c413b52
@ -6,7 +6,6 @@ import (
|
|||||||
"github.com/spf13/pflag"
|
"github.com/spf13/pflag"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -80,5 +79,18 @@ func exportGroupsCmd(cmd *cobra.Command, args []string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return Only(ctx, utils.ErrNotYetImplemented)
|
opts := utils.MakeGroupsOpts(cmd)
|
||||||
|
|
||||||
|
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
||||||
|
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
||||||
|
|
||||||
|
return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "Groups")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -76,8 +76,7 @@ func (suite *GroupsUnitSuite) TestAddGroupsCommands() {
|
|||||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||||
cmd.SetErr(new(bytes.Buffer)) // drop output
|
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||||
err := cmd.Execute()
|
err := cmd.Execute()
|
||||||
// assert.NoError(t, err, clues.ToCore(err))
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
|
||||||
|
|
||||||
opts := utils.MakeGroupsOpts(cmd)
|
opts := utils.MakeGroupsOpts(cmd)
|
||||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||||
|
|||||||
@ -6,7 +6,6 @@ import (
|
|||||||
"github.com/spf13/pflag"
|
"github.com/spf13/pflag"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
"github.com/alcionai/corso/src/cli/flags"
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -80,5 +79,18 @@ func exportTeamsCmd(cmd *cobra.Command, args []string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return Only(ctx, utils.ErrNotYetImplemented)
|
opts := utils.MakeGroupsOpts(cmd)
|
||||||
|
|
||||||
|
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := utils.ValidateGroupsRestoreFlags(flags.BackupIDFV, opts); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
sel := utils.IncludeGroupsRestoreDataSelectors(ctx, opts)
|
||||||
|
utils.FilterGroupsRestoreInfoSelectors(sel, opts)
|
||||||
|
|
||||||
|
return runExport(ctx, cmd, args, opts.ExportCfg, sel.Selector, flags.BackupIDFV, "Teams")
|
||||||
}
|
}
|
||||||
|
|||||||
@ -76,10 +76,9 @@ func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
|
|||||||
cmd.SetOut(new(bytes.Buffer)) // drop output
|
cmd.SetOut(new(bytes.Buffer)) // drop output
|
||||||
cmd.SetErr(new(bytes.Buffer)) // drop output
|
cmd.SetErr(new(bytes.Buffer)) // drop output
|
||||||
err := cmd.Execute()
|
err := cmd.Execute()
|
||||||
// assert.NoError(t, err, clues.ToCore(err))
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
|
||||||
|
|
||||||
opts := utils.MakeTeamsOpts(cmd)
|
opts := utils.MakeGroupsOpts(cmd)
|
||||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||||
|
|
||||||
assert.Equal(t, testdata.Archive, opts.ExportCfg.Archive)
|
assert.Equal(t, testdata.Archive, opts.ExportCfg.Archive)
|
||||||
|
|||||||
@ -86,7 +86,7 @@ func (suite *TeamsUnitSuite) TestAddTeamsCommands() {
|
|||||||
// assert.NoError(t, err, clues.ToCore(err))
|
// assert.NoError(t, err, clues.ToCore(err))
|
||||||
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
assert.ErrorIs(t, err, utils.ErrNotYetImplemented, clues.ToCore(err))
|
||||||
|
|
||||||
opts := utils.MakeTeamsOpts(cmd)
|
opts := utils.MakeGroupsOpts(cmd)
|
||||||
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
assert.Equal(t, testdata.BackupInput, flags.BackupIDFV)
|
||||||
|
|
||||||
assert.Equal(t, testdata.Collisions, opts.RestoreCfg.Collisions)
|
assert.Equal(t, testdata.Collisions, opts.RestoreCfg.Collisions)
|
||||||
|
|||||||
@ -1,30 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/cli/flags"
|
|
||||||
)
|
|
||||||
|
|
||||||
type TeamsOpts struct {
|
|
||||||
Teams []string
|
|
||||||
|
|
||||||
RestoreCfg RestoreCfgOpts
|
|
||||||
ExportCfg ExportCfgOpts
|
|
||||||
|
|
||||||
Populated flags.PopulatedFlags
|
|
||||||
}
|
|
||||||
|
|
||||||
func MakeTeamsOpts(cmd *cobra.Command) TeamsOpts {
|
|
||||||
return TeamsOpts{
|
|
||||||
Teams: flags.UserFV,
|
|
||||||
|
|
||||||
RestoreCfg: makeRestoreCfgOpts(cmd),
|
|
||||||
ExportCfg: makeExportCfgOpts(cmd),
|
|
||||||
|
|
||||||
// populated contains the list of flags that appear in the
|
|
||||||
// command, according to pflags. Use this to differentiate
|
|
||||||
// between an "empty" and a "missing" value.
|
|
||||||
Populated: flags.GetPopulatedFlags(cmd),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -32,10 +32,8 @@ func (z zipCollection) Items(ctx context.Context) <-chan export.Item {
|
|||||||
defer close(rc)
|
defer close(rc)
|
||||||
|
|
||||||
rc <- export.Item{
|
rc <- export.Item{
|
||||||
Data: export.ItemData{
|
Name: "Corso_Export_" + dttm.FormatNow(dttm.HumanReadable) + ".zip",
|
||||||
Name: "Corso_Export_" + dttm.FormatNow(dttm.HumanReadable) + ".zip",
|
Body: z.reader,
|
||||||
Body: z.reader,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return rc
|
return rc
|
||||||
@ -45,8 +43,8 @@ func (z zipCollection) Items(ctx context.Context) <-chan export.Item {
|
|||||||
// them into a single collection.
|
// them into a single collection.
|
||||||
func ZipExportCollection(
|
func ZipExportCollection(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
expCollections []export.Collection,
|
expCollections []export.Collectioner,
|
||||||
) (export.Collection, error) {
|
) (export.Collectioner, error) {
|
||||||
if len(expCollections) == 0 {
|
if len(expCollections) == 0 {
|
||||||
return nil, clues.New("no export collections provided")
|
return nil, clues.New("no export collections provided")
|
||||||
}
|
}
|
||||||
@ -71,7 +69,7 @@ func ZipExportCollection(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
name := item.Data.Name
|
name := item.Name
|
||||||
|
|
||||||
// We assume folder and name to not contain any path separators.
|
// We assume folder and name to not contain any path separators.
|
||||||
// Also, this should always use `/` as this is
|
// Also, this should always use `/` as this is
|
||||||
@ -86,7 +84,7 @@ func ZipExportCollection(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = io.CopyBuffer(f, item.Data.Body, buf)
|
_, err = io.CopyBuffer(f, item.Body, buf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
|
writer.CloseWithError(clues.Wrap(err, "writing zip entry").With("name", name).With("id", item.ID))
|
||||||
return
|
return
|
||||||
|
|||||||
@ -10,6 +10,10 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Collections
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// A Collection represents the set of data within a single logical location
|
// A Collection represents the set of data within a single logical location
|
||||||
// denoted by FullPath.
|
// denoted by FullPath.
|
||||||
type Collection interface {
|
type Collection interface {
|
||||||
@ -56,12 +60,9 @@ type RestoreCollection interface {
|
|||||||
FetchItemByNamer
|
FetchItemByNamer
|
||||||
}
|
}
|
||||||
|
|
||||||
type FetchItemByNamer interface {
|
// ---------------------------------------------------------------------------
|
||||||
// Fetch retrieves an item with the given name from the Collection if it
|
// Items
|
||||||
// exists. Items retrieved with Fetch may still appear in the channel returned
|
// ---------------------------------------------------------------------------
|
||||||
// by Items().
|
|
||||||
FetchItemByName(ctx context.Context, name string) (Item, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Item represents a single item within a Collection
|
// Item represents a single item within a Collection
|
||||||
type Item interface {
|
type Item interface {
|
||||||
@ -74,23 +75,6 @@ type Item interface {
|
|||||||
Deleted() bool
|
Deleted() bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// LocationPather provides a LocationPath describing the path with Display Names
|
|
||||||
// instead of canonical IDs
|
|
||||||
type LocationPather interface {
|
|
||||||
LocationPath() *path.Builder
|
|
||||||
}
|
|
||||||
|
|
||||||
// PreviousLocationPather provides both the current location of the collection
|
|
||||||
// as well as the location of the item in the previous backup.
|
|
||||||
//
|
|
||||||
// TODO(ashmrtn): If we guarantee that we persist the location of collections in
|
|
||||||
// addition to the path of the item then we could just have a single
|
|
||||||
// *LocationPather interface with current and previous location functions.
|
|
||||||
type PreviousLocationPather interface {
|
|
||||||
LocationPather
|
|
||||||
PreviousLocationPath() details.LocationIDer
|
|
||||||
}
|
|
||||||
|
|
||||||
// ItemInfo returns the details.ItemInfo for the item.
|
// ItemInfo returns the details.ItemInfo for the item.
|
||||||
type ItemInfo interface {
|
type ItemInfo interface {
|
||||||
Info() (details.ItemInfo, error)
|
Info() (details.ItemInfo, error)
|
||||||
@ -108,3 +92,31 @@ type ItemSize interface {
|
|||||||
type ItemModTime interface {
|
type ItemModTime interface {
|
||||||
ModTime() time.Time
|
ModTime() time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type FetchItemByNamer interface {
|
||||||
|
// Fetch retrieves an item with the given name from the Collection if it
|
||||||
|
// exists. Items retrieved with Fetch may still appear in the channel returned
|
||||||
|
// by Items().
|
||||||
|
FetchItemByName(ctx context.Context, name string) (Item, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Paths
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// LocationPather provides a LocationPath describing the path with Display Names
|
||||||
|
// instead of canonical IDs
|
||||||
|
type LocationPather interface {
|
||||||
|
LocationPath() *path.Builder
|
||||||
|
}
|
||||||
|
|
||||||
|
// PreviousLocationPather provides both the current location of the collection
|
||||||
|
// as well as the location of the item in the previous backup.
|
||||||
|
//
|
||||||
|
// TODO(ashmrtn): If we guarantee that we persist the location of collections in
|
||||||
|
// addition to the path of the item then we could just have a single
|
||||||
|
// *LocationPather interface with current and previous location functions.
|
||||||
|
type PreviousLocationPather interface {
|
||||||
|
LocationPather
|
||||||
|
PreviousLocationPath() details.LocationIDer
|
||||||
|
}
|
||||||
|
|||||||
@ -78,14 +78,39 @@ var (
|
|||||||
_ data.RestoreCollection = &Collection{}
|
_ data.RestoreCollection = &Collection{}
|
||||||
)
|
)
|
||||||
|
|
||||||
type Collection struct{}
|
type Collection struct {
|
||||||
|
Path path.Path
|
||||||
|
ItemData []*Item
|
||||||
|
ItemsRecoverableErrs []error
|
||||||
|
}
|
||||||
|
|
||||||
func (c Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
func (c Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||||
return nil
|
ch := make(chan data.Item)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer close(ch)
|
||||||
|
|
||||||
|
el := errs.Local()
|
||||||
|
|
||||||
|
for _, item := range c.ItemData {
|
||||||
|
if item.ReadErr != nil {
|
||||||
|
el.AddRecoverable(ctx, item.ReadErr)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ch <- item
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
for _, err := range c.ItemsRecoverableErrs {
|
||||||
|
errs.AddRecoverable(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ch
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Collection) FullPath() path.Path {
|
func (c Collection) FullPath() path.Path {
|
||||||
return nil
|
return c.Path
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Collection) PreviousPath() path.Path {
|
func (c Collection) PreviousPath() path.Path {
|
||||||
|
|||||||
@ -13,83 +13,61 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ export.Collection = &ExportCollection{}
|
|
||||||
|
|
||||||
// ExportCollection is the implementation of export.ExportCollection for OneDrive
|
|
||||||
type ExportCollection struct {
|
|
||||||
// baseDir contains the path of the collection
|
|
||||||
baseDir string
|
|
||||||
|
|
||||||
// backingCollection is the restore collection from which we will
|
|
||||||
// create the export collection.
|
|
||||||
backingCollection data.RestoreCollection
|
|
||||||
|
|
||||||
// backupVersion is the backupVersion of the backup this collection was part
|
|
||||||
// of. This is required to figure out how to get the name of the
|
|
||||||
// item.
|
|
||||||
backupVersion int
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewExportCollection(
|
func NewExportCollection(
|
||||||
baseDir string,
|
baseDir string,
|
||||||
backingCollection data.RestoreCollection,
|
backingCollection []data.RestoreCollection,
|
||||||
backupVersion int,
|
backupVersion int,
|
||||||
) ExportCollection {
|
) export.Collectioner {
|
||||||
return ExportCollection{
|
return export.BaseCollection{
|
||||||
baseDir: baseDir,
|
BaseDir: baseDir,
|
||||||
backingCollection: backingCollection,
|
BackingCollection: backingCollection,
|
||||||
backupVersion: backupVersion,
|
BackupVersion: backupVersion,
|
||||||
|
Stream: streamItems,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ec ExportCollection) BasePath() string {
|
// streamItems streams the streamItems in the backingCollection into the export stream chan
|
||||||
return ec.baseDir
|
func streamItems(
|
||||||
}
|
ctx context.Context,
|
||||||
|
drc []data.RestoreCollection,
|
||||||
func (ec ExportCollection) Items(ctx context.Context) <-chan export.Item {
|
backupVersion int,
|
||||||
ch := make(chan export.Item)
|
ch chan<- export.Item,
|
||||||
go items(ctx, ec, ch)
|
) {
|
||||||
|
|
||||||
return ch
|
|
||||||
}
|
|
||||||
|
|
||||||
// items converts items in backing collection to export items
|
|
||||||
func items(ctx context.Context, ec ExportCollection, ch chan<- export.Item) {
|
|
||||||
defer close(ch)
|
defer close(ch)
|
||||||
|
|
||||||
errs := fault.New(false)
|
errs := fault.New(false)
|
||||||
|
|
||||||
for item := range ec.backingCollection.Items(ctx, errs) {
|
for _, rc := range drc {
|
||||||
itemUUID := item.ID()
|
for item := range rc.Items(ctx, errs) {
|
||||||
if isMetadataFile(itemUUID, ec.backupVersion) {
|
itemUUID := item.ID()
|
||||||
continue
|
if isMetadataFile(itemUUID, backupVersion) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
name, err := getItemName(ctx, itemUUID, backupVersion, rc)
|
||||||
|
|
||||||
|
ch <- export.Item{
|
||||||
|
ID: itemUUID,
|
||||||
|
Name: name,
|
||||||
|
Body: item.ToReader(),
|
||||||
|
Error: err,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
name, err := getItemName(ctx, itemUUID, ec.backupVersion, ec.backingCollection)
|
items, recovered := errs.ItemsAndRecovered()
|
||||||
|
|
||||||
ch <- export.Item{
|
// Return all the items that we failed to source from the persistence layer
|
||||||
ID: itemUUID,
|
for _, err := range items {
|
||||||
Data: export.ItemData{
|
ch <- export.Item{
|
||||||
Name: name,
|
ID: err.ID,
|
||||||
Body: item.ToReader(),
|
Error: &err,
|
||||||
},
|
}
|
||||||
Error: err,
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
eitems, erecovereable := errs.ItemsAndRecovered()
|
for _, err := range recovered {
|
||||||
|
ch <- export.Item{
|
||||||
// Return all the items that we failed to source from the persistence layer
|
Error: err,
|
||||||
for _, err := range eitems {
|
}
|
||||||
ch <- export.Item{
|
|
||||||
ID: err.ID,
|
|
||||||
Error: &err,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, ec := range erecovereable {
|
|
||||||
ch <- export.Item{
|
|
||||||
Error: ec,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
@ -58,7 +59,11 @@ func (suite *ExportUnitSuite) TestIsMetadataFile() {
|
|||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
assert.Equal(suite.T(), test.isMeta, isMetadataFile(test.id, test.backupVersion), "is metadata")
|
assert.Equal(
|
||||||
|
suite.T(),
|
||||||
|
test.isMeta,
|
||||||
|
isMetadataFile(test.id, test.backupVersion),
|
||||||
|
"is metadata")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -86,47 +91,47 @@ func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, err
|
|||||||
|
|
||||||
func (suite *ExportUnitSuite) TestGetItemName() {
|
func (suite *ExportUnitSuite) TestGetItemName() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
tname string
|
name string
|
||||||
id string
|
id string
|
||||||
backupVersion int
|
backupVersion int
|
||||||
name string
|
expectName string
|
||||||
fin data.FetchItemByNamer
|
fin data.FetchItemByNamer
|
||||||
errFunc assert.ErrorAssertionFunc
|
expectErr assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
tname: "legacy",
|
name: "legacy",
|
||||||
id: "name",
|
id: "name",
|
||||||
backupVersion: version.OneDrive1DataAndMetaFiles,
|
backupVersion: version.OneDrive1DataAndMetaFiles,
|
||||||
name: "name",
|
expectName: "name",
|
||||||
errFunc: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "name in filename",
|
name: "name in filename",
|
||||||
id: "name.data",
|
id: "name.data",
|
||||||
backupVersion: version.OneDrive4DirIncludesPermissions,
|
backupVersion: version.OneDrive4DirIncludesPermissions,
|
||||||
name: "name",
|
expectName: "name",
|
||||||
errFunc: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "name in metadata",
|
name: "name in metadata",
|
||||||
id: "id.data",
|
id: "id.data",
|
||||||
backupVersion: version.Backup,
|
backupVersion: version.Backup,
|
||||||
name: "name",
|
expectName: "name",
|
||||||
fin: finD{id: "id.meta", name: "name"},
|
fin: finD{id: "id.meta", name: "name"},
|
||||||
errFunc: assert.NoError,
|
expectErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
tname: "name in metadata but error",
|
name: "name in metadata but error",
|
||||||
id: "id.data",
|
id: "id.data",
|
||||||
backupVersion: version.Backup,
|
backupVersion: version.Backup,
|
||||||
name: "",
|
expectName: "",
|
||||||
fin: finD{err: assert.AnError},
|
fin: finD{err: assert.AnError},
|
||||||
errFunc: assert.Error,
|
expectErr: assert.Error,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
suite.Run(test.tname, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
@ -137,9 +142,9 @@ func (suite *ExportUnitSuite) TestGetItemName() {
|
|||||||
test.id,
|
test.id,
|
||||||
test.backupVersion,
|
test.backupVersion,
|
||||||
test.fin)
|
test.fin)
|
||||||
test.errFunc(t, err)
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
assert.Equal(t, test.name, name, "name")
|
assert.Equal(t, test.expectName, name, "name")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
65
src/internal/m365/collection/groups/export.go
Normal file
65
src/internal/m365/collection/groups/export.go
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
package groups
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewExportCollection(
|
||||||
|
baseDir string,
|
||||||
|
backingCollections []data.RestoreCollection,
|
||||||
|
backupVersion int,
|
||||||
|
) export.Collectioner {
|
||||||
|
return export.BaseCollection{
|
||||||
|
BaseDir: baseDir,
|
||||||
|
BackingCollection: backingCollections,
|
||||||
|
BackupVersion: backupVersion,
|
||||||
|
Stream: streamItems,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// streamItems streams the items in the backingCollection into the export stream chan
|
||||||
|
func streamItems(
|
||||||
|
ctx context.Context,
|
||||||
|
drc []data.RestoreCollection,
|
||||||
|
backupVersion int,
|
||||||
|
ch chan<- export.Item,
|
||||||
|
) {
|
||||||
|
defer close(ch)
|
||||||
|
|
||||||
|
errs := fault.New(false)
|
||||||
|
|
||||||
|
for _, rc := range drc {
|
||||||
|
for item := range rc.Items(ctx, errs) {
|
||||||
|
itemID := item.ID()
|
||||||
|
|
||||||
|
// channel message items have no name
|
||||||
|
name := itemID
|
||||||
|
|
||||||
|
ch <- export.Item{
|
||||||
|
ID: itemID,
|
||||||
|
Name: name,
|
||||||
|
Body: item.ToReader(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
items, recovered := errs.ItemsAndRecovered()
|
||||||
|
|
||||||
|
// Return all the items that we failed to source from the persistence layer
|
||||||
|
for _, err := range items {
|
||||||
|
ch <- export.Item{
|
||||||
|
ID: err.ID,
|
||||||
|
Error: &err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, err := range recovered {
|
||||||
|
ch <- export.Item{
|
||||||
|
Error: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
99
src/internal/m365/collection/groups/export_test.go
Normal file
99
src/internal/m365/collection/groups/export_test.go
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
package groups
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExportUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ExportUnitSuite) TestStreamItems() {
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
backingColl dataMock.Collection
|
||||||
|
expectName string
|
||||||
|
expectErr assert.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no errors",
|
||||||
|
backingColl: dataMock.Collection{
|
||||||
|
ItemData: []*dataMock.Item{
|
||||||
|
{ItemID: "zim"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectName: "zim",
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "only recoverable errors",
|
||||||
|
backingColl: dataMock.Collection{
|
||||||
|
ItemsRecoverableErrs: []error{
|
||||||
|
clues.New("The knowledge... it fills me! It is neat!"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectErr: assert.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "items and recoverable errors",
|
||||||
|
backingColl: dataMock.Collection{
|
||||||
|
ItemData: []*dataMock.Item{
|
||||||
|
{ItemID: "gir"},
|
||||||
|
},
|
||||||
|
ItemsRecoverableErrs: []error{
|
||||||
|
clues.New("I miss my cupcake."),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectName: "gir",
|
||||||
|
expectErr: assert.Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
ch := make(chan export.Item)
|
||||||
|
|
||||||
|
go streamItems(
|
||||||
|
ctx,
|
||||||
|
[]data.RestoreCollection{test.backingColl},
|
||||||
|
version.NoBackup,
|
||||||
|
ch)
|
||||||
|
|
||||||
|
var (
|
||||||
|
itm export.Item
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
for i := range ch {
|
||||||
|
if i.Error == nil {
|
||||||
|
itm = i
|
||||||
|
} else {
|
||||||
|
err = i.Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
assert.Equal(t, test.expectName, itm.Name, "item name")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
@ -27,7 +28,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
|||||||
opts control.Options,
|
opts control.Options,
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collection, error) {
|
) ([]export.Collectioner, error) {
|
||||||
ctx, end := diagnostics.Span(ctx, "m365:export")
|
ctx, end := diagnostics.Span(ctx, "m365:export")
|
||||||
defer end()
|
defer end()
|
||||||
|
|
||||||
@ -35,7 +36,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
|||||||
ctx = clues.Add(ctx, "export_config", exportCfg) // TODO(meain): needs PII control
|
ctx = clues.Add(ctx, "export_config", exportCfg) // TODO(meain): needs PII control
|
||||||
|
|
||||||
var (
|
var (
|
||||||
expCollections []export.Collection
|
expCollections []export.Collectioner
|
||||||
status *support.ControllerOperationStatus
|
status *support.ControllerOperationStatus
|
||||||
deets = &details.Builder{}
|
deets = &details.Builder{}
|
||||||
err error
|
err error
|
||||||
@ -61,6 +62,15 @@ func (ctrl *Controller) ProduceExportCollections(
|
|||||||
ctrl.backupDriveIDNames,
|
ctrl.backupDriveIDNames,
|
||||||
deets,
|
deets,
|
||||||
errs)
|
errs)
|
||||||
|
case selectors.ServiceGroups:
|
||||||
|
expCollections, err = groups.ProduceExportCollections(
|
||||||
|
ctx,
|
||||||
|
backupVersion,
|
||||||
|
exportCfg,
|
||||||
|
opts,
|
||||||
|
dcs,
|
||||||
|
deets,
|
||||||
|
errs)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
|
err = clues.Wrap(clues.New(sels.Service.String()), "service not supported")
|
||||||
|
|||||||
@ -78,7 +78,7 @@ func (ctrl Controller) ProduceExportCollections(
|
|||||||
_ control.Options,
|
_ control.Options,
|
||||||
_ []data.RestoreCollection,
|
_ []data.RestoreCollection,
|
||||||
_ *fault.Bus,
|
_ *fault.Bus,
|
||||||
) ([]export.Collection, error) {
|
) ([]export.Collectioner, error) {
|
||||||
return nil, ctrl.Err
|
return nil, ctrl.Err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
52
src/internal/m365/service/groups/export.go
Normal file
52
src/internal/m365/service/groups/export.go
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
package groups
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ProduceExportCollections will create the export collections for the
|
||||||
|
// given restore collections.
|
||||||
|
func ProduceExportCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
backupVersion int,
|
||||||
|
exportCfg control.ExportConfig,
|
||||||
|
opts control.Options,
|
||||||
|
dcs []data.RestoreCollection,
|
||||||
|
deets *details.Builder,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]export.Collectioner, error) {
|
||||||
|
var (
|
||||||
|
el = errs.Local()
|
||||||
|
ec = make([]export.Collectioner, 0, len(dcs))
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, restoreColl := range dcs {
|
||||||
|
var (
|
||||||
|
fp = restoreColl.FullPath()
|
||||||
|
cat = fp.Category()
|
||||||
|
folders = []string{cat.String()}
|
||||||
|
)
|
||||||
|
|
||||||
|
switch cat {
|
||||||
|
case path.ChannelMessagesCategory:
|
||||||
|
folders = append(folders, fp.Folders()...)
|
||||||
|
}
|
||||||
|
|
||||||
|
coll := groups.NewExportCollection(
|
||||||
|
path.Builder{}.Append(folders...).String(),
|
||||||
|
[]data.RestoreCollection{restoreColl},
|
||||||
|
backupVersion)
|
||||||
|
|
||||||
|
ec = append(ec, coll)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ec, el.Failure()
|
||||||
|
}
|
||||||
111
src/internal/m365/service/groups/export_test.go
Normal file
111
src/internal/m365/service/groups/export_test.go
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
package groups
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
groupMock "github.com/alcionai/corso/src/internal/m365/service/groups/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExportUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
type finD struct {
|
||||||
|
id string
|
||||||
|
name string
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
|
||||||
|
if fd.err != nil {
|
||||||
|
return nil, fd.err
|
||||||
|
}
|
||||||
|
|
||||||
|
if name == fd.id {
|
||||||
|
return &dataMock.Item{
|
||||||
|
ItemID: fd.id,
|
||||||
|
Reader: io.NopCloser(bytes.NewBufferString(`{"displayname": "` + fd.name + `"}`)),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, assert.AnError
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
var (
|
||||||
|
itemID = "itemID"
|
||||||
|
containerName = "channelID"
|
||||||
|
exportCfg = control.ExportConfig{}
|
||||||
|
dii = groupMock.ItemInfo()
|
||||||
|
expectedPath = path.ChannelMessagesCategory.String() + "/" + containerName
|
||||||
|
expectedItems = []export.Item{
|
||||||
|
{
|
||||||
|
ID: itemID,
|
||||||
|
Name: dii.Groups.ItemName,
|
||||||
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
p, err := path.Build("t", "pr", path.GroupsService, path.ChannelMessagesCategory, false, containerName)
|
||||||
|
assert.NoError(t, err, "build path")
|
||||||
|
|
||||||
|
dcs := []data.RestoreCollection{
|
||||||
|
data.FetchRestoreCollection{
|
||||||
|
Collection: dataMock.Collection{
|
||||||
|
Path: p,
|
||||||
|
ItemData: []*dataMock.Item{
|
||||||
|
{
|
||||||
|
ItemID: itemID,
|
||||||
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
|
ItemInfo: dii,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
FetchItemByNamer: finD{id: itemID, name: dii.Groups.ItemName},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
ecs, err := ProduceExportCollections(
|
||||||
|
ctx,
|
||||||
|
int(version.Backup),
|
||||||
|
exportCfg,
|
||||||
|
control.DefaultOptions(),
|
||||||
|
dcs,
|
||||||
|
nil,
|
||||||
|
fault.New(true))
|
||||||
|
assert.NoError(t, err, "export collections error")
|
||||||
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
|
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
||||||
|
|
||||||
|
fitems := []export.Item{}
|
||||||
|
for item := range ecs[0].Items(ctx) {
|
||||||
|
fitems = append(fitems, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, expectedItems, fitems, "items")
|
||||||
|
}
|
||||||
15
src/internal/m365/service/groups/mock/mock.go
Normal file
15
src/internal/m365/service/groups/mock/mock.go
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
package stub
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ItemInfo() details.ItemInfo {
|
||||||
|
return details.ItemInfo{
|
||||||
|
Groups: &details.GroupsInfo{
|
||||||
|
ItemType: details.GroupsChannelMessage,
|
||||||
|
ItemName: "itemID",
|
||||||
|
Size: 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -24,10 +24,10 @@ func ProduceExportCollections(
|
|||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collection, error) {
|
) ([]export.Collectioner, error) {
|
||||||
var (
|
var (
|
||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
ec = make([]export.Collection, 0, len(dcs))
|
ec = make([]export.Collectioner, 0, len(dcs))
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, dc := range dcs {
|
for _, dc := range dcs {
|
||||||
@ -38,7 +38,12 @@ func ProduceExportCollections(
|
|||||||
|
|
||||||
baseDir := path.Builder{}.Append(drivePath.Folders...)
|
baseDir := path.Builder{}.Append(drivePath.Folders...)
|
||||||
|
|
||||||
ec = append(ec, drive.NewExportCollection(baseDir.String(), dc, backupVersion))
|
ec = append(
|
||||||
|
ec,
|
||||||
|
drive.NewExportCollection(
|
||||||
|
baseDir.String(),
|
||||||
|
[]data.RestoreCollection{dc},
|
||||||
|
backupVersion))
|
||||||
}
|
}
|
||||||
|
|
||||||
return ec, el.Failure()
|
return ec, el.Failure()
|
||||||
|
|||||||
@ -19,7 +19,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type ExportUnitSuite struct {
|
type ExportUnitSuite struct {
|
||||||
@ -51,36 +50,6 @@ func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, err
|
|||||||
return nil, assert.AnError
|
return nil, assert.AnError
|
||||||
}
|
}
|
||||||
|
|
||||||
type mockRestoreCollection struct {
|
|
||||||
path path.Path
|
|
||||||
items []*dataMock.Item
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc mockRestoreCollection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
|
||||||
ch := make(chan data.Item)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer close(ch)
|
|
||||||
|
|
||||||
el := errs.Local()
|
|
||||||
|
|
||||||
for _, item := range rc.items {
|
|
||||||
if item.ReadErr != nil {
|
|
||||||
el.AddRecoverable(ctx, item.ReadErr)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ch <- item
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return ch
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc mockRestoreCollection) FullPath() path.Path {
|
|
||||||
return rc.path
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *ExportUnitSuite) TestGetItems() {
|
func (suite *ExportUnitSuite) TestGetItems() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
@ -92,8 +61,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
name: "single item",
|
name: "single item",
|
||||||
version: 1,
|
version: 1,
|
||||||
backingCollection: data.NoFetchRestoreCollection{
|
backingCollection: data.NoFetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{
|
{
|
||||||
ItemID: "name1",
|
ItemID: "name1",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
@ -103,11 +72,9 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
},
|
},
|
||||||
expectedItems: []export.Item{
|
expectedItems: []export.Item{
|
||||||
{
|
{
|
||||||
ID: "name1",
|
ID: "name1",
|
||||||
Data: export.ItemData{
|
Name: "name1",
|
||||||
Name: "name1",
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -115,8 +82,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
name: "multiple items",
|
name: "multiple items",
|
||||||
version: 1,
|
version: 1,
|
||||||
backingCollection: data.NoFetchRestoreCollection{
|
backingCollection: data.NoFetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{
|
{
|
||||||
ItemID: "name1",
|
ItemID: "name1",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
@ -130,18 +97,14 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
},
|
},
|
||||||
expectedItems: []export.Item{
|
expectedItems: []export.Item{
|
||||||
{
|
{
|
||||||
ID: "name1",
|
ID: "name1",
|
||||||
Data: export.ItemData{
|
Name: "name1",
|
||||||
Name: "name1",
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "name2",
|
ID: "name2",
|
||||||
Data: export.ItemData{
|
Name: "name2",
|
||||||
Name: "name2",
|
Body: io.NopCloser((bytes.NewBufferString("body2"))),
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body2"))),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -149,8 +112,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
name: "single item with data suffix",
|
name: "single item with data suffix",
|
||||||
version: 2,
|
version: 2,
|
||||||
backingCollection: data.NoFetchRestoreCollection{
|
backingCollection: data.NoFetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{
|
{
|
||||||
ItemID: "name1.data",
|
ItemID: "name1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
@ -160,11 +123,9 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
},
|
},
|
||||||
expectedItems: []export.Item{
|
expectedItems: []export.Item{
|
||||||
{
|
{
|
||||||
ID: "name1.data",
|
ID: "name1.data",
|
||||||
Data: export.ItemData{
|
Name: "name1",
|
||||||
Name: "name1",
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -172,8 +133,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
name: "single item name from metadata",
|
name: "single item name from metadata",
|
||||||
version: version.Backup,
|
version: version.Backup,
|
||||||
backingCollection: data.FetchRestoreCollection{
|
backingCollection: data.FetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{
|
{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
@ -184,11 +145,9 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
},
|
},
|
||||||
expectedItems: []export.Item{
|
expectedItems: []export.Item{
|
||||||
{
|
{
|
||||||
ID: "id1.data",
|
ID: "id1.data",
|
||||||
Data: export.ItemData{
|
Name: "name1",
|
||||||
Name: "name1",
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -196,8 +155,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
name: "single item name from metadata with error",
|
name: "single item name from metadata with error",
|
||||||
version: version.Backup,
|
version: version.Backup,
|
||||||
backingCollection: data.FetchRestoreCollection{
|
backingCollection: data.FetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{ItemID: "id1.data"},
|
{ItemID: "id1.data"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -214,8 +173,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
name: "items with success and metadata read error",
|
name: "items with success and metadata read error",
|
||||||
version: version.Backup,
|
version: version.Backup,
|
||||||
backingCollection: data.FetchRestoreCollection{
|
backingCollection: data.FetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{
|
{
|
||||||
ItemID: "missing.data",
|
ItemID: "missing.data",
|
||||||
},
|
},
|
||||||
@ -233,11 +192,9 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
Error: assert.AnError,
|
Error: assert.AnError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "id1.data",
|
ID: "id1.data",
|
||||||
Data: export.ItemData{
|
Name: "name1",
|
||||||
Name: "name1",
|
Body: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
Body: io.NopCloser(bytes.NewBufferString("body1")),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -245,8 +202,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
name: "items with success and fetch error",
|
name: "items with success and fetch error",
|
||||||
version: version.OneDrive1DataAndMetaFiles,
|
version: version.OneDrive1DataAndMetaFiles,
|
||||||
backingCollection: data.FetchRestoreCollection{
|
backingCollection: data.FetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{
|
{
|
||||||
ItemID: "name0",
|
ItemID: "name0",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body0")),
|
Reader: io.NopCloser(bytes.NewBufferString("body0")),
|
||||||
@ -264,18 +221,14 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
},
|
},
|
||||||
expectedItems: []export.Item{
|
expectedItems: []export.Item{
|
||||||
{
|
{
|
||||||
ID: "name0",
|
ID: "name0",
|
||||||
Data: export.ItemData{
|
Name: "name0",
|
||||||
Name: "name0",
|
Body: io.NopCloser(bytes.NewBufferString("body0")),
|
||||||
Body: io.NopCloser(bytes.NewBufferString("body0")),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "name2",
|
ID: "name2",
|
||||||
Data: export.ItemData{
|
Name: "name2",
|
||||||
Name: "name2",
|
Body: io.NopCloser(bytes.NewBufferString("body2")),
|
||||||
Body: io.NopCloser(bytes.NewBufferString("body2")),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "",
|
ID: "",
|
||||||
@ -292,7 +245,10 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ec := drive.NewExportCollection("", test.backingCollection, test.version)
|
ec := drive.NewExportCollection(
|
||||||
|
"",
|
||||||
|
[]data.RestoreCollection{test.backingCollection},
|
||||||
|
test.version)
|
||||||
|
|
||||||
items := ec.Items(ctx)
|
items := ec.Items(ctx)
|
||||||
|
|
||||||
@ -308,8 +264,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
// to simplify testing.
|
// to simplify testing.
|
||||||
for i, item := range fitems {
|
for i, item := range fitems {
|
||||||
assert.Equal(t, test.expectedItems[i].ID, item.ID, "id")
|
assert.Equal(t, test.expectedItems[i].ID, item.ID, "id")
|
||||||
assert.Equal(t, test.expectedItems[i].Data.Name, item.Data.Name, "name")
|
assert.Equal(t, test.expectedItems[i].Name, item.Name, "name")
|
||||||
assert.Equal(t, test.expectedItems[i].Data.Body, item.Data.Body, "body")
|
assert.Equal(t, test.expectedItems[i].Body, item.Body, "body")
|
||||||
assert.ErrorIs(t, item.Error, test.expectedItems[i].Error)
|
assert.ErrorIs(t, item.Error, test.expectedItems[i].Error)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@ -328,11 +284,9 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
dii = odStub.DriveItemInfo()
|
dii = odStub.DriveItemInfo()
|
||||||
expectedItems = []export.Item{
|
expectedItems = []export.Item{
|
||||||
{
|
{
|
||||||
ID: "id1.data",
|
ID: "id1.data",
|
||||||
Data: export.ItemData{
|
Name: "name1",
|
||||||
Name: "name1",
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -344,9 +298,9 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
|
|
||||||
dcs := []data.RestoreCollection{
|
dcs := []data.RestoreCollection{
|
||||||
data.FetchRestoreCollection{
|
data.FetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
path: p,
|
Path: p,
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{
|
{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
|
|||||||
@ -27,10 +27,10 @@ func ProduceExportCollections(
|
|||||||
backupDriveIDNames idname.CacheBuilder,
|
backupDriveIDNames idname.CacheBuilder,
|
||||||
deets *details.Builder,
|
deets *details.Builder,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collection, error) {
|
) ([]export.Collectioner, error) {
|
||||||
var (
|
var (
|
||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
ec = make([]export.Collection, 0, len(dcs))
|
ec = make([]export.Collectioner, 0, len(dcs))
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, dc := range dcs {
|
for _, dc := range dcs {
|
||||||
@ -51,7 +51,12 @@ func ProduceExportCollections(
|
|||||||
Append(driveName).
|
Append(driveName).
|
||||||
Append(drivePath.Folders...)
|
Append(drivePath.Folders...)
|
||||||
|
|
||||||
ec = append(ec, drive.NewExportCollection(baseDir.String(), dc, backupVersion))
|
ec = append(
|
||||||
|
ec,
|
||||||
|
drive.NewExportCollection(
|
||||||
|
baseDir.String(),
|
||||||
|
[]data.RestoreCollection{dc},
|
||||||
|
backupVersion))
|
||||||
}
|
}
|
||||||
|
|
||||||
return ec, el.Failure()
|
return ec, el.Failure()
|
||||||
|
|||||||
@ -20,7 +20,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/export"
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type ExportUnitSuite struct {
|
type ExportUnitSuite struct {
|
||||||
@ -52,36 +51,6 @@ func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, err
|
|||||||
return nil, assert.AnError
|
return nil, assert.AnError
|
||||||
}
|
}
|
||||||
|
|
||||||
type mockRestoreCollection struct {
|
|
||||||
path path.Path
|
|
||||||
items []*dataMock.Item
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc mockRestoreCollection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
|
||||||
ch := make(chan data.Item)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer close(ch)
|
|
||||||
|
|
||||||
el := errs.Local()
|
|
||||||
|
|
||||||
for _, item := range rc.items {
|
|
||||||
if item.ReadErr != nil {
|
|
||||||
el.AddRecoverable(ctx, item.ReadErr)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ch <- item
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return ch
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc mockRestoreCollection) FullPath() path.Path {
|
|
||||||
return rc.path
|
|
||||||
}
|
|
||||||
|
|
||||||
func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
@ -100,11 +69,9 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
expectedPath = "Libraries/" + driveName
|
expectedPath = "Libraries/" + driveName
|
||||||
expectedItems = []export.Item{
|
expectedItems = []export.Item{
|
||||||
{
|
{
|
||||||
ID: "id1.data",
|
ID: "id1.data",
|
||||||
Data: export.ItemData{
|
Name: "name1",
|
||||||
Name: "name1",
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -116,9 +83,9 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
|
|
||||||
dcs := []data.RestoreCollection{
|
dcs := []data.RestoreCollection{
|
||||||
data.FetchRestoreCollection{
|
data.FetchRestoreCollection{
|
||||||
Collection: mockRestoreCollection{
|
Collection: dataMock.Collection{
|
||||||
path: p,
|
Path: p,
|
||||||
items: []*dataMock.Item{
|
ItemData: []*dataMock.Item{
|
||||||
{
|
{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
|
|||||||
@ -105,7 +105,7 @@ type exportStats struct {
|
|||||||
|
|
||||||
// Run begins a synchronous export operation.
|
// Run begins a synchronous export operation.
|
||||||
func (op *ExportOperation) Run(ctx context.Context) (
|
func (op *ExportOperation) Run(ctx context.Context) (
|
||||||
expColl []export.Collection,
|
expColl []export.Collectioner,
|
||||||
err error,
|
err error,
|
||||||
) {
|
) {
|
||||||
defer func() {
|
defer func() {
|
||||||
@ -199,7 +199,7 @@ func (op *ExportOperation) do(
|
|||||||
opStats *exportStats,
|
opStats *exportStats,
|
||||||
detailsStore streamstore.Reader,
|
detailsStore streamstore.Reader,
|
||||||
start time.Time,
|
start time.Time,
|
||||||
) ([]export.Collection, error) {
|
) ([]export.Collectioner, error) {
|
||||||
logger.Ctx(ctx).
|
logger.Ctx(ctx).
|
||||||
With("control_options", op.Options, "selectors", op.Selectors).
|
With("control_options", op.Options, "selectors", op.Selectors).
|
||||||
Info("exporting selection")
|
Info("exporting selection")
|
||||||
@ -268,7 +268,7 @@ func (op *ExportOperation) do(
|
|||||||
dcs,
|
dcs,
|
||||||
op.Errors)
|
op.Errors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Wrap(err, "exporting collections")
|
return nil, clues.Stack(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
opStats.ctrl = op.ec.Wait()
|
opStats.ctrl = op.ec.Wait()
|
||||||
@ -281,7 +281,7 @@ func (op *ExportOperation) do(
|
|||||||
return nil, clues.Wrap(err, "zipping export collections")
|
return nil, clues.Wrap(err, "zipping export collections")
|
||||||
}
|
}
|
||||||
|
|
||||||
return []export.Collection{zc}, nil
|
return []export.Collectioner{zc}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return expCollections, nil
|
return expCollections, nil
|
||||||
@ -334,7 +334,7 @@ func exportRestoreCollections(
|
|||||||
opts control.Options,
|
opts control.Options,
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collection, error) {
|
) ([]export.Collectioner, error) {
|
||||||
complete := observe.MessageWithCompletion(ctx, "Preparing export")
|
complete := observe.MessageWithCompletion(ctx, "Preparing export")
|
||||||
defer func() {
|
defer func() {
|
||||||
complete <- struct{}{}
|
complete <- struct{}{}
|
||||||
|
|||||||
@ -29,15 +29,15 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/store"
|
"github.com/alcionai/corso/src/pkg/store"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ExportOpSuite struct {
|
type ExportUnitSuite struct {
|
||||||
tester.Suite
|
tester.Suite
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestExportOpSuite(t *testing.T) {
|
func TestExportUnitSuite(t *testing.T) {
|
||||||
suite.Run(t, &ExportOpSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ExportOpSuite) TestExportOperation_PersistResults() {
|
func (suite *ExportUnitSuite) TestExportOperation_PersistResults() {
|
||||||
var (
|
var (
|
||||||
kw = &kopia.Wrapper{}
|
kw = &kopia.Wrapper{}
|
||||||
sw = store.NewWrapper(&kopia.ModelStore{})
|
sw = store.NewWrapper(&kopia.ModelStore{})
|
||||||
@ -163,56 +163,57 @@ func (r *ReadSeekCloser) Close() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ExportOpSuite) TestZipExports() {
|
func (suite *ExportUnitSuite) TestZipExports() {
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
collection []export.Collection
|
inputColls []export.Collectioner
|
||||||
shouldErr bool
|
expectZipErr assert.ErrorAssertionFunc
|
||||||
readErr bool
|
expectReadErr assert.ErrorAssertionFunc
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "nothing",
|
name: "nothing",
|
||||||
collection: []export.Collection{},
|
inputColls: []export.Collectioner{},
|
||||||
shouldErr: true,
|
expectZipErr: assert.Error,
|
||||||
|
expectReadErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "empty",
|
name: "empty",
|
||||||
collection: []export.Collection{
|
inputColls: []export.Collectioner{
|
||||||
expCol{
|
expCol{
|
||||||
base: "",
|
base: "",
|
||||||
items: []export.Item{},
|
items: []export.Item{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
expectZipErr: assert.NoError,
|
||||||
|
expectReadErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "one item",
|
name: "one item",
|
||||||
collection: []export.Collection{
|
inputColls: []export.Collectioner{
|
||||||
expCol{
|
expCol{
|
||||||
base: "",
|
base: "",
|
||||||
items: []export.Item{
|
items: []export.Item{
|
||||||
{
|
{
|
||||||
ID: "id1",
|
ID: "id1",
|
||||||
Data: export.ItemData{
|
Name: "test",
|
||||||
Name: "test",
|
Body: NewReadSeekCloser([]byte("test")),
|
||||||
Body: NewReadSeekCloser([]byte("test")),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
expectZipErr: assert.NoError,
|
||||||
|
expectReadErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple items",
|
name: "multiple items",
|
||||||
collection: []export.Collection{
|
inputColls: []export.Collectioner{
|
||||||
expCol{
|
expCol{
|
||||||
base: "",
|
base: "",
|
||||||
items: []export.Item{
|
items: []export.Item{
|
||||||
{
|
{
|
||||||
ID: "id1",
|
ID: "id1",
|
||||||
Data: export.ItemData{
|
Name: "test",
|
||||||
Name: "test",
|
Body: NewReadSeekCloser([]byte("test")),
|
||||||
Body: NewReadSeekCloser([]byte("test")),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -220,19 +221,19 @@ func (suite *ExportOpSuite) TestZipExports() {
|
|||||||
base: "/fold",
|
base: "/fold",
|
||||||
items: []export.Item{
|
items: []export.Item{
|
||||||
{
|
{
|
||||||
ID: "id2",
|
ID: "id2",
|
||||||
Data: export.ItemData{
|
Name: "test2",
|
||||||
Name: "test2",
|
Body: NewReadSeekCloser([]byte("test2")),
|
||||||
Body: NewReadSeekCloser([]byte("test2")),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
expectZipErr: assert.NoError,
|
||||||
|
expectReadErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "one item with err",
|
name: "one item with err",
|
||||||
collection: []export.Collection{
|
inputColls: []export.Collectioner{
|
||||||
expCol{
|
expCol{
|
||||||
base: "",
|
base: "",
|
||||||
items: []export.Item{
|
items: []export.Item{
|
||||||
@ -243,7 +244,8 @@ func (suite *ExportOpSuite) TestZipExports() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
readErr: true,
|
expectZipErr: assert.NoError,
|
||||||
|
expectReadErr: assert.Error,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -254,46 +256,46 @@ func (suite *ExportOpSuite) TestZipExports() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
zc, err := archive.ZipExportCollection(ctx, test.collection)
|
zc, err := archive.ZipExportCollection(ctx, test.inputColls)
|
||||||
|
test.expectZipErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
if test.shouldErr {
|
if err != nil {
|
||||||
assert.Error(t, err, "error")
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, err, "error")
|
|
||||||
assert.Empty(t, zc.BasePath(), "base path")
|
assert.Empty(t, zc.BasePath(), "base path")
|
||||||
|
|
||||||
zippedItems := []export.ItemData{}
|
zippedItems := []export.Item{}
|
||||||
|
|
||||||
count := 0
|
count := 0
|
||||||
for item := range zc.Items(ctx) {
|
for item := range zc.Items(ctx) {
|
||||||
assert.True(t, strings.HasPrefix(item.Data.Name, "Corso_Export_"), "name prefix")
|
assert.True(t, strings.HasPrefix(item.Name, "Corso_Export_"), "name prefix")
|
||||||
assert.True(t, strings.HasSuffix(item.Data.Name, ".zip"), "name suffix")
|
assert.True(t, strings.HasSuffix(item.Name, ".zip"), "name suffix")
|
||||||
|
|
||||||
data, err := io.ReadAll(item.Data.Body)
|
data, err := io.ReadAll(item.Body)
|
||||||
if test.readErr {
|
test.expectReadErr(t, err, clues.ToCore(err))
|
||||||
assert.Error(t, err, "read error")
|
|
||||||
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
size := int64(len(data))
|
assert.NotEmpty(t, item.Name, "item name")
|
||||||
|
|
||||||
item.Data.Body.Close()
|
item.Body.Close()
|
||||||
|
|
||||||
reader, err := zip.NewReader(bytes.NewReader(data), size)
|
reader, err := zip.NewReader(bytes.NewReader(data), int64(len(data)))
|
||||||
require.NoError(t, err, "zip reader")
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
for _, f := range reader.File {
|
for _, f := range reader.File {
|
||||||
rc, err := f.Open()
|
rc, err := f.Open()
|
||||||
assert.NoError(t, err, "open file in zip")
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
data, err := io.ReadAll(rc)
|
data, err := io.ReadAll(rc)
|
||||||
require.NoError(t, err, "read zip file content")
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
rc.Close()
|
rc.Close()
|
||||||
|
|
||||||
zippedItems = append(zippedItems, export.ItemData{
|
zippedItems = append(zippedItems, export.Item{
|
||||||
Name: f.Name,
|
Name: f.Name,
|
||||||
Body: NewReadSeekCloser([]byte(data)),
|
Body: NewReadSeekCloser([]byte(data)),
|
||||||
})
|
})
|
||||||
@ -304,17 +306,28 @@ func (suite *ExportOpSuite) TestZipExports() {
|
|||||||
|
|
||||||
assert.Equal(t, 1, count, "single item")
|
assert.Equal(t, 1, count, "single item")
|
||||||
|
|
||||||
expectedZippedItems := []export.ItemData{}
|
expectedZippedItems := []export.Item{}
|
||||||
for _, col := range test.collection {
|
|
||||||
|
for _, col := range test.inputColls {
|
||||||
for item := range col.Items(ctx) {
|
for item := range col.Items(ctx) {
|
||||||
if col.BasePath() != "" {
|
expected := export.Item{
|
||||||
item.Data.Name = strings.Join([]string{col.BasePath(), item.Data.Name}, "/")
|
Name: item.Name,
|
||||||
|
Body: item.Body,
|
||||||
}
|
}
|
||||||
_, err := item.Data.Body.(io.ReadSeeker).Seek(0, io.SeekStart)
|
|
||||||
require.NoError(t, err, "seek")
|
if col.BasePath() != "" {
|
||||||
expectedZippedItems = append(expectedZippedItems, item.Data)
|
expected.Name = strings.Join([]string{col.BasePath(), item.Name}, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := expected.Body.(io.ReadSeeker).Seek(0, io.SeekStart)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
expected.ID = ""
|
||||||
|
|
||||||
|
expectedZippedItems = append(expectedZippedItems, expected)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, expectedZippedItems, zippedItems, "items")
|
assert.Equal(t, expectedZippedItems, zippedItems, "items")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@ -74,7 +74,7 @@ type (
|
|||||||
opts control.Options,
|
opts control.Options,
|
||||||
dcs []data.RestoreCollection,
|
dcs []data.RestoreCollection,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]export.Collection, error)
|
) ([]export.Collectioner, error)
|
||||||
|
|
||||||
Wait() *data.CollectionStats
|
Wait() *data.CollectionStats
|
||||||
|
|
||||||
|
|||||||
@ -15,7 +15,7 @@ import (
|
|||||||
func ConsumeExportCollections(
|
func ConsumeExportCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
exportLocation string,
|
exportLocation string,
|
||||||
expColl []Collection,
|
expColl []Collectioner,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) error {
|
) error {
|
||||||
el := errs.Local()
|
el := errs.Local()
|
||||||
@ -36,7 +36,7 @@ func ConsumeExportCollections(
|
|||||||
if err := writeItem(ictx, item, folder); err != nil {
|
if err := writeItem(ictx, item, folder); err != nil {
|
||||||
el.AddRecoverable(
|
el.AddRecoverable(
|
||||||
ictx,
|
ictx,
|
||||||
clues.Wrap(err, "writing item").With("file_name", item.Data.Name).WithClues(ctx))
|
clues.Wrap(err, "writing item").With("file_name", item.Name).WithClues(ctx))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -46,16 +46,16 @@ func ConsumeExportCollections(
|
|||||||
|
|
||||||
// writeItem writes an ExportItem to disk in the specified folder.
|
// writeItem writes an ExportItem to disk in the specified folder.
|
||||||
func writeItem(ctx context.Context, item Item, folder string) error {
|
func writeItem(ctx context.Context, item Item, folder string) error {
|
||||||
name := item.Data.Name
|
name := item.Name
|
||||||
fpath := filepath.Join(folder, name)
|
fpath := filepath.Join(folder, name)
|
||||||
|
|
||||||
progReader, pclose := observe.ItemSpinner(
|
progReader, pclose := observe.ItemSpinner(
|
||||||
ctx,
|
ctx,
|
||||||
item.Data.Body,
|
item.Body,
|
||||||
observe.ItemExportMsg,
|
observe.ItemExportMsg,
|
||||||
clues.Hide(name))
|
clues.Hide(name))
|
||||||
|
|
||||||
defer item.Data.Body.Close()
|
defer item.Body.Close()
|
||||||
defer pclose()
|
defer pclose()
|
||||||
|
|
||||||
err := os.MkdirAll(folder, os.ModePerm)
|
err := os.MkdirAll(folder, os.ModePerm)
|
||||||
|
|||||||
@ -132,15 +132,13 @@ func (suite *ExportE2ESuite) TestConsumeExportCollection() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ecs := []Collection{}
|
ecs := []Collectioner{}
|
||||||
for _, col := range test.cols {
|
for _, col := range test.cols {
|
||||||
items := []Item{}
|
items := []Item{}
|
||||||
for _, item := range col.items {
|
for _, item := range col.items {
|
||||||
items = append(items, Item{
|
items = append(items, Item{
|
||||||
Data: ItemData{
|
Name: item.name,
|
||||||
Name: item.name,
|
Body: io.NopCloser((bytes.NewBufferString(item.body))),
|
||||||
Body: io.NopCloser((bytes.NewBufferString(item.body))),
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -3,10 +3,16 @@ package export
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Collection is the interface that is returned to the SDK consumer
|
// ---------------------------------------------------------------------------
|
||||||
type Collection interface {
|
// Collections
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// Collectioner is the interface that is returned to the SDK consumer
|
||||||
|
type Collectioner interface {
|
||||||
// BasePath gets the base path of the collection. This is derived
|
// BasePath gets the base path of the collection. This is derived
|
||||||
// from FullPath, but trim out thing like drive id or any other part
|
// from FullPath, but trim out thing like drive id or any other part
|
||||||
// that is not needed to show the path to the collection.
|
// that is not needed to show the path to the collection.
|
||||||
@ -16,17 +22,36 @@ type Collection interface {
|
|||||||
Items(context.Context) <-chan Item
|
Items(context.Context) <-chan Item
|
||||||
}
|
}
|
||||||
|
|
||||||
// ItemData is the data for an individual item.
|
// BaseCollection holds the foundational details of an export collection.
|
||||||
type ItemData struct {
|
type BaseCollection struct {
|
||||||
// Name is the name of the item. This is the name that the item
|
// BaseDir contains the destination path of the collection.
|
||||||
// would have had in the service.
|
BaseDir string
|
||||||
Name string
|
|
||||||
|
|
||||||
// Body is the body of the item. This is an io.ReadCloser and the
|
// BackingCollection is the restore collection from which we will
|
||||||
// SDK consumer is responsible for closing it.
|
// create the export collection.
|
||||||
Body io.ReadCloser
|
BackingCollection []data.RestoreCollection
|
||||||
|
|
||||||
|
// BackupVersion is the backupVersion of the data source.
|
||||||
|
BackupVersion int
|
||||||
|
|
||||||
|
Stream func(context.Context, []data.RestoreCollection, int, chan<- Item)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (bc BaseCollection) BasePath() string {
|
||||||
|
return bc.BaseDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func (bc BaseCollection) Items(ctx context.Context) <-chan Item {
|
||||||
|
ch := make(chan Item)
|
||||||
|
go bc.Stream(ctx, bc.BackingCollection, bc.BackupVersion, ch)
|
||||||
|
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Items
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
// Item is the item that is returned to the SDK consumer
|
// Item is the item that is returned to the SDK consumer
|
||||||
type Item struct {
|
type Item struct {
|
||||||
// ID will be a unique id for the item. This is same as the id
|
// ID will be a unique id for the item. This is same as the id
|
||||||
@ -34,10 +59,13 @@ type Item struct {
|
|||||||
// mostly used just for tracking.
|
// mostly used just for tracking.
|
||||||
ID string
|
ID string
|
||||||
|
|
||||||
// Data contains the actual data of the item. It will have both
|
// Name is the name of the item. This is the name that the item
|
||||||
// the name of the item and an io.ReadCloser which contains the
|
// would have had in the service.
|
||||||
// body of the item.
|
Name string
|
||||||
Data ItemData
|
|
||||||
|
// Body is the body of the item. This is an io.ReadCloser and the
|
||||||
|
// SDK consumer is responsible for closing it.
|
||||||
|
Body io.ReadCloser
|
||||||
|
|
||||||
// Error will contain any error that happened while trying to get
|
// Error will contain any error that happened while trying to get
|
||||||
// the item/items like when trying to resolve the name of the item.
|
// the item/items like when trying to resolve the name of the item.
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user