From 040257f8be95f924fce3adcb1159422a1f22b45c Mon Sep 17 00:00:00 2001 From: Abin Simon Date: Wed, 11 Oct 2023 11:24:17 +0530 Subject: [PATCH] Add stats to export operations (#4461) Have a way to gather stats about the exported data. Users can now call `ExportOperation.GetStats()` at the end of the run to get the stats for the operations. The data will be in the format `map[path.CategoryType]data.KindStats` whre `KindStats` is: ```go type KindStats struct { BytesRead int64 ResourceCount int64 } ``` --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [x] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [ ] :broom: Tech Debt/Cleanup #### Issue(s) * https://github.com/alcionai/corso/issues/4311 #### Test Plan - [ ] :muscle: Manual - [x] :zap: Unit test - [ ] :green_heart: E2E --- CHANGELOG.md | 1 + src/cli/export/export.go | 10 +++ src/internal/data/metrics.go | 72 +++++++++++++++++++ src/internal/m365/collection/drive/export.go | 17 ++++- src/internal/m365/collection/groups/export.go | 7 ++ .../m365/collection/groups/export_test.go | 3 +- src/internal/m365/export.go | 4 ++ src/internal/m365/mock/connector.go | 1 + src/internal/m365/service/groups/export.go | 7 +- .../m365/service/groups/export_test.go | 39 +++++++++- src/internal/m365/service/onedrive/export.go | 4 +- .../m365/service/onedrive/export_test.go | 52 +++++++++++++- .../m365/service/sharepoint/export.go | 4 +- .../m365/service/sharepoint/export_test.go | 19 +++++ src/internal/operations/export.go | 20 +++++- src/internal/operations/inject/inject.go | 1 + src/pkg/export/export.go | 7 +- 17 files changed, 255 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f7dc82166..5ce255cf4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - Skips graph calls for expired item download URLs. +- Export operation now shows the stats at the end of the run ### Fixed - Catch and report cases where a protected resource is locked out of access. SDK consumers have a new errs sentinel that allows them to check for this case. diff --git a/src/cli/export/export.go b/src/cli/export/export.go index 8415caea3..aeaf8f3e7 100644 --- a/src/cli/export/export.go +++ b/src/cli/export/export.go @@ -5,6 +5,7 @@ import ( "errors" "github.com/alcionai/clues" + "github.com/dustin/go-humanize" "github.com/spf13/cobra" "github.com/alcionai/corso/src/cli/flags" @@ -110,5 +111,14 @@ func runExport( return Only(ctx, err) } + stats := eo.GetStats() + if len(stats) > 0 { + Infof(ctx, "\nExport details") + } + + for k, s := range stats { + Infof(ctx, "%s: %d items (%s)", k.HumanString(), s.ResourceCount, humanize.Bytes(uint64(s.BytesRead))) + } + return nil } diff --git a/src/internal/data/metrics.go b/src/internal/data/metrics.go index f34d20a16..e07ad584e 100644 --- a/src/internal/data/metrics.go +++ b/src/internal/data/metrics.go @@ -1,5 +1,12 @@ package data +import ( + "io" + "sync/atomic" + + "github.com/alcionai/corso/src/pkg/path" +) + type CollectionStats struct { Folders, Objects, @@ -15,3 +22,68 @@ func (cs CollectionStats) IsZero() bool { func (cs CollectionStats) String() string { return cs.Details } + +type KindStats struct { + BytesRead int64 + ResourceCount int64 +} + +type ExportStats struct { + // data is kept private so that we can enforce atomic int updates + data map[path.CategoryType]KindStats +} + +func (es *ExportStats) UpdateBytes(kind path.CategoryType, bytesRead int64) { + if es.data == nil { + es.data = map[path.CategoryType]KindStats{} + } + + ks := es.data[kind] + atomic.AddInt64(&ks.BytesRead, bytesRead) + es.data[kind] = ks +} + +func (es *ExportStats) UpdateResourceCount(kind path.CategoryType) { + if es.data == nil { + es.data = map[path.CategoryType]KindStats{} + } + + ks := es.data[kind] + atomic.AddInt64(&ks.ResourceCount, 1) + es.data[kind] = ks +} + +func (es *ExportStats) GetStats() map[path.CategoryType]KindStats { + return es.data +} + +type statsReader struct { + io.ReadCloser + kind path.CategoryType + stats *ExportStats +} + +func (sr *statsReader) Read(p []byte) (int, error) { + n, err := sr.ReadCloser.Read(p) + sr.stats.UpdateBytes(sr.kind, int64(n)) + + return n, err +} + +// Create a function that will take a reader and return a reader that +// will update the stats +func ReaderWithStats( + reader io.ReadCloser, + kind path.CategoryType, + stats *ExportStats, +) io.ReadCloser { + if reader == nil { + return nil + } + + return &statsReader{ + ReadCloser: reader, + kind: kind, + stats: stats, + } +} diff --git a/src/internal/m365/collection/drive/export.go b/src/internal/m365/collection/drive/export.go index 6c2200854..d21c950ff 100644 --- a/src/internal/m365/collection/drive/export.go +++ b/src/internal/m365/collection/drive/export.go @@ -12,18 +12,21 @@ import ( "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/path" ) func NewExportCollection( baseDir string, backingCollection []data.RestoreCollection, backupVersion int, + stats *data.ExportStats, ) export.Collectioner { return export.BaseCollection{ BaseDir: baseDir, BackingCollection: backingCollection, BackupVersion: backupVersion, Stream: streamItems, + Stats: stats, } } @@ -34,6 +37,7 @@ func streamItems( backupVersion int, cec control.ExportConfig, ch chan<- export.Item, + stats *data.ExportStats, ) { defer close(ch) @@ -47,11 +51,22 @@ func streamItems( } name, err := getItemName(ctx, itemUUID, backupVersion, rc) + if err != nil { + ch <- export.Item{ + ID: itemUUID, + Error: err, + } + + continue + } + + stats.UpdateResourceCount(path.FilesCategory) + body := data.ReaderWithStats(item.ToReader(), path.FilesCategory, stats) ch <- export.Item{ ID: itemUUID, Name: name, - Body: item.ToReader(), + Body: body, Error: err, } } diff --git a/src/internal/m365/collection/groups/export.go b/src/internal/m365/collection/groups/export.go index ecc0a3410..590bacd48 100644 --- a/src/internal/m365/collection/groups/export.go +++ b/src/internal/m365/collection/groups/export.go @@ -15,6 +15,7 @@ import ( "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/services/m365/api" ) @@ -23,6 +24,7 @@ func NewExportCollection( backingCollections []data.RestoreCollection, backupVersion int, cec control.ExportConfig, + stats *data.ExportStats, ) export.Collectioner { return export.BaseCollection{ BaseDir: baseDir, @@ -30,6 +32,7 @@ func NewExportCollection( BackupVersion: backupVersion, Cfg: cec, Stream: streamItems, + Stats: stats, } } @@ -40,6 +43,7 @@ func streamItems( backupVersion int, cec control.ExportConfig, ch chan<- export.Item, + stats *data.ExportStats, ) { defer close(ch) @@ -54,6 +58,9 @@ func streamItems( Error: err, } } else { + stats.UpdateResourceCount(path.ChannelMessagesCategory) + body = data.ReaderWithStats(body, path.ChannelMessagesCategory, stats) + ch <- export.Item{ ID: item.ID(), // channel message items have no name diff --git a/src/internal/m365/collection/groups/export_test.go b/src/internal/m365/collection/groups/export_test.go index a98ca7aba..34430ba77 100644 --- a/src/internal/m365/collection/groups/export_test.go +++ b/src/internal/m365/collection/groups/export_test.go @@ -90,7 +90,8 @@ func (suite *ExportUnitSuite) TestStreamItems() { []data.RestoreCollection{test.backingColl}, version.NoBackup, control.DefaultExportConfig(), - ch) + ch, + &data.ExportStats{}) var ( itm export.Item diff --git a/src/internal/m365/export.go b/src/internal/m365/export.go index ab7a94ceb..ddf512611 100644 --- a/src/internal/m365/export.go +++ b/src/internal/m365/export.go @@ -27,6 +27,7 @@ func (ctrl *Controller) ProduceExportCollections( exportCfg control.ExportConfig, opts control.Options, dcs []data.RestoreCollection, + stats *data.ExportStats, errs *fault.Bus, ) ([]export.Collectioner, error) { ctx, end := diagnostics.Span(ctx, "m365:export") @@ -51,6 +52,7 @@ func (ctrl *Controller) ProduceExportCollections( opts, dcs, deets, + stats, errs) case selectors.ServiceSharePoint: expCollections, err = sharepoint.ProduceExportCollections( @@ -61,6 +63,7 @@ func (ctrl *Controller) ProduceExportCollections( dcs, ctrl.backupDriveIDNames, deets, + stats, errs) case selectors.ServiceGroups: expCollections, err = groups.ProduceExportCollections( @@ -72,6 +75,7 @@ func (ctrl *Controller) ProduceExportCollections( ctrl.backupDriveIDNames, ctrl.backupSiteIDWebURL, deets, + stats, errs) default: diff --git a/src/internal/m365/mock/connector.go b/src/internal/m365/mock/connector.go index ed04f1d3e..e10a48819 100644 --- a/src/internal/m365/mock/connector.go +++ b/src/internal/m365/mock/connector.go @@ -90,6 +90,7 @@ func (ctrl Controller) ProduceExportCollections( _ control.ExportConfig, _ control.Options, _ []data.RestoreCollection, + _ *data.ExportStats, _ *fault.Bus, ) ([]export.Collectioner, error) { return nil, ctrl.Err diff --git a/src/internal/m365/service/groups/export.go b/src/internal/m365/service/groups/export.go index f4345d0ba..09b0fbf92 100644 --- a/src/internal/m365/service/groups/export.go +++ b/src/internal/m365/service/groups/export.go @@ -29,6 +29,7 @@ func ProduceExportCollections( backupDriveIDNames idname.Cacher, backupSiteIDWebURL idname.Cacher, deets *details.Builder, + stats *data.ExportStats, errs *fault.Bus, ) ([]export.Collectioner, error) { var ( @@ -52,7 +53,8 @@ func ProduceExportCollections( path.Builder{}.Append(folders...).String(), []data.RestoreCollection{restoreColl}, backupVersion, - exportCfg) + exportCfg, + stats) case path.LibrariesCategory: drivePath, err := path.ToDrivePath(restoreColl.FullPath()) if err != nil { @@ -91,7 +93,8 @@ func ProduceExportCollections( coll = drive.NewExportCollection( baseDir.String(), []data.RestoreCollection{restoreColl}, - backupVersion) + backupVersion, + stats) default: el.AddRecoverable( ctx, diff --git a/src/internal/m365/service/groups/export_test.go b/src/internal/m365/service/groups/export_test.go index ffcc54c9b..bc633cde3 100644 --- a/src/internal/m365/service/groups/export_test.go +++ b/src/internal/m365/service/groups/export_test.go @@ -7,6 +7,7 @@ import ( "strings" "testing" + "github.com/alcionai/clues" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" @@ -64,8 +65,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() { itemID = "itemID" containerName = "channelID" dii = groupMock.ItemInfo() - body = io.NopCloser(bytes.NewBufferString( - `{"displayname": "` + dii.Groups.ItemName + `"}`)) + content = `{"displayname": "` + dii.Groups.ItemName + `"}` + body = io.NopCloser(bytes.NewBufferString(content)) exportCfg = control.ExportConfig{} expectedPath = path.ChannelMessagesCategory.HumanString() + "/" + containerName expectedItems = []export.Item{ @@ -96,6 +97,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() { }, } + stats := data.ExportStats{} + ecs, err := ProduceExportCollections( ctx, int(version.Backup), @@ -105,6 +108,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() { nil, nil, nil, + &stats, fault.New(true)) assert.NoError(t, err, "export collections error") assert.Len(t, ecs, 1, "num of collections") @@ -113,7 +117,15 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() { fitems := []export.Item{} + size := 0 + for item := range ecs[0].Items(ctx) { + b, err := io.ReadAll(item.Body) + assert.NoError(t, err, clues.ToCore(err)) + + // count up size for tests + size += len(b) + // have to nil out body, otherwise assert fails due to // pointer memory location differences item.Body = nil @@ -121,6 +133,11 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() { } assert.Equal(t, expectedItems, fitems, "items") + + expectedStats := data.ExportStats{} + expectedStats.UpdateBytes(path.ChannelMessagesCategory, int64(size)) + expectedStats.UpdateResourceCount(path.ChannelMessagesCategory) + assert.Equal(t, expectedStats, stats, "stats") } func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() { @@ -182,6 +199,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() { }, } + stats := data.ExportStats{} + ecs, err := ProduceExportCollections( ctx, int(version.Backup), @@ -191,6 +210,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() { driveNameCache, siteWebURLCache, nil, + &stats, fault.New(true)) assert.NoError(t, err, "export collections error") assert.Len(t, ecs, 1, "num of collections") @@ -199,9 +219,24 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() { fitems := []export.Item{} + size := 0 + for item := range ecs[0].Items(ctx) { + // unwrap the body from stats reader + b, err := io.ReadAll(item.Body) + assert.NoError(t, err, clues.ToCore(err)) + + size += len(b) + bitem := io.NopCloser(bytes.NewBuffer(b)) + item.Body = bitem + fitems = append(fitems, item) } assert.Equal(t, expectedItems, fitems, "items") + + expectedStats := data.ExportStats{} + expectedStats.UpdateBytes(path.FilesCategory, int64(size)) + expectedStats.UpdateResourceCount(path.FilesCategory) + assert.Equal(t, expectedStats, stats, "stats") } diff --git a/src/internal/m365/service/onedrive/export.go b/src/internal/m365/service/onedrive/export.go index 48df5e7ce..9b985b608 100644 --- a/src/internal/m365/service/onedrive/export.go +++ b/src/internal/m365/service/onedrive/export.go @@ -23,6 +23,7 @@ func ProduceExportCollections( opts control.Options, dcs []data.RestoreCollection, deets *details.Builder, + stats *data.ExportStats, errs *fault.Bus, ) ([]export.Collectioner, error) { var ( @@ -43,7 +44,8 @@ func ProduceExportCollections( drive.NewExportCollection( baseDir.String(), []data.RestoreCollection{dc}, - backupVersion)) + backupVersion, + stats)) } return ec, el.Failure() diff --git a/src/internal/m365/service/onedrive/export_test.go b/src/internal/m365/service/onedrive/export_test.go index 7ff9ea069..9d941cf3b 100644 --- a/src/internal/m365/service/onedrive/export_test.go +++ b/src/internal/m365/service/onedrive/export_test.go @@ -6,6 +6,7 @@ import ( "io" "testing" + "github.com/alcionai/clues" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" @@ -19,6 +20,7 @@ import ( "github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/fault" + "github.com/alcionai/corso/src/pkg/path" ) type ExportUnitSuite struct { @@ -245,15 +247,32 @@ func (suite *ExportUnitSuite) TestGetItems() { ctx, flush := tester.NewContext(t) defer flush() + stats := data.ExportStats{} ec := drive.NewExportCollection( "", []data.RestoreCollection{test.backingCollection}, - test.version) + test.version, + &stats) items := ec.Items(ctx) + count := 0 + size := 0 fitems := []export.Item{} + for item := range items { + if item.Error == nil { + count++ + } + + if item.Body != nil { + b, err := io.ReadAll(item.Body) + assert.NoError(t, err, clues.ToCore(err)) + + size += len(b) + item.Body = io.NopCloser(bytes.NewBuffer(b)) + } + fitems = append(fitems, item) } @@ -268,6 +287,19 @@ func (suite *ExportUnitSuite) TestGetItems() { assert.Equal(t, test.expectedItems[i].Body, item.Body, "body") assert.ErrorIs(t, item.Error, test.expectedItems[i].Error) } + + var expectedStats data.ExportStats + + if size+count > 0 { // it is only initialized if we have something + expectedStats = data.ExportStats{} + expectedStats.UpdateBytes(path.FilesCategory, int64(size)) + + for i := 0; i < count; i++ { + expectedStats.UpdateResourceCount(path.FilesCategory) + } + } + + assert.Equal(t, expectedStats, stats, "stats") }) } } @@ -312,6 +344,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() { }, } + stats := data.ExportStats{} + ecs, err := ProduceExportCollections( ctx, int(version.Backup), @@ -319,14 +353,30 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() { control.DefaultOptions(), dcs, nil, + &stats, fault.New(true)) assert.NoError(t, err, "export collections error") assert.Len(t, ecs, 1, "num of collections") fitems := []export.Item{} + size := 0 + for item := range ecs[0].Items(ctx) { + // unwrap the body from stats reader + b, err := io.ReadAll(item.Body) + assert.NoError(t, err, clues.ToCore(err)) + + size += len(b) + bitem := io.NopCloser(bytes.NewBuffer(b)) + item.Body = bitem + fitems = append(fitems, item) } assert.Equal(t, expectedItems, fitems, "items") + + expectedStats := data.ExportStats{} + expectedStats.UpdateBytes(path.FilesCategory, int64(size)) + expectedStats.UpdateResourceCount(path.FilesCategory) + assert.Equal(t, expectedStats, stats, "stats") } diff --git a/src/internal/m365/service/sharepoint/export.go b/src/internal/m365/service/sharepoint/export.go index 7eb840bb7..eb52647cd 100644 --- a/src/internal/m365/service/sharepoint/export.go +++ b/src/internal/m365/service/sharepoint/export.go @@ -26,6 +26,7 @@ func ProduceExportCollections( dcs []data.RestoreCollection, backupDriveIDNames idname.CacheBuilder, deets *details.Builder, + stats *data.ExportStats, errs *fault.Bus, ) ([]export.Collectioner, error) { var ( @@ -56,7 +57,8 @@ func ProduceExportCollections( drive.NewExportCollection( baseDir.String(), []data.RestoreCollection{dc}, - backupVersion)) + backupVersion, + stats)) } return ec, el.Failure() diff --git a/src/internal/m365/service/sharepoint/export_test.go b/src/internal/m365/service/sharepoint/export_test.go index 6becb725a..6de83ab7f 100644 --- a/src/internal/m365/service/sharepoint/export_test.go +++ b/src/internal/m365/service/sharepoint/export_test.go @@ -7,6 +7,7 @@ import ( "strings" "testing" + "github.com/alcionai/clues" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" @@ -98,6 +99,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() { }, } + stats := data.ExportStats{} + ecs, err := ProduceExportCollections( ctx, int(version.Backup), @@ -106,6 +109,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() { dcs, cache, nil, + &stats, fault.New(true)) assert.NoError(t, err, "export collections error") assert.Len(t, ecs, 1, "num of collections") @@ -113,9 +117,24 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() { assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir") fitems := []export.Item{} + size := 0 + for item := range ecs[0].Items(ctx) { + // unwrap the body from stats reader + b, err := io.ReadAll(item.Body) + assert.NoError(t, err, clues.ToCore(err)) + + size += len(b) + bitem := io.NopCloser(bytes.NewBuffer(b)) + item.Body = bitem + fitems = append(fitems, item) } assert.Equal(t, expectedItems, fitems, "items") + + expectedStats := data.ExportStats{} + expectedStats.UpdateBytes(path.FilesCategory, int64(size)) + expectedStats.UpdateResourceCount(path.FilesCategory) + assert.Equal(t, expectedStats, stats, "stats") } diff --git a/src/internal/operations/export.go b/src/internal/operations/export.go index fe807d25b..74fc1a44f 100644 --- a/src/internal/operations/export.go +++ b/src/internal/operations/export.go @@ -27,6 +27,7 @@ import ( "github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/logger" + "github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/store" ) @@ -46,6 +47,7 @@ type ExportOperation struct { Selectors selectors.Selector ExportCfg control.ExportConfig Version string + stats data.ExportStats acct account.Account ec inject.ExportConsumer @@ -72,6 +74,7 @@ func NewExportOperation( Selectors: sel, Version: "v0", ec: ec, + stats: data.ExportStats{}, } if err := op.validate(); err != nil { return ExportOperation{}, err @@ -247,7 +250,7 @@ func (op *ExportOperation) do( opStats.resourceCount = 1 opStats.cs = dcs - expCollections, err := exportRestoreCollections( + expCollections, err := produceExportCollections( ctx, op.ec, bup.Version, @@ -255,6 +258,9 @@ func (op *ExportOperation) do( op.ExportCfg, op.Options, dcs, + // We also have opStats, but that tracks different data. + // Maybe we can look into merging them some time in the future. + &op.stats, op.Errors) if err != nil { return nil, clues.Stack(err) @@ -310,11 +316,19 @@ func (op *ExportOperation) finalizeMetrics( return op.Errors.Failure() } +// GetStats returns the stats of the export operation. You should only +// be calling this once the export collections have been read and process +// as the data that will be available here will be the data that was read +// and processed. +func (op *ExportOperation) GetStats() map[path.CategoryType]data.KindStats { + return op.stats.GetStats() +} + // --------------------------------------------------------------------------- // Exporter funcs // --------------------------------------------------------------------------- -func exportRestoreCollections( +func produceExportCollections( ctx context.Context, ec inject.ExportConsumer, backupVersion int, @@ -322,6 +336,7 @@ func exportRestoreCollections( exportCfg control.ExportConfig, opts control.Options, dcs []data.RestoreCollection, + exportStats *data.ExportStats, errs *fault.Bus, ) ([]export.Collectioner, error) { complete := observe.MessageWithCompletion(ctx, "Preparing export") @@ -337,6 +352,7 @@ func exportRestoreCollections( exportCfg, opts, dcs, + exportStats, errs) if err != nil { return nil, clues.Wrap(err, "exporting collections") diff --git a/src/internal/operations/inject/inject.go b/src/internal/operations/inject/inject.go index 92d74d334..298e224b8 100644 --- a/src/internal/operations/inject/inject.go +++ b/src/internal/operations/inject/inject.go @@ -88,6 +88,7 @@ type ( exportCfg control.ExportConfig, opts control.Options, dcs []data.RestoreCollection, + stats *data.ExportStats, errs *fault.Bus, ) ([]export.Collectioner, error) diff --git a/src/pkg/export/export.go b/src/pkg/export/export.go index 42da0bdc2..7b998d30e 100644 --- a/src/pkg/export/export.go +++ b/src/pkg/export/export.go @@ -28,7 +28,8 @@ type itemStreamer func( backingColls []data.RestoreCollection, backupVersion int, cfg control.ExportConfig, - ch chan<- Item) + ch chan<- Item, + stats *data.ExportStats) // BaseCollection holds the foundational details of an export collection. type BaseCollection struct { @@ -45,6 +46,8 @@ type BaseCollection struct { Cfg control.ExportConfig Stream itemStreamer + + Stats *data.ExportStats } func (bc BaseCollection) BasePath() string { @@ -53,7 +56,7 @@ func (bc BaseCollection) BasePath() string { func (bc BaseCollection) Items(ctx context.Context) <-chan Item { ch := make(chan Item) - go bc.Stream(ctx, bc.BackingCollection, bc.BackupVersion, bc.Cfg, ch) + go bc.Stream(ctx, bc.BackingCollection, bc.BackupVersion, bc.Cfg, ch, bc.Stats) return ch }