Add stats to export operations (#4461)
Have a way to gather stats about the exported data.
Users can now call `ExportOperation.GetStats()` at the end of the run to get the stats for the operations. The data will be in the format `map[path.CategoryType]data.KindStats` whre `KindStats` is:
```go
type KindStats struct {
BytesRead int64
ResourceCount int64
}
```
---
#### Does this PR need a docs update or release note?
- [ ] ✅ Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x] ⛔ No
#### Type of change
<!--- Please check the type of change your PR introduces: --->
- [x] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [ ] 🧹 Tech Debt/Cleanup
#### Issue(s)
<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* https://github.com/alcionai/corso/issues/4311
#### Test Plan
<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
- [x] ⚡ Unit test
- [ ] 💚 E2E
This commit is contained in:
parent
107b6883d5
commit
040257f8be
@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
### Added
|
||||
- Skips graph calls for expired item download URLs.
|
||||
- Export operation now shows the stats at the end of the run
|
||||
|
||||
### Fixed
|
||||
- Catch and report cases where a protected resource is locked out of access. SDK consumers have a new errs sentinel that allows them to check for this case.
|
||||
|
||||
@ -5,6 +5,7 @@ import (
|
||||
"errors"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/dustin/go-humanize"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
@ -110,5 +111,14 @@ func runExport(
|
||||
return Only(ctx, err)
|
||||
}
|
||||
|
||||
stats := eo.GetStats()
|
||||
if len(stats) > 0 {
|
||||
Infof(ctx, "\nExport details")
|
||||
}
|
||||
|
||||
for k, s := range stats {
|
||||
Infof(ctx, "%s: %d items (%s)", k.HumanString(), s.ResourceCount, humanize.Bytes(uint64(s.BytesRead)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -1,5 +1,12 @@
|
||||
package data
|
||||
|
||||
import (
|
||||
"io"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type CollectionStats struct {
|
||||
Folders,
|
||||
Objects,
|
||||
@ -15,3 +22,68 @@ func (cs CollectionStats) IsZero() bool {
|
||||
func (cs CollectionStats) String() string {
|
||||
return cs.Details
|
||||
}
|
||||
|
||||
type KindStats struct {
|
||||
BytesRead int64
|
||||
ResourceCount int64
|
||||
}
|
||||
|
||||
type ExportStats struct {
|
||||
// data is kept private so that we can enforce atomic int updates
|
||||
data map[path.CategoryType]KindStats
|
||||
}
|
||||
|
||||
func (es *ExportStats) UpdateBytes(kind path.CategoryType, bytesRead int64) {
|
||||
if es.data == nil {
|
||||
es.data = map[path.CategoryType]KindStats{}
|
||||
}
|
||||
|
||||
ks := es.data[kind]
|
||||
atomic.AddInt64(&ks.BytesRead, bytesRead)
|
||||
es.data[kind] = ks
|
||||
}
|
||||
|
||||
func (es *ExportStats) UpdateResourceCount(kind path.CategoryType) {
|
||||
if es.data == nil {
|
||||
es.data = map[path.CategoryType]KindStats{}
|
||||
}
|
||||
|
||||
ks := es.data[kind]
|
||||
atomic.AddInt64(&ks.ResourceCount, 1)
|
||||
es.data[kind] = ks
|
||||
}
|
||||
|
||||
func (es *ExportStats) GetStats() map[path.CategoryType]KindStats {
|
||||
return es.data
|
||||
}
|
||||
|
||||
type statsReader struct {
|
||||
io.ReadCloser
|
||||
kind path.CategoryType
|
||||
stats *ExportStats
|
||||
}
|
||||
|
||||
func (sr *statsReader) Read(p []byte) (int, error) {
|
||||
n, err := sr.ReadCloser.Read(p)
|
||||
sr.stats.UpdateBytes(sr.kind, int64(n))
|
||||
|
||||
return n, err
|
||||
}
|
||||
|
||||
// Create a function that will take a reader and return a reader that
|
||||
// will update the stats
|
||||
func ReaderWithStats(
|
||||
reader io.ReadCloser,
|
||||
kind path.CategoryType,
|
||||
stats *ExportStats,
|
||||
) io.ReadCloser {
|
||||
if reader == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &statsReader{
|
||||
ReadCloser: reader,
|
||||
kind: kind,
|
||||
stats: stats,
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,18 +12,21 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
func NewExportCollection(
|
||||
baseDir string,
|
||||
backingCollection []data.RestoreCollection,
|
||||
backupVersion int,
|
||||
stats *data.ExportStats,
|
||||
) export.Collectioner {
|
||||
return export.BaseCollection{
|
||||
BaseDir: baseDir,
|
||||
BackingCollection: backingCollection,
|
||||
BackupVersion: backupVersion,
|
||||
Stream: streamItems,
|
||||
Stats: stats,
|
||||
}
|
||||
}
|
||||
|
||||
@ -34,6 +37,7 @@ func streamItems(
|
||||
backupVersion int,
|
||||
cec control.ExportConfig,
|
||||
ch chan<- export.Item,
|
||||
stats *data.ExportStats,
|
||||
) {
|
||||
defer close(ch)
|
||||
|
||||
@ -47,11 +51,22 @@ func streamItems(
|
||||
}
|
||||
|
||||
name, err := getItemName(ctx, itemUUID, backupVersion, rc)
|
||||
if err != nil {
|
||||
ch <- export.Item{
|
||||
ID: itemUUID,
|
||||
Error: err,
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
stats.UpdateResourceCount(path.FilesCategory)
|
||||
body := data.ReaderWithStats(item.ToReader(), path.FilesCategory, stats)
|
||||
|
||||
ch <- export.Item{
|
||||
ID: itemUUID,
|
||||
Name: name,
|
||||
Body: item.ToReader(),
|
||||
Body: body,
|
||||
Error: err,
|
||||
}
|
||||
}
|
||||
|
||||
@ -15,6 +15,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
)
|
||||
|
||||
@ -23,6 +24,7 @@ func NewExportCollection(
|
||||
backingCollections []data.RestoreCollection,
|
||||
backupVersion int,
|
||||
cec control.ExportConfig,
|
||||
stats *data.ExportStats,
|
||||
) export.Collectioner {
|
||||
return export.BaseCollection{
|
||||
BaseDir: baseDir,
|
||||
@ -30,6 +32,7 @@ func NewExportCollection(
|
||||
BackupVersion: backupVersion,
|
||||
Cfg: cec,
|
||||
Stream: streamItems,
|
||||
Stats: stats,
|
||||
}
|
||||
}
|
||||
|
||||
@ -40,6 +43,7 @@ func streamItems(
|
||||
backupVersion int,
|
||||
cec control.ExportConfig,
|
||||
ch chan<- export.Item,
|
||||
stats *data.ExportStats,
|
||||
) {
|
||||
defer close(ch)
|
||||
|
||||
@ -54,6 +58,9 @@ func streamItems(
|
||||
Error: err,
|
||||
}
|
||||
} else {
|
||||
stats.UpdateResourceCount(path.ChannelMessagesCategory)
|
||||
body = data.ReaderWithStats(body, path.ChannelMessagesCategory, stats)
|
||||
|
||||
ch <- export.Item{
|
||||
ID: item.ID(),
|
||||
// channel message items have no name
|
||||
|
||||
@ -90,7 +90,8 @@ func (suite *ExportUnitSuite) TestStreamItems() {
|
||||
[]data.RestoreCollection{test.backingColl},
|
||||
version.NoBackup,
|
||||
control.DefaultExportConfig(),
|
||||
ch)
|
||||
ch,
|
||||
&data.ExportStats{})
|
||||
|
||||
var (
|
||||
itm export.Item
|
||||
|
||||
@ -27,6 +27,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
||||
exportCfg control.ExportConfig,
|
||||
opts control.Options,
|
||||
dcs []data.RestoreCollection,
|
||||
stats *data.ExportStats,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collectioner, error) {
|
||||
ctx, end := diagnostics.Span(ctx, "m365:export")
|
||||
@ -51,6 +52,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
||||
opts,
|
||||
dcs,
|
||||
deets,
|
||||
stats,
|
||||
errs)
|
||||
case selectors.ServiceSharePoint:
|
||||
expCollections, err = sharepoint.ProduceExportCollections(
|
||||
@ -61,6 +63,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
||||
dcs,
|
||||
ctrl.backupDriveIDNames,
|
||||
deets,
|
||||
stats,
|
||||
errs)
|
||||
case selectors.ServiceGroups:
|
||||
expCollections, err = groups.ProduceExportCollections(
|
||||
@ -72,6 +75,7 @@ func (ctrl *Controller) ProduceExportCollections(
|
||||
ctrl.backupDriveIDNames,
|
||||
ctrl.backupSiteIDWebURL,
|
||||
deets,
|
||||
stats,
|
||||
errs)
|
||||
|
||||
default:
|
||||
|
||||
@ -90,6 +90,7 @@ func (ctrl Controller) ProduceExportCollections(
|
||||
_ control.ExportConfig,
|
||||
_ control.Options,
|
||||
_ []data.RestoreCollection,
|
||||
_ *data.ExportStats,
|
||||
_ *fault.Bus,
|
||||
) ([]export.Collectioner, error) {
|
||||
return nil, ctrl.Err
|
||||
|
||||
@ -29,6 +29,7 @@ func ProduceExportCollections(
|
||||
backupDriveIDNames idname.Cacher,
|
||||
backupSiteIDWebURL idname.Cacher,
|
||||
deets *details.Builder,
|
||||
stats *data.ExportStats,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collectioner, error) {
|
||||
var (
|
||||
@ -52,7 +53,8 @@ func ProduceExportCollections(
|
||||
path.Builder{}.Append(folders...).String(),
|
||||
[]data.RestoreCollection{restoreColl},
|
||||
backupVersion,
|
||||
exportCfg)
|
||||
exportCfg,
|
||||
stats)
|
||||
case path.LibrariesCategory:
|
||||
drivePath, err := path.ToDrivePath(restoreColl.FullPath())
|
||||
if err != nil {
|
||||
@ -91,7 +93,8 @@ func ProduceExportCollections(
|
||||
coll = drive.NewExportCollection(
|
||||
baseDir.String(),
|
||||
[]data.RestoreCollection{restoreColl},
|
||||
backupVersion)
|
||||
backupVersion,
|
||||
stats)
|
||||
default:
|
||||
el.AddRecoverable(
|
||||
ctx,
|
||||
|
||||
@ -7,6 +7,7 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
@ -64,8 +65,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
||||
itemID = "itemID"
|
||||
containerName = "channelID"
|
||||
dii = groupMock.ItemInfo()
|
||||
body = io.NopCloser(bytes.NewBufferString(
|
||||
`{"displayname": "` + dii.Groups.ItemName + `"}`))
|
||||
content = `{"displayname": "` + dii.Groups.ItemName + `"}`
|
||||
body = io.NopCloser(bytes.NewBufferString(content))
|
||||
exportCfg = control.ExportConfig{}
|
||||
expectedPath = path.ChannelMessagesCategory.HumanString() + "/" + containerName
|
||||
expectedItems = []export.Item{
|
||||
@ -96,6 +97,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
||||
},
|
||||
}
|
||||
|
||||
stats := data.ExportStats{}
|
||||
|
||||
ecs, err := ProduceExportCollections(
|
||||
ctx,
|
||||
int(version.Backup),
|
||||
@ -105,6 +108,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
||||
nil,
|
||||
nil,
|
||||
nil,
|
||||
&stats,
|
||||
fault.New(true))
|
||||
assert.NoError(t, err, "export collections error")
|
||||
assert.Len(t, ecs, 1, "num of collections")
|
||||
@ -113,7 +117,15 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
||||
|
||||
fitems := []export.Item{}
|
||||
|
||||
size := 0
|
||||
|
||||
for item := range ecs[0].Items(ctx) {
|
||||
b, err := io.ReadAll(item.Body)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// count up size for tests
|
||||
size += len(b)
|
||||
|
||||
// have to nil out body, otherwise assert fails due to
|
||||
// pointer memory location differences
|
||||
item.Body = nil
|
||||
@ -121,6 +133,11 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedItems, fitems, "items")
|
||||
|
||||
expectedStats := data.ExportStats{}
|
||||
expectedStats.UpdateBytes(path.ChannelMessagesCategory, int64(size))
|
||||
expectedStats.UpdateResourceCount(path.ChannelMessagesCategory)
|
||||
assert.Equal(t, expectedStats, stats, "stats")
|
||||
}
|
||||
|
||||
func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
||||
@ -182,6 +199,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
||||
},
|
||||
}
|
||||
|
||||
stats := data.ExportStats{}
|
||||
|
||||
ecs, err := ProduceExportCollections(
|
||||
ctx,
|
||||
int(version.Backup),
|
||||
@ -191,6 +210,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
||||
driveNameCache,
|
||||
siteWebURLCache,
|
||||
nil,
|
||||
&stats,
|
||||
fault.New(true))
|
||||
assert.NoError(t, err, "export collections error")
|
||||
assert.Len(t, ecs, 1, "num of collections")
|
||||
@ -199,9 +219,24 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
|
||||
|
||||
fitems := []export.Item{}
|
||||
|
||||
size := 0
|
||||
|
||||
for item := range ecs[0].Items(ctx) {
|
||||
// unwrap the body from stats reader
|
||||
b, err := io.ReadAll(item.Body)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
size += len(b)
|
||||
bitem := io.NopCloser(bytes.NewBuffer(b))
|
||||
item.Body = bitem
|
||||
|
||||
fitems = append(fitems, item)
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedItems, fitems, "items")
|
||||
|
||||
expectedStats := data.ExportStats{}
|
||||
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
|
||||
expectedStats.UpdateResourceCount(path.FilesCategory)
|
||||
assert.Equal(t, expectedStats, stats, "stats")
|
||||
}
|
||||
|
||||
@ -23,6 +23,7 @@ func ProduceExportCollections(
|
||||
opts control.Options,
|
||||
dcs []data.RestoreCollection,
|
||||
deets *details.Builder,
|
||||
stats *data.ExportStats,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collectioner, error) {
|
||||
var (
|
||||
@ -43,7 +44,8 @@ func ProduceExportCollections(
|
||||
drive.NewExportCollection(
|
||||
baseDir.String(),
|
||||
[]data.RestoreCollection{dc},
|
||||
backupVersion))
|
||||
backupVersion,
|
||||
stats))
|
||||
}
|
||||
|
||||
return ec, el.Failure()
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
@ -19,6 +20,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
)
|
||||
|
||||
type ExportUnitSuite struct {
|
||||
@ -245,15 +247,32 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
stats := data.ExportStats{}
|
||||
ec := drive.NewExportCollection(
|
||||
"",
|
||||
[]data.RestoreCollection{test.backingCollection},
|
||||
test.version)
|
||||
test.version,
|
||||
&stats)
|
||||
|
||||
items := ec.Items(ctx)
|
||||
|
||||
count := 0
|
||||
size := 0
|
||||
fitems := []export.Item{}
|
||||
|
||||
for item := range items {
|
||||
if item.Error == nil {
|
||||
count++
|
||||
}
|
||||
|
||||
if item.Body != nil {
|
||||
b, err := io.ReadAll(item.Body)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
size += len(b)
|
||||
item.Body = io.NopCloser(bytes.NewBuffer(b))
|
||||
}
|
||||
|
||||
fitems = append(fitems, item)
|
||||
}
|
||||
|
||||
@ -268,6 +287,19 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
||||
assert.Equal(t, test.expectedItems[i].Body, item.Body, "body")
|
||||
assert.ErrorIs(t, item.Error, test.expectedItems[i].Error)
|
||||
}
|
||||
|
||||
var expectedStats data.ExportStats
|
||||
|
||||
if size+count > 0 { // it is only initialized if we have something
|
||||
expectedStats = data.ExportStats{}
|
||||
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
|
||||
|
||||
for i := 0; i < count; i++ {
|
||||
expectedStats.UpdateResourceCount(path.FilesCategory)
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedStats, stats, "stats")
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -312,6 +344,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
},
|
||||
}
|
||||
|
||||
stats := data.ExportStats{}
|
||||
|
||||
ecs, err := ProduceExportCollections(
|
||||
ctx,
|
||||
int(version.Backup),
|
||||
@ -319,14 +353,30 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
control.DefaultOptions(),
|
||||
dcs,
|
||||
nil,
|
||||
&stats,
|
||||
fault.New(true))
|
||||
assert.NoError(t, err, "export collections error")
|
||||
assert.Len(t, ecs, 1, "num of collections")
|
||||
|
||||
fitems := []export.Item{}
|
||||
size := 0
|
||||
|
||||
for item := range ecs[0].Items(ctx) {
|
||||
// unwrap the body from stats reader
|
||||
b, err := io.ReadAll(item.Body)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
size += len(b)
|
||||
bitem := io.NopCloser(bytes.NewBuffer(b))
|
||||
item.Body = bitem
|
||||
|
||||
fitems = append(fitems, item)
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedItems, fitems, "items")
|
||||
|
||||
expectedStats := data.ExportStats{}
|
||||
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
|
||||
expectedStats.UpdateResourceCount(path.FilesCategory)
|
||||
assert.Equal(t, expectedStats, stats, "stats")
|
||||
}
|
||||
|
||||
@ -26,6 +26,7 @@ func ProduceExportCollections(
|
||||
dcs []data.RestoreCollection,
|
||||
backupDriveIDNames idname.CacheBuilder,
|
||||
deets *details.Builder,
|
||||
stats *data.ExportStats,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collectioner, error) {
|
||||
var (
|
||||
@ -56,7 +57,8 @@ func ProduceExportCollections(
|
||||
drive.NewExportCollection(
|
||||
baseDir.String(),
|
||||
[]data.RestoreCollection{dc},
|
||||
backupVersion))
|
||||
backupVersion,
|
||||
stats))
|
||||
}
|
||||
|
||||
return ec, el.Failure()
|
||||
|
||||
@ -7,6 +7,7 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
@ -98,6 +99,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
},
|
||||
}
|
||||
|
||||
stats := data.ExportStats{}
|
||||
|
||||
ecs, err := ProduceExportCollections(
|
||||
ctx,
|
||||
int(version.Backup),
|
||||
@ -106,6 +109,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
dcs,
|
||||
cache,
|
||||
nil,
|
||||
&stats,
|
||||
fault.New(true))
|
||||
assert.NoError(t, err, "export collections error")
|
||||
assert.Len(t, ecs, 1, "num of collections")
|
||||
@ -113,9 +117,24 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
||||
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
||||
|
||||
fitems := []export.Item{}
|
||||
size := 0
|
||||
|
||||
for item := range ecs[0].Items(ctx) {
|
||||
// unwrap the body from stats reader
|
||||
b, err := io.ReadAll(item.Body)
|
||||
assert.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
size += len(b)
|
||||
bitem := io.NopCloser(bytes.NewBuffer(b))
|
||||
item.Body = bitem
|
||||
|
||||
fitems = append(fitems, item)
|
||||
}
|
||||
|
||||
assert.Equal(t, expectedItems, fitems, "items")
|
||||
|
||||
expectedStats := data.ExportStats{}
|
||||
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
|
||||
expectedStats.UpdateResourceCount(path.FilesCategory)
|
||||
assert.Equal(t, expectedStats, stats, "stats")
|
||||
}
|
||||
|
||||
@ -27,6 +27,7 @@ import (
|
||||
"github.com/alcionai/corso/src/pkg/export"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
@ -46,6 +47,7 @@ type ExportOperation struct {
|
||||
Selectors selectors.Selector
|
||||
ExportCfg control.ExportConfig
|
||||
Version string
|
||||
stats data.ExportStats
|
||||
|
||||
acct account.Account
|
||||
ec inject.ExportConsumer
|
||||
@ -72,6 +74,7 @@ func NewExportOperation(
|
||||
Selectors: sel,
|
||||
Version: "v0",
|
||||
ec: ec,
|
||||
stats: data.ExportStats{},
|
||||
}
|
||||
if err := op.validate(); err != nil {
|
||||
return ExportOperation{}, err
|
||||
@ -247,7 +250,7 @@ func (op *ExportOperation) do(
|
||||
opStats.resourceCount = 1
|
||||
opStats.cs = dcs
|
||||
|
||||
expCollections, err := exportRestoreCollections(
|
||||
expCollections, err := produceExportCollections(
|
||||
ctx,
|
||||
op.ec,
|
||||
bup.Version,
|
||||
@ -255,6 +258,9 @@ func (op *ExportOperation) do(
|
||||
op.ExportCfg,
|
||||
op.Options,
|
||||
dcs,
|
||||
// We also have opStats, but that tracks different data.
|
||||
// Maybe we can look into merging them some time in the future.
|
||||
&op.stats,
|
||||
op.Errors)
|
||||
if err != nil {
|
||||
return nil, clues.Stack(err)
|
||||
@ -310,11 +316,19 @@ func (op *ExportOperation) finalizeMetrics(
|
||||
return op.Errors.Failure()
|
||||
}
|
||||
|
||||
// GetStats returns the stats of the export operation. You should only
|
||||
// be calling this once the export collections have been read and process
|
||||
// as the data that will be available here will be the data that was read
|
||||
// and processed.
|
||||
func (op *ExportOperation) GetStats() map[path.CategoryType]data.KindStats {
|
||||
return op.stats.GetStats()
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Exporter funcs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func exportRestoreCollections(
|
||||
func produceExportCollections(
|
||||
ctx context.Context,
|
||||
ec inject.ExportConsumer,
|
||||
backupVersion int,
|
||||
@ -322,6 +336,7 @@ func exportRestoreCollections(
|
||||
exportCfg control.ExportConfig,
|
||||
opts control.Options,
|
||||
dcs []data.RestoreCollection,
|
||||
exportStats *data.ExportStats,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collectioner, error) {
|
||||
complete := observe.MessageWithCompletion(ctx, "Preparing export")
|
||||
@ -337,6 +352,7 @@ func exportRestoreCollections(
|
||||
exportCfg,
|
||||
opts,
|
||||
dcs,
|
||||
exportStats,
|
||||
errs)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "exporting collections")
|
||||
|
||||
@ -88,6 +88,7 @@ type (
|
||||
exportCfg control.ExportConfig,
|
||||
opts control.Options,
|
||||
dcs []data.RestoreCollection,
|
||||
stats *data.ExportStats,
|
||||
errs *fault.Bus,
|
||||
) ([]export.Collectioner, error)
|
||||
|
||||
|
||||
@ -28,7 +28,8 @@ type itemStreamer func(
|
||||
backingColls []data.RestoreCollection,
|
||||
backupVersion int,
|
||||
cfg control.ExportConfig,
|
||||
ch chan<- Item)
|
||||
ch chan<- Item,
|
||||
stats *data.ExportStats)
|
||||
|
||||
// BaseCollection holds the foundational details of an export collection.
|
||||
type BaseCollection struct {
|
||||
@ -45,6 +46,8 @@ type BaseCollection struct {
|
||||
Cfg control.ExportConfig
|
||||
|
||||
Stream itemStreamer
|
||||
|
||||
Stats *data.ExportStats
|
||||
}
|
||||
|
||||
func (bc BaseCollection) BasePath() string {
|
||||
@ -53,7 +56,7 @@ func (bc BaseCollection) BasePath() string {
|
||||
|
||||
func (bc BaseCollection) Items(ctx context.Context) <-chan Item {
|
||||
ch := make(chan Item)
|
||||
go bc.Stream(ctx, bc.BackingCollection, bc.BackupVersion, bc.Cfg, ch)
|
||||
go bc.Stream(ctx, bc.BackingCollection, bc.BackupVersion, bc.Cfg, ch, bc.Stats)
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user