Add tests to check for invalid email addresses in eml export (#4881)

<!-- PR description-->

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

<!--- Please check the type of change your PR introduces: --->
- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [x] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [ ] 🧹 Tech Debt/Cleanup

#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* #<issue>

#### Test Plan

<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
- [x]  Unit test
- [ ] 💚 E2E
This commit is contained in:
Abin Simon 2023-12-20 01:11:42 +05:30 committed by GitHub
parent 6f1c5c6249
commit b896405e92
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 174 additions and 128 deletions

View File

@ -131,31 +131,60 @@ func (suite *EMLUnitSuite) TestConvert_messageble_to_eml() {
assert.Equal(t, source, target) assert.Equal(t, source, target)
} }
func (suite *EMLUnitSuite) TestConvert_empty_attachment_no_err() { func (suite *EMLUnitSuite) TestConvert_edge_cases() {
t := suite.T() tests := []struct {
name string
transform func(models.Messageable)
}{
{
name: "just a name",
transform: func(msg models.Messageable) {
msg.GetFrom().GetEmailAddress().SetName(ptr.To("alphabob"))
msg.GetFrom().GetEmailAddress().SetAddress(nil)
},
},
{
name: "incorrect address",
transform: func(msg models.Messageable) {
msg.GetFrom().GetEmailAddress().SetAddress(ptr.To("invalid"))
},
},
{
name: "empty attachment",
transform: func(msg models.Messageable) {
attachments := msg.GetAttachments()
err := attachments[0].GetBackingStore().Set("contentBytes", []uint8{})
require.NoError(suite.T(), err, "setting attachment content")
},
},
}
ctx, flush := tester.NewContext(t) for _, test := range tests {
defer flush() suite.Run(test.name, func() {
t := suite.T()
body := []byte(testdata.EmailWithAttachments) ctx, flush := tester.NewContext(t)
defer flush()
msg, err := api.BytesToMessageable(body) body := []byte(testdata.EmailWithAttachments)
require.NoError(t, err, "creating message")
attachments := msg.GetAttachments() msg, err := api.BytesToMessageable(body)
err = attachments[0].GetBackingStore().Set("contentBytes", []uint8{}) require.NoError(t, err, "creating message")
require.NoError(t, err, "setting content bytes")
writer := kjson.NewJsonSerializationWriter() test.transform(msg)
defer writer.Close() writer := kjson.NewJsonSerializationWriter()
err = writer.WriteObjectValue("", msg) defer writer.Close()
require.NoError(t, err, "serializing message")
nbody, err := writer.GetSerializedContent() err = writer.WriteObjectValue("", msg)
require.NoError(t, err, "getting serialized content") require.NoError(t, err, "serializing message")
_, err = FromJSON(ctx, nbody) nbody, err := writer.GetSerializedContent()
assert.NoError(t, err, "converting to eml") require.NoError(t, err, "getting serialized content")
_, err = FromJSON(ctx, nbody)
assert.NoError(t, err, "converting to eml")
})
}
} }

View File

@ -1,12 +1,5 @@
package data package data
import (
"io"
"sync/atomic"
"github.com/alcionai/corso/src/pkg/path"
)
type CollectionStats struct { type CollectionStats struct {
Folders, Folders,
Objects, Objects,
@ -22,68 +15,3 @@ func (cs CollectionStats) IsZero() bool {
func (cs CollectionStats) String() string { func (cs CollectionStats) String() string {
return cs.Details return cs.Details
} }
type KindStats struct {
BytesRead int64
ResourceCount int64
}
type ExportStats struct {
// data is kept private so that we can enforce atomic int updates
data map[path.CategoryType]KindStats
}
func (es *ExportStats) UpdateBytes(kind path.CategoryType, bytesRead int64) {
if es.data == nil {
es.data = map[path.CategoryType]KindStats{}
}
ks := es.data[kind]
atomic.AddInt64(&ks.BytesRead, bytesRead)
es.data[kind] = ks
}
func (es *ExportStats) UpdateResourceCount(kind path.CategoryType) {
if es.data == nil {
es.data = map[path.CategoryType]KindStats{}
}
ks := es.data[kind]
atomic.AddInt64(&ks.ResourceCount, 1)
es.data[kind] = ks
}
func (es *ExportStats) GetStats() map[path.CategoryType]KindStats {
return es.data
}
type statsReader struct {
io.ReadCloser
kind path.CategoryType
stats *ExportStats
}
func (sr *statsReader) Read(p []byte) (int, error) {
n, err := sr.ReadCloser.Read(p)
sr.stats.UpdateBytes(sr.kind, int64(n))
return n, err
}
// Create a function that will take a reader and return a reader that
// will update the stats
func ReaderWithStats(
reader io.ReadCloser,
kind path.CategoryType,
stats *ExportStats,
) io.ReadCloser {
if reader == nil {
return nil
}
return &statsReader{
ReadCloser: reader,
kind: kind,
stats: stats,
}
}

View File

@ -12,6 +12,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -19,7 +20,7 @@ func NewExportCollection(
baseDir string, baseDir string,
backingCollection []data.RestoreCollection, backingCollection []data.RestoreCollection,
backupVersion int, backupVersion int,
stats *data.ExportStats, stats *metrics.ExportStats,
) export.Collectioner { ) export.Collectioner {
return export.BaseCollection{ return export.BaseCollection{
BaseDir: baseDir, BaseDir: baseDir,
@ -37,7 +38,7 @@ func streamItems(
backupVersion int, backupVersion int,
cec control.ExportConfig, cec control.ExportConfig,
ch chan<- export.Item, ch chan<- export.Item,
stats *data.ExportStats, stats *metrics.ExportStats,
) { ) {
defer close(ch) defer close(ch)
@ -61,7 +62,7 @@ func streamItems(
} }
stats.UpdateResourceCount(path.FilesCategory) stats.UpdateResourceCount(path.FilesCategory)
body := data.ReaderWithStats(item.ToReader(), path.FilesCategory, stats) body := metrics.ReaderWithStats(item.ToReader(), path.FilesCategory, stats)
ch <- export.Item{ ch <- export.Item{
ID: itemUUID, ID: itemUUID,

View File

@ -12,6 +12,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -19,7 +20,7 @@ func NewExportCollection(
baseDir string, baseDir string,
backingCollection []data.RestoreCollection, backingCollection []data.RestoreCollection,
backupVersion int, backupVersion int,
stats *data.ExportStats, stats *metrics.ExportStats,
) export.Collectioner { ) export.Collectioner {
return export.BaseCollection{ return export.BaseCollection{
BaseDir: baseDir, BaseDir: baseDir,
@ -37,7 +38,7 @@ func streamItems(
backupVersion int, backupVersion int,
config control.ExportConfig, config control.ExportConfig,
ch chan<- export.Item, ch chan<- export.Item,
stats *data.ExportStats, stats *metrics.ExportStats,
) { ) {
defer close(ch) defer close(ch)
@ -77,7 +78,7 @@ func streamItems(
} }
emlReader := io.NopCloser(bytes.NewReader([]byte(email))) emlReader := io.NopCloser(bytes.NewReader([]byte(email)))
body := data.ReaderWithStats(emlReader, path.EmailCategory, stats) body := metrics.ReaderWithStats(emlReader, path.EmailCategory, stats)
ch <- export.Item{ ch <- export.Item{
ID: id, ID: id,

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -24,7 +25,7 @@ func NewExportCollection(
backingCollections []data.RestoreCollection, backingCollections []data.RestoreCollection,
backupVersion int, backupVersion int,
cec control.ExportConfig, cec control.ExportConfig,
stats *data.ExportStats, stats *metrics.ExportStats,
) export.Collectioner { ) export.Collectioner {
return export.BaseCollection{ return export.BaseCollection{
BaseDir: baseDir, BaseDir: baseDir,
@ -43,7 +44,7 @@ func streamItems(
backupVersion int, backupVersion int,
cec control.ExportConfig, cec control.ExportConfig,
ch chan<- export.Item, ch chan<- export.Item,
stats *data.ExportStats, stats *metrics.ExportStats,
) { ) {
defer close(ch) defer close(ch)
@ -59,7 +60,7 @@ func streamItems(
} }
} else { } else {
stats.UpdateResourceCount(path.ChannelMessagesCategory) stats.UpdateResourceCount(path.ChannelMessagesCategory)
body = data.ReaderWithStats(body, path.ChannelMessagesCategory, stats) body = metrics.ReaderWithStats(body, path.ChannelMessagesCategory, stats)
// messages are exported as json and should be named as such // messages are exported as json and should be named as such
name := item.ID() + ".json" name := item.ID() + ".json"

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/metrics"
) )
type ExportUnitSuite struct { type ExportUnitSuite struct {
@ -91,7 +92,7 @@ func (suite *ExportUnitSuite) TestStreamItems() {
version.NoBackup, version.NoBackup,
control.DefaultExportConfig(), control.DefaultExportConfig(),
ch, ch,
&data.ExportStats{}) &metrics.ExportStats{})
var ( var (
itm export.Item itm export.Item

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -87,7 +88,7 @@ func (ctrl Controller) ProduceExportCollections(
_ int, _ int,
_ control.ExportConfig, _ control.ExportConfig,
_ []data.RestoreCollection, _ []data.RestoreCollection,
_ *data.ExportStats, _ *metrics.ExportStats,
_ *fault.Bus, _ *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
return nil, ctrl.Err return nil, ctrl.Err

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -54,7 +55,7 @@ func (h *baseExchangeHandler) ProduceExportCollections(
backupVersion int, backupVersion int,
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
stats *data.ExportStats, stats *metrics.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
var ( var (

View File

@ -18,6 +18,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -144,7 +145,7 @@ func (suite *ExportUnitSuite) TestGetItems() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
stats := data.ExportStats{} stats := metrics.ExportStats{}
ec := exchange.NewExportCollection( ec := exchange.NewExportCollection(
"", "",
[]data.RestoreCollection{test.backingCollection}, []data.RestoreCollection{test.backingCollection},
@ -184,10 +185,10 @@ func (suite *ExportUnitSuite) TestGetItems() {
assert.ErrorIs(t, item.Error, test.expectedItems[i].Error) assert.ErrorIs(t, item.Error, test.expectedItems[i].Error)
} }
var expectedStats data.ExportStats var expectedStats metrics.ExportStats
if size+count > 0 { // it is only initialized if we have something if size+count > 0 { // it is only initialized if we have something
expectedStats = data.ExportStats{} expectedStats = metrics.ExportStats{}
expectedStats.UpdateBytes(path.EmailCategory, int64(size)) expectedStats.UpdateBytes(path.EmailCategory, int64(size))
for i := 0; i < count; i++ { for i := 0; i < count; i++ {
@ -379,7 +380,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
exportCfg := control.ExportConfig{} exportCfg := control.ExportConfig{}
stats := data.ExportStats{} stats := metrics.ExportStats{}
ecs, err := NewExchangeHandler(control.DefaultOptions(), api.Client{}, nil). ecs, err := NewExchangeHandler(control.DefaultOptions(), api.Client{}, nil).
ProduceExportCollections( ProduceExportCollections(
@ -398,7 +399,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
assert.NoError(t, err, "export collections error") assert.NoError(t, err, "export collections error")
assert.Len(t, ecs, len(tt.expectedItems), "num of collections") assert.Len(t, ecs, len(tt.expectedItems), "num of collections")
expectedStats := data.ExportStats{} expectedStats := metrics.ExportStats{}
// We are dependent on the order the collections are // We are dependent on the order the collections are
// returned in the test which is not necessary for the // returned in the test which is not necessary for the

View File

@ -17,6 +17,7 @@ import (
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -68,7 +69,7 @@ func (h *baseGroupsHandler) ProduceExportCollections(
backupVersion int, backupVersion int,
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
stats *data.ExportStats, stats *metrics.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
var ( var (

View File

@ -20,6 +20,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -95,7 +96,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
}, },
} }
stats := data.ExportStats{} stats := metrics.ExportStats{}
ecs, err := NewGroupsHandler(control.DefaultOptions(), api.Client{}, nil). ecs, err := NewGroupsHandler(control.DefaultOptions(), api.Client{}, nil).
ProduceExportCollections( ProduceExportCollections(
@ -129,7 +130,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_messages() {
assert.Equal(t, expectedItems, fitems, "items") assert.Equal(t, expectedItems, fitems, "items")
expectedStats := data.ExportStats{} expectedStats := metrics.ExportStats{}
expectedStats.UpdateBytes(path.ChannelMessagesCategory, int64(size)) expectedStats.UpdateBytes(path.ChannelMessagesCategory, int64(size))
expectedStats.UpdateResourceCount(path.ChannelMessagesCategory) expectedStats.UpdateResourceCount(path.ChannelMessagesCategory)
assert.Equal(t, expectedStats, stats, "stats") assert.Equal(t, expectedStats, stats, "stats")
@ -200,7 +201,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
handler := NewGroupsHandler(control.DefaultOptions(), api.Client{}, nil) handler := NewGroupsHandler(control.DefaultOptions(), api.Client{}, nil)
handler.CacheItemInfo(dii) handler.CacheItemInfo(dii)
stats := data.ExportStats{} stats := metrics.ExportStats{}
ecs, err := handler.ProduceExportCollections( ecs, err := handler.ProduceExportCollections(
ctx, ctx,
@ -232,7 +233,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections_libraries() {
assert.Equal(t, expectedItems, fitems, "items") assert.Equal(t, expectedItems, fitems, "items")
expectedStats := data.ExportStats{} expectedStats := metrics.ExportStats{}
expectedStats.UpdateBytes(path.FilesCategory, int64(size)) expectedStats.UpdateBytes(path.FilesCategory, int64(size))
expectedStats.UpdateResourceCount(path.FilesCategory) expectedStats.UpdateResourceCount(path.FilesCategory)
assert.Equal(t, expectedStats, stats, "stats") assert.Equal(t, expectedStats, stats, "stats")

View File

@ -14,6 +14,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -61,7 +62,7 @@ func (h *baseOneDriveHandler) ProduceExportCollections(
backupVersion int, backupVersion int,
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
stats *data.ExportStats, stats *metrics.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
var ( var (

View File

@ -19,6 +19,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -247,7 +248,7 @@ func (suite *ExportUnitSuite) TestGetItems() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
stats := data.ExportStats{} stats := metrics.ExportStats{}
ec := drive.NewExportCollection( ec := drive.NewExportCollection(
"", "",
[]data.RestoreCollection{test.backingCollection}, []data.RestoreCollection{test.backingCollection},
@ -288,10 +289,10 @@ func (suite *ExportUnitSuite) TestGetItems() {
assert.ErrorIs(t, item.Error, test.expectedItems[i].Error) assert.ErrorIs(t, item.Error, test.expectedItems[i].Error)
} }
var expectedStats data.ExportStats var expectedStats metrics.ExportStats
if size+count > 0 { // it is only initialized if we have something if size+count > 0 { // it is only initialized if we have something
expectedStats = data.ExportStats{} expectedStats = metrics.ExportStats{}
expectedStats.UpdateBytes(path.FilesCategory, int64(size)) expectedStats.UpdateBytes(path.FilesCategory, int64(size))
for i := 0; i < count; i++ { for i := 0; i < count; i++ {
@ -340,7 +341,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
}, },
} }
stats := data.ExportStats{} stats := metrics.ExportStats{}
ecs, err := NewOneDriveHandler(control.DefaultOptions(), api.Client{}, nil). ecs, err := NewOneDriveHandler(control.DefaultOptions(), api.Client{}, nil).
ProduceExportCollections( ProduceExportCollections(
@ -370,7 +371,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
assert.Equal(t, expectedItems, fitems, "items") assert.Equal(t, expectedItems, fitems, "items")
expectedStats := data.ExportStats{} expectedStats := metrics.ExportStats{}
expectedStats.UpdateBytes(path.FilesCategory, int64(size)) expectedStats.UpdateBytes(path.FilesCategory, int64(size))
expectedStats.UpdateResourceCount(path.FilesCategory) expectedStats.UpdateResourceCount(path.FilesCategory)
assert.Equal(t, expectedStats, stats, "stats") assert.Equal(t, expectedStats, stats, "stats")

View File

@ -15,6 +15,7 @@ import (
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -65,7 +66,7 @@ func (h *baseSharePointHandler) ProduceExportCollections(
backupVersion int, backupVersion int,
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
stats *data.ExportStats, stats *metrics.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
var ( var (

View File

@ -19,6 +19,7 @@ import (
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api" "github.com/alcionai/corso/src/pkg/services/m365/api"
) )
@ -129,7 +130,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
handler := NewSharePointHandler(control.DefaultOptions(), api.Client{}, nil) handler := NewSharePointHandler(control.DefaultOptions(), api.Client{}, nil)
handler.CacheItemInfo(test.itemInfo) handler.CacheItemInfo(test.itemInfo)
stats := data.ExportStats{} stats := metrics.ExportStats{}
ecs, err := handler.ProduceExportCollections( ecs, err := handler.ProduceExportCollections(
ctx, ctx,
@ -160,7 +161,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
assert.Equal(t, expectedItems, fitems, "items") assert.Equal(t, expectedItems, fitems, "items")
expectedStats := data.ExportStats{} expectedStats := metrics.ExportStats{}
expectedStats.UpdateBytes(path.FilesCategory, int64(size)) expectedStats.UpdateBytes(path.FilesCategory, int64(size))
expectedStats.UpdateResourceCount(path.FilesCategory) expectedStats.UpdateResourceCount(path.FilesCategory)
assert.Equal(t, expectedStats, stats, "stats") assert.Equal(t, expectedStats, stats, "stats")

View File

@ -27,6 +27,7 @@ import (
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
@ -47,7 +48,7 @@ type ExportOperation struct {
Selectors selectors.Selector Selectors selectors.Selector
ExportCfg control.ExportConfig ExportCfg control.ExportConfig
Version string Version string
stats data.ExportStats stats metrics.ExportStats
acct account.Account acct account.Account
ec inject.ExportConsumer ec inject.ExportConsumer
@ -74,7 +75,7 @@ func NewExportOperation(
Selectors: sel, Selectors: sel,
Version: "v0", Version: "v0",
ec: ec, ec: ec,
stats: data.ExportStats{}, stats: metrics.ExportStats{},
} }
if err := op.validate(); err != nil { if err := op.validate(); err != nil {
return ExportOperation{}, err return ExportOperation{}, err
@ -322,7 +323,7 @@ func (op *ExportOperation) finalizeMetrics(
// be calling this once the export collections have been read and process // be calling this once the export collections have been read and process
// as the data that will be available here will be the data that was read // as the data that will be available here will be the data that was read
// and processed. // and processed.
func (op *ExportOperation) GetStats() map[path.CategoryType]data.KindStats { func (op *ExportOperation) GetStats() map[path.CategoryType]metrics.KindStats {
return op.stats.GetStats() return op.stats.GetStats()
} }
@ -336,7 +337,7 @@ func produceExportCollections(
backupVersion int, backupVersion int,
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
exportStats *data.ExportStats, exportStats *metrics.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) { ) ([]export.Collectioner, error) {
complete := observe.MessageWithCompletion(ctx, observe.ProgressCfg{}, "Preparing export") complete := observe.MessageWithCompletion(ctx, observe.ProgressCfg{}, "Preparing export")

View File

@ -16,6 +16,7 @@ import (
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/metrics"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -96,7 +97,7 @@ type (
backupVersion int, backupVersion int,
exportCfg control.ExportConfig, exportCfg control.ExportConfig,
dcs []data.RestoreCollection, dcs []data.RestoreCollection,
stats *data.ExportStats, stats *metrics.ExportStats,
errs *fault.Bus, errs *fault.Bus,
) ([]export.Collectioner, error) ) ([]export.Collectioner, error)

View File

@ -6,6 +6,7 @@ import (
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/metrics"
) )
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -29,7 +30,7 @@ type itemStreamer func(
backupVersion int, backupVersion int,
cfg control.ExportConfig, cfg control.ExportConfig,
ch chan<- Item, ch chan<- Item,
stats *data.ExportStats) stats *metrics.ExportStats)
// BaseCollection holds the foundational details of an export collection. // BaseCollection holds the foundational details of an export collection.
type BaseCollection struct { type BaseCollection struct {
@ -47,7 +48,7 @@ type BaseCollection struct {
Stream itemStreamer Stream itemStreamer
Stats *data.ExportStats Stats *metrics.ExportStats
} }
func (bc BaseCollection) BasePath() string { func (bc BaseCollection) BasePath() string {

View File

@ -0,0 +1,73 @@
package metrics
import (
"io"
"sync/atomic"
"github.com/alcionai/corso/src/pkg/path"
)
type KindStats struct {
BytesRead int64
ResourceCount int64
}
type ExportStats struct {
// data is kept private so that we can enforce atomic int updates
data map[path.CategoryType]KindStats
}
func (es *ExportStats) UpdateBytes(kind path.CategoryType, bytesRead int64) {
if es.data == nil {
es.data = map[path.CategoryType]KindStats{}
}
ks := es.data[kind]
atomic.AddInt64(&ks.BytesRead, bytesRead)
es.data[kind] = ks
}
func (es *ExportStats) UpdateResourceCount(kind path.CategoryType) {
if es.data == nil {
es.data = map[path.CategoryType]KindStats{}
}
ks := es.data[kind]
atomic.AddInt64(&ks.ResourceCount, 1)
es.data[kind] = ks
}
func (es *ExportStats) GetStats() map[path.CategoryType]KindStats {
return es.data
}
type statsReader struct {
io.ReadCloser
kind path.CategoryType
stats *ExportStats
}
func (sr *statsReader) Read(p []byte) (int, error) {
n, err := sr.ReadCloser.Read(p)
sr.stats.UpdateBytes(sr.kind, int64(n))
return n, err
}
// Create a function that will take a reader and return a reader that
// will update the stats
func ReaderWithStats(
reader io.ReadCloser,
kind path.CategoryType,
stats *ExportStats,
) io.ReadCloser {
if reader == nil {
return nil
}
return &statsReader{
ReadCloser: reader,
kind: kind,
stats: stats,
}
}