Data Collection --> Collection refactor (#415)

DataCollection changed to Collection in the repository. All associated imports changed to reflect the change.
This commit is contained in:
Danny 2022-07-27 12:04:31 -04:00 committed by GitHub
parent 9b28d71705
commit 34a7a1a80c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 86 additions and 66 deletions

View File

@ -9,9 +9,9 @@ import (
"github.com/alcionai/corso/pkg/backup/details" "github.com/alcionai/corso/pkg/backup/details"
) )
var _ data.DataCollection = &ExchangeDataCollection{} var _ data.Collection = &ExchangeDataCollection{}
var _ data.DataStream = &ExchangeData{} var _ data.Stream = &ExchangeData{}
var _ data.DataStreamInfo = &ExchangeData{} var _ data.StreamInfo = &ExchangeData{}
const ( const (
collectionChannelBufferSize = 120 collectionChannelBufferSize = 120
@ -24,7 +24,7 @@ const (
type ExchangeDataCollection struct { type ExchangeDataCollection struct {
// M365 user // M365 user
user string user string
data chan data.DataStream data chan data.Stream
tasks []string tasks []string
updateCh chan support.ConnectorOperationStatus updateCh chan support.ConnectorOperationStatus
service graphService service graphService
@ -39,7 +39,7 @@ type ExchangeDataCollection struct {
func NewExchangeDataCollection(aUser string, pathRepresentation []string) ExchangeDataCollection { func NewExchangeDataCollection(aUser string, pathRepresentation []string) ExchangeDataCollection {
collection := ExchangeDataCollection{ collection := ExchangeDataCollection{
user: aUser, user: aUser,
data: make(chan data.DataStream, collectionChannelBufferSize), data: make(chan data.Stream, collectionChannelBufferSize),
fullPath: pathRepresentation, fullPath: pathRepresentation,
} }
return collection return collection
@ -57,7 +57,7 @@ func (edc *ExchangeDataCollection) FinishPopulation() {
} }
} }
func (edc *ExchangeDataCollection) Items() <-chan data.DataStream { func (edc *ExchangeDataCollection) Items() <-chan data.Stream {
return edc.data return edc.data
} }

View File

@ -56,7 +56,7 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_NewExchange
func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_PopulateCollection() { func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_PopulateCollection() {
inputStrings := []string{"Jack", "and", "Jill", "went", "up", "the", "hill to", inputStrings := []string{"Jack", "and", "Jill", "went", "up", "the", "hill to",
"fetch", "a", "pale", "of", "water"} "fetch", "a", "pail", "of", "water"}
expected := len(inputStrings) / 2 // We are using pairs expected := len(inputStrings) / 2 // We are using pairs
edc := NewExchangeDataCollection("Fletcher", []string{"sugar", "horses", "painted red"}) edc := NewExchangeDataCollection("Fletcher", []string{"sugar", "horses", "painted red"})
for i := 0; i < expected; i++ { for i := 0; i < expected; i++ {
@ -64,3 +64,23 @@ func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_PopulateCol
} }
suite.Equal(expected, len(edc.data)) suite.Equal(expected, len(edc.data))
} }
func (suite *ExchangeDataCollectionSuite) TestExchangeDataCollection_Items() {
inputStrings := []string{"Jack", "and", "Jill", "went", "up", "the", "hill to",
"fetch", "a", "pail", "of", "water"}
expected := len(inputStrings) / 2 // We are using pairs
edc := NewExchangeDataCollection("Fletcher", []string{"sugar", "horses", "painted red"})
for i := 0; i < expected; i++ {
edc.data <- &ExchangeData{id: inputStrings[i*2], message: []byte(inputStrings[i*2+1])}
}
close(edc.data)
suite.Equal(expected, len(edc.data))
streams := edc.Items()
suite.Equal(expected, len(streams))
count := 0
for item := range streams {
assert.NotNil(suite.T(), item)
count++
}
suite.Equal(count, expected)
}

View File

@ -191,13 +191,13 @@ func buildFromMap(isKey bool, mapping map[string]string) []string {
// use to read mailbox data out for the specified user // use to read mailbox data out for the specified user
// Assumption: User exists // Assumption: User exists
// Add iota to this call -> mail, contacts, calendar, etc. // Add iota to this call -> mail, contacts, calendar, etc.
func (gc *GraphConnector) ExchangeDataCollection(ctx context.Context, selector selectors.Selector) ([]data.DataCollection, error) { func (gc *GraphConnector) ExchangeDataCollection(ctx context.Context, selector selectors.Selector) ([]data.Collection, error) {
eb, err := selector.ToExchangeBackup() eb, err := selector.ToExchangeBackup()
if err != nil { if err != nil {
return nil, errors.Wrap(err, "collecting exchange data") return nil, errors.Wrap(err, "collecting exchange data")
} }
collections := []data.DataCollection{} collections := []data.Collection{}
scopes := eb.Scopes() scopes := eb.Scopes()
var errs error var errs error
@ -236,7 +236,7 @@ func (gc *GraphConnector) ExchangeDataCollection(ctx context.Context, selector s
// RestoreMessages: Utility function to connect to M365 backstore // RestoreMessages: Utility function to connect to M365 backstore
// and upload messages from DataCollection. // and upload messages from DataCollection.
// FullPath: tenantId, userId, <mailCategory>, FolderId // FullPath: tenantId, userId, <mailCategory>, FolderId
func (gc *GraphConnector) RestoreMessages(ctx context.Context, dcs []data.DataCollection) error { func (gc *GraphConnector) RestoreMessages(ctx context.Context, dcs []data.Collection) error {
var ( var (
pathCounter = map[string]bool{} pathCounter = map[string]bool{}
attempts, successes int attempts, successes int
@ -422,7 +422,7 @@ func messageToDataCollection(
client *msgraphsdk.GraphServiceClient, client *msgraphsdk.GraphServiceClient,
ctx context.Context, ctx context.Context,
objectWriter *kw.JsonSerializationWriter, objectWriter *kw.JsonSerializationWriter,
dataChannel chan<- data.DataStream, dataChannel chan<- data.Stream,
message models.Messageable, message models.Messageable,
user string, user string,
) error { ) error {

View File

@ -89,7 +89,7 @@ func (suite *GraphConnectorIntegrationSuite) TestGraphConnector_restoreMessages(
edc := NewExchangeDataCollection("tenant", []string{"tenantId", evs[user], mailCategory, "Inbox"}) edc := NewExchangeDataCollection("tenant", []string{"tenantId", evs[user], mailCategory, "Inbox"})
edc.PopulateCollection(&ds) edc.PopulateCollection(&ds)
edc.FinishPopulation() edc.FinishPopulation()
err = suite.connector.RestoreMessages(context.Background(), []data.DataCollection{&edc}) err = suite.connector.RestoreMessages(context.Background(), []data.Collection{&edc})
assert.NoError(suite.T(), err) assert.NoError(suite.T(), err)
} }
@ -173,7 +173,7 @@ func (suite *DisconnectedGraphConnectorSuite) TestBuild() {
} }
func (suite *DisconnectedGraphConnectorSuite) TestInterfaceAlignment() { func (suite *DisconnectedGraphConnectorSuite) TestInterfaceAlignment() {
var dc data.DataCollection var dc data.Collection
concrete := NewExchangeDataCollection("Check", []string{"interface", "works"}) concrete := NewExchangeDataCollection("Check", []string{"interface", "works"})
dc = &concrete dc = &concrete
assert.NotNil(suite.T(), dc) assert.NotNil(suite.T(), dc)

View File

@ -20,9 +20,9 @@ type MockExchangeDataCollection struct {
} }
var ( var (
_ data.DataCollection = &MockExchangeDataCollection{} _ data.Collection = &MockExchangeDataCollection{}
_ data.DataStream = &MockExchangeData{} _ data.Stream = &MockExchangeData{}
_ data.DataStreamInfo = &MockExchangeData{} _ data.StreamInfo = &MockExchangeData{}
) )
// NewMockExchangeDataCollection creates an data collection that will return the specified number of // NewMockExchangeDataCollection creates an data collection that will return the specified number of
@ -49,8 +49,8 @@ func (medc *MockExchangeDataCollection) FullPath() []string {
// Items returns a channel that has the next items in the collection. The // Items returns a channel that has the next items in the collection. The
// channel is closed when there are no more items available. // channel is closed when there are no more items available.
func (medc *MockExchangeDataCollection) Items() <-chan data.DataStream { func (medc *MockExchangeDataCollection) Items() <-chan data.Stream {
res := make(chan data.DataStream) res := make(chan data.Stream)
go func() { go func() {
defer close(res) defer close(res)

View File

@ -6,14 +6,14 @@ import (
"github.com/alcionai/corso/pkg/backup/details" "github.com/alcionai/corso/pkg/backup/details"
) )
// A DataCollection represents a collection of data of the // A Collection represents a compilation of data from the
// same type (e.g. mail) // same type application (e.g. mail)
type DataCollection interface { type Collection interface {
// Items returns a channel from which items in the collection can be read. // Items returns a channel from which items in the collection can be read.
// Each returned struct contains the next item in the collection // Each returned struct contains the next item in the collection
// The channel is closed when there are no more items in the collection or if // The channel is closed when there are no more items in the collection or if
// an unrecoverable error caused an early termination in the sender. // an unrecoverable error caused an early termination in the sender.
Items() <-chan DataStream Items() <-chan Stream
// FullPath returns a slice of strings that act as metadata tags for this // FullPath returns a slice of strings that act as metadata tags for this
// DataCollection. Returned items should be ordered from most generic to least // DataCollection. Returned items should be ordered from most generic to least
// generic. For example, a DataCollection for emails from a specific user // generic. For example, a DataCollection for emails from a specific user
@ -23,7 +23,7 @@ type DataCollection interface {
// DataStream represents a single item within a DataCollection // DataStream represents a single item within a DataCollection
// that can be consumed as a stream (it embeds io.Reader) // that can be consumed as a stream (it embeds io.Reader)
type DataStream interface { type Stream interface {
// ToReader returns an io.Reader for the DataStream // ToReader returns an io.Reader for the DataStream
ToReader() io.ReadCloser ToReader() io.ReadCloser
// UUID provides a unique identifier for this data // UUID provides a unique identifier for this data
@ -32,6 +32,6 @@ type DataStream interface {
// DataStreamInfo is used to provide service specific // DataStreamInfo is used to provide service specific
// information about the DataStream // information about the DataStream
type DataStreamInfo interface { type StreamInfo interface {
Info() details.ItemInfo Info() details.ItemInfo
} }

View File

@ -6,10 +6,10 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
) )
type DataCollectionSuite struct { type CollectionSuite struct {
suite.Suite suite.Suite
} }
func TestDataCollectionSuite(t *testing.T) { func TestDataCollectionSuite(t *testing.T) {
suite.Run(t, new(DataCollectionSuite)) suite.Run(t, new(CollectionSuite))
} }

View File

@ -6,16 +6,16 @@ import (
"github.com/alcionai/corso/internal/data" "github.com/alcionai/corso/internal/data"
) )
var _ data.DataCollection = &kopiaDataCollection{} var _ data.Collection = &kopiaDataCollection{}
var _ data.DataStream = &kopiaDataStream{} var _ data.Stream = &kopiaDataStream{}
type kopiaDataCollection struct { type kopiaDataCollection struct {
path []string path []string
streams []data.DataStream streams []data.Stream
} }
func (kdc *kopiaDataCollection) Items() <-chan data.DataStream { func (kdc *kopiaDataCollection) Items() <-chan data.Stream {
res := make(chan data.DataStream) res := make(chan data.Stream)
go func() { go func() {
defer close(res) defer close(res)

View File

@ -30,7 +30,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsPath() {
path := []string{"some", "path", "for", "data"} path := []string{"some", "path", "for", "data"}
c := kopiaDataCollection{ c := kopiaDataCollection{
streams: []data.DataStream{}, streams: []data.Stream{},
path: path, path: path,
} }
@ -50,11 +50,11 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
table := []struct { table := []struct {
name string name string
streams []data.DataStream streams []data.Stream
}{ }{
{ {
name: "SingleStream", name: "SingleStream",
streams: []data.DataStream{ streams: []data.Stream{
&kopiaDataStream{ &kopiaDataStream{
reader: io.NopCloser(bytes.NewReader(testData[0])), reader: io.NopCloser(bytes.NewReader(testData[0])),
uuid: uuids[0], uuid: uuids[0],
@ -63,7 +63,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestReturnsStreams() {
}, },
{ {
name: "MultipleStreams", name: "MultipleStreams",
streams: []data.DataStream{ streams: []data.Stream{
&kopiaDataStream{ &kopiaDataStream{
reader: io.NopCloser(bytes.NewReader(testData[0])), reader: io.NopCloser(bytes.NewReader(testData[0])),
uuid: uuids[0], uuid: uuids[0],

View File

@ -80,7 +80,7 @@ func (w *Wrapper) Close(ctx context.Context) error {
// kopia callbacks on directory entries. It binds the directory to the given // kopia callbacks on directory entries. It binds the directory to the given
// DataCollection. // DataCollection.
func getStreamItemFunc( func getStreamItemFunc(
collection data.DataCollection, collection data.Collection,
details *details.Details, details *details.Details,
) func(context.Context, func(context.Context, fs.Entry) error) error { ) func(context.Context, func(context.Context, fs.Entry) error) error {
return func(ctx context.Context, cb func(context.Context, fs.Entry) error) error { return func(ctx context.Context, cb func(context.Context, fs.Entry) error) error {
@ -93,7 +93,7 @@ func getStreamItemFunc(
if !ok { if !ok {
return nil return nil
} }
ei, ok := e.(data.DataStreamInfo) ei, ok := e.(data.StreamInfo)
if !ok { if !ok {
return errors.New("item does not implement DataStreamInfo") return errors.New("item does not implement DataStreamInfo")
} }
@ -143,7 +143,7 @@ func buildKopiaDirs(dirName string, dir *treeMap, details *details.Details) (fs.
type treeMap struct { type treeMap struct {
childDirs map[string]*treeMap childDirs map[string]*treeMap
collection data.DataCollection collection data.Collection
} }
func newTreeMap() *treeMap { func newTreeMap() *treeMap {
@ -156,7 +156,7 @@ func newTreeMap() *treeMap {
// ancestor of the streams and uses virtualfs.StaticDirectory for internal nodes // ancestor of the streams and uses virtualfs.StaticDirectory for internal nodes
// in the hierarchy. Leaf nodes are virtualfs.StreamingDirectory with the given // in the hierarchy. Leaf nodes are virtualfs.StreamingDirectory with the given
// DataCollections. // DataCollections.
func inflateDirTree(ctx context.Context, collections []data.DataCollection, details *details.Details) (fs.Directory, error) { func inflateDirTree(ctx context.Context, collections []data.Collection, details *details.Details) (fs.Directory, error) {
roots := make(map[string]*treeMap) roots := make(map[string]*treeMap)
for _, s := range collections { for _, s := range collections {
@ -228,7 +228,7 @@ func inflateDirTree(ctx context.Context, collections []data.DataCollection, deta
func (w Wrapper) BackupCollections( func (w Wrapper) BackupCollections(
ctx context.Context, ctx context.Context,
collections []data.DataCollection, collections []data.Collection,
) (*BackupStats, *details.Details, error) { ) (*BackupStats, *details.Details, error) {
if w.c == nil { if w.c == nil {
return nil, nil, errNotConnected return nil, nil, errNotConnected
@ -332,7 +332,7 @@ func (w Wrapper) collectItems(
snapshotID string, snapshotID string,
itemPath []string, itemPath []string,
isDirectory bool, isDirectory bool,
) ([]data.DataCollection, error) { ) ([]data.Collection, error) {
e, err := w.getEntry(ctx, snapshotID, itemPath) e, err := w.getEntry(ctx, snapshotID, itemPath)
if err != nil { if err != nil {
return nil, err return nil, err
@ -362,7 +362,7 @@ func (w Wrapper) collectItems(
return nil, err return nil, err
} }
return []data.DataCollection{c}, nil return []data.Collection{c}, nil
} }
// RestoreSingleItem looks up the item at the given path in the snapshot with id // RestoreSingleItem looks up the item at the given path in the snapshot with id
@ -376,7 +376,7 @@ func (w Wrapper) RestoreSingleItem(
ctx context.Context, ctx context.Context,
snapshotID string, snapshotID string,
itemPath []string, itemPath []string,
) (data.DataCollection, error) { ) (data.Collection, error) {
c, err := w.collectItems(ctx, snapshotID, itemPath, false) c, err := w.collectItems(ctx, snapshotID, itemPath, false)
if err != nil { if err != nil {
return nil, err return nil, err
@ -396,14 +396,14 @@ func restoreSingleItem(
ctx context.Context, ctx context.Context,
f fs.File, f fs.File,
itemPath []string, itemPath []string,
) (data.DataCollection, error) { ) (data.Collection, error) {
r, err := f.Open(ctx) r, err := f.Open(ctx)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "opening file") return nil, errors.Wrap(err, "opening file")
} }
return &kopiaDataCollection{ return &kopiaDataCollection{
streams: []data.DataStream{ streams: []data.Stream{
&kopiaDataStream{ &kopiaDataStream{
uuid: f.Name(), uuid: f.Name(),
reader: r, reader: r,
@ -457,8 +457,8 @@ func restoreSubtree(
ctx context.Context, ctx context.Context,
dir fs.Directory, dir fs.Directory,
relativePath []string, relativePath []string,
) ([]data.DataCollection, *multierror.Error) { ) ([]data.Collection, *multierror.Error) {
collections := []data.DataCollection{} collections := []data.Collection{}
// Want a local copy of relativePath with our new element. // Want a local copy of relativePath with our new element.
fullPath := append(append([]string{}, relativePath...), dir.Name()) fullPath := append(append([]string{}, relativePath...), dir.Name())
var errs *multierror.Error var errs *multierror.Error
@ -475,7 +475,7 @@ func restoreSubtree(
return nil, errs return nil, errs
} }
streams := make([]data.DataStream, 0, len(files)) streams := make([]data.Stream, 0, len(files))
for _, f := range files { for _, f := range files {
r, err := f.Open(ctx) r, err := f.Open(ctx)
@ -524,7 +524,7 @@ func (w Wrapper) RestoreDirectory(
ctx context.Context, ctx context.Context,
snapshotID string, snapshotID string,
basePath []string, basePath []string,
) ([]data.DataCollection, error) { ) ([]data.Collection, error) {
return w.collectItems(ctx, snapshotID, basePath, true) return w.collectItems(ctx, snapshotID, basePath, true)
} }
@ -539,9 +539,9 @@ func (w Wrapper) RestoreMultipleItems(
ctx context.Context, ctx context.Context,
snapshotID string, snapshotID string,
paths [][]string, paths [][]string,
) ([]data.DataCollection, error) { ) ([]data.Collection, error) {
var ( var (
dcs = []data.DataCollection{} dcs = []data.Collection{}
errs *multierror.Error errs *multierror.Error
) )
for _, path := range paths { for _, path := range paths {

View File

@ -63,7 +63,7 @@ func entriesToNames(entries []fs.Entry) []string {
func testForFiles( func testForFiles(
t *testing.T, t *testing.T,
expected map[string][]byte, expected map[string][]byte,
collections []data.DataCollection, collections []data.Collection,
) { ) {
count := 0 count := 0
for _, c := range collections { for _, c := range collections {
@ -119,7 +119,7 @@ func (suite *KopiaUnitSuite) TestBuildDirectoryTree() {
details := &details.Details{} details := &details.Details{}
collections := []data.DataCollection{ collections := []data.Collection{
mockconnector.NewMockExchangeDataCollection( mockconnector.NewMockExchangeDataCollection(
[]string{tenant, user1, emails}, []string{tenant, user1, emails},
expectedFileCount[user1], expectedFileCount[user1],
@ -181,7 +181,7 @@ func (suite *KopiaUnitSuite) TestBuildDirectoryTree_NoAncestorDirs() {
expectedFileCount := 42 expectedFileCount := 42
details := &details.Details{} details := &details.Details{}
collections := []data.DataCollection{ collections := []data.Collection{
mockconnector.NewMockExchangeDataCollection( mockconnector.NewMockExchangeDataCollection(
[]string{emails}, []string{emails},
expectedFileCount, expectedFileCount,
@ -203,7 +203,7 @@ func (suite *KopiaUnitSuite) TestBuildDirectoryTree_NoAncestorDirs() {
func (suite *KopiaUnitSuite) TestBuildDirectoryTree_Fails() { func (suite *KopiaUnitSuite) TestBuildDirectoryTree_Fails() {
table := []struct { table := []struct {
name string name string
layout []data.DataCollection layout []data.Collection
}{ }{
{ {
"MultipleRoots", "MultipleRoots",
@ -214,7 +214,7 @@ func (suite *KopiaUnitSuite) TestBuildDirectoryTree_Fails() {
// - user2 // - user2
// - emails // - emails
// - 42 separate files // - 42 separate files
[]data.DataCollection{ []data.Collection{
mockconnector.NewMockExchangeDataCollection( mockconnector.NewMockExchangeDataCollection(
[]string{"user1", "emails"}, []string{"user1", "emails"},
5, 5,
@ -227,7 +227,7 @@ func (suite *KopiaUnitSuite) TestBuildDirectoryTree_Fails() {
}, },
{ {
"NoCollectionPath", "NoCollectionPath",
[]data.DataCollection{ []data.Collection{
mockconnector.NewMockExchangeDataCollection( mockconnector.NewMockExchangeDataCollection(
nil, nil,
5, 5,
@ -242,7 +242,7 @@ func (suite *KopiaUnitSuite) TestBuildDirectoryTree_Fails() {
// - emails // - emails
// - 5 separate files // - 5 separate files
// - 42 separate files // - 42 separate files
[]data.DataCollection{ []data.Collection{
mockconnector.NewMockExchangeDataCollection( mockconnector.NewMockExchangeDataCollection(
[]string{"a-tenant", "user1", "emails"}, []string{"a-tenant", "user1", "emails"},
5, 5,
@ -401,7 +401,7 @@ func (suite *KopiaIntegrationSuite) TearDownTest() {
func (suite *KopiaIntegrationSuite) TestBackupCollections() { func (suite *KopiaIntegrationSuite) TestBackupCollections() {
t := suite.T() t := suite.T()
collections := []data.DataCollection{ collections := []data.Collection{
mockconnector.NewMockExchangeDataCollection( mockconnector.NewMockExchangeDataCollection(
[]string{"a-tenant", "user1", "emails"}, []string{"a-tenant", "user1", "emails"},
5, 5,
@ -456,10 +456,10 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
suite.w = &Wrapper{c} suite.w = &Wrapper{c}
collections := []data.DataCollection{ collections := []data.Collection{
&kopiaDataCollection{ &kopiaDataCollection{
path: testPath, path: testPath,
streams: []data.DataStream{ streams: []data.Stream{
&mockconnector.MockExchangeData{ &mockconnector.MockExchangeData{
ID: testFileName, ID: testFileName,
Reader: io.NopCloser(bytes.NewReader(testFileData)), Reader: io.NopCloser(bytes.NewReader(testFileData)),
@ -472,7 +472,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
}, },
&kopiaDataCollection{ &kopiaDataCollection{
path: testPath2, path: testPath2,
streams: []data.DataStream{ streams: []data.Stream{
&mockconnector.MockExchangeData{ &mockconnector.MockExchangeData{
ID: testFileName3, ID: testFileName3,
Reader: io.NopCloser(bytes.NewReader(testFileData3)), Reader: io.NopCloser(bytes.NewReader(testFileData3)),
@ -688,7 +688,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestRestoreMultipleItems() {
fp1 := append(p1, dc1.Names[0]) fp1 := append(p1, dc1.Names[0])
fp2 := append(p2, dc2.Names[0]) fp2 := append(p2, dc2.Names[0])
stats, _, err := w.BackupCollections(ctx, []data.DataCollection{dc1, dc2}) stats, _, err := w.BackupCollections(ctx, []data.Collection{dc1, dc2})
require.NoError(t, err) require.NoError(t, err)
expected := map[string][]byte{ expected := map[string][]byte{

View File

@ -99,7 +99,7 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
return errors.Wrap(err, "connecting to graph api") return errors.Wrap(err, "connecting to graph api")
} }
var cs []data.DataCollection var cs []data.Collection
cs, err = gc.ExchangeDataCollection(ctx, op.Selectors) cs, err = gc.ExchangeDataCollection(ctx, op.Selectors)
if err != nil { if err != nil {
stats.readErr = err stats.readErr = err

View File

@ -69,7 +69,7 @@ func (op RestoreOperation) validate() error {
// pointer wrapping the values, while those values // pointer wrapping the values, while those values
// get populated asynchronously. // get populated asynchronously.
type restoreStats struct { type restoreStats struct {
cs []data.DataCollection cs []data.Collection
gc *support.ConnectorOperationStatus gc *support.ConnectorOperationStatus
readErr, writeErr error readErr, writeErr error
} }

View File

@ -46,7 +46,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
stats = restoreStats{ stats = restoreStats{
readErr: multierror.Append(nil, assert.AnError), readErr: multierror.Append(nil, assert.AnError),
writeErr: assert.AnError, writeErr: assert.AnError,
cs: []data.DataCollection{&connector.ExchangeDataCollection{}}, cs: []data.Collection{&connector.ExchangeDataCollection{}},
gc: &support.ConnectorOperationStatus{ gc: &support.ConnectorOperationStatus{
ObjectCount: 1, ObjectCount: 1,
}, },