Split collection interface (#2415)

## Description

Split the collection interface into stuff used during backup and stuff used during restore. Does not change other code beyond fixing types

## Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No 

## Type of change

- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Test
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

## Issue(s)

* closes #1944

## Test Plan

- [ ] 💪 Manual
- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
ashmrtn 2023-02-07 14:15:48 -08:00 committed by GitHub
parent 7f2a8735ef
commit 373f0458a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 240 additions and 218 deletions

View File

@ -152,8 +152,8 @@ func buildCollections(
tenant, user string, tenant, user string,
dest control.RestoreDestination, dest control.RestoreDestination,
colls []collection, colls []collection,
) ([]data.Collection, error) { ) ([]data.RestoreCollection, error) {
collections := make([]data.Collection, 0, len(colls)) collections := make([]data.RestoreCollection, 0, len(colls))
for _, c := range colls { for _, c := range colls {
pth, err := toDataLayerPath( pth, err := toDataLayerPath(

View File

@ -34,9 +34,9 @@ import (
func (gc *GraphConnector) DataCollections( func (gc *GraphConnector) DataCollections(
ctx context.Context, ctx context.Context,
sels selectors.Selector, sels selectors.Selector,
metadata []data.Collection, metadata []data.RestoreCollection,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String())) ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String()))
defer end() defer end()
@ -51,7 +51,7 @@ func (gc *GraphConnector) DataCollections(
} }
if !serviceEnabled { if !serviceEnabled {
return []data.Collection{}, nil, nil return []data.BackupCollection{}, nil, nil
} }
switch sels.Service { switch sels.Service {
@ -182,9 +182,9 @@ func (fm odFolderMatcher) Matches(dir string) bool {
func (gc *GraphConnector) OneDriveDataCollections( func (gc *GraphConnector) OneDriveDataCollections(
ctx context.Context, ctx context.Context,
selector selectors.Selector, selector selectors.Selector,
metadata []data.Collection, metadata []data.RestoreCollection,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
odb, err := selector.ToOneDriveBackup() odb, err := selector.ToOneDriveBackup()
if err != nil { if err != nil {
return nil, nil, errors.Wrap(err, "oneDriveDataCollection: parsing selector") return nil, nil, errors.Wrap(err, "oneDriveDataCollection: parsing selector")
@ -192,7 +192,7 @@ func (gc *GraphConnector) OneDriveDataCollections(
var ( var (
user = selector.DiscreteOwner user = selector.DiscreteOwner
collections = []data.Collection{} collections = []data.BackupCollection{}
allExcludes = map[string]struct{}{} allExcludes = map[string]struct{}{}
errs error errs error
) )

View File

@ -63,7 +63,7 @@ type DeltaPath struct {
// and path lookup maps. // and path lookup maps.
func parseMetadataCollections( func parseMetadataCollections(
ctx context.Context, ctx context.Context,
colls []data.Collection, colls []data.RestoreCollection,
) (CatDeltaPaths, error) { ) (CatDeltaPaths, error) {
// cdp stores metadata // cdp stores metadata
cdp := CatDeltaPaths{ cdp := CatDeltaPaths{
@ -163,11 +163,11 @@ func parseMetadataCollections(
func DataCollections( func DataCollections(
ctx context.Context, ctx context.Context,
selector selectors.Selector, selector selectors.Selector,
metadata []data.Collection, metadata []data.RestoreCollection,
acct account.M365Config, acct account.M365Config,
su support.StatusUpdater, su support.StatusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
eb, err := selector.ToExchangeBackup() eb, err := selector.ToExchangeBackup()
if err != nil { if err != nil {
return nil, nil, errors.Wrap(err, "exchangeDataCollection: parsing selector") return nil, nil, errors.Wrap(err, "exchangeDataCollection: parsing selector")
@ -175,7 +175,7 @@ func DataCollections(
var ( var (
user = selector.DiscreteOwner user = selector.DiscreteOwner
collections = []data.Collection{} collections = []data.BackupCollection{}
errs error errs error
) )
@ -231,10 +231,10 @@ func createCollections(
dps DeltaPaths, dps DeltaPaths,
ctrlOpts control.Options, ctrlOpts control.Options,
su support.StatusUpdater, su support.StatusUpdater,
) ([]data.Collection, error) { ) ([]data.BackupCollection, error) {
var ( var (
errs *multierror.Error errs *multierror.Error
allCollections = make([]data.Collection, 0) allCollections = make([]data.BackupCollection, 0)
ac = api.Client{Credentials: creds} ac = api.Client{Credentials: creds}
category = scope.Category().PathType() category = scope.Category().PathType()
) )
@ -245,7 +245,7 @@ func createCollections(
} }
// Create collection of ExchangeDataCollection // Create collection of ExchangeDataCollection
collections := make(map[string]data.Collection) collections := make(map[string]data.BackupCollection)
qp := graph.QueryParams{ qp := graph.QueryParams{
Category: category, Category: category,

View File

@ -174,7 +174,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
) )
require.NoError(t, err) require.NoError(t, err)
cdps, err := parseMetadataCollections(ctx, []data.Collection{coll}) cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{coll})
test.expectError(t, err) test.expectError(t, err)
emails := cdps[path.EmailCategory] emails := cdps[path.EmailCategory]
@ -335,7 +335,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
require.NoError(t, err) require.NoError(t, err)
assert.Less(t, 1, len(collections), "retrieved metadata and data collections") assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.Collection var metadata data.BackupCollection
for _, coll := range collections { for _, coll := range collections {
if coll.FullPath().Service() == path.ExchangeMetadataService { if coll.FullPath().Service() == path.ExchangeMetadataService {
@ -345,7 +345,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
require.NotNil(t, metadata, "collections contains a metadata collection") require.NotNil(t, metadata, "collections contains a metadata collection")
cdps, err := parseMetadataCollections(ctx, []data.Collection{metadata}) cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{metadata})
require.NoError(t, err) require.NoError(t, err)
dps := cdps[test.scope.Category().PathType()] dps := cdps[test.scope.Category().PathType()]

View File

@ -24,10 +24,10 @@ import (
) )
var ( var (
_ data.Collection = &Collection{} _ data.BackupCollection = &Collection{}
_ data.Stream = &Stream{} _ data.Stream = &Stream{}
_ data.StreamInfo = &Stream{} _ data.StreamInfo = &Stream{}
_ data.StreamModTime = &Stream{} _ data.StreamModTime = &Stream{}
) )
const ( const (

View File

@ -25,14 +25,14 @@ type addedAndRemovedItemIDsGetter interface {
// filterContainersAndFillCollections is a utility function // filterContainersAndFillCollections is a utility function
// that places the M365 object ids belonging to specific directories // that places the M365 object ids belonging to specific directories
// into a Collection. Messages outside of those directories are omitted. // into a BackupCollection. Messages outside of those directories are omitted.
// @param collection is filled with during this function. // @param collection is filled with during this function.
// Supports all exchange applications: Contacts, Events, and Mail // Supports all exchange applications: Contacts, Events, and Mail
func filterContainersAndFillCollections( func filterContainersAndFillCollections(
ctx context.Context, ctx context.Context,
qp graph.QueryParams, qp graph.QueryParams,
getter addedAndRemovedItemIDsGetter, getter addedAndRemovedItemIDsGetter,
collections map[string]data.Collection, collections map[string]data.BackupCollection,
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
resolver graph.ContainerResolver, resolver graph.ContainerResolver,
scope selectors.ExchangeScope, scope selectors.ExchangeScope,

View File

@ -280,7 +280,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
collections := map[string]data.Collection{} collections := map[string]data.BackupCollection{}
err := filterContainersAndFillCollections( err := filterContainersAndFillCollections(
ctx, ctx,
@ -433,7 +433,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
resolver = newMockResolver(container1) resolver = newMockResolver(container1)
) )
collections := map[string]data.Collection{} collections := map[string]data.BackupCollection{}
err := filterContainersAndFillCollections( err := filterContainersAndFillCollections(
ctx, ctx,
@ -785,7 +785,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
collections := map[string]data.Collection{} collections := map[string]data.BackupCollection{}
err := filterContainersAndFillCollections( err := filterContainersAndFillCollections(
ctx, ctx,

View File

@ -297,7 +297,7 @@ func SendMailToBackStore(
return errs return errs
} }
// RestoreExchangeDataCollections restores M365 objects in data.Collection to MSFT // RestoreExchangeDataCollections restores M365 objects in data.RestoreCollection to MSFT
// store through GraphAPI. // store through GraphAPI.
// @param dest: container destination to M365 // @param dest: container destination to M365
func RestoreExchangeDataCollections( func RestoreExchangeDataCollections(
@ -305,7 +305,7 @@ func RestoreExchangeDataCollections(
creds account.M365Config, creds account.M365Config,
gs graph.Servicer, gs graph.Servicer,
dest control.RestoreDestination, dest control.RestoreDestination,
dcs []data.Collection, dcs []data.RestoreCollection,
deets *details.Builder, deets *details.Builder,
) (*support.ConnectorOperationStatus, error) { ) (*support.ConnectorOperationStatus, error) {
var ( var (
@ -364,7 +364,7 @@ func RestoreExchangeDataCollections(
func restoreCollection( func restoreCollection(
ctx context.Context, ctx context.Context,
gs graph.Servicer, gs graph.Servicer,
dc data.Collection, dc data.RestoreCollection,
folderID string, folderID string,
policy control.CollisionPolicy, policy control.CollisionPolicy,
deets *details.Builder, deets *details.Builder,

View File

@ -14,8 +14,8 @@ import (
) )
var ( var (
_ data.Collection = &MetadataCollection{} _ data.BackupCollection = &MetadataCollection{}
_ data.Stream = &MetadataItem{} _ data.Stream = &MetadataItem{}
) )
// MetadataCollection in a simple collection that assumes all items to be // MetadataCollection in a simple collection that assumes all items to be
@ -67,7 +67,7 @@ func MakeMetadataCollection(
cat path.CategoryType, cat path.CategoryType,
metadata []MetadataCollectionEntry, metadata []MetadataCollectionEntry,
statusUpdater support.StatusUpdater, statusUpdater support.StatusUpdater,
) (data.Collection, error) { ) (data.BackupCollection, error) {
if len(metadata) == 0 { if len(metadata) == 0 {
return nil, nil return nil, nil
} }

View File

@ -271,7 +271,7 @@ func (gc *GraphConnector) RestoreDataCollections(
selector selectors.Selector, selector selectors.Selector,
dest control.RestoreDestination, dest control.RestoreDestination,
opts control.Options, opts control.Options,
dcs []data.Collection, dcs []data.RestoreCollection,
) (*details.Details, error) { ) (*details.Details, error) {
ctx, end := D.Span(ctx, "connector:restore") ctx, end := D.Span(ctx, "connector:restore")
defer end() defer end()

View File

@ -740,7 +740,7 @@ func compareItem(
func checkHasCollections( func checkHasCollections(
t *testing.T, t *testing.T,
expected map[string]map[string][]byte, expected map[string]map[string][]byte,
got []data.Collection, got []data.BackupCollection,
) { ) {
t.Helper() t.Helper()
@ -762,10 +762,10 @@ func checkCollections(
t *testing.T, t *testing.T,
expectedItems int, expectedItems int,
expected map[string]map[string][]byte, expected map[string]map[string][]byte,
got []data.Collection, got []data.BackupCollection,
restorePermissions bool, restorePermissions bool,
) int { ) int {
collectionsWithItems := []data.Collection{} collectionsWithItems := []data.BackupCollection{}
skipped := 0 skipped := 0
gotItems := 0 gotItems := 0
@ -950,8 +950,8 @@ func collectionsForInfo(
tenant, user string, tenant, user string,
dest control.RestoreDestination, dest control.RestoreDestination,
allInfo []colInfo, allInfo []colInfo,
) (int, int, []data.Collection, map[string]map[string][]byte) { ) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
collections := make([]data.Collection, 0, len(allInfo)) collections := make([]data.RestoreCollection, 0, len(allInfo))
expectedData := make(map[string]map[string][]byte, len(allInfo)) expectedData := make(map[string]map[string][]byte, len(allInfo))
totalItems := 0 totalItems := 0
kopiaEntries := 0 kopiaEntries := 0
@ -1002,8 +1002,8 @@ func collectionsForInfoVersion0(
tenant, user string, tenant, user string,
dest control.RestoreDestination, dest control.RestoreDestination,
allInfo []colInfo, allInfo []colInfo,
) (int, int, []data.Collection, map[string]map[string][]byte) { ) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
collections := make([]data.Collection, 0, len(allInfo)) collections := make([]data.RestoreCollection, 0, len(allInfo))
expectedData := make(map[string]map[string][]byte, len(allInfo)) expectedData := make(map[string]map[string][]byte, len(allInfo))
totalItems := 0 totalItems := 0
kopiaEntries := 0 kopiaEntries := 0

View File

@ -257,7 +257,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
dest := tester.DefaultTestRestoreDestination() dest := tester.DefaultTestRestoreDestination()
table := []struct { table := []struct {
name string name string
col []data.Collection col []data.RestoreCollection
sel selectors.Selector sel selectors.Selector
}{ }{
{ {
@ -269,7 +269,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
}, },
{ {
name: "ExchangeEmpty", name: "ExchangeEmpty",
col: []data.Collection{}, col: []data.RestoreCollection{},
sel: selectors.Selector{ sel: selectors.Selector{
Service: selectors.ServiceExchange, Service: selectors.ServiceExchange,
}, },
@ -283,7 +283,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
}, },
{ {
name: "OneDriveEmpty", name: "OneDriveEmpty",
col: []data.Collection{}, col: []data.RestoreCollection{},
sel: selectors.Selector{ sel: selectors.Selector{
Service: selectors.ServiceOneDrive, Service: selectors.ServiceOneDrive,
}, },
@ -297,7 +297,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
}, },
{ {
name: "SharePointEmpty", name: "SharePointEmpty",
col: []data.Collection{}, col: []data.RestoreCollection{},
sel: selectors.Selector{ sel: selectors.Selector{
Service: selectors.ServiceSharePoint, Service: selectors.ServiceSharePoint,
}, },
@ -370,7 +370,7 @@ func runRestoreBackupTest(
opts control.Options, opts control.Options,
) { ) {
var ( var (
collections []data.Collection collections []data.RestoreCollection
expectedData = map[string]map[string][]byte{} expectedData = map[string]map[string][]byte{}
totalItems = 0 totalItems = 0
totalKopiaItems = 0 totalKopiaItems = 0
@ -495,7 +495,7 @@ func runRestoreBackupTestVersion0(
opts control.Options, opts control.Options,
) { ) {
var ( var (
collections []data.Collection collections []data.RestoreCollection
expectedData = map[string]map[string][]byte{} expectedData = map[string]map[string][]byte{}
totalItems = 0 totalItems = 0
totalKopiaItems = 0 totalKopiaItems = 0

View File

@ -27,10 +27,13 @@ type MockExchangeDataCollection struct {
} }
var ( var (
_ data.Collection = &MockExchangeDataCollection{} // Needs to implement both backup and restore interfaces so we can use it in
_ data.Stream = &MockExchangeData{} // integration tests.
_ data.StreamInfo = &MockExchangeData{} _ data.BackupCollection = &MockExchangeDataCollection{}
_ data.StreamSize = &MockExchangeData{} _ data.RestoreCollection = &MockExchangeDataCollection{}
_ data.Stream = &MockExchangeData{}
_ data.StreamInfo = &MockExchangeData{}
_ data.StreamSize = &MockExchangeData{}
) )
// NewMockExchangeDataCollection creates an data collection that will return the specified number of // NewMockExchangeDataCollection creates an data collection that will return the specified number of

View File

@ -14,8 +14,8 @@ import (
) )
var ( var (
_ data.Stream = &MockListData{} _ data.Stream = &MockListData{}
_ data.Collection = &MockListCollection{} _ data.BackupCollection = &MockListCollection{}
) )
type MockListCollection struct { type MockListCollection struct {

View File

@ -42,10 +42,10 @@ const (
) )
var ( var (
_ data.Collection = &Collection{} _ data.BackupCollection = &Collection{}
_ data.Stream = &Item{} _ data.Stream = &Item{}
_ data.StreamInfo = &Item{} _ data.StreamInfo = &Item{}
_ data.StreamModTime = &Item{} _ data.StreamModTime = &Item{}
) )
// Collection represents a set of OneDrive objects retrieved from M365 // Collection represents a set of OneDrive objects retrieved from M365

View File

@ -61,9 +61,9 @@ type Collections struct {
ctrl control.Options ctrl control.Options
// collectionMap allows lookup of the data.Collection // collectionMap allows lookup of the data.BackupCollection
// for a OneDrive folder // for a OneDrive folder
CollectionMap map[string]data.Collection CollectionMap map[string]data.BackupCollection
// Not the most ideal, but allows us to change the pager function for testing // Not the most ideal, but allows us to change the pager function for testing
// as needed. This will allow us to mock out some scenarios during testing. // as needed. This will allow us to mock out some scenarios during testing.
@ -100,7 +100,7 @@ func NewCollections(
resourceOwner: resourceOwner, resourceOwner: resourceOwner,
source: source, source: source,
matcher: matcher, matcher: matcher,
CollectionMap: map[string]data.Collection{}, CollectionMap: map[string]data.BackupCollection{},
drivePagerFunc: PagerForSource, drivePagerFunc: PagerForSource,
itemPagerFunc: defaultItemPager, itemPagerFunc: defaultItemPager,
service: service, service: service,
@ -111,7 +111,7 @@ func NewCollections(
func deserializeMetadata( func deserializeMetadata(
ctx context.Context, ctx context.Context,
cols []data.Collection, cols []data.RestoreCollection,
) (map[string]string, map[string]map[string]string, error) { ) (map[string]string, map[string]map[string]string, error) {
logger.Ctx(ctx).Infow( logger.Ctx(ctx).Infow(
"deserialzing previous backup metadata", "deserialzing previous backup metadata",
@ -249,8 +249,8 @@ func deserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) erro
// be excluded from the upcoming backup. // be excluded from the upcoming backup.
func (c *Collections) Get( func (c *Collections) Get(
ctx context.Context, ctx context.Context,
prevMetadata []data.Collection, prevMetadata []data.RestoreCollection,
) ([]data.Collection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
prevDeltas, _, err := deserializeMetadata(ctx, prevMetadata) prevDeltas, _, err := deserializeMetadata(ctx, prevMetadata)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
@ -327,7 +327,7 @@ func (c *Collections) Get(
observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items to backup", c.NumItems))) observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items to backup", c.NumItems)))
// Add an extra for the metadata collection. // Add an extra for the metadata collection.
collections := make([]data.Collection, 0, len(c.CollectionMap)+1) collections := make([]data.BackupCollection, 0, len(c.CollectionMap)+1)
for _, coll := range c.CollectionMap { for _, coll := range c.CollectionMap {
collections = append(collections, coll) collections = append(collections, coll)
} }

View File

@ -983,7 +983,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
ctx, flush := tester.NewContext() ctx, flush := tester.NewContext()
defer flush() defer flush()
cols := []data.Collection{} cols := []data.RestoreCollection{}
for _, c := range test.cols { for _, c := range test.cols {
mc, err := graph.MakeMetadataCollection( mc, err := graph.MakeMetadataCollection(
@ -1529,7 +1529,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
for _, baseCol := range cols { for _, baseCol := range cols {
folderPath := baseCol.FullPath().String() folderPath := baseCol.FullPath().String()
if folderPath == metadataPath.String() { if folderPath == metadataPath.String() {
deltas, paths, err := deserializeMetadata(ctx, []data.Collection{baseCol}) deltas, paths, err := deserializeMetadata(ctx, []data.RestoreCollection{baseCol})
if !assert.NoError(t, err, "deserializing metadata") { if !assert.NoError(t, err, "deserializing metadata") {
continue continue
} }

View File

@ -64,7 +64,7 @@ func RestoreCollections(
service graph.Servicer, service graph.Servicer,
dest control.RestoreDestination, dest control.RestoreDestination,
opts control.Options, opts control.Options,
dcs []data.Collection, dcs []data.RestoreCollection,
deets *details.Builder, deets *details.Builder,
) (*support.ConnectorOperationStatus, error) { ) (*support.ConnectorOperationStatus, error) {
var ( var (
@ -148,7 +148,7 @@ func RestoreCollection(
ctx context.Context, ctx context.Context,
backupVersion int, backupVersion int,
service graph.Servicer, service graph.Servicer,
dc data.Collection, dc data.RestoreCollection,
parentPerms []UserPermission, parentPerms []UserPermission,
source driveSource, source driveSource,
restoreContainerName string, restoreContainerName string,

View File

@ -30,10 +30,10 @@ const (
) )
var ( var (
_ data.Collection = &Collection{} _ data.BackupCollection = &Collection{}
_ data.Stream = &Item{} _ data.Stream = &Item{}
_ data.StreamInfo = &Item{} _ data.StreamInfo = &Item{}
_ data.StreamModTime = &Item{} _ data.StreamModTime = &Item{}
) )
// Collection is the SharePoint.List implementation of data.Collection. SharePoint.Libraries collections are supported // Collection is the SharePoint.List implementation of data.Collection. SharePoint.Libraries collections are supported

View File

@ -34,7 +34,7 @@ func DataCollections(
serv graph.Servicer, serv graph.Servicer,
su statusUpdater, su statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
b, err := selector.ToSharePointBackup() b, err := selector.ToSharePointBackup()
if err != nil { if err != nil {
return nil, nil, errors.Wrap(err, "sharePointDataCollection: parsing selector") return nil, nil, errors.Wrap(err, "sharePointDataCollection: parsing selector")
@ -42,7 +42,7 @@ func DataCollections(
var ( var (
site = b.DiscreteOwner site = b.DiscreteOwner
collections = []data.Collection{} collections = []data.BackupCollection{}
errs error errs error
) )
@ -54,7 +54,7 @@ func DataCollections(
defer closer() defer closer()
defer close(foldersComplete) defer close(foldersComplete)
var spcs []data.Collection var spcs []data.BackupCollection
switch scope.Category().PathType() { switch scope.Category().PathType() {
case path.ListsCategory: case path.ListsCategory:
@ -97,10 +97,10 @@ func collectLists(
tenantID, siteID string, tenantID, siteID string,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, error) { ) ([]data.BackupCollection, error) {
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint List Collections") logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint List Collections")
spcs := make([]data.Collection, 0) spcs := make([]data.BackupCollection, 0)
tuples, err := preFetchLists(ctx, serv, siteID) tuples, err := preFetchLists(ctx, serv, siteID)
if err != nil { if err != nil {
@ -137,9 +137,9 @@ func collectLibraries(
scope selectors.SharePointScope, scope selectors.SharePointScope,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
var ( var (
collections = []data.Collection{} collections = []data.BackupCollection{}
errs error errs error
) )
@ -175,10 +175,10 @@ func collectPages(
scope selectors.SharePointScope, scope selectors.SharePointScope,
updater statusUpdater, updater statusUpdater,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, error) { ) ([]data.BackupCollection, error) {
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint Pages collections") logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint Pages collections")
spcs := make([]data.Collection, 0) spcs := make([]data.BackupCollection, 0)
// make the betaClient // make the betaClient
adpt, err := graph.CreateAdapter(creds.AzureTenantID, creds.AzureClientID, creds.AzureClientSecret) adpt, err := graph.CreateAdapter(creds.AzureTenantID, creds.AzureClientID, creds.AzureClientSecret)

View File

@ -30,7 +30,7 @@ import (
// -- Switch: // -- Switch:
// ---- Libraries restored via the same workflow as oneDrive // ---- Libraries restored via the same workflow as oneDrive
// ---- Lists call RestoreCollection() // ---- Lists call RestoreCollection()
// ----> for each data.Stream within Collection.Items() // ----> for each data.Stream within RestoreCollection.Items()
// ----> restoreListItems() is called // ----> restoreListItems() is called
// Restored List can be found in the Site's `Site content` page // Restored List can be found in the Site's `Site content` page
// Restored Libraries can be found within the Site's `Pages` page // Restored Libraries can be found within the Site's `Pages` page
@ -43,7 +43,7 @@ func RestoreCollections(
creds account.M365Config, creds account.M365Config,
service graph.Servicer, service graph.Servicer,
dest control.RestoreDestination, dest control.RestoreDestination,
dcs []data.Collection, dcs []data.RestoreCollection,
deets *details.Builder, deets *details.Builder,
) (*support.ConnectorOperationStatus, error) { ) (*support.ConnectorOperationStatus, error) {
var ( var (
@ -219,7 +219,7 @@ func restoreListItem(
func RestoreListCollection( func RestoreListCollection(
ctx context.Context, ctx context.Context,
service graph.Servicer, service graph.Servicer,
dc data.Collection, dc data.RestoreCollection,
restoreContainerName string, restoreContainerName string,
deets *details.Builder, deets *details.Builder,
errUpdater func(string, error), errUpdater func(string, error),
@ -291,7 +291,7 @@ func RestoreListCollection(
func RestorePageCollection( func RestorePageCollection(
ctx context.Context, ctx context.Context,
creds account.M365Config, creds account.M365Config,
dc data.Collection, dc data.RestoreCollection,
restoreContainerName string, restoreContainerName string,
deets *details.Builder, deets *details.Builder,
errUpdater func(string, error), errUpdater func(string, error),

View File

@ -21,8 +21,8 @@ const (
DeletedState DeletedState
) )
// A Collection represents a compilation of data from the // A Collection represents the set of data within a single logical location
// same type application (e.g. mail) // denoted by FullPath.
type Collection interface { type Collection interface {
// Items returns a channel from which items in the collection can be read. // Items returns a channel from which items in the collection can be read.
// Each returned struct contains the next item in the collection // Each returned struct contains the next item in the collection
@ -30,10 +30,13 @@ type Collection interface {
// an unrecoverable error caused an early termination in the sender. // an unrecoverable error caused an early termination in the sender.
Items() <-chan Stream Items() <-chan Stream
// FullPath returns a path struct that acts as a metadata tag for this // FullPath returns a path struct that acts as a metadata tag for this
// DataCollection. Returned items should be ordered from most generic to least // Collection.
// generic. For example, a DataCollection for emails from a specific user
// would be {"<tenant id>", "exchange", "<user ID>", "emails"}.
FullPath() path.Path FullPath() path.Path
}
// BackupCollection is an extension of Collection that is used during backups.
type BackupCollection interface {
Collection
// PreviousPath returns the path.Path this collection used to reside at // PreviousPath returns the path.Path this collection used to reside at
// (according to the M365 ID for the container) if the collection was moved or // (according to the M365 ID for the container) if the collection was moved or
// renamed. Returns nil if the collection is new. // renamed. Returns nil if the collection is new.
@ -58,6 +61,11 @@ type Collection interface {
DoNotMergeItems() bool DoNotMergeItems() bool
} }
// RestoreCollection is an extension of Collection that is used during restores.
type RestoreCollection interface {
Collection
}
// Stream represents a single item within a Collection // Stream represents a single item within a Collection
// that can be consumed as a stream (it embeds io.Reader) // that can be consumed as a stream (it embeds io.Reader)
type Stream interface { type Stream interface {

View File

@ -8,8 +8,8 @@ import (
) )
var ( var (
_ data.Collection = &kopiaDataCollection{} _ data.RestoreCollection = &kopiaDataCollection{}
_ data.Stream = &kopiaDataStream{} _ data.Stream = &kopiaDataStream{}
) )
type kopiaDataCollection struct { type kopiaDataCollection struct {
@ -35,18 +35,6 @@ func (kdc kopiaDataCollection) FullPath() path.Path {
return kdc.path return kdc.path
} }
func (kdc kopiaDataCollection) PreviousPath() path.Path {
return nil
}
func (kdc kopiaDataCollection) State() data.CollectionState {
return data.NewState
}
func (kdc kopiaDataCollection) DoNotMergeItems() bool {
return false
}
type kopiaDataStream struct { type kopiaDataStream struct {
reader io.ReadCloser reader io.ReadCloser
uuid string uuid string

View File

@ -254,7 +254,7 @@ func (cp *corsoProgress) get(k string) *itemDetails {
func collectionEntries( func collectionEntries(
ctx context.Context, ctx context.Context,
cb func(context.Context, fs.Entry) error, cb func(context.Context, fs.Entry) error,
streamedEnts data.Collection, streamedEnts data.BackupCollection,
progress *corsoProgress, progress *corsoProgress,
) (map[string]struct{}, *multierror.Error) { ) (map[string]struct{}, *multierror.Error) {
if streamedEnts == nil { if streamedEnts == nil {
@ -442,7 +442,7 @@ func getStreamItemFunc(
curPath path.Path, curPath path.Path,
prevPath path.Path, prevPath path.Path,
staticEnts []fs.Entry, staticEnts []fs.Entry,
streamedEnts data.Collection, streamedEnts data.BackupCollection,
baseDir fs.Directory, baseDir fs.Directory,
globalExcludeSet map[string]struct{}, globalExcludeSet map[string]struct{},
progress *corsoProgress, progress *corsoProgress,
@ -540,7 +540,7 @@ type treeMap struct {
childDirs map[string]*treeMap childDirs map[string]*treeMap
// Reference to data pulled from the external service. Contains only items in // Reference to data pulled from the external service. Contains only items in
// this directory. Does not contain references to subdirectories. // this directory. Does not contain references to subdirectories.
collection data.Collection collection data.BackupCollection
// Reference to directory in base snapshot. The referenced directory itself // Reference to directory in base snapshot. The referenced directory itself
// may contain files and subdirectories, but the subdirectories should // may contain files and subdirectories, but the subdirectories should
// eventually be added when walking the base snapshot to build the hierarchy, // eventually be added when walking the base snapshot to build the hierarchy,
@ -617,7 +617,7 @@ func getTreeNode(roots map[string]*treeMap, pathElements []string) *treeMap {
func inflateCollectionTree( func inflateCollectionTree(
ctx context.Context, ctx context.Context,
collections []data.Collection, collections []data.BackupCollection,
) (map[string]*treeMap, map[string]path.Path, error) { ) (map[string]*treeMap, map[string]path.Path, error) {
roots := make(map[string]*treeMap) roots := make(map[string]*treeMap)
// Contains the old path for collections that have been moved or renamed. // Contains the old path for collections that have been moved or renamed.
@ -911,13 +911,13 @@ func inflateBaseTree(
// exclude from base directories when uploading the snapshot. As items in *all* // exclude from base directories when uploading the snapshot. As items in *all*
// base directories will be checked for in every base directory, this assumes // base directories will be checked for in every base directory, this assumes
// that items in the bases are unique. Deletions of directories or subtrees // that items in the bases are unique. Deletions of directories or subtrees
// should be represented as changes in the status of a Collection, not an entry // should be represented as changes in the status of a BackupCollection, not an
// in the globalExcludeSet. // entry in the globalExcludeSet.
func inflateDirTree( func inflateDirTree(
ctx context.Context, ctx context.Context,
loader snapshotLoader, loader snapshotLoader,
baseSnaps []IncrementalBase, baseSnaps []IncrementalBase,
collections []data.Collection, collections []data.BackupCollection,
globalExcludeSet map[string]struct{}, globalExcludeSet map[string]struct{},
progress *corsoProgress, progress *corsoProgress,
) (fs.Directory, error) { ) (fs.Directory, error) {

View File

@ -683,7 +683,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
progress := &corsoProgress{pending: map[string]*itemDetails{}} progress := &corsoProgress{pending: map[string]*itemDetails{}}
collections := []data.Collection{ collections := []data.BackupCollection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
suite.testPath, suite.testPath,
expectedFileCount[user1Encoded], expectedFileCount[user1Encoded],
@ -759,11 +759,11 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
// - 42 separate files // - 42 separate files
table := []struct { table := []struct {
name string name string
layout []data.Collection layout []data.BackupCollection
}{ }{
{ {
name: "SubdirFirst", name: "SubdirFirst",
layout: []data.Collection{ layout: []data.BackupCollection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
p2, p2,
5, 5,
@ -776,7 +776,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
}, },
{ {
name: "SubdirLast", name: "SubdirLast",
layout: []data.Collection{ layout: []data.BackupCollection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
suite.testPath, suite.testPath,
42, 42,
@ -845,7 +845,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
table := []struct { table := []struct {
name string name string
layout []data.Collection layout []data.BackupCollection
}{ }{
{ {
"MultipleRoots", "MultipleRoots",
@ -862,7 +862,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
// - emails // - emails
// - Inbox // - Inbox
// - 42 separate files // - 42 separate files
[]data.Collection{ []data.BackupCollection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
suite.testPath, suite.testPath,
5, 5,
@ -875,7 +875,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
}, },
{ {
"NoCollectionPath", "NoCollectionPath",
[]data.Collection{ []data.BackupCollection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
nil, nil,
5, 5,
@ -973,7 +973,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() {
progress := &corsoProgress{pending: map[string]*itemDetails{}} progress := &corsoProgress{pending: map[string]*itemDetails{}}
cols := []data.Collection{} cols := []data.BackupCollection{}
for _, s := range test.states { for _, s := range test.states {
prevPath := dirPath prevPath := dirPath
nowPath := dirPath nowPath := dirPath
@ -1037,17 +1037,17 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
table := []struct { table := []struct {
name string name string
inputCollections func() []data.Collection inputCollections func() []data.BackupCollection
expected *expectedNode expected *expectedNode
}{ }{
{ {
name: "SkipsDeletedItems", name: "SkipsDeletedItems",
inputCollections: func() []data.Collection { inputCollections: func() []data.BackupCollection {
mc := mockconnector.NewMockExchangeCollection(dirPath, 1) mc := mockconnector.NewMockExchangeCollection(dirPath, 1)
mc.Names[0] = testFileName mc.Names[0] = testFileName
mc.DeletedItems[0] = true mc.DeletedItems[0] = true
return []data.Collection{mc} return []data.BackupCollection{mc}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1066,13 +1066,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
}, },
{ {
name: "AddsNewItems", name: "AddsNewItems",
inputCollections: func() []data.Collection { inputCollections: func() []data.BackupCollection {
mc := mockconnector.NewMockExchangeCollection(dirPath, 1) mc := mockconnector.NewMockExchangeCollection(dirPath, 1)
mc.Names[0] = testFileName2 mc.Names[0] = testFileName2
mc.Data[0] = testFileData2 mc.Data[0] = testFileData2
mc.ColState = data.NotMovedState mc.ColState = data.NotMovedState
return []data.Collection{mc} return []data.BackupCollection{mc}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1101,13 +1101,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
}, },
{ {
name: "SkipsUpdatedItems", name: "SkipsUpdatedItems",
inputCollections: func() []data.Collection { inputCollections: func() []data.BackupCollection {
mc := mockconnector.NewMockExchangeCollection(dirPath, 1) mc := mockconnector.NewMockExchangeCollection(dirPath, 1)
mc.Names[0] = testFileName mc.Names[0] = testFileName
mc.Data[0] = testFileData2 mc.Data[0] = testFileData2
mc.ColState = data.NotMovedState mc.ColState = data.NotMovedState
return []data.Collection{mc} return []data.BackupCollection{mc}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1132,7 +1132,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
}, },
{ {
name: "DeleteAndNew", name: "DeleteAndNew",
inputCollections: func() []data.Collection { inputCollections: func() []data.BackupCollection {
mc1 := mockconnector.NewMockExchangeCollection(dirPath, 0) mc1 := mockconnector.NewMockExchangeCollection(dirPath, 0)
mc1.ColState = data.DeletedState mc1.ColState = data.DeletedState
mc1.PrevPath = dirPath mc1.PrevPath = dirPath
@ -1142,7 +1142,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
mc2.Names[0] = testFileName2 mc2.Names[0] = testFileName2
mc2.Data[0] = testFileData2 mc2.Data[0] = testFileData2
return []data.Collection{mc1, mc2} return []data.BackupCollection{mc1, mc2}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1167,7 +1167,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
}, },
{ {
name: "MovedAndNew", name: "MovedAndNew",
inputCollections: func() []data.Collection { inputCollections: func() []data.BackupCollection {
mc1 := mockconnector.NewMockExchangeCollection(dirPath2, 0) mc1 := mockconnector.NewMockExchangeCollection(dirPath2, 0)
mc1.ColState = data.MovedState mc1.ColState = data.MovedState
mc1.PrevPath = dirPath mc1.PrevPath = dirPath
@ -1177,7 +1177,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
mc2.Names[0] = testFileName2 mc2.Names[0] = testFileName2
mc2.Data[0] = testFileData2 mc2.Data[0] = testFileData2
return []data.Collection{mc1, mc2} return []data.BackupCollection{mc1, mc2}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1211,13 +1211,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
}, },
{ {
name: "NewDoesntMerge", name: "NewDoesntMerge",
inputCollections: func() []data.Collection { inputCollections: func() []data.BackupCollection {
mc1 := mockconnector.NewMockExchangeCollection(dirPath, 1) mc1 := mockconnector.NewMockExchangeCollection(dirPath, 1)
mc1.ColState = data.NewState mc1.ColState = data.NewState
mc1.Names[0] = testFileName2 mc1.Names[0] = testFileName2
mc1.Data[0] = testFileData2 mc1.Data[0] = testFileData2
return []data.Collection{mc1} return []data.BackupCollection{mc1}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1369,13 +1369,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
table := []struct { table := []struct {
name string name string
inputCollections func(t *testing.T) []data.Collection inputCollections func(t *testing.T) []data.BackupCollection
inputExcludes map[string]struct{} inputExcludes map[string]struct{}
expected *expectedNode expected *expectedNode
}{ }{
{ {
name: "GlobalExcludeSet", name: "GlobalExcludeSet",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
return nil return nil
}, },
inputExcludes: map[string]struct{}{ inputExcludes: map[string]struct{}{
@ -1417,7 +1417,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "MovesSubtree", name: "MovesSubtree",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
newPath := makePath( newPath := makePath(
t, t,
[]string{testTenant, service, testUser, category, testInboxDir + "2"}, []string{testTenant, service, testUser, category, testInboxDir + "2"},
@ -1428,7 +1428,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
mc.PrevPath = inboxPath mc.PrevPath = inboxPath
mc.ColState = data.MovedState mc.ColState = data.MovedState
return []data.Collection{mc} return []data.BackupCollection{mc}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1474,7 +1474,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "MovesChildAfterAncestorMove", name: "MovesChildAfterAncestorMove",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
newInboxPath := makePath( newInboxPath := makePath(
t, t,
[]string{testTenant, service, testUser, category, testInboxDir + "2"}, []string{testTenant, service, testUser, category, testInboxDir + "2"},
@ -1494,7 +1494,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.PrevPath = workPath work.PrevPath = workPath
work.ColState = data.MovedState work.ColState = data.MovedState
return []data.Collection{inbox, work} return []data.BackupCollection{inbox, work}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1540,7 +1540,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "MovesChildAfterAncestorDelete", name: "MovesChildAfterAncestorDelete",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
newWorkPath := makePath( newWorkPath := makePath(
t, t,
[]string{testTenant, service, testUser, category, workDir}, []string{testTenant, service, testUser, category, workDir},
@ -1555,7 +1555,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.PrevPath = workPath work.PrevPath = workPath
work.ColState = data.MovedState work.ColState = data.MovedState
return []data.Collection{inbox, work} return []data.BackupCollection{inbox, work}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1579,7 +1579,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "ReplaceDeletedDirectory", name: "ReplaceDeletedDirectory",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
personal := mockconnector.NewMockExchangeCollection(personalPath, 0) personal := mockconnector.NewMockExchangeCollection(personalPath, 0)
personal.PrevPath = personalPath personal.PrevPath = personalPath
personal.ColState = data.DeletedState personal.ColState = data.DeletedState
@ -1588,7 +1588,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.PrevPath = workPath work.PrevPath = workPath
work.ColState = data.MovedState work.ColState = data.MovedState
return []data.Collection{personal, work} return []data.BackupCollection{personal, work}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1620,7 +1620,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "ReplaceDeletedDirectoryWithNew", name: "ReplaceDeletedDirectoryWithNew",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
personal := mockconnector.NewMockExchangeCollection(personalPath, 0) personal := mockconnector.NewMockExchangeCollection(personalPath, 0)
personal.PrevPath = personalPath personal.PrevPath = personalPath
personal.ColState = data.DeletedState personal.ColState = data.DeletedState
@ -1630,7 +1630,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
newCol.Names[0] = workFileName2 newCol.Names[0] = workFileName2
newCol.Data[0] = workFileData2 newCol.Data[0] = workFileData2
return []data.Collection{personal, newCol} return []data.BackupCollection{personal, newCol}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1671,7 +1671,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "ReplaceMovedDirectory", name: "ReplaceMovedDirectory",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
newPersonalPath := makePath( newPersonalPath := makePath(
t, t,
[]string{testTenant, service, testUser, category, personalDir}, []string{testTenant, service, testUser, category, personalDir},
@ -1686,7 +1686,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.PrevPath = workPath work.PrevPath = workPath
work.ColState = data.MovedState work.ColState = data.MovedState
return []data.Collection{personal, work} return []data.BackupCollection{personal, work}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1729,7 +1729,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "MoveDirectoryAndMergeItems", name: "MoveDirectoryAndMergeItems",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
newPersonalPath := makePath( newPersonalPath := makePath(
t, t,
[]string{testTenant, service, testUser, category, workDir}, []string{testTenant, service, testUser, category, workDir},
@ -1744,7 +1744,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
personal.Names[1] = testFileName4 personal.Names[1] = testFileName4
personal.Data[1] = testFileData4 personal.Data[1] = testFileData4
return []data.Collection{personal} return []data.BackupCollection{personal}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1793,7 +1793,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "MoveParentDeleteFileNoMergeSubtreeMerge", name: "MoveParentDeleteFileNoMergeSubtreeMerge",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
newInboxPath := makePath( newInboxPath := makePath(
t, t,
[]string{testTenant, service, testUser, category, personalDir}, []string{testTenant, service, testUser, category, personalDir},
@ -1824,7 +1824,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.Names[0] = testFileName6 work.Names[0] = testFileName6
work.Data[0] = testFileData6 work.Data[0] = testFileData6
return []data.Collection{inbox, work} return []data.BackupCollection{inbox, work}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -1876,7 +1876,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
}, },
{ {
name: "NoMoveParentDeleteFileNoMergeSubtreeMerge", name: "NoMoveParentDeleteFileNoMergeSubtreeMerge",
inputCollections: func(t *testing.T) []data.Collection { inputCollections: func(t *testing.T) []data.BackupCollection {
inbox := mockconnector.NewMockExchangeCollection(inboxPath, 1) inbox := mockconnector.NewMockExchangeCollection(inboxPath, 1)
inbox.PrevPath = inboxPath inbox.PrevPath = inboxPath
inbox.ColState = data.NotMovedState inbox.ColState = data.NotMovedState
@ -1892,7 +1892,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.Names[0] = testFileName6 work.Names[0] = testFileName6
work.Data[0] = testFileData6 work.Data[0] = testFileData6
return []data.Collection{inbox, work} return []data.BackupCollection{inbox, work}
}, },
expected: expectedTreeWithChildren( expected: expectedTreeWithChildren(
[]string{ []string{
@ -2105,7 +2105,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
snapshotRoot: getBaseSnapshot(), snapshotRoot: getBaseSnapshot(),
} }
collections := []data.Collection{mc} collections := []data.BackupCollection{mc}
// Returned directory structure should look like: // Returned directory structure should look like:
// - a-tenant // - a-tenant
@ -2361,7 +2361,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
}, },
} }
collections := []data.Collection{mc} collections := []data.BackupCollection{mc}
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,

View File

@ -118,7 +118,7 @@ type IncrementalBase struct {
func (w Wrapper) BackupCollections( func (w Wrapper) BackupCollections(
ctx context.Context, ctx context.Context,
previousSnapshots []IncrementalBase, previousSnapshots []IncrementalBase,
collections []data.Collection, collections []data.BackupCollection,
globalExcludeSet map[string]struct{}, globalExcludeSet map[string]struct{},
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
@ -368,7 +368,7 @@ func (w Wrapper) RestoreMultipleItems(
snapshotID string, snapshotID string,
paths []path.Path, paths []path.Path,
bcounter ByteCounter, bcounter ByteCounter,
) ([]data.Collection, error) { ) ([]data.RestoreCollection, error) {
ctx, end := D.Span(ctx, "kopia:restoreMultipleItems") ctx, end := D.Span(ctx, "kopia:restoreMultipleItems")
defer end() defer end()
@ -409,7 +409,7 @@ func (w Wrapper) RestoreMultipleItems(
c.streams = append(c.streams, ds) c.streams = append(c.streams, ds)
} }
res := make([]data.Collection, 0, len(cols)) res := make([]data.RestoreCollection, 0, len(cols))
for _, c := range cols { for _, c := range cols {
res = append(res, c) res = append(res, c)
} }

View File

@ -52,7 +52,7 @@ var (
func testForFiles( func testForFiles(
t *testing.T, t *testing.T,
expected map[string][]byte, expected map[string][]byte,
collections []data.Collection, collections []data.RestoreCollection,
) { ) {
t.Helper() t.Helper()
@ -196,7 +196,7 @@ func (suite *KopiaIntegrationSuite) TearDownTest() {
} }
func (suite *KopiaIntegrationSuite) TestBackupCollections() { func (suite *KopiaIntegrationSuite) TestBackupCollections() {
collections := []data.Collection{ collections := []data.BackupCollection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
suite.testPath1, suite.testPath1,
5, 5,
@ -353,7 +353,7 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
stats, _, _, err := w.BackupCollections( stats, _, _, err := w.BackupCollections(
ctx, ctx,
nil, nil,
[]data.Collection{dc1, dc2}, []data.BackupCollection{dc1, dc2},
nil, nil,
tags, tags,
true, true,
@ -382,6 +382,41 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
testForFiles(t, expected, result) testForFiles(t, expected, result)
} }
type mockBackupCollection struct {
path path.Path
streams []data.Stream
}
func (c *mockBackupCollection) Items() <-chan data.Stream {
res := make(chan data.Stream)
go func() {
defer close(res)
for _, s := range c.streams {
res <- s
}
}()
return res
}
func (c mockBackupCollection) FullPath() path.Path {
return c.path
}
func (c mockBackupCollection) PreviousPath() path.Path {
return nil
}
func (c mockBackupCollection) State() data.CollectionState {
return data.NewState
}
func (c mockBackupCollection) DoNotMergeItems() bool {
return false
}
func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() { func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
t := suite.T() t := suite.T()
@ -396,8 +431,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
tags[k] = "" tags[k] = ""
} }
collections := []data.Collection{ collections := []data.BackupCollection{
&kopiaDataCollection{ &mockBackupCollection{
path: suite.testPath1, path: suite.testPath1,
streams: []data.Stream{ streams: []data.Stream{
&mockconnector.MockExchangeData{ &mockconnector.MockExchangeData{
@ -410,7 +445,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
}, },
}, },
}, },
&kopiaDataCollection{ &mockBackupCollection{
path: suite.testPath2, path: suite.testPath2,
streams: []data.Stream{ streams: []data.Stream{
&mockconnector.MockExchangeData{ &mockconnector.MockExchangeData{
@ -477,7 +512,7 @@ type backedupFile struct {
func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections() { func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections() {
table := []struct { table := []struct {
name string name string
collections []data.Collection collections []data.BackupCollection
}{ }{
{ {
name: "NilCollections", name: "NilCollections",
@ -485,7 +520,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections()
}, },
{ {
name: "EmptyCollections", name: "EmptyCollections",
collections: []data.Collection{}, collections: []data.BackupCollection{},
}, },
} }
@ -624,10 +659,10 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
suite.w = &Wrapper{c} suite.w = &Wrapper{c}
collections := []data.Collection{} collections := []data.BackupCollection{}
for _, parent := range []path.Path{suite.testPath1, suite.testPath2} { for _, parent := range []path.Path{suite.testPath1, suite.testPath2} {
collection := &kopiaDataCollection{path: parent} collection := &mockBackupCollection{path: parent}
for _, item := range suite.files[parent.String()] { for _, item := range suite.files[parent.String()] {
collection.streams = append( collection.streams = append(
@ -723,7 +758,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
excludeItem bool excludeItem bool
expectedCachedItems int expectedCachedItems int
expectedUncachedItems int expectedUncachedItems int
cols func() []data.Collection cols func() []data.BackupCollection
backupIDCheck require.ValueAssertionFunc backupIDCheck require.ValueAssertionFunc
restoreCheck assert.ErrorAssertionFunc restoreCheck assert.ErrorAssertionFunc
}{ }{
@ -732,7 +767,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
excludeItem: true, excludeItem: true,
expectedCachedItems: len(suite.filesByPath) - 1, expectedCachedItems: len(suite.filesByPath) - 1,
expectedUncachedItems: 0, expectedUncachedItems: 0,
cols: func() []data.Collection { cols: func() []data.BackupCollection {
return nil return nil
}, },
backupIDCheck: require.NotEmpty, backupIDCheck: require.NotEmpty,
@ -743,7 +778,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
// No snapshot should be made since there were no changes. // No snapshot should be made since there were no changes.
expectedCachedItems: 0, expectedCachedItems: 0,
expectedUncachedItems: 0, expectedUncachedItems: 0,
cols: func() []data.Collection { cols: func() []data.BackupCollection {
return nil return nil
}, },
// Backup doesn't run. // Backup doesn't run.
@ -753,14 +788,14 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
name: "NoExcludeItemWithChanges", name: "NoExcludeItemWithChanges",
expectedCachedItems: len(suite.filesByPath), expectedCachedItems: len(suite.filesByPath),
expectedUncachedItems: 1, expectedUncachedItems: 1,
cols: func() []data.Collection { cols: func() []data.BackupCollection {
c := mockconnector.NewMockExchangeCollection( c := mockconnector.NewMockExchangeCollection(
suite.testPath1, suite.testPath1,
1, 1,
) )
c.ColState = data.NotMovedState c.ColState = data.NotMovedState
return []data.Collection{c} return []data.BackupCollection{c}
}, },
backupIDCheck: require.NotEmpty, backupIDCheck: require.NotEmpty,
restoreCheck: assert.NoError, restoreCheck: assert.NoError,

View File

@ -308,9 +308,9 @@ func produceBackupDataCollections(
ctx context.Context, ctx context.Context,
gc *connector.GraphConnector, gc *connector.GraphConnector,
sel selectors.Selector, sel selectors.Selector,
metadata []data.Collection, metadata []data.RestoreCollection,
ctrlOpts control.Options, ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) { ) ([]data.BackupCollection, map[string]struct{}, error) {
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Discovering items to backup")) complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Discovering items to backup"))
defer func() { defer func() {
complete <- struct{}{} complete <- struct{}{}
@ -331,7 +331,7 @@ type backuper interface {
BackupCollections( BackupCollections(
ctx context.Context, ctx context.Context,
bases []kopia.IncrementalBase, bases []kopia.IncrementalBase,
cs []data.Collection, cs []data.BackupCollection,
excluded map[string]struct{}, excluded map[string]struct{},
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
@ -389,7 +389,7 @@ func consumeBackupDataCollections(
tenantID string, tenantID string,
reasons []kopia.Reason, reasons []kopia.Reason,
mans []*kopia.ManifestEntry, mans []*kopia.ManifestEntry,
cs []data.Collection, cs []data.BackupCollection,
excludes map[string]struct{}, excludes map[string]struct{},
backupID model.StableID, backupID model.StableID,
isIncremental bool, isIncremental bool,

View File

@ -387,10 +387,10 @@ func buildCollections(
tenant, user string, tenant, user string,
dest control.RestoreDestination, dest control.RestoreDestination,
colls []incrementalCollection, colls []incrementalCollection,
) []data.Collection { ) []data.RestoreCollection {
t.Helper() t.Helper()
collections := make([]data.Collection, 0, len(colls)) collections := make([]data.RestoreCollection, 0, len(colls))
for _, c := range colls { for _, c := range colls {
pth := toDataLayerPath( pth := toDataLayerPath(

View File

@ -36,20 +36,20 @@ import (
type mockRestorer struct { type mockRestorer struct {
gotPaths []path.Path gotPaths []path.Path
colls []data.Collection colls []data.RestoreCollection
collsByID map[string][]data.Collection // snapshotID: []Collection collsByID map[string][]data.RestoreCollection // snapshotID: []RestoreCollection
err error err error
onRestore restoreFunc onRestore restoreFunc
} }
type restoreFunc func(id string, ps []path.Path) ([]data.Collection, error) type restoreFunc func(id string, ps []path.Path) ([]data.RestoreCollection, error)
func (mr *mockRestorer) buildRestoreFunc( func (mr *mockRestorer) buildRestoreFunc(
t *testing.T, t *testing.T,
oid string, oid string,
ops []path.Path, ops []path.Path,
) { ) {
mr.onRestore = func(id string, ps []path.Path) ([]data.Collection, error) { mr.onRestore = func(id string, ps []path.Path) ([]data.RestoreCollection, error) {
assert.Equal(t, oid, id, "manifest id") assert.Equal(t, oid, id, "manifest id")
checkPaths(t, ops, ps) checkPaths(t, ops, ps)
@ -62,7 +62,7 @@ func (mr *mockRestorer) RestoreMultipleItems(
snapshotID string, snapshotID string,
paths []path.Path, paths []path.Path,
bc kopia.ByteCounter, bc kopia.ByteCounter,
) ([]data.Collection, error) { ) ([]data.RestoreCollection, error) {
mr.gotPaths = append(mr.gotPaths, paths...) mr.gotPaths = append(mr.gotPaths, paths...)
if mr.onRestore != nil { if mr.onRestore != nil {
@ -85,7 +85,7 @@ func checkPaths(t *testing.T, expected, got []path.Path) {
type mockBackuper struct { type mockBackuper struct {
checkFunc func( checkFunc func(
bases []kopia.IncrementalBase, bases []kopia.IncrementalBase,
cs []data.Collection, cs []data.BackupCollection,
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
) )
@ -94,7 +94,7 @@ type mockBackuper struct {
func (mbu mockBackuper) BackupCollections( func (mbu mockBackuper) BackupCollections(
ctx context.Context, ctx context.Context,
bases []kopia.IncrementalBase, bases []kopia.IncrementalBase,
cs []data.Collection, cs []data.BackupCollection,
excluded map[string]struct{}, excluded map[string]struct{},
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
@ -559,7 +559,7 @@ func (suite *BackupOpSuite) TestBackupOperation_ConsumeBackupDataCollections_Pat
mbu := &mockBackuper{ mbu := &mockBackuper{
checkFunc: func( checkFunc: func(
bases []kopia.IncrementalBase, bases []kopia.IncrementalBase,
cs []data.Collection, cs []data.BackupCollection,
tags map[string]string, tags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
) { ) {

View File

@ -46,10 +46,10 @@ func produceManifestsAndMetadata(
tenantID string, tenantID string,
getMetadata bool, getMetadata bool,
errs fault.Adder, errs fault.Adder,
) ([]*kopia.ManifestEntry, []data.Collection, bool, error) { ) ([]*kopia.ManifestEntry, []data.RestoreCollection, bool, error) {
var ( var (
metadataFiles = graph.AllMetadataFileNames() metadataFiles = graph.AllMetadataFileNames()
collections []data.Collection collections []data.RestoreCollection
) )
ms, err := mr.FetchPrevSnapshotManifests( ms, err := mr.FetchPrevSnapshotManifests(
@ -183,7 +183,7 @@ func collectMetadata(
man *kopia.ManifestEntry, man *kopia.ManifestEntry,
fileNames []string, fileNames []string,
tenantID string, tenantID string,
) ([]data.Collection, error) { ) ([]data.RestoreCollection, error) {
paths := []path.Path{} paths := []path.Path{}
for _, fn := range fileNames { for _, fn := range fileNames {

View File

@ -49,9 +49,8 @@ func (mg mockGetDetailsIDer) GetDetailsIDFromBackupID(
} }
type mockColl struct { type mockColl struct {
id string // for comparisons id string // for comparisons
p path.Path p path.Path
prevP path.Path
} }
func (mc mockColl) Items() <-chan data.Stream { func (mc mockColl) Items() <-chan data.Stream {
@ -62,18 +61,6 @@ func (mc mockColl) FullPath() path.Path {
return mc.p return mc.p
} }
func (mc mockColl) PreviousPath() path.Path {
return mc.prevP
}
func (mc mockColl) State() data.CollectionState {
return data.NewState
}
func (mc mockColl) DoNotMergeItems() bool {
return false
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// tests // tests
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -447,7 +434,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
getMeta bool getMeta bool
assertErr assert.ErrorAssertionFunc assertErr assert.ErrorAssertionFunc
assertB assert.BoolAssertionFunc assertB assert.BoolAssertionFunc
expectDCS []data.Collection expectDCS []data.RestoreCollection
expectNilMans bool expectNilMans bool
}{ }{
{ {
@ -550,7 +537,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "man missing backup id", name: "man missing backup id",
mr: mockManifestRestorer{ mr: mockManifestRestorer{
mockRestorer: mockRestorer{collsByID: map[string][]data.Collection{ mockRestorer: mockRestorer{collsByID: map[string][]data.RestoreCollection{
"id": {mockColl{id: "id_coll"}}, "id": {mockColl{id: "id_coll"}},
}}, }},
mans: []*kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "")}, mans: []*kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "")},
@ -577,7 +564,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "one complete, one incomplete", name: "one complete, one incomplete",
mr: mockManifestRestorer{ mr: mockManifestRestorer{
mockRestorer: mockRestorer{collsByID: map[string][]data.Collection{ mockRestorer: mockRestorer{collsByID: map[string][]data.RestoreCollection{
"id": {mockColl{id: "id_coll"}}, "id": {mockColl{id: "id_coll"}},
"incmpl_id": {mockColl{id: "incmpl_id_coll"}}, "incmpl_id": {mockColl{id: "incmpl_id_coll"}},
}}, }},
@ -591,12 +578,12 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []data.Collection{mockColl{id: "id_coll"}}, expectDCS: []data.RestoreCollection{mockColl{id: "id_coll"}},
}, },
{ {
name: "single valid man", name: "single valid man",
mr: mockManifestRestorer{ mr: mockManifestRestorer{
mockRestorer: mockRestorer{collsByID: map[string][]data.Collection{ mockRestorer: mockRestorer{collsByID: map[string][]data.RestoreCollection{
"id": {mockColl{id: "id_coll"}}, "id": {mockColl{id: "id_coll"}},
}}, }},
mans: []*kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "bid")}, mans: []*kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "bid")},
@ -606,12 +593,12 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []data.Collection{mockColl{id: "id_coll"}}, expectDCS: []data.RestoreCollection{mockColl{id: "id_coll"}},
}, },
{ {
name: "multiple valid mans", name: "multiple valid mans",
mr: mockManifestRestorer{ mr: mockManifestRestorer{
mockRestorer: mockRestorer{collsByID: map[string][]data.Collection{ mockRestorer: mockRestorer{collsByID: map[string][]data.RestoreCollection{
"mail": {mockColl{id: "mail_coll"}}, "mail": {mockColl{id: "mail_coll"}},
"contact": {mockColl{id: "contact_coll"}}, "contact": {mockColl{id: "contact_coll"}},
}}, }},
@ -625,7 +612,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
assertB: assert.True, assertB: assert.True,
expectDCS: []data.Collection{ expectDCS: []data.RestoreCollection{
mockColl{id: "mail_coll"}, mockColl{id: "mail_coll"},
mockColl{id: "contact_coll"}, mockColl{id: "contact_coll"},
}, },

View File

@ -88,7 +88,7 @@ func (op RestoreOperation) validate() error {
// pointer wrapping the values, while those values // pointer wrapping the values, while those values
// get populated asynchronously. // get populated asynchronously.
type restoreStats struct { type restoreStats struct {
cs []data.Collection cs []data.RestoreCollection
gc *support.ConnectorOperationStatus gc *support.ConnectorOperationStatus
bytesRead *stats.ByteCounter bytesRead *stats.ByteCounter
resourceCount int resourceCount int
@ -104,7 +104,7 @@ type restorer interface {
snapshotID string, snapshotID string,
paths []path.Path, paths []path.Path,
bc kopia.ByteCounter, bc kopia.ByteCounter,
) ([]data.Collection, error) ) ([]data.RestoreCollection, error)
} }
// Run begins a synchronous restore operation. // Run begins a synchronous restore operation.

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange" "github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support" "github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
@ -61,7 +62,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
bytesRead: &stats.ByteCounter{ bytesRead: &stats.ByteCounter{
NumBytes: 42, NumBytes: 42,
}, },
cs: []data.Collection{&exchange.Collection{}}, cs: []data.RestoreCollection{&mockconnector.MockExchangeDataCollection{}},
gc: &support.ConnectorOperationStatus{ gc: &support.ConnectorOperationStatus{
ObjectCount: 1, ObjectCount: 1,
Successful: 1, Successful: 1,
@ -82,7 +83,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
expectErr: assert.NoError, expectErr: assert.NoError,
stats: restoreStats{ stats: restoreStats{
bytesRead: &stats.ByteCounter{}, bytesRead: &stats.ByteCounter{},
cs: []data.Collection{}, cs: []data.RestoreCollection{},
gc: &support.ConnectorOperationStatus{}, gc: &support.ConnectorOperationStatus{},
}, },
}, },

View File

@ -76,7 +76,7 @@ func (ss *streamStore) WriteBackupDetails(
backupStats, _, _, err := ss.kw.BackupCollections( backupStats, _, _, err := ss.kw.BackupCollections(
ctx, ctx,
nil, nil,
[]data.Collection{dc}, []data.BackupCollection{dc},
nil, nil,
nil, nil,
false) false)
@ -164,7 +164,7 @@ func (ss *streamStore) DeleteBackupDetails(
return nil return nil
} }
// streamCollection is a data.Collection used to persist // streamCollection is a data.BackupCollection used to persist
// a single data stream // a single data stream
type streamCollection struct { type streamCollection struct {
// folderPath indicates what level in the hierarchy this collection // folderPath indicates what level in the hierarchy this collection