Split collection interface (#2415)

## Description

Split the collection interface into stuff used during backup and stuff used during restore. Does not change other code beyond fixing types

## Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No 

## Type of change

- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Test
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

## Issue(s)

* closes #1944

## Test Plan

- [ ] 💪 Manual
- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
ashmrtn 2023-02-07 14:15:48 -08:00 committed by GitHub
parent 7f2a8735ef
commit 373f0458a7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
35 changed files with 240 additions and 218 deletions

View File

@ -152,8 +152,8 @@ func buildCollections(
tenant, user string,
dest control.RestoreDestination,
colls []collection,
) ([]data.Collection, error) {
collections := make([]data.Collection, 0, len(colls))
) ([]data.RestoreCollection, error) {
collections := make([]data.RestoreCollection, 0, len(colls))
for _, c := range colls {
pth, err := toDataLayerPath(

View File

@ -34,9 +34,9 @@ import (
func (gc *GraphConnector) DataCollections(
ctx context.Context,
sels selectors.Selector,
metadata []data.Collection,
metadata []data.RestoreCollection,
ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) {
) ([]data.BackupCollection, map[string]struct{}, error) {
ctx, end := D.Span(ctx, "gc:dataCollections", D.Index("service", sels.Service.String()))
defer end()
@ -51,7 +51,7 @@ func (gc *GraphConnector) DataCollections(
}
if !serviceEnabled {
return []data.Collection{}, nil, nil
return []data.BackupCollection{}, nil, nil
}
switch sels.Service {
@ -182,9 +182,9 @@ func (fm odFolderMatcher) Matches(dir string) bool {
func (gc *GraphConnector) OneDriveDataCollections(
ctx context.Context,
selector selectors.Selector,
metadata []data.Collection,
metadata []data.RestoreCollection,
ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) {
) ([]data.BackupCollection, map[string]struct{}, error) {
odb, err := selector.ToOneDriveBackup()
if err != nil {
return nil, nil, errors.Wrap(err, "oneDriveDataCollection: parsing selector")
@ -192,7 +192,7 @@ func (gc *GraphConnector) OneDriveDataCollections(
var (
user = selector.DiscreteOwner
collections = []data.Collection{}
collections = []data.BackupCollection{}
allExcludes = map[string]struct{}{}
errs error
)

View File

@ -63,7 +63,7 @@ type DeltaPath struct {
// and path lookup maps.
func parseMetadataCollections(
ctx context.Context,
colls []data.Collection,
colls []data.RestoreCollection,
) (CatDeltaPaths, error) {
// cdp stores metadata
cdp := CatDeltaPaths{
@ -163,11 +163,11 @@ func parseMetadataCollections(
func DataCollections(
ctx context.Context,
selector selectors.Selector,
metadata []data.Collection,
metadata []data.RestoreCollection,
acct account.M365Config,
su support.StatusUpdater,
ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) {
) ([]data.BackupCollection, map[string]struct{}, error) {
eb, err := selector.ToExchangeBackup()
if err != nil {
return nil, nil, errors.Wrap(err, "exchangeDataCollection: parsing selector")
@ -175,7 +175,7 @@ func DataCollections(
var (
user = selector.DiscreteOwner
collections = []data.Collection{}
collections = []data.BackupCollection{}
errs error
)
@ -231,10 +231,10 @@ func createCollections(
dps DeltaPaths,
ctrlOpts control.Options,
su support.StatusUpdater,
) ([]data.Collection, error) {
) ([]data.BackupCollection, error) {
var (
errs *multierror.Error
allCollections = make([]data.Collection, 0)
allCollections = make([]data.BackupCollection, 0)
ac = api.Client{Credentials: creds}
category = scope.Category().PathType()
)
@ -245,7 +245,7 @@ func createCollections(
}
// Create collection of ExchangeDataCollection
collections := make(map[string]data.Collection)
collections := make(map[string]data.BackupCollection)
qp := graph.QueryParams{
Category: category,

View File

@ -174,7 +174,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
)
require.NoError(t, err)
cdps, err := parseMetadataCollections(ctx, []data.Collection{coll})
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{coll})
test.expectError(t, err)
emails := cdps[path.EmailCategory]
@ -335,7 +335,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
require.NoError(t, err)
assert.Less(t, 1, len(collections), "retrieved metadata and data collections")
var metadata data.Collection
var metadata data.BackupCollection
for _, coll := range collections {
if coll.FullPath().Service() == path.ExchangeMetadataService {
@ -345,7 +345,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
require.NotNil(t, metadata, "collections contains a metadata collection")
cdps, err := parseMetadataCollections(ctx, []data.Collection{metadata})
cdps, err := parseMetadataCollections(ctx, []data.RestoreCollection{metadata})
require.NoError(t, err)
dps := cdps[test.scope.Category().PathType()]

View File

@ -24,10 +24,10 @@ import (
)
var (
_ data.Collection = &Collection{}
_ data.Stream = &Stream{}
_ data.StreamInfo = &Stream{}
_ data.StreamModTime = &Stream{}
_ data.BackupCollection = &Collection{}
_ data.Stream = &Stream{}
_ data.StreamInfo = &Stream{}
_ data.StreamModTime = &Stream{}
)
const (

View File

@ -25,14 +25,14 @@ type addedAndRemovedItemIDsGetter interface {
// filterContainersAndFillCollections is a utility function
// that places the M365 object ids belonging to specific directories
// into a Collection. Messages outside of those directories are omitted.
// into a BackupCollection. Messages outside of those directories are omitted.
// @param collection is filled with during this function.
// Supports all exchange applications: Contacts, Events, and Mail
func filterContainersAndFillCollections(
ctx context.Context,
qp graph.QueryParams,
getter addedAndRemovedItemIDsGetter,
collections map[string]data.Collection,
collections map[string]data.BackupCollection,
statusUpdater support.StatusUpdater,
resolver graph.ContainerResolver,
scope selectors.ExchangeScope,

View File

@ -280,7 +280,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
ctx, flush := tester.NewContext()
defer flush()
collections := map[string]data.Collection{}
collections := map[string]data.BackupCollection{}
err := filterContainersAndFillCollections(
ctx,
@ -433,7 +433,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
resolver = newMockResolver(container1)
)
collections := map[string]data.Collection{}
collections := map[string]data.BackupCollection{}
err := filterContainersAndFillCollections(
ctx,
@ -785,7 +785,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incre
ctx, flush := tester.NewContext()
defer flush()
collections := map[string]data.Collection{}
collections := map[string]data.BackupCollection{}
err := filterContainersAndFillCollections(
ctx,

View File

@ -297,7 +297,7 @@ func SendMailToBackStore(
return errs
}
// RestoreExchangeDataCollections restores M365 objects in data.Collection to MSFT
// RestoreExchangeDataCollections restores M365 objects in data.RestoreCollection to MSFT
// store through GraphAPI.
// @param dest: container destination to M365
func RestoreExchangeDataCollections(
@ -305,7 +305,7 @@ func RestoreExchangeDataCollections(
creds account.M365Config,
gs graph.Servicer,
dest control.RestoreDestination,
dcs []data.Collection,
dcs []data.RestoreCollection,
deets *details.Builder,
) (*support.ConnectorOperationStatus, error) {
var (
@ -364,7 +364,7 @@ func RestoreExchangeDataCollections(
func restoreCollection(
ctx context.Context,
gs graph.Servicer,
dc data.Collection,
dc data.RestoreCollection,
folderID string,
policy control.CollisionPolicy,
deets *details.Builder,

View File

@ -14,8 +14,8 @@ import (
)
var (
_ data.Collection = &MetadataCollection{}
_ data.Stream = &MetadataItem{}
_ data.BackupCollection = &MetadataCollection{}
_ data.Stream = &MetadataItem{}
)
// MetadataCollection in a simple collection that assumes all items to be
@ -67,7 +67,7 @@ func MakeMetadataCollection(
cat path.CategoryType,
metadata []MetadataCollectionEntry,
statusUpdater support.StatusUpdater,
) (data.Collection, error) {
) (data.BackupCollection, error) {
if len(metadata) == 0 {
return nil, nil
}

View File

@ -271,7 +271,7 @@ func (gc *GraphConnector) RestoreDataCollections(
selector selectors.Selector,
dest control.RestoreDestination,
opts control.Options,
dcs []data.Collection,
dcs []data.RestoreCollection,
) (*details.Details, error) {
ctx, end := D.Span(ctx, "connector:restore")
defer end()

View File

@ -740,7 +740,7 @@ func compareItem(
func checkHasCollections(
t *testing.T,
expected map[string]map[string][]byte,
got []data.Collection,
got []data.BackupCollection,
) {
t.Helper()
@ -762,10 +762,10 @@ func checkCollections(
t *testing.T,
expectedItems int,
expected map[string]map[string][]byte,
got []data.Collection,
got []data.BackupCollection,
restorePermissions bool,
) int {
collectionsWithItems := []data.Collection{}
collectionsWithItems := []data.BackupCollection{}
skipped := 0
gotItems := 0
@ -950,8 +950,8 @@ func collectionsForInfo(
tenant, user string,
dest control.RestoreDestination,
allInfo []colInfo,
) (int, int, []data.Collection, map[string]map[string][]byte) {
collections := make([]data.Collection, 0, len(allInfo))
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
collections := make([]data.RestoreCollection, 0, len(allInfo))
expectedData := make(map[string]map[string][]byte, len(allInfo))
totalItems := 0
kopiaEntries := 0
@ -1002,8 +1002,8 @@ func collectionsForInfoVersion0(
tenant, user string,
dest control.RestoreDestination,
allInfo []colInfo,
) (int, int, []data.Collection, map[string]map[string][]byte) {
collections := make([]data.Collection, 0, len(allInfo))
) (int, int, []data.RestoreCollection, map[string]map[string][]byte) {
collections := make([]data.RestoreCollection, 0, len(allInfo))
expectedData := make(map[string]map[string][]byte, len(allInfo))
totalItems := 0
kopiaEntries := 0

View File

@ -257,7 +257,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
dest := tester.DefaultTestRestoreDestination()
table := []struct {
name string
col []data.Collection
col []data.RestoreCollection
sel selectors.Selector
}{
{
@ -269,7 +269,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
},
{
name: "ExchangeEmpty",
col: []data.Collection{},
col: []data.RestoreCollection{},
sel: selectors.Selector{
Service: selectors.ServiceExchange,
},
@ -283,7 +283,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
},
{
name: "OneDriveEmpty",
col: []data.Collection{},
col: []data.RestoreCollection{},
sel: selectors.Selector{
Service: selectors.ServiceOneDrive,
},
@ -297,7 +297,7 @@ func (suite *GraphConnectorIntegrationSuite) TestEmptyCollections() {
},
{
name: "SharePointEmpty",
col: []data.Collection{},
col: []data.RestoreCollection{},
sel: selectors.Selector{
Service: selectors.ServiceSharePoint,
},
@ -370,7 +370,7 @@ func runRestoreBackupTest(
opts control.Options,
) {
var (
collections []data.Collection
collections []data.RestoreCollection
expectedData = map[string]map[string][]byte{}
totalItems = 0
totalKopiaItems = 0
@ -495,7 +495,7 @@ func runRestoreBackupTestVersion0(
opts control.Options,
) {
var (
collections []data.Collection
collections []data.RestoreCollection
expectedData = map[string]map[string][]byte{}
totalItems = 0
totalKopiaItems = 0

View File

@ -27,10 +27,13 @@ type MockExchangeDataCollection struct {
}
var (
_ data.Collection = &MockExchangeDataCollection{}
_ data.Stream = &MockExchangeData{}
_ data.StreamInfo = &MockExchangeData{}
_ data.StreamSize = &MockExchangeData{}
// Needs to implement both backup and restore interfaces so we can use it in
// integration tests.
_ data.BackupCollection = &MockExchangeDataCollection{}
_ data.RestoreCollection = &MockExchangeDataCollection{}
_ data.Stream = &MockExchangeData{}
_ data.StreamInfo = &MockExchangeData{}
_ data.StreamSize = &MockExchangeData{}
)
// NewMockExchangeDataCollection creates an data collection that will return the specified number of

View File

@ -14,8 +14,8 @@ import (
)
var (
_ data.Stream = &MockListData{}
_ data.Collection = &MockListCollection{}
_ data.Stream = &MockListData{}
_ data.BackupCollection = &MockListCollection{}
)
type MockListCollection struct {

View File

@ -42,10 +42,10 @@ const (
)
var (
_ data.Collection = &Collection{}
_ data.Stream = &Item{}
_ data.StreamInfo = &Item{}
_ data.StreamModTime = &Item{}
_ data.BackupCollection = &Collection{}
_ data.Stream = &Item{}
_ data.StreamInfo = &Item{}
_ data.StreamModTime = &Item{}
)
// Collection represents a set of OneDrive objects retrieved from M365

View File

@ -61,9 +61,9 @@ type Collections struct {
ctrl control.Options
// collectionMap allows lookup of the data.Collection
// collectionMap allows lookup of the data.BackupCollection
// for a OneDrive folder
CollectionMap map[string]data.Collection
CollectionMap map[string]data.BackupCollection
// Not the most ideal, but allows us to change the pager function for testing
// as needed. This will allow us to mock out some scenarios during testing.
@ -100,7 +100,7 @@ func NewCollections(
resourceOwner: resourceOwner,
source: source,
matcher: matcher,
CollectionMap: map[string]data.Collection{},
CollectionMap: map[string]data.BackupCollection{},
drivePagerFunc: PagerForSource,
itemPagerFunc: defaultItemPager,
service: service,
@ -111,7 +111,7 @@ func NewCollections(
func deserializeMetadata(
ctx context.Context,
cols []data.Collection,
cols []data.RestoreCollection,
) (map[string]string, map[string]map[string]string, error) {
logger.Ctx(ctx).Infow(
"deserialzing previous backup metadata",
@ -249,8 +249,8 @@ func deserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) erro
// be excluded from the upcoming backup.
func (c *Collections) Get(
ctx context.Context,
prevMetadata []data.Collection,
) ([]data.Collection, map[string]struct{}, error) {
prevMetadata []data.RestoreCollection,
) ([]data.BackupCollection, map[string]struct{}, error) {
prevDeltas, _, err := deserializeMetadata(ctx, prevMetadata)
if err != nil {
return nil, nil, err
@ -327,7 +327,7 @@ func (c *Collections) Get(
observe.Message(ctx, observe.Safe(fmt.Sprintf("Discovered %d items to backup", c.NumItems)))
// Add an extra for the metadata collection.
collections := make([]data.Collection, 0, len(c.CollectionMap)+1)
collections := make([]data.BackupCollection, 0, len(c.CollectionMap)+1)
for _, coll := range c.CollectionMap {
collections = append(collections, coll)
}

View File

@ -983,7 +983,7 @@ func (suite *OneDriveCollectionsSuite) TestDeserializeMetadata() {
ctx, flush := tester.NewContext()
defer flush()
cols := []data.Collection{}
cols := []data.RestoreCollection{}
for _, c := range test.cols {
mc, err := graph.MakeMetadataCollection(
@ -1529,7 +1529,7 @@ func (suite *OneDriveCollectionsSuite) TestGet() {
for _, baseCol := range cols {
folderPath := baseCol.FullPath().String()
if folderPath == metadataPath.String() {
deltas, paths, err := deserializeMetadata(ctx, []data.Collection{baseCol})
deltas, paths, err := deserializeMetadata(ctx, []data.RestoreCollection{baseCol})
if !assert.NoError(t, err, "deserializing metadata") {
continue
}

View File

@ -64,7 +64,7 @@ func RestoreCollections(
service graph.Servicer,
dest control.RestoreDestination,
opts control.Options,
dcs []data.Collection,
dcs []data.RestoreCollection,
deets *details.Builder,
) (*support.ConnectorOperationStatus, error) {
var (
@ -148,7 +148,7 @@ func RestoreCollection(
ctx context.Context,
backupVersion int,
service graph.Servicer,
dc data.Collection,
dc data.RestoreCollection,
parentPerms []UserPermission,
source driveSource,
restoreContainerName string,

View File

@ -30,10 +30,10 @@ const (
)
var (
_ data.Collection = &Collection{}
_ data.Stream = &Item{}
_ data.StreamInfo = &Item{}
_ data.StreamModTime = &Item{}
_ data.BackupCollection = &Collection{}
_ data.Stream = &Item{}
_ data.StreamInfo = &Item{}
_ data.StreamModTime = &Item{}
)
// Collection is the SharePoint.List implementation of data.Collection. SharePoint.Libraries collections are supported

View File

@ -34,7 +34,7 @@ func DataCollections(
serv graph.Servicer,
su statusUpdater,
ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) {
) ([]data.BackupCollection, map[string]struct{}, error) {
b, err := selector.ToSharePointBackup()
if err != nil {
return nil, nil, errors.Wrap(err, "sharePointDataCollection: parsing selector")
@ -42,7 +42,7 @@ func DataCollections(
var (
site = b.DiscreteOwner
collections = []data.Collection{}
collections = []data.BackupCollection{}
errs error
)
@ -54,7 +54,7 @@ func DataCollections(
defer closer()
defer close(foldersComplete)
var spcs []data.Collection
var spcs []data.BackupCollection
switch scope.Category().PathType() {
case path.ListsCategory:
@ -97,10 +97,10 @@ func collectLists(
tenantID, siteID string,
updater statusUpdater,
ctrlOpts control.Options,
) ([]data.Collection, error) {
) ([]data.BackupCollection, error) {
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint List Collections")
spcs := make([]data.Collection, 0)
spcs := make([]data.BackupCollection, 0)
tuples, err := preFetchLists(ctx, serv, siteID)
if err != nil {
@ -137,9 +137,9 @@ func collectLibraries(
scope selectors.SharePointScope,
updater statusUpdater,
ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) {
) ([]data.BackupCollection, map[string]struct{}, error) {
var (
collections = []data.Collection{}
collections = []data.BackupCollection{}
errs error
)
@ -175,10 +175,10 @@ func collectPages(
scope selectors.SharePointScope,
updater statusUpdater,
ctrlOpts control.Options,
) ([]data.Collection, error) {
) ([]data.BackupCollection, error) {
logger.Ctx(ctx).With("site", siteID).Debug("Creating SharePoint Pages collections")
spcs := make([]data.Collection, 0)
spcs := make([]data.BackupCollection, 0)
// make the betaClient
adpt, err := graph.CreateAdapter(creds.AzureTenantID, creds.AzureClientID, creds.AzureClientSecret)

View File

@ -30,7 +30,7 @@ import (
// -- Switch:
// ---- Libraries restored via the same workflow as oneDrive
// ---- Lists call RestoreCollection()
// ----> for each data.Stream within Collection.Items()
// ----> for each data.Stream within RestoreCollection.Items()
// ----> restoreListItems() is called
// Restored List can be found in the Site's `Site content` page
// Restored Libraries can be found within the Site's `Pages` page
@ -43,7 +43,7 @@ func RestoreCollections(
creds account.M365Config,
service graph.Servicer,
dest control.RestoreDestination,
dcs []data.Collection,
dcs []data.RestoreCollection,
deets *details.Builder,
) (*support.ConnectorOperationStatus, error) {
var (
@ -219,7 +219,7 @@ func restoreListItem(
func RestoreListCollection(
ctx context.Context,
service graph.Servicer,
dc data.Collection,
dc data.RestoreCollection,
restoreContainerName string,
deets *details.Builder,
errUpdater func(string, error),
@ -291,7 +291,7 @@ func RestoreListCollection(
func RestorePageCollection(
ctx context.Context,
creds account.M365Config,
dc data.Collection,
dc data.RestoreCollection,
restoreContainerName string,
deets *details.Builder,
errUpdater func(string, error),

View File

@ -21,8 +21,8 @@ const (
DeletedState
)
// A Collection represents a compilation of data from the
// same type application (e.g. mail)
// A Collection represents the set of data within a single logical location
// denoted by FullPath.
type Collection interface {
// Items returns a channel from which items in the collection can be read.
// Each returned struct contains the next item in the collection
@ -30,10 +30,13 @@ type Collection interface {
// an unrecoverable error caused an early termination in the sender.
Items() <-chan Stream
// FullPath returns a path struct that acts as a metadata tag for this
// DataCollection. Returned items should be ordered from most generic to least
// generic. For example, a DataCollection for emails from a specific user
// would be {"<tenant id>", "exchange", "<user ID>", "emails"}.
// Collection.
FullPath() path.Path
}
// BackupCollection is an extension of Collection that is used during backups.
type BackupCollection interface {
Collection
// PreviousPath returns the path.Path this collection used to reside at
// (according to the M365 ID for the container) if the collection was moved or
// renamed. Returns nil if the collection is new.
@ -58,6 +61,11 @@ type Collection interface {
DoNotMergeItems() bool
}
// RestoreCollection is an extension of Collection that is used during restores.
type RestoreCollection interface {
Collection
}
// Stream represents a single item within a Collection
// that can be consumed as a stream (it embeds io.Reader)
type Stream interface {

View File

@ -8,8 +8,8 @@ import (
)
var (
_ data.Collection = &kopiaDataCollection{}
_ data.Stream = &kopiaDataStream{}
_ data.RestoreCollection = &kopiaDataCollection{}
_ data.Stream = &kopiaDataStream{}
)
type kopiaDataCollection struct {
@ -35,18 +35,6 @@ func (kdc kopiaDataCollection) FullPath() path.Path {
return kdc.path
}
func (kdc kopiaDataCollection) PreviousPath() path.Path {
return nil
}
func (kdc kopiaDataCollection) State() data.CollectionState {
return data.NewState
}
func (kdc kopiaDataCollection) DoNotMergeItems() bool {
return false
}
type kopiaDataStream struct {
reader io.ReadCloser
uuid string

View File

@ -254,7 +254,7 @@ func (cp *corsoProgress) get(k string) *itemDetails {
func collectionEntries(
ctx context.Context,
cb func(context.Context, fs.Entry) error,
streamedEnts data.Collection,
streamedEnts data.BackupCollection,
progress *corsoProgress,
) (map[string]struct{}, *multierror.Error) {
if streamedEnts == nil {
@ -442,7 +442,7 @@ func getStreamItemFunc(
curPath path.Path,
prevPath path.Path,
staticEnts []fs.Entry,
streamedEnts data.Collection,
streamedEnts data.BackupCollection,
baseDir fs.Directory,
globalExcludeSet map[string]struct{},
progress *corsoProgress,
@ -540,7 +540,7 @@ type treeMap struct {
childDirs map[string]*treeMap
// Reference to data pulled from the external service. Contains only items in
// this directory. Does not contain references to subdirectories.
collection data.Collection
collection data.BackupCollection
// Reference to directory in base snapshot. The referenced directory itself
// may contain files and subdirectories, but the subdirectories should
// eventually be added when walking the base snapshot to build the hierarchy,
@ -617,7 +617,7 @@ func getTreeNode(roots map[string]*treeMap, pathElements []string) *treeMap {
func inflateCollectionTree(
ctx context.Context,
collections []data.Collection,
collections []data.BackupCollection,
) (map[string]*treeMap, map[string]path.Path, error) {
roots := make(map[string]*treeMap)
// Contains the old path for collections that have been moved or renamed.
@ -911,13 +911,13 @@ func inflateBaseTree(
// exclude from base directories when uploading the snapshot. As items in *all*
// base directories will be checked for in every base directory, this assumes
// that items in the bases are unique. Deletions of directories or subtrees
// should be represented as changes in the status of a Collection, not an entry
// in the globalExcludeSet.
// should be represented as changes in the status of a BackupCollection, not an
// entry in the globalExcludeSet.
func inflateDirTree(
ctx context.Context,
loader snapshotLoader,
baseSnaps []IncrementalBase,
collections []data.Collection,
collections []data.BackupCollection,
globalExcludeSet map[string]struct{},
progress *corsoProgress,
) (fs.Directory, error) {

View File

@ -683,7 +683,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
progress := &corsoProgress{pending: map[string]*itemDetails{}}
collections := []data.Collection{
collections := []data.BackupCollection{
mockconnector.NewMockExchangeCollection(
suite.testPath,
expectedFileCount[user1Encoded],
@ -759,11 +759,11 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
// - 42 separate files
table := []struct {
name string
layout []data.Collection
layout []data.BackupCollection
}{
{
name: "SubdirFirst",
layout: []data.Collection{
layout: []data.BackupCollection{
mockconnector.NewMockExchangeCollection(
p2,
5,
@ -776,7 +776,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
},
{
name: "SubdirLast",
layout: []data.Collection{
layout: []data.BackupCollection{
mockconnector.NewMockExchangeCollection(
suite.testPath,
42,
@ -845,7 +845,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
table := []struct {
name string
layout []data.Collection
layout []data.BackupCollection
}{
{
"MultipleRoots",
@ -862,7 +862,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
// - emails
// - Inbox
// - 42 separate files
[]data.Collection{
[]data.BackupCollection{
mockconnector.NewMockExchangeCollection(
suite.testPath,
5,
@ -875,7 +875,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
},
{
"NoCollectionPath",
[]data.Collection{
[]data.BackupCollection{
mockconnector.NewMockExchangeCollection(
nil,
5,
@ -973,7 +973,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() {
progress := &corsoProgress{pending: map[string]*itemDetails{}}
cols := []data.Collection{}
cols := []data.BackupCollection{}
for _, s := range test.states {
prevPath := dirPath
nowPath := dirPath
@ -1037,17 +1037,17 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
table := []struct {
name string
inputCollections func() []data.Collection
inputCollections func() []data.BackupCollection
expected *expectedNode
}{
{
name: "SkipsDeletedItems",
inputCollections: func() []data.Collection {
inputCollections: func() []data.BackupCollection {
mc := mockconnector.NewMockExchangeCollection(dirPath, 1)
mc.Names[0] = testFileName
mc.DeletedItems[0] = true
return []data.Collection{mc}
return []data.BackupCollection{mc}
},
expected: expectedTreeWithChildren(
[]string{
@ -1066,13 +1066,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
{
name: "AddsNewItems",
inputCollections: func() []data.Collection {
inputCollections: func() []data.BackupCollection {
mc := mockconnector.NewMockExchangeCollection(dirPath, 1)
mc.Names[0] = testFileName2
mc.Data[0] = testFileData2
mc.ColState = data.NotMovedState
return []data.Collection{mc}
return []data.BackupCollection{mc}
},
expected: expectedTreeWithChildren(
[]string{
@ -1101,13 +1101,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
{
name: "SkipsUpdatedItems",
inputCollections: func() []data.Collection {
inputCollections: func() []data.BackupCollection {
mc := mockconnector.NewMockExchangeCollection(dirPath, 1)
mc.Names[0] = testFileName
mc.Data[0] = testFileData2
mc.ColState = data.NotMovedState
return []data.Collection{mc}
return []data.BackupCollection{mc}
},
expected: expectedTreeWithChildren(
[]string{
@ -1132,7 +1132,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
{
name: "DeleteAndNew",
inputCollections: func() []data.Collection {
inputCollections: func() []data.BackupCollection {
mc1 := mockconnector.NewMockExchangeCollection(dirPath, 0)
mc1.ColState = data.DeletedState
mc1.PrevPath = dirPath
@ -1142,7 +1142,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
mc2.Names[0] = testFileName2
mc2.Data[0] = testFileData2
return []data.Collection{mc1, mc2}
return []data.BackupCollection{mc1, mc2}
},
expected: expectedTreeWithChildren(
[]string{
@ -1167,7 +1167,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
{
name: "MovedAndNew",
inputCollections: func() []data.Collection {
inputCollections: func() []data.BackupCollection {
mc1 := mockconnector.NewMockExchangeCollection(dirPath2, 0)
mc1.ColState = data.MovedState
mc1.PrevPath = dirPath
@ -1177,7 +1177,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
mc2.Names[0] = testFileName2
mc2.Data[0] = testFileData2
return []data.Collection{mc1, mc2}
return []data.BackupCollection{mc1, mc2}
},
expected: expectedTreeWithChildren(
[]string{
@ -1211,13 +1211,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
},
{
name: "NewDoesntMerge",
inputCollections: func() []data.Collection {
inputCollections: func() []data.BackupCollection {
mc1 := mockconnector.NewMockExchangeCollection(dirPath, 1)
mc1.ColState = data.NewState
mc1.Names[0] = testFileName2
mc1.Data[0] = testFileData2
return []data.Collection{mc1}
return []data.BackupCollection{mc1}
},
expected: expectedTreeWithChildren(
[]string{
@ -1369,13 +1369,13 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
table := []struct {
name string
inputCollections func(t *testing.T) []data.Collection
inputCollections func(t *testing.T) []data.BackupCollection
inputExcludes map[string]struct{}
expected *expectedNode
}{
{
name: "GlobalExcludeSet",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
return nil
},
inputExcludes: map[string]struct{}{
@ -1417,7 +1417,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "MovesSubtree",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
newPath := makePath(
t,
[]string{testTenant, service, testUser, category, testInboxDir + "2"},
@ -1428,7 +1428,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
mc.PrevPath = inboxPath
mc.ColState = data.MovedState
return []data.Collection{mc}
return []data.BackupCollection{mc}
},
expected: expectedTreeWithChildren(
[]string{
@ -1474,7 +1474,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "MovesChildAfterAncestorMove",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
newInboxPath := makePath(
t,
[]string{testTenant, service, testUser, category, testInboxDir + "2"},
@ -1494,7 +1494,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.PrevPath = workPath
work.ColState = data.MovedState
return []data.Collection{inbox, work}
return []data.BackupCollection{inbox, work}
},
expected: expectedTreeWithChildren(
[]string{
@ -1540,7 +1540,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "MovesChildAfterAncestorDelete",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
newWorkPath := makePath(
t,
[]string{testTenant, service, testUser, category, workDir},
@ -1555,7 +1555,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.PrevPath = workPath
work.ColState = data.MovedState
return []data.Collection{inbox, work}
return []data.BackupCollection{inbox, work}
},
expected: expectedTreeWithChildren(
[]string{
@ -1579,7 +1579,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "ReplaceDeletedDirectory",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
personal := mockconnector.NewMockExchangeCollection(personalPath, 0)
personal.PrevPath = personalPath
personal.ColState = data.DeletedState
@ -1588,7 +1588,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.PrevPath = workPath
work.ColState = data.MovedState
return []data.Collection{personal, work}
return []data.BackupCollection{personal, work}
},
expected: expectedTreeWithChildren(
[]string{
@ -1620,7 +1620,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "ReplaceDeletedDirectoryWithNew",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
personal := mockconnector.NewMockExchangeCollection(personalPath, 0)
personal.PrevPath = personalPath
personal.ColState = data.DeletedState
@ -1630,7 +1630,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
newCol.Names[0] = workFileName2
newCol.Data[0] = workFileData2
return []data.Collection{personal, newCol}
return []data.BackupCollection{personal, newCol}
},
expected: expectedTreeWithChildren(
[]string{
@ -1671,7 +1671,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "ReplaceMovedDirectory",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
newPersonalPath := makePath(
t,
[]string{testTenant, service, testUser, category, personalDir},
@ -1686,7 +1686,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.PrevPath = workPath
work.ColState = data.MovedState
return []data.Collection{personal, work}
return []data.BackupCollection{personal, work}
},
expected: expectedTreeWithChildren(
[]string{
@ -1729,7 +1729,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "MoveDirectoryAndMergeItems",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
newPersonalPath := makePath(
t,
[]string{testTenant, service, testUser, category, workDir},
@ -1744,7 +1744,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
personal.Names[1] = testFileName4
personal.Data[1] = testFileData4
return []data.Collection{personal}
return []data.BackupCollection{personal}
},
expected: expectedTreeWithChildren(
[]string{
@ -1793,7 +1793,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "MoveParentDeleteFileNoMergeSubtreeMerge",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
newInboxPath := makePath(
t,
[]string{testTenant, service, testUser, category, personalDir},
@ -1824,7 +1824,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.Names[0] = testFileName6
work.Data[0] = testFileData6
return []data.Collection{inbox, work}
return []data.BackupCollection{inbox, work}
},
expected: expectedTreeWithChildren(
[]string{
@ -1876,7 +1876,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
},
{
name: "NoMoveParentDeleteFileNoMergeSubtreeMerge",
inputCollections: func(t *testing.T) []data.Collection {
inputCollections: func(t *testing.T) []data.BackupCollection {
inbox := mockconnector.NewMockExchangeCollection(inboxPath, 1)
inbox.PrevPath = inboxPath
inbox.ColState = data.NotMovedState
@ -1892,7 +1892,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
work.Names[0] = testFileName6
work.Data[0] = testFileData6
return []data.Collection{inbox, work}
return []data.BackupCollection{inbox, work}
},
expected: expectedTreeWithChildren(
[]string{
@ -2105,7 +2105,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
snapshotRoot: getBaseSnapshot(),
}
collections := []data.Collection{mc}
collections := []data.BackupCollection{mc}
// Returned directory structure should look like:
// - a-tenant
@ -2361,7 +2361,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
},
}
collections := []data.Collection{mc}
collections := []data.BackupCollection{mc}
dirTree, err := inflateDirTree(
ctx,

View File

@ -118,7 +118,7 @@ type IncrementalBase struct {
func (w Wrapper) BackupCollections(
ctx context.Context,
previousSnapshots []IncrementalBase,
collections []data.Collection,
collections []data.BackupCollection,
globalExcludeSet map[string]struct{},
tags map[string]string,
buildTreeWithBase bool,
@ -368,7 +368,7 @@ func (w Wrapper) RestoreMultipleItems(
snapshotID string,
paths []path.Path,
bcounter ByteCounter,
) ([]data.Collection, error) {
) ([]data.RestoreCollection, error) {
ctx, end := D.Span(ctx, "kopia:restoreMultipleItems")
defer end()
@ -409,7 +409,7 @@ func (w Wrapper) RestoreMultipleItems(
c.streams = append(c.streams, ds)
}
res := make([]data.Collection, 0, len(cols))
res := make([]data.RestoreCollection, 0, len(cols))
for _, c := range cols {
res = append(res, c)
}

View File

@ -52,7 +52,7 @@ var (
func testForFiles(
t *testing.T,
expected map[string][]byte,
collections []data.Collection,
collections []data.RestoreCollection,
) {
t.Helper()
@ -196,7 +196,7 @@ func (suite *KopiaIntegrationSuite) TearDownTest() {
}
func (suite *KopiaIntegrationSuite) TestBackupCollections() {
collections := []data.Collection{
collections := []data.BackupCollection{
mockconnector.NewMockExchangeCollection(
suite.testPath1,
5,
@ -353,7 +353,7 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
stats, _, _, err := w.BackupCollections(
ctx,
nil,
[]data.Collection{dc1, dc2},
[]data.BackupCollection{dc1, dc2},
nil,
tags,
true,
@ -382,6 +382,41 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
testForFiles(t, expected, result)
}
type mockBackupCollection struct {
path path.Path
streams []data.Stream
}
func (c *mockBackupCollection) Items() <-chan data.Stream {
res := make(chan data.Stream)
go func() {
defer close(res)
for _, s := range c.streams {
res <- s
}
}()
return res
}
func (c mockBackupCollection) FullPath() path.Path {
return c.path
}
func (c mockBackupCollection) PreviousPath() path.Path {
return nil
}
func (c mockBackupCollection) State() data.CollectionState {
return data.NewState
}
func (c mockBackupCollection) DoNotMergeItems() bool {
return false
}
func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
t := suite.T()
@ -396,8 +431,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
tags[k] = ""
}
collections := []data.Collection{
&kopiaDataCollection{
collections := []data.BackupCollection{
&mockBackupCollection{
path: suite.testPath1,
streams: []data.Stream{
&mockconnector.MockExchangeData{
@ -410,7 +445,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
},
},
},
&kopiaDataCollection{
&mockBackupCollection{
path: suite.testPath2,
streams: []data.Stream{
&mockconnector.MockExchangeData{
@ -477,7 +512,7 @@ type backedupFile struct {
func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections() {
table := []struct {
name string
collections []data.Collection
collections []data.BackupCollection
}{
{
name: "NilCollections",
@ -485,7 +520,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollectionsHandlesNoCollections()
},
{
name: "EmptyCollections",
collections: []data.Collection{},
collections: []data.BackupCollection{},
},
}
@ -624,10 +659,10 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
suite.w = &Wrapper{c}
collections := []data.Collection{}
collections := []data.BackupCollection{}
for _, parent := range []path.Path{suite.testPath1, suite.testPath2} {
collection := &kopiaDataCollection{path: parent}
collection := &mockBackupCollection{path: parent}
for _, item := range suite.files[parent.String()] {
collection.streams = append(
@ -723,7 +758,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
excludeItem bool
expectedCachedItems int
expectedUncachedItems int
cols func() []data.Collection
cols func() []data.BackupCollection
backupIDCheck require.ValueAssertionFunc
restoreCheck assert.ErrorAssertionFunc
}{
@ -732,7 +767,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
excludeItem: true,
expectedCachedItems: len(suite.filesByPath) - 1,
expectedUncachedItems: 0,
cols: func() []data.Collection {
cols: func() []data.BackupCollection {
return nil
},
backupIDCheck: require.NotEmpty,
@ -743,7 +778,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
// No snapshot should be made since there were no changes.
expectedCachedItems: 0,
expectedUncachedItems: 0,
cols: func() []data.Collection {
cols: func() []data.BackupCollection {
return nil
},
// Backup doesn't run.
@ -753,14 +788,14 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
name: "NoExcludeItemWithChanges",
expectedCachedItems: len(suite.filesByPath),
expectedUncachedItems: 1,
cols: func() []data.Collection {
cols: func() []data.BackupCollection {
c := mockconnector.NewMockExchangeCollection(
suite.testPath1,
1,
)
c.ColState = data.NotMovedState
return []data.Collection{c}
return []data.BackupCollection{c}
},
backupIDCheck: require.NotEmpty,
restoreCheck: assert.NoError,

View File

@ -308,9 +308,9 @@ func produceBackupDataCollections(
ctx context.Context,
gc *connector.GraphConnector,
sel selectors.Selector,
metadata []data.Collection,
metadata []data.RestoreCollection,
ctrlOpts control.Options,
) ([]data.Collection, map[string]struct{}, error) {
) ([]data.BackupCollection, map[string]struct{}, error) {
complete, closer := observe.MessageWithCompletion(ctx, observe.Safe("Discovering items to backup"))
defer func() {
complete <- struct{}{}
@ -331,7 +331,7 @@ type backuper interface {
BackupCollections(
ctx context.Context,
bases []kopia.IncrementalBase,
cs []data.Collection,
cs []data.BackupCollection,
excluded map[string]struct{},
tags map[string]string,
buildTreeWithBase bool,
@ -389,7 +389,7 @@ func consumeBackupDataCollections(
tenantID string,
reasons []kopia.Reason,
mans []*kopia.ManifestEntry,
cs []data.Collection,
cs []data.BackupCollection,
excludes map[string]struct{},
backupID model.StableID,
isIncremental bool,

View File

@ -387,10 +387,10 @@ func buildCollections(
tenant, user string,
dest control.RestoreDestination,
colls []incrementalCollection,
) []data.Collection {
) []data.RestoreCollection {
t.Helper()
collections := make([]data.Collection, 0, len(colls))
collections := make([]data.RestoreCollection, 0, len(colls))
for _, c := range colls {
pth := toDataLayerPath(

View File

@ -36,20 +36,20 @@ import (
type mockRestorer struct {
gotPaths []path.Path
colls []data.Collection
collsByID map[string][]data.Collection // snapshotID: []Collection
colls []data.RestoreCollection
collsByID map[string][]data.RestoreCollection // snapshotID: []RestoreCollection
err error
onRestore restoreFunc
}
type restoreFunc func(id string, ps []path.Path) ([]data.Collection, error)
type restoreFunc func(id string, ps []path.Path) ([]data.RestoreCollection, error)
func (mr *mockRestorer) buildRestoreFunc(
t *testing.T,
oid string,
ops []path.Path,
) {
mr.onRestore = func(id string, ps []path.Path) ([]data.Collection, error) {
mr.onRestore = func(id string, ps []path.Path) ([]data.RestoreCollection, error) {
assert.Equal(t, oid, id, "manifest id")
checkPaths(t, ops, ps)
@ -62,7 +62,7 @@ func (mr *mockRestorer) RestoreMultipleItems(
snapshotID string,
paths []path.Path,
bc kopia.ByteCounter,
) ([]data.Collection, error) {
) ([]data.RestoreCollection, error) {
mr.gotPaths = append(mr.gotPaths, paths...)
if mr.onRestore != nil {
@ -85,7 +85,7 @@ func checkPaths(t *testing.T, expected, got []path.Path) {
type mockBackuper struct {
checkFunc func(
bases []kopia.IncrementalBase,
cs []data.Collection,
cs []data.BackupCollection,
tags map[string]string,
buildTreeWithBase bool,
)
@ -94,7 +94,7 @@ type mockBackuper struct {
func (mbu mockBackuper) BackupCollections(
ctx context.Context,
bases []kopia.IncrementalBase,
cs []data.Collection,
cs []data.BackupCollection,
excluded map[string]struct{},
tags map[string]string,
buildTreeWithBase bool,
@ -559,7 +559,7 @@ func (suite *BackupOpSuite) TestBackupOperation_ConsumeBackupDataCollections_Pat
mbu := &mockBackuper{
checkFunc: func(
bases []kopia.IncrementalBase,
cs []data.Collection,
cs []data.BackupCollection,
tags map[string]string,
buildTreeWithBase bool,
) {

View File

@ -46,10 +46,10 @@ func produceManifestsAndMetadata(
tenantID string,
getMetadata bool,
errs fault.Adder,
) ([]*kopia.ManifestEntry, []data.Collection, bool, error) {
) ([]*kopia.ManifestEntry, []data.RestoreCollection, bool, error) {
var (
metadataFiles = graph.AllMetadataFileNames()
collections []data.Collection
collections []data.RestoreCollection
)
ms, err := mr.FetchPrevSnapshotManifests(
@ -183,7 +183,7 @@ func collectMetadata(
man *kopia.ManifestEntry,
fileNames []string,
tenantID string,
) ([]data.Collection, error) {
) ([]data.RestoreCollection, error) {
paths := []path.Path{}
for _, fn := range fileNames {

View File

@ -49,9 +49,8 @@ func (mg mockGetDetailsIDer) GetDetailsIDFromBackupID(
}
type mockColl struct {
id string // for comparisons
p path.Path
prevP path.Path
id string // for comparisons
p path.Path
}
func (mc mockColl) Items() <-chan data.Stream {
@ -62,18 +61,6 @@ func (mc mockColl) FullPath() path.Path {
return mc.p
}
func (mc mockColl) PreviousPath() path.Path {
return mc.prevP
}
func (mc mockColl) State() data.CollectionState {
return data.NewState
}
func (mc mockColl) DoNotMergeItems() bool {
return false
}
// ---------------------------------------------------------------------------
// tests
// ---------------------------------------------------------------------------
@ -447,7 +434,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
getMeta bool
assertErr assert.ErrorAssertionFunc
assertB assert.BoolAssertionFunc
expectDCS []data.Collection
expectDCS []data.RestoreCollection
expectNilMans bool
}{
{
@ -550,7 +537,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{
name: "man missing backup id",
mr: mockManifestRestorer{
mockRestorer: mockRestorer{collsByID: map[string][]data.Collection{
mockRestorer: mockRestorer{collsByID: map[string][]data.RestoreCollection{
"id": {mockColl{id: "id_coll"}},
}},
mans: []*kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "")},
@ -577,7 +564,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{
name: "one complete, one incomplete",
mr: mockManifestRestorer{
mockRestorer: mockRestorer{collsByID: map[string][]data.Collection{
mockRestorer: mockRestorer{collsByID: map[string][]data.RestoreCollection{
"id": {mockColl{id: "id_coll"}},
"incmpl_id": {mockColl{id: "incmpl_id_coll"}},
}},
@ -591,12 +578,12 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []data.Collection{mockColl{id: "id_coll"}},
expectDCS: []data.RestoreCollection{mockColl{id: "id_coll"}},
},
{
name: "single valid man",
mr: mockManifestRestorer{
mockRestorer: mockRestorer{collsByID: map[string][]data.Collection{
mockRestorer: mockRestorer{collsByID: map[string][]data.RestoreCollection{
"id": {mockColl{id: "id_coll"}},
}},
mans: []*kopia.ManifestEntry{makeMan(path.EmailCategory, "id", "", "bid")},
@ -606,12 +593,12 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []data.Collection{mockColl{id: "id_coll"}},
expectDCS: []data.RestoreCollection{mockColl{id: "id_coll"}},
},
{
name: "multiple valid mans",
mr: mockManifestRestorer{
mockRestorer: mockRestorer{collsByID: map[string][]data.Collection{
mockRestorer: mockRestorer{collsByID: map[string][]data.RestoreCollection{
"mail": {mockColl{id: "mail_coll"}},
"contact": {mockColl{id: "contact_coll"}},
}},
@ -625,7 +612,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
getMeta: true,
assertErr: assert.NoError,
assertB: assert.True,
expectDCS: []data.Collection{
expectDCS: []data.RestoreCollection{
mockColl{id: "mail_coll"},
mockColl{id: "contact_coll"},
},

View File

@ -88,7 +88,7 @@ func (op RestoreOperation) validate() error {
// pointer wrapping the values, while those values
// get populated asynchronously.
type restoreStats struct {
cs []data.Collection
cs []data.RestoreCollection
gc *support.ConnectorOperationStatus
bytesRead *stats.ByteCounter
resourceCount int
@ -104,7 +104,7 @@ type restorer interface {
snapshotID string,
paths []path.Path,
bc kopia.ByteCounter,
) ([]data.Collection, error)
) ([]data.RestoreCollection, error)
}
// Run begins a synchronous restore operation.

View File

@ -10,6 +10,7 @@ import (
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/exchange"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/events"
@ -61,7 +62,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
bytesRead: &stats.ByteCounter{
NumBytes: 42,
},
cs: []data.Collection{&exchange.Collection{}},
cs: []data.RestoreCollection{&mockconnector.MockExchangeDataCollection{}},
gc: &support.ConnectorOperationStatus{
ObjectCount: 1,
Successful: 1,
@ -82,7 +83,7 @@ func (suite *RestoreOpSuite) TestRestoreOperation_PersistResults() {
expectErr: assert.NoError,
stats: restoreStats{
bytesRead: &stats.ByteCounter{},
cs: []data.Collection{},
cs: []data.RestoreCollection{},
gc: &support.ConnectorOperationStatus{},
},
},

View File

@ -76,7 +76,7 @@ func (ss *streamStore) WriteBackupDetails(
backupStats, _, _, err := ss.kw.BackupCollections(
ctx,
nil,
[]data.Collection{dc},
[]data.BackupCollection{dc},
nil,
nil,
false)
@ -164,7 +164,7 @@ func (ss *streamStore) DeleteBackupDetails(
return nil
}
// streamCollection is a data.Collection used to persist
// streamCollection is a data.BackupCollection used to persist
// a single data stream
type streamCollection struct {
// folderPath indicates what level in the hierarchy this collection