Incrementals for Groups SharePoint backup (#4205)
<!-- PR description--> --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change <!--- Please check the type of change your PR introduces: ---> - [x] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup #### Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * https://github.com/alcionai/corso/issues/3990 #### Test Plan <!-- How will this be tested prior to merging.--> - [ ] 💪 Manual - [x] ⚡ Unit test - [ ] 💚 E2E
This commit is contained in:
parent
248fc80fcd
commit
6e1be1f08c
@ -5,8 +5,6 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -80,8 +78,13 @@ var (
|
|||||||
|
|
||||||
type Collection struct {
|
type Collection struct {
|
||||||
Path path.Path
|
Path path.Path
|
||||||
ItemData []*Item
|
Loc *path.Builder
|
||||||
|
ItemData []data.Item
|
||||||
ItemsRecoverableErrs []error
|
ItemsRecoverableErrs []error
|
||||||
|
CState data.CollectionState
|
||||||
|
|
||||||
|
// For restore
|
||||||
|
AuxItems map[string]data.Item
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
func (c Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||||
@ -93,8 +96,9 @@ func (c Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item
|
|||||||
el := errs.Local()
|
el := errs.Local()
|
||||||
|
|
||||||
for _, item := range c.ItemData {
|
for _, item := range c.ItemData {
|
||||||
if item.ReadErr != nil {
|
it, ok := item.(*Item)
|
||||||
el.AddRecoverable(ctx, item.ReadErr)
|
if ok && it.ReadErr != nil {
|
||||||
|
el.AddRecoverable(ctx, it.ReadErr)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -114,17 +118,48 @@ func (c Collection) FullPath() path.Path {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c Collection) PreviousPath() path.Path {
|
func (c Collection) PreviousPath() path.Path {
|
||||||
return nil
|
return c.Path
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Collection) LocationPath() *path.Builder {
|
||||||
|
return c.Loc
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Collection) State() data.CollectionState {
|
func (c Collection) State() data.CollectionState {
|
||||||
return data.NewState
|
return c.CState
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Collection) DoNotMergeItems() bool {
|
func (c Collection) DoNotMergeItems() bool {
|
||||||
return true
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Collection) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
|
func (c Collection) FetchItemByName(
|
||||||
return &Item{}, clues.New("not implemented")
|
ctx context.Context,
|
||||||
|
name string,
|
||||||
|
) (data.Item, error) {
|
||||||
|
res := c.AuxItems[name]
|
||||||
|
if res == nil {
|
||||||
|
return nil, data.ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ data.RestoreCollection = &RestoreCollection{}
|
||||||
|
|
||||||
|
type RestoreCollection struct {
|
||||||
|
data.Collection
|
||||||
|
AuxItems map[string]data.Item
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rc RestoreCollection) FetchItemByName(
|
||||||
|
ctx context.Context,
|
||||||
|
name string,
|
||||||
|
) (data.Item, error) {
|
||||||
|
res := rc.AuxItems[name]
|
||||||
|
if res == nil {
|
||||||
|
return nil, data.ErrNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -26,7 +26,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
m365Mock "github.com/alcionai/corso/src/internal/m365/mock"
|
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -1126,10 +1125,10 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
|
|||||||
streams = append(streams, ms)
|
streams = append(streams, ms)
|
||||||
}
|
}
|
||||||
|
|
||||||
mc := &m365Mock.BackupCollection{
|
mc := &dataMock.Collection{
|
||||||
Path: storePath,
|
Path: storePath,
|
||||||
Loc: locPath,
|
Loc: locPath,
|
||||||
Streams: streams,
|
ItemData: streams,
|
||||||
}
|
}
|
||||||
|
|
||||||
return []data.BackupCollection{mc}
|
return []data.BackupCollection{mc}
|
||||||
@ -1153,11 +1152,11 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
|
|||||||
ItemInfo: details.ItemInfo{OneDrive: &info},
|
ItemInfo: details.ItemInfo{OneDrive: &info},
|
||||||
}
|
}
|
||||||
|
|
||||||
mc := &m365Mock.BackupCollection{
|
mc := &dataMock.Collection{
|
||||||
Path: storePath,
|
Path: storePath,
|
||||||
Loc: locPath,
|
Loc: locPath,
|
||||||
Streams: []data.Item{ms},
|
ItemData: []data.Item{ms},
|
||||||
CState: data.NotMovedState,
|
CState: data.NotMovedState,
|
||||||
}
|
}
|
||||||
|
|
||||||
return []data.BackupCollection{mc}
|
return []data.BackupCollection{mc}
|
||||||
@ -1296,10 +1295,10 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
r := NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory)
|
r := NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory)
|
||||||
|
|
||||||
collections := []data.BackupCollection{
|
collections := []data.BackupCollection{
|
||||||
&m365Mock.BackupCollection{
|
&dataMock.Collection{
|
||||||
Path: suite.storePath1,
|
Path: suite.storePath1,
|
||||||
Loc: loc1,
|
Loc: loc1,
|
||||||
Streams: []data.Item{
|
ItemData: []data.Item{
|
||||||
&dataMock.Item{
|
&dataMock.Item{
|
||||||
ItemID: testFileName,
|
ItemID: testFileName,
|
||||||
Reader: io.NopCloser(bytes.NewReader(testFileData)),
|
Reader: io.NopCloser(bytes.NewReader(testFileData)),
|
||||||
@ -1312,10 +1311,10 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&m365Mock.BackupCollection{
|
&dataMock.Collection{
|
||||||
Path: suite.storePath2,
|
Path: suite.storePath2,
|
||||||
Loc: loc2,
|
Loc: loc2,
|
||||||
Streams: []data.Item{
|
ItemData: []data.Item{
|
||||||
&dataMock.Item{
|
&dataMock.Item{
|
||||||
ItemID: testFileName3,
|
ItemID: testFileName3,
|
||||||
Reader: io.NopCloser(bytes.NewReader(testFileData3)),
|
Reader: io.NopCloser(bytes.NewReader(testFileData3)),
|
||||||
@ -1340,6 +1339,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
errs := fault.New(true)
|
||||||
|
|
||||||
stats, deets, _, err := suite.w.ConsumeBackupCollections(
|
stats, deets, _, err := suite.w.ConsumeBackupCollections(
|
||||||
suite.ctx,
|
suite.ctx,
|
||||||
[]identity.Reasoner{r},
|
[]identity.Reasoner{r},
|
||||||
@ -1348,13 +1349,14 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
nil,
|
nil,
|
||||||
nil,
|
nil,
|
||||||
true,
|
true,
|
||||||
fault.New(true))
|
errs)
|
||||||
require.Error(t, err, clues.ToCore(err))
|
require.Error(t, err, clues.ToCore(err))
|
||||||
assert.Equal(t, 0, stats.ErrorCount)
|
assert.Equal(t, 0, stats.ErrorCount, "error count")
|
||||||
assert.Equal(t, 5, stats.TotalFileCount)
|
assert.Equal(t, 5, stats.TotalFileCount, "total files")
|
||||||
assert.Equal(t, 6, stats.TotalDirectoryCount)
|
assert.Equal(t, 6, stats.TotalDirectoryCount, "total directories")
|
||||||
assert.Equal(t, 1, stats.IgnoredErrorCount)
|
assert.Equal(t, 0, stats.IgnoredErrorCount, "ignored errors")
|
||||||
assert.False(t, stats.Incomplete)
|
assert.Equal(t, 1, len(errs.Errors().Recovered), "recovered errors")
|
||||||
|
assert.False(t, stats.Incomplete, "incomplete")
|
||||||
// 5 file and 2 folder entries.
|
// 5 file and 2 folder entries.
|
||||||
assert.Len(t, deets.Details().Entries, 5+2)
|
assert.Len(t, deets.Details().Entries, 5+2)
|
||||||
|
|
||||||
@ -1373,7 +1375,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
|
|||||||
|
|
||||||
require.Len(t, dcs, 1, "number of restore collections")
|
require.Len(t, dcs, 1, "number of restore collections")
|
||||||
|
|
||||||
errs := fault.New(true)
|
errs = fault.New(true)
|
||||||
items := dcs[0].Items(suite.ctx, errs)
|
items := dcs[0].Items(suite.ctx, errs)
|
||||||
|
|
||||||
// Get all the items from channel
|
// Get all the items from channel
|
||||||
@ -1555,11 +1557,11 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
|
|||||||
|
|
||||||
for _, parent := range []path.Path{suite.testPath1, suite.testPath2} {
|
for _, parent := range []path.Path{suite.testPath1, suite.testPath2} {
|
||||||
loc := path.Builder{}.Append(parent.Folders()...)
|
loc := path.Builder{}.Append(parent.Folders()...)
|
||||||
collection := &m365Mock.BackupCollection{Path: parent, Loc: loc}
|
collection := &dataMock.Collection{Path: parent, Loc: loc}
|
||||||
|
|
||||||
for _, item := range suite.files[parent.String()] {
|
for _, item := range suite.files[parent.String()] {
|
||||||
collection.Streams = append(
|
collection.ItemData = append(
|
||||||
collection.Streams,
|
collection.ItemData,
|
||||||
&dataMock.Item{
|
&dataMock.Item{
|
||||||
ItemID: item.itemPath.Item(),
|
ItemID: item.itemPath.Item(),
|
||||||
Reader: io.NopCloser(bytes.NewReader(item.data)),
|
Reader: io.NopCloser(bytes.NewReader(item.data)),
|
||||||
|
|||||||
@ -8,12 +8,15 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
|
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
"github.com/alcionai/corso/src/internal/m365/service/groups"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
"github.com/alcionai/corso/src/internal/m365/service/onedrive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
"github.com/alcionai/corso/src/internal/m365/service/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/filters"
|
"github.com/alcionai/corso/src/pkg/filters"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
@ -186,3 +189,55 @@ func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ctrl *Controller) GetMetadataPaths(
|
||||||
|
ctx context.Context,
|
||||||
|
r kinject.RestoreProducer,
|
||||||
|
man kopia.ManifestEntry,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]path.RestorePaths, error) {
|
||||||
|
var (
|
||||||
|
paths = []path.RestorePaths{}
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, reason := range man.Reasons {
|
||||||
|
filePaths := [][]string{}
|
||||||
|
|
||||||
|
switch reason.Service() {
|
||||||
|
case path.GroupsService:
|
||||||
|
filePaths, err = groups.MetadataFiles(ctx, reason, r, man.ID, errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
for _, fn := range bupMD.AllMetadataFileNames() {
|
||||||
|
filePaths = append(filePaths, []string{fn})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fp := range filePaths {
|
||||||
|
pth, err := path.BuildMetadata(
|
||||||
|
reason.Tenant(),
|
||||||
|
reason.ProtectedResource(),
|
||||||
|
reason.Service(),
|
||||||
|
reason.Category(),
|
||||||
|
true,
|
||||||
|
fp...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, err := pth.Dir()
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.
|
||||||
|
Wrap(err, "building metadata collection path").
|
||||||
|
With("metadata_file", fp)
|
||||||
|
}
|
||||||
|
|
||||||
|
paths = append(paths, path.RestorePaths{StoragePath: pth, RestorePath: dir})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return paths, nil
|
||||||
|
}
|
||||||
|
|||||||
@ -122,10 +122,10 @@ func deserializeMetadata(
|
|||||||
|
|
||||||
switch item.ID() {
|
switch item.ID() {
|
||||||
case bupMD.PreviousPathFileName:
|
case bupMD.PreviousPathFileName:
|
||||||
err = deserializeMap(item.ToReader(), prevFolders)
|
err = DeserializeMap(item.ToReader(), prevFolders)
|
||||||
|
|
||||||
case bupMD.DeltaURLsFileName:
|
case bupMD.DeltaURLsFileName:
|
||||||
err = deserializeMap(item.ToReader(), prevDeltas)
|
err = DeserializeMap(item.ToReader(), prevDeltas)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
logger.Ctx(ictx).Infow(
|
logger.Ctx(ictx).Infow(
|
||||||
@ -191,11 +191,11 @@ func deserializeMetadata(
|
|||||||
|
|
||||||
var errExistingMapping = clues.New("mapping already exists for same drive ID")
|
var errExistingMapping = clues.New("mapping already exists for same drive ID")
|
||||||
|
|
||||||
// deserializeMap takes an reader and a map of already deserialized items and
|
// DeserializeMap takes an reader and a map of already deserialized items and
|
||||||
// adds the newly deserialized items to alreadyFound. Items are only added to
|
// adds the newly deserialized items to alreadyFound. Items are only added to
|
||||||
// alreadyFound if none of the keys in the freshly deserialized map already
|
// alreadyFound if none of the keys in the freshly deserialized map already
|
||||||
// exist in alreadyFound. reader is closed at the end of this function.
|
// exist in alreadyFound. reader is closed at the end of this function.
|
||||||
func deserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) error {
|
func DeserializeMap[T any](reader io.ReadCloser, alreadyFound map[string]T) error {
|
||||||
defer reader.Close()
|
defer reader.Close()
|
||||||
|
|
||||||
tmp := map[string]T{}
|
tmp := map[string]T{}
|
||||||
|
|||||||
@ -32,8 +32,8 @@ func (suite *ExportUnitSuite) TestStreamItems() {
|
|||||||
{
|
{
|
||||||
name: "no errors",
|
name: "no errors",
|
||||||
backingColl: dataMock.Collection{
|
backingColl: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{ItemID: "zim"},
|
&dataMock.Item{ItemID: "zim"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectName: "zim",
|
expectName: "zim",
|
||||||
@ -51,8 +51,8 @@ func (suite *ExportUnitSuite) TestStreamItems() {
|
|||||||
{
|
{
|
||||||
name: "items and recoverable errors",
|
name: "items and recoverable errors",
|
||||||
backingColl: dataMock.Collection{
|
backingColl: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{ItemID: "gir"},
|
&dataMock.Item{ItemID: "gir"},
|
||||||
},
|
},
|
||||||
ItemsRecoverableErrs: []error{
|
ItemsRecoverableErrs: []error{
|
||||||
clues.New("I miss my cupcake."),
|
clues.New("I miss my cupcake."),
|
||||||
|
|||||||
@ -1,67 +0,0 @@
|
|||||||
package mock
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
)
|
|
||||||
|
|
||||||
type RestoreCollection struct {
|
|
||||||
data.Collection
|
|
||||||
AuxItems map[string]data.Item
|
|
||||||
}
|
|
||||||
|
|
||||||
func (rc RestoreCollection) FetchItemByName(
|
|
||||||
ctx context.Context,
|
|
||||||
name string,
|
|
||||||
) (data.Item, error) {
|
|
||||||
res := rc.AuxItems[name]
|
|
||||||
if res == nil {
|
|
||||||
return nil, data.ErrNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type BackupCollection struct {
|
|
||||||
Path path.Path
|
|
||||||
Loc *path.Builder
|
|
||||||
Streams []data.Item
|
|
||||||
CState data.CollectionState
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *BackupCollection) Items(context.Context, *fault.Bus) <-chan data.Item {
|
|
||||||
res := make(chan data.Item)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer close(res)
|
|
||||||
|
|
||||||
for _, s := range c.Streams {
|
|
||||||
res <- s
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c BackupCollection) FullPath() path.Path {
|
|
||||||
return c.Path
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c BackupCollection) PreviousPath() path.Path {
|
|
||||||
return c.Path
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c BackupCollection) LocationPath() *path.Builder {
|
|
||||||
return c.Loc
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c BackupCollection) State() data.CollectionState {
|
|
||||||
return c.CState
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c BackupCollection) DoNotMergeItems() bool {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
@ -3,9 +3,13 @@ package mock
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
|
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
@ -46,6 +50,15 @@ func (ctrl Controller) ProduceBackupCollections(
|
|||||||
return ctrl.Collections, ctrl.Exclude, ctrl.Err == nil, ctrl.Err
|
return ctrl.Collections, ctrl.Exclude, ctrl.Err == nil, ctrl.Err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ctrl *Controller) GetMetadataPaths(
|
||||||
|
ctx context.Context,
|
||||||
|
r kinject.RestoreProducer,
|
||||||
|
man kopia.ManifestEntry,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]path.RestorePaths, error) {
|
||||||
|
return nil, clues.New("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
func (ctrl Controller) IsServiceEnabled(
|
func (ctrl Controller) IsServiceEnabled(
|
||||||
_ context.Context,
|
_ context.Context,
|
||||||
_ path.ServiceType,
|
_ path.ServiceType,
|
||||||
|
|||||||
@ -4,12 +4,14 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/kopia/kopia/repo/manifest"
|
||||||
"golang.org/x/exp/slices"
|
"golang.org/x/exp/slices"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/site"
|
"github.com/alcionai/corso/src/internal/m365/collection/site"
|
||||||
@ -19,6 +21,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/observe"
|
"github.com/alcionai/corso/src/internal/observe"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/metadata"
|
"github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
@ -96,12 +99,21 @@ func ProduceBackupCollections(
|
|||||||
return nil, nil, false, err
|
return nil, nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
siteMetadataCollection := map[string][]data.RestoreCollection{}
|
||||||
|
|
||||||
|
// Once we have metadata collections for chat as well, we will have to filter those out
|
||||||
|
for _, c := range bpc.MetadataCollections {
|
||||||
|
siteID := c.FullPath().Elements().Last()
|
||||||
|
siteMetadataCollection[siteID] = append(siteMetadataCollection[siteID], c)
|
||||||
|
}
|
||||||
|
|
||||||
pr := idname.NewProvider(ptr.Val(resp.GetId()), ptr.Val(resp.GetName()))
|
pr := idname.NewProvider(ptr.Val(resp.GetId()), ptr.Val(resp.GetName()))
|
||||||
sbpc := inject.BackupProducerConfig{
|
sbpc := inject.BackupProducerConfig{
|
||||||
LastBackupVersion: bpc.LastBackupVersion,
|
LastBackupVersion: bpc.LastBackupVersion,
|
||||||
Options: bpc.Options,
|
Options: bpc.Options,
|
||||||
ProtectedResource: pr,
|
ProtectedResource: pr,
|
||||||
Selector: bpc.Selector,
|
Selector: bpc.Selector,
|
||||||
|
MetadataCollections: siteMetadataCollection[ptr.Val(resp.GetId())],
|
||||||
}
|
}
|
||||||
|
|
||||||
bh := drive.NewGroupBackupHandler(
|
bh := drive.NewGroupBackupHandler(
|
||||||
@ -211,3 +223,125 @@ func getSitesMetadataCollection(
|
|||||||
|
|
||||||
return md, err
|
return md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func MetadataFiles(
|
||||||
|
ctx context.Context,
|
||||||
|
reason identity.Reasoner,
|
||||||
|
r kinject.RestoreProducer,
|
||||||
|
manID manifest.ID,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([][]string, error) {
|
||||||
|
pth, err := path.BuildMetadata(
|
||||||
|
reason.Tenant(),
|
||||||
|
reason.ProtectedResource(),
|
||||||
|
reason.Service(),
|
||||||
|
reason.Category(),
|
||||||
|
true,
|
||||||
|
odConsts.SitesPathDir,
|
||||||
|
metadata.PreviousPathFileName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, err := pth.Dir()
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "building metadata collection path")
|
||||||
|
}
|
||||||
|
|
||||||
|
dcs, err := r.ProduceRestoreCollections(
|
||||||
|
ctx,
|
||||||
|
string(manID),
|
||||||
|
[]path.RestorePaths{{StoragePath: pth, RestorePath: dir}},
|
||||||
|
nil,
|
||||||
|
errs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sites, err := deserializeSiteMetadata(ctx, dcs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
filePaths := [][]string{}
|
||||||
|
|
||||||
|
for k := range sites {
|
||||||
|
for _, fn := range metadata.AllMetadataFileNames() {
|
||||||
|
filePaths = append(filePaths, []string{odConsts.SitesPathDir, k, fn})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filePaths, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func deserializeSiteMetadata(
|
||||||
|
ctx context.Context,
|
||||||
|
cols []data.RestoreCollection,
|
||||||
|
) (map[string]string, error) {
|
||||||
|
logger.Ctx(ctx).Infow(
|
||||||
|
"deserializing previous sites metadata",
|
||||||
|
"num_collections", len(cols))
|
||||||
|
|
||||||
|
var (
|
||||||
|
prevFolders = map[string]string{}
|
||||||
|
errs = fault.New(true) // metadata item reads should not fail backup
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, col := range cols {
|
||||||
|
if errs.Failure() != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
items := col.Items(ctx, errs)
|
||||||
|
|
||||||
|
for breakLoop := false; !breakLoop; {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return nil, clues.Wrap(
|
||||||
|
ctx.Err(),
|
||||||
|
"deserializing previous sites metadata").WithClues(ctx)
|
||||||
|
|
||||||
|
case item, ok := <-items:
|
||||||
|
if !ok {
|
||||||
|
breakLoop = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
err error
|
||||||
|
ictx = clues.Add(ctx, "item_uuid", item.ID())
|
||||||
|
)
|
||||||
|
|
||||||
|
switch item.ID() {
|
||||||
|
case metadata.PreviousPathFileName:
|
||||||
|
err = drive.DeserializeMap(item.ToReader(), prevFolders)
|
||||||
|
|
||||||
|
default:
|
||||||
|
logger.Ctx(ictx).Infow(
|
||||||
|
"skipping unknown metadata file",
|
||||||
|
"file_name", item.ID())
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
// Successful decode.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Stack(err).WithClues(ictx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if reads from items failed, return empty but no error
|
||||||
|
if errs.Failure() != nil {
|
||||||
|
logger.CtxErr(ctx, errs.Failure()).Info("reading metadata collection items")
|
||||||
|
|
||||||
|
return map[string]string{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return prevFolders, nil
|
||||||
|
}
|
||||||
|
|||||||
145
src/internal/m365/service/groups/backup_test.go
Normal file
145
src/internal/m365/service/groups/backup_test.go
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
package groups
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/kopia/kopia/repo/manifest"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
|
"github.com/alcionai/corso/src/internal/kopia/inject"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GroupsBackupUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGroupsBackupUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &GroupsBackupUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
type mockRestoreProducer struct {
|
||||||
|
rc []data.RestoreCollection
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mr mockRestoreProducer) ProduceRestoreCollections(
|
||||||
|
ctx context.Context,
|
||||||
|
snapshotID string,
|
||||||
|
paths []path.RestorePaths,
|
||||||
|
bc kopia.ByteCounter,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]data.RestoreCollection, error) {
|
||||||
|
return mr.rc, mr.err
|
||||||
|
}
|
||||||
|
|
||||||
|
type mockCollection struct {
|
||||||
|
items []mockItem
|
||||||
|
}
|
||||||
|
|
||||||
|
type mockItem struct {
|
||||||
|
name string
|
||||||
|
data string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mi mockItem) ToReader() io.ReadCloser { return io.NopCloser(strings.NewReader(mi.data)) }
|
||||||
|
func (mi mockItem) ID() string { return mi.name }
|
||||||
|
func (mi mockItem) Deleted() bool { return false }
|
||||||
|
|
||||||
|
func (mc mockCollection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
|
||||||
|
ch := make(chan data.Item)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer close(ch)
|
||||||
|
|
||||||
|
for _, item := range mc.items {
|
||||||
|
ch <- item
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
func (mc mockCollection) FullPath() path.Path { panic("unimplemented") }
|
||||||
|
func (mc mockCollection) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
|
||||||
|
panic("unimplemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *GroupsBackupUnitSuite) TestMetadataFiles() {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
reason identity.Reasoner
|
||||||
|
r inject.RestoreProducer
|
||||||
|
manID manifest.ID
|
||||||
|
result [][]string
|
||||||
|
expectErr require.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "error",
|
||||||
|
reason: kopia.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory),
|
||||||
|
manID: "manifestID",
|
||||||
|
r: mockRestoreProducer{err: assert.AnError},
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "single site",
|
||||||
|
reason: kopia.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory),
|
||||||
|
manID: "manifestID",
|
||||||
|
r: mockRestoreProducer{
|
||||||
|
rc: []data.RestoreCollection{
|
||||||
|
mockCollection{
|
||||||
|
items: []mockItem{
|
||||||
|
{name: "previouspath", data: `{"id1": "path/to/id1"}`},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
result: [][]string{{"sites", "id1", "delta"}, {"sites", "id1", "previouspath"}},
|
||||||
|
expectErr: require.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple sites",
|
||||||
|
reason: kopia.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory),
|
||||||
|
manID: "manifestID",
|
||||||
|
r: mockRestoreProducer{
|
||||||
|
rc: []data.RestoreCollection{
|
||||||
|
mockCollection{
|
||||||
|
items: []mockItem{
|
||||||
|
{name: "previouspath", data: `{"id1": "path/to/id1", "id2": "path/to/id2"}`},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
result: [][]string{
|
||||||
|
{"sites", "id1", "delta"},
|
||||||
|
{"sites", "id1", "previouspath"},
|
||||||
|
{"sites", "id2", "delta"},
|
||||||
|
{"sites", "id2", "previouspath"},
|
||||||
|
},
|
||||||
|
expectErr: require.NoError,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range tests {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
res, err := MetadataFiles(ctx, test.reason, test.r, test.manID, fault.New(true))
|
||||||
|
|
||||||
|
test.expectErr(t, err)
|
||||||
|
assert.ElementsMatch(t, test.result, res)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -77,8 +77,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
data.FetchRestoreCollection{
|
data.FetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
Path: p,
|
Path: p,
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: itemID,
|
ItemID: itemID,
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
ItemInfo: dii,
|
ItemInfo: dii,
|
||||||
|
|||||||
@ -62,8 +62,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
version: 1,
|
version: 1,
|
||||||
backingCollection: data.NoFetchRestoreCollection{
|
backingCollection: data.NoFetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "name1",
|
ItemID: "name1",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
},
|
},
|
||||||
@ -83,12 +83,12 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
version: 1,
|
version: 1,
|
||||||
backingCollection: data.NoFetchRestoreCollection{
|
backingCollection: data.NoFetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "name1",
|
ItemID: "name1",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
},
|
},
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "name2",
|
ItemID: "name2",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body2")),
|
Reader: io.NopCloser(bytes.NewBufferString("body2")),
|
||||||
},
|
},
|
||||||
@ -113,8 +113,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
version: 2,
|
version: 2,
|
||||||
backingCollection: data.NoFetchRestoreCollection{
|
backingCollection: data.NoFetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "name1.data",
|
ItemID: "name1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
},
|
},
|
||||||
@ -134,8 +134,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
version: version.Backup,
|
version: version.Backup,
|
||||||
backingCollection: data.FetchRestoreCollection{
|
backingCollection: data.FetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
},
|
},
|
||||||
@ -156,8 +156,8 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
version: version.Backup,
|
version: version.Backup,
|
||||||
backingCollection: data.FetchRestoreCollection{
|
backingCollection: data.FetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{ItemID: "id1.data"},
|
&dataMock.Item{ItemID: "id1.data"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
FetchItemByNamer: finD{err: assert.AnError},
|
FetchItemByNamer: finD{err: assert.AnError},
|
||||||
@ -174,11 +174,11 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
version: version.Backup,
|
version: version.Backup,
|
||||||
backingCollection: data.FetchRestoreCollection{
|
backingCollection: data.FetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "missing.data",
|
ItemID: "missing.data",
|
||||||
},
|
},
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
},
|
},
|
||||||
@ -203,16 +203,16 @@ func (suite *ExportUnitSuite) TestGetItems() {
|
|||||||
version: version.OneDrive1DataAndMetaFiles,
|
version: version.OneDrive1DataAndMetaFiles,
|
||||||
backingCollection: data.FetchRestoreCollection{
|
backingCollection: data.FetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "name0",
|
ItemID: "name0",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body0")),
|
Reader: io.NopCloser(bytes.NewBufferString("body0")),
|
||||||
},
|
},
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "name1",
|
ItemID: "name1",
|
||||||
ReadErr: assert.AnError,
|
ReadErr: assert.AnError,
|
||||||
},
|
},
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "name2",
|
ItemID: "name2",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body2")),
|
Reader: io.NopCloser(bytes.NewBufferString("body2")),
|
||||||
},
|
},
|
||||||
@ -300,8 +300,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
data.FetchRestoreCollection{
|
data.FetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
Path: p,
|
Path: p,
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
ItemInfo: dii,
|
ItemInfo: dii,
|
||||||
|
|||||||
@ -85,8 +85,8 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
data.FetchRestoreCollection{
|
data.FetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
Path: p,
|
Path: p,
|
||||||
ItemData: []*dataMock.Item{
|
ItemData: []data.Item{
|
||||||
{
|
&dataMock.Item{
|
||||||
ItemID: "id1.data",
|
ItemID: "id1.data",
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
ItemInfo: dii,
|
ItemInfo: dii,
|
||||||
|
|||||||
@ -9,7 +9,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/m365/mock"
|
|
||||||
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -137,7 +136,7 @@ func CollectionsForInfo(
|
|||||||
totalItems++
|
totalItems++
|
||||||
}
|
}
|
||||||
|
|
||||||
c := mock.RestoreCollection{
|
c := dataMock.RestoreCollection{
|
||||||
Collection: mc,
|
Collection: mc,
|
||||||
AuxItems: map[string]data.Item{},
|
AuxItems: map[string]data.Item{},
|
||||||
}
|
}
|
||||||
|
|||||||
@ -356,6 +356,7 @@ func (op *BackupOperation) do(
|
|||||||
mans, mdColls, canUseMetadata, err := produceManifestsAndMetadata(
|
mans, mdColls, canUseMetadata, err := produceManifestsAndMetadata(
|
||||||
ctx,
|
ctx,
|
||||||
kbf,
|
kbf,
|
||||||
|
op.bp,
|
||||||
op.kopia,
|
op.kopia,
|
||||||
reasons, fallbackReasons,
|
reasons, fallbackReasons,
|
||||||
op.account.ID(),
|
op.account.ID(),
|
||||||
|
|||||||
@ -27,6 +27,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/m365/support"
|
"github.com/alcionai/corso/src/internal/m365/support"
|
||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
opMock "github.com/alcionai/corso/src/internal/operations/inject/mock"
|
||||||
"github.com/alcionai/corso/src/internal/streamstore"
|
"github.com/alcionai/corso/src/internal/streamstore"
|
||||||
ssmock "github.com/alcionai/corso/src/internal/streamstore/mock"
|
ssmock "github.com/alcionai/corso/src/internal/streamstore/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -1553,38 +1554,6 @@ func (suite *AssistBackupIntegrationSuite) TearDownSuite() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ inject.BackupProducer = &mockBackupProducer{}
|
|
||||||
|
|
||||||
type mockBackupProducer struct {
|
|
||||||
colls []data.BackupCollection
|
|
||||||
dcs data.CollectionStats
|
|
||||||
injectNonRecoverableErr bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (mbp *mockBackupProducer) ProduceBackupCollections(
|
|
||||||
context.Context,
|
|
||||||
inject.BackupProducerConfig,
|
|
||||||
*fault.Bus,
|
|
||||||
) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) {
|
|
||||||
if mbp.injectNonRecoverableErr {
|
|
||||||
return nil, nil, false, clues.New("non-recoverable error")
|
|
||||||
}
|
|
||||||
|
|
||||||
return mbp.colls, nil, true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (mbp *mockBackupProducer) IsServiceEnabled(
|
|
||||||
context.Context,
|
|
||||||
path.ServiceType,
|
|
||||||
string,
|
|
||||||
) (bool, error) {
|
|
||||||
return true, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (mbp *mockBackupProducer) Wait() *data.CollectionStats {
|
|
||||||
return &mbp.dcs
|
|
||||||
}
|
|
||||||
|
|
||||||
func makeBackupCollection(
|
func makeBackupCollection(
|
||||||
p path.Path,
|
p path.Path,
|
||||||
locPath *path.Builder,
|
locPath *path.Builder,
|
||||||
@ -1596,10 +1565,10 @@ func makeBackupCollection(
|
|||||||
streams[i] = &items[i]
|
streams[i] = &items[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
return &mock.BackupCollection{
|
return &dataMock.Collection{
|
||||||
Path: p,
|
Path: p,
|
||||||
Loc: locPath,
|
Loc: locPath,
|
||||||
Streams: streams,
|
ItemData: streams,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1878,10 +1847,7 @@ func (suite *AssistBackupIntegrationSuite) TestBackupTypesForFailureModes() {
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
cs = append(cs, mc)
|
cs = append(cs, mc)
|
||||||
bp := &mockBackupProducer{
|
bp := opMock.NewMockBackupProducer(cs, data.CollectionStats{}, test.injectNonRecoverableErr)
|
||||||
colls: cs,
|
|
||||||
injectNonRecoverableErr: test.injectNonRecoverableErr,
|
|
||||||
}
|
|
||||||
|
|
||||||
opts.FailureHandling = test.failurePolicy
|
opts.FailureHandling = test.failurePolicy
|
||||||
|
|
||||||
@ -1890,7 +1856,7 @@ func (suite *AssistBackupIntegrationSuite) TestBackupTypesForFailureModes() {
|
|||||||
opts,
|
opts,
|
||||||
suite.kw,
|
suite.kw,
|
||||||
suite.sw,
|
suite.sw,
|
||||||
bp,
|
&bp,
|
||||||
acct,
|
acct,
|
||||||
osel.Selector,
|
osel.Selector,
|
||||||
selectors.Selector{DiscreteOwner: userID},
|
selectors.Selector{DiscreteOwner: userID},
|
||||||
@ -2196,9 +2162,7 @@ func (suite *AssistBackupIntegrationSuite) TestExtensionsIncrementals() {
|
|||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
cs = append(cs, mc)
|
cs = append(cs, mc)
|
||||||
bp := &mockBackupProducer{
|
bp := opMock.NewMockBackupProducer(cs, data.CollectionStats{}, false)
|
||||||
colls: cs,
|
|
||||||
}
|
|
||||||
|
|
||||||
opts.FailureHandling = failurePolicy
|
opts.FailureHandling = failurePolicy
|
||||||
|
|
||||||
@ -2207,7 +2171,7 @@ func (suite *AssistBackupIntegrationSuite) TestExtensionsIncrementals() {
|
|||||||
opts,
|
opts,
|
||||||
suite.kw,
|
suite.kw,
|
||||||
suite.sw,
|
suite.sw,
|
||||||
bp,
|
&bp,
|
||||||
acct,
|
acct,
|
||||||
osel.Selector,
|
osel.Selector,
|
||||||
selectors.Selector{DiscreteOwner: userID},
|
selectors.Selector{DiscreteOwner: userID},
|
||||||
|
|||||||
@ -6,6 +6,8 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
|
"github.com/alcionai/corso/src/internal/kopia/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||||
@ -26,6 +28,19 @@ type (
|
|||||||
|
|
||||||
IsServiceEnableder
|
IsServiceEnableder
|
||||||
|
|
||||||
|
// GetMetadataPaths returns a list of paths that form metadata
|
||||||
|
// collections. In case of service that have just a single
|
||||||
|
// underlying service like OneDrive or SharePoint, it will mostly
|
||||||
|
// just have a single collection per manifest reason, but in the
|
||||||
|
// case of groups, it will contain a collection each for the
|
||||||
|
// underlying service, for example one per SharePoint site.
|
||||||
|
GetMetadataPaths(
|
||||||
|
ctx context.Context,
|
||||||
|
r inject.RestoreProducer,
|
||||||
|
man kopia.ManifestEntry,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]path.RestorePaths, error)
|
||||||
|
|
||||||
Wait() *data.CollectionStats
|
Wait() *data.CollectionStats
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
70
src/internal/operations/inject/mock/inject.go
Normal file
70
src/internal/operations/inject/mock/inject.go
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
package mock
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
|
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ inject.BackupProducer = &mockBackupProducer{}
|
||||||
|
|
||||||
|
type mockBackupProducer struct {
|
||||||
|
colls []data.BackupCollection
|
||||||
|
dcs data.CollectionStats
|
||||||
|
injectNonRecoverableErr bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewMockBackupProducer(
|
||||||
|
colls []data.BackupCollection,
|
||||||
|
dcs data.CollectionStats,
|
||||||
|
injectNonRecoverableErr bool,
|
||||||
|
) mockBackupProducer {
|
||||||
|
return mockBackupProducer{
|
||||||
|
colls: colls,
|
||||||
|
dcs: dcs,
|
||||||
|
injectNonRecoverableErr: injectNonRecoverableErr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mbp *mockBackupProducer) ProduceBackupCollections(
|
||||||
|
context.Context,
|
||||||
|
inject.BackupProducerConfig,
|
||||||
|
*fault.Bus,
|
||||||
|
) ([]data.BackupCollection, prefixmatcher.StringSetReader, bool, error) {
|
||||||
|
if mbp.injectNonRecoverableErr {
|
||||||
|
return nil, nil, false, clues.New("non-recoverable error")
|
||||||
|
}
|
||||||
|
|
||||||
|
return mbp.colls, nil, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mbp *mockBackupProducer) IsServiceEnabled(
|
||||||
|
context.Context,
|
||||||
|
path.ServiceType,
|
||||||
|
string,
|
||||||
|
) (bool, error) {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mbp *mockBackupProducer) Wait() *data.CollectionStats {
|
||||||
|
return &mbp.dcs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (mbp mockBackupProducer) GetMetadataPaths(
|
||||||
|
ctx context.Context,
|
||||||
|
r kinject.RestoreProducer,
|
||||||
|
man kopia.ManifestEntry,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) ([]path.RestorePaths, error) {
|
||||||
|
ctrl := m365.Controller{}
|
||||||
|
return ctrl.GetMetadataPaths(ctx, r, man, errs)
|
||||||
|
}
|
||||||
@ -9,16 +9,16 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/kopia"
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
"github.com/alcionai/corso/src/internal/kopia/inject"
|
"github.com/alcionai/corso/src/internal/kopia/inject"
|
||||||
|
oinject "github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/identity"
|
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/metadata"
|
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func produceManifestsAndMetadata(
|
func produceManifestsAndMetadata(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bf inject.BaseFinder,
|
bf inject.BaseFinder,
|
||||||
|
bp oinject.BackupProducer,
|
||||||
rp inject.RestoreProducer,
|
rp inject.RestoreProducer,
|
||||||
reasons, fallbackReasons []identity.Reasoner,
|
reasons, fallbackReasons []identity.Reasoner,
|
||||||
tenantID string,
|
tenantID string,
|
||||||
@ -27,6 +27,7 @@ func produceManifestsAndMetadata(
|
|||||||
bb, meta, useMergeBases, err := getManifestsAndMetadata(
|
bb, meta, useMergeBases, err := getManifestsAndMetadata(
|
||||||
ctx,
|
ctx,
|
||||||
bf,
|
bf,
|
||||||
|
bp,
|
||||||
rp,
|
rp,
|
||||||
reasons,
|
reasons,
|
||||||
fallbackReasons,
|
fallbackReasons,
|
||||||
@ -56,15 +57,15 @@ func produceManifestsAndMetadata(
|
|||||||
func getManifestsAndMetadata(
|
func getManifestsAndMetadata(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bf inject.BaseFinder,
|
bf inject.BaseFinder,
|
||||||
|
bp oinject.BackupProducer,
|
||||||
rp inject.RestoreProducer,
|
rp inject.RestoreProducer,
|
||||||
reasons, fallbackReasons []identity.Reasoner,
|
reasons, fallbackReasons []identity.Reasoner,
|
||||||
tenantID string,
|
tenantID string,
|
||||||
getMetadata bool,
|
getMetadata bool,
|
||||||
) (kopia.BackupBases, []data.RestoreCollection, bool, error) {
|
) (kopia.BackupBases, []data.RestoreCollection, bool, error) {
|
||||||
var (
|
var (
|
||||||
tags = map[string]string{kopia.TagBackupCategory: ""}
|
tags = map[string]string{kopia.TagBackupCategory: ""}
|
||||||
metadataFiles = metadata.AllMetadataFileNames()
|
collections []data.RestoreCollection
|
||||||
collections []data.RestoreCollection
|
|
||||||
)
|
)
|
||||||
|
|
||||||
bb := bf.FindBases(ctx, reasons, tags)
|
bb := bf.FindBases(ctx, reasons, tags)
|
||||||
@ -102,8 +103,19 @@ func getManifestsAndMetadata(
|
|||||||
// spread around. Need to find more idiomatic handling.
|
// spread around. Need to find more idiomatic handling.
|
||||||
fb := fault.New(true)
|
fb := fault.New(true)
|
||||||
|
|
||||||
colls, err := collectMetadata(mctx, rp, man, metadataFiles, tenantID, fb)
|
paths, err := bp.GetMetadataPaths(mctx, rp, man, fb)
|
||||||
LogFaultErrors(ctx, fb.Errors(), "collecting metadata")
|
if err != nil {
|
||||||
|
LogFaultErrors(ctx, fb.Errors(), "collecting metadata paths")
|
||||||
|
return nil, nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
colls, err := rp.ProduceRestoreCollections(ctx, string(man.ID), paths, nil, fb)
|
||||||
|
if err != nil {
|
||||||
|
// Restore is best-effort and we want to keep it that way since we want to
|
||||||
|
// return as much metadata as we can to reduce the work we'll need to do.
|
||||||
|
// Just wrap the error here for better reporting/debugging.
|
||||||
|
LogFaultErrors(ctx, fb.Errors(), "collecting metadata")
|
||||||
|
}
|
||||||
|
|
||||||
// TODO(ashmrtn): It should be alright to relax this condition a little. We
|
// TODO(ashmrtn): It should be alright to relax this condition a little. We
|
||||||
// should be able to just remove the offending manifest and backup from the
|
// should be able to just remove the offending manifest and backup from the
|
||||||
@ -127,51 +139,3 @@ func getManifestsAndMetadata(
|
|||||||
|
|
||||||
return bb, collections, true, nil
|
return bb, collections, true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// collectMetadata retrieves all metadata files associated with the manifest.
|
|
||||||
func collectMetadata(
|
|
||||||
ctx context.Context,
|
|
||||||
r inject.RestoreProducer,
|
|
||||||
man kopia.ManifestEntry,
|
|
||||||
fileNames []string,
|
|
||||||
tenantID string,
|
|
||||||
errs *fault.Bus,
|
|
||||||
) ([]data.RestoreCollection, error) {
|
|
||||||
paths := []path.RestorePaths{}
|
|
||||||
|
|
||||||
for _, fn := range fileNames {
|
|
||||||
for _, reason := range man.Reasons {
|
|
||||||
p, err := path.BuildMetadata(
|
|
||||||
tenantID,
|
|
||||||
reason.ProtectedResource(),
|
|
||||||
reason.Service(),
|
|
||||||
reason.Category(),
|
|
||||||
true,
|
|
||||||
fn)
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.
|
|
||||||
Wrap(err, "building metadata path").
|
|
||||||
With("metadata_file", fn, "category", reason.Category)
|
|
||||||
}
|
|
||||||
|
|
||||||
dir, err := p.Dir()
|
|
||||||
if err != nil {
|
|
||||||
return nil, clues.
|
|
||||||
Wrap(err, "building metadata collection path").
|
|
||||||
With("metadata_file", fn, "category", reason.Category)
|
|
||||||
}
|
|
||||||
|
|
||||||
paths = append(paths, path.RestorePaths{StoragePath: p, RestorePath: dir})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dcs, err := r.ProduceRestoreCollections(ctx, string(man.ID), paths, nil, errs)
|
|
||||||
if err != nil {
|
|
||||||
// Restore is best-effort and we want to keep it that way since we want to
|
|
||||||
// return as much metadata as we can to reduce the work we'll need to do.
|
|
||||||
// Just wrap the error here for better reporting/debugging.
|
|
||||||
return dcs, clues.Wrap(err, "collecting prior metadata")
|
|
||||||
}
|
|
||||||
|
|
||||||
return dcs, nil
|
|
||||||
}
|
|
||||||
|
|||||||
@ -12,7 +12,9 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/kopia"
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365"
|
||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
|
"github.com/alcionai/corso/src/internal/operations/inject/mock"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/backup"
|
"github.com/alcionai/corso/src/pkg/backup"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/identity"
|
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||||
@ -79,7 +81,7 @@ func TestOperationsManifestsUnitSuite(t *testing.T) {
|
|||||||
suite.Run(t, &OperationsManifestsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
suite.Run(t, &OperationsManifestsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
|
func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
|
||||||
const (
|
const (
|
||||||
ro = "owner"
|
ro = "owner"
|
||||||
tid = "tenantid"
|
tid = "tenantid"
|
||||||
@ -104,13 +106,12 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
|
|||||||
name string
|
name string
|
||||||
manID string
|
manID string
|
||||||
reasons []identity.Reasoner
|
reasons []identity.Reasoner
|
||||||
fileNames []string
|
|
||||||
expectPaths func(*testing.T, []string) []path.Path
|
expectPaths func(*testing.T, []string) []path.Path
|
||||||
expectErr error
|
expectErr error
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "single reason, single file",
|
name: "single reason",
|
||||||
manID: "single single",
|
manID: "single",
|
||||||
reasons: []identity.Reasoner{
|
reasons: []identity.Reasoner{
|
||||||
kopia.NewReason(tid, ro, path.ExchangeService, path.EmailCategory),
|
kopia.NewReason(tid, ro, path.ExchangeService, path.EmailCategory),
|
||||||
},
|
},
|
||||||
@ -125,30 +126,10 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
|
|||||||
|
|
||||||
return ps
|
return ps
|
||||||
},
|
},
|
||||||
fileNames: []string{"a"},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "single reason, multiple files",
|
name: "multiple reasons",
|
||||||
manID: "single multi",
|
manID: "multi",
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
kopia.NewReason(tid, ro, path.ExchangeService, path.EmailCategory),
|
|
||||||
},
|
|
||||||
expectPaths: func(t *testing.T, files []string) []path.Path {
|
|
||||||
ps := make([]path.Path, 0, len(files))
|
|
||||||
|
|
||||||
for _, f := range files {
|
|
||||||
p, err := emailPath.AppendItem(f)
|
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
|
||||||
ps = append(ps, p)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ps
|
|
||||||
},
|
|
||||||
fileNames: []string{"a", "b"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "multiple reasons, single file",
|
|
||||||
manID: "multi single",
|
|
||||||
reasons: []identity.Reasoner{
|
reasons: []identity.Reasoner{
|
||||||
kopia.NewReason(tid, ro, path.ExchangeService, path.EmailCategory),
|
kopia.NewReason(tid, ro, path.ExchangeService, path.EmailCategory),
|
||||||
kopia.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
|
kopia.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
|
||||||
@ -167,30 +148,6 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
|
|||||||
|
|
||||||
return ps
|
return ps
|
||||||
},
|
},
|
||||||
fileNames: []string{"a"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "multiple reasons, multiple file",
|
|
||||||
manID: "multi multi",
|
|
||||||
reasons: []identity.Reasoner{
|
|
||||||
kopia.NewReason(tid, ro, path.ExchangeService, path.EmailCategory),
|
|
||||||
kopia.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
|
|
||||||
},
|
|
||||||
expectPaths: func(t *testing.T, files []string) []path.Path {
|
|
||||||
ps := make([]path.Path, 0, len(files))
|
|
||||||
|
|
||||||
for _, f := range files {
|
|
||||||
p, err := emailPath.AppendItem(f)
|
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
|
||||||
ps = append(ps, p)
|
|
||||||
p, err = contactPath.AppendItem(f)
|
|
||||||
assert.NoError(t, err, clues.ToCore(err))
|
|
||||||
ps = append(ps, p)
|
|
||||||
}
|
|
||||||
|
|
||||||
return ps
|
|
||||||
},
|
|
||||||
fileNames: []string{"a", "b"},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
@ -200,7 +157,7 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
paths := test.expectPaths(t, test.fileNames)
|
paths := test.expectPaths(t, []string{"delta", "previouspath"})
|
||||||
|
|
||||||
mr := mockRestoreProducer{err: test.expectErr}
|
mr := mockRestoreProducer{err: test.expectErr}
|
||||||
mr.buildRestoreFunc(t, test.manID, paths)
|
mr.buildRestoreFunc(t, test.manID, paths)
|
||||||
@ -210,13 +167,15 @@ func (suite *OperationsManifestsUnitSuite) TestCollectMetadata() {
|
|||||||
Reasons: test.reasons,
|
Reasons: test.reasons,
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := collectMetadata(ctx, &mr, man, test.fileNames, tid, fault.New(true))
|
controller := m365.Controller{}
|
||||||
|
_, err := controller.GetMetadataPaths(ctx, &mr, man, fault.New(true))
|
||||||
assert.ErrorIs(t, err, test.expectErr, clues.ToCore(err))
|
assert.ErrorIs(t, err, test.expectErr, clues.ToCore(err))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildReasons(
|
func buildReasons(
|
||||||
|
tenant string,
|
||||||
ro string,
|
ro string,
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
cats ...path.CategoryType,
|
cats ...path.CategoryType,
|
||||||
@ -226,7 +185,7 @@ func buildReasons(
|
|||||||
for _, cat := range cats {
|
for _, cat := range cats {
|
||||||
reasons = append(
|
reasons = append(
|
||||||
reasons,
|
reasons,
|
||||||
kopia.NewReason("", ro, service, cat))
|
kopia.NewReason(tenant, ro, service, cat))
|
||||||
}
|
}
|
||||||
|
|
||||||
return reasons
|
return reasons
|
||||||
@ -245,7 +204,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
ID: manifest.ID(id),
|
ID: manifest.ID(id),
|
||||||
IncompleteReason: incmpl,
|
IncompleteReason: incmpl,
|
||||||
},
|
},
|
||||||
Reasons: buildReasons(ro, path.ExchangeService, cats...),
|
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,7 +217,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
SnapshotID: snapID,
|
SnapshotID: snapID,
|
||||||
StreamStoreID: snapID + "store",
|
StreamStoreID: snapID + "store",
|
||||||
},
|
},
|
||||||
Reasons: buildReasons(ro, path.ExchangeService, cats...),
|
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -477,9 +436,11 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
|
emptyMockBackpuProducer := mock.NewMockBackupProducer(nil, data.CollectionStats{}, false)
|
||||||
mans, dcs, b, err := produceManifestsAndMetadata(
|
mans, dcs, b, err := produceManifestsAndMetadata(
|
||||||
ctx,
|
ctx,
|
||||||
test.bf,
|
test.bf,
|
||||||
|
&emptyMockBackpuProducer,
|
||||||
&test.rp,
|
&test.rp,
|
||||||
test.reasons, nil,
|
test.reasons, nil,
|
||||||
tid,
|
tid,
|
||||||
@ -545,7 +506,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
|
|||||||
IncompleteReason: incmpl,
|
IncompleteReason: incmpl,
|
||||||
Tags: map[string]string{"tag:" + kopia.TagBackupID: id + "bup"},
|
Tags: map[string]string{"tag:" + kopia.TagBackupID: id + "bup"},
|
||||||
},
|
},
|
||||||
Reasons: buildReasons(ro, path.ExchangeService, cats...),
|
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -558,7 +519,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
|
|||||||
SnapshotID: snapID,
|
SnapshotID: snapID,
|
||||||
StreamStoreID: snapID + "store",
|
StreamStoreID: snapID + "store",
|
||||||
},
|
},
|
||||||
Reasons: buildReasons(ro, path.ExchangeService, cats...),
|
Reasons: buildReasons(tid, ro, path.ExchangeService, cats...),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -929,9 +890,11 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
|
mbp := mock.NewMockBackupProducer(nil, data.CollectionStats{}, false)
|
||||||
mans, dcs, b, err := produceManifestsAndMetadata(
|
mans, dcs, b, err := produceManifestsAndMetadata(
|
||||||
ctx,
|
ctx,
|
||||||
test.bf,
|
test.bf,
|
||||||
|
&mbp,
|
||||||
&test.rp,
|
&test.rp,
|
||||||
test.reasons, test.fallbackReasons,
|
test.reasons, test.fallbackReasons,
|
||||||
tid,
|
tid,
|
||||||
|
|||||||
@ -489,3 +489,68 @@ func (suite *PathUnitSuite) TestBuildPrefix() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *PathUnitSuite) TestBuildRestorePaths() {
|
||||||
|
type args struct {
|
||||||
|
tenantID string
|
||||||
|
protectedResource string
|
||||||
|
service ServiceType
|
||||||
|
category CategoryType
|
||||||
|
fp []string
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
restorePath string
|
||||||
|
storagePath string
|
||||||
|
expectErr require.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "single",
|
||||||
|
args: args{
|
||||||
|
tenantID: "tenant",
|
||||||
|
protectedResource: "protectedResource",
|
||||||
|
service: GroupsService,
|
||||||
|
category: LibrariesCategory,
|
||||||
|
fp: []string{"a"},
|
||||||
|
},
|
||||||
|
restorePath: "tenant/groupsMetadata/protectedResource/libraries",
|
||||||
|
storagePath: "tenant/groupsMetadata/protectedResource/libraries/a",
|
||||||
|
expectErr: require.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multi",
|
||||||
|
args: args{
|
||||||
|
tenantID: "tenant",
|
||||||
|
protectedResource: "protectedResource",
|
||||||
|
service: GroupsService,
|
||||||
|
category: LibrariesCategory,
|
||||||
|
fp: []string{"a", "b"},
|
||||||
|
},
|
||||||
|
restorePath: "tenant/groupsMetadata/protectedResource/libraries/a",
|
||||||
|
storagePath: "tenant/groupsMetadata/protectedResource/libraries/a/b",
|
||||||
|
expectErr: require.NoError,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
r, err := BuildMetadata(
|
||||||
|
test.args.tenantID,
|
||||||
|
test.args.protectedResource,
|
||||||
|
test.args.service,
|
||||||
|
test.args.category,
|
||||||
|
true,
|
||||||
|
test.args.fp...)
|
||||||
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
rdir, err := r.Dir()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
assert.Equal(t, test.restorePath, rdir.String(), "restore path")
|
||||||
|
assert.Equal(t, test.storagePath, r.String(), "storage path")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user