exports sharepoint lists (#4959)
provision to export sharepoint lists #### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change <!--- Please check the type of change your PR introduces: ---> - [x] 🌻 Feature #### Issue(s) #4752 #### Test Plan <!-- How will this be tested prior to merging.--> - [x] 💪 Manual - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
9dbf9f3676
commit
87104ce404
71
src/internal/m365/collection/site/export.go
Normal file
71
src/internal/m365/collection/site/export.go
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
package site
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
|
"github.com/alcionai/corso/src/pkg/metrics"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewExportCollection(
|
||||||
|
baseDir string,
|
||||||
|
backingCollection []data.RestoreCollection,
|
||||||
|
backupVersion int,
|
||||||
|
stats *metrics.ExportStats,
|
||||||
|
) export.Collectioner {
|
||||||
|
return export.BaseCollection{
|
||||||
|
BaseDir: baseDir,
|
||||||
|
BackingCollection: backingCollection,
|
||||||
|
BackupVersion: backupVersion,
|
||||||
|
Stream: streamItems,
|
||||||
|
Stats: stats,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func streamItems(
|
||||||
|
ctx context.Context,
|
||||||
|
drc []data.RestoreCollection,
|
||||||
|
backupVersion int,
|
||||||
|
config control.ExportConfig,
|
||||||
|
ch chan<- export.Item,
|
||||||
|
stats *metrics.ExportStats,
|
||||||
|
) {
|
||||||
|
defer close(ch)
|
||||||
|
|
||||||
|
errs := fault.New(false)
|
||||||
|
|
||||||
|
for _, rc := range drc {
|
||||||
|
for item := range rc.Items(ctx, errs) {
|
||||||
|
stats.UpdateResourceCount(path.ListsCategory)
|
||||||
|
body := metrics.ReaderWithStats(item.ToReader(), path.ListsCategory, stats)
|
||||||
|
|
||||||
|
name := item.ID() + ".json"
|
||||||
|
|
||||||
|
ch <- export.Item{
|
||||||
|
ID: item.ID(),
|
||||||
|
Name: name,
|
||||||
|
Body: body,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
items, recovered := errs.ItemsAndRecovered()
|
||||||
|
|
||||||
|
// Return all the items that we failed to source from the persistence layer
|
||||||
|
for _, item := range items {
|
||||||
|
ch <- export.Item{
|
||||||
|
ID: item.ID,
|
||||||
|
Error: &item,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, err := range recovered {
|
||||||
|
ch <- export.Item{
|
||||||
|
Error: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
138
src/internal/m365/collection/site/export_test.go
Normal file
138
src/internal/m365/collection/site/export_test.go
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
package site
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
kjson "github.com/microsoft/kiota-serialization-json-go"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
dataMock "github.com/alcionai/corso/src/internal/data/mock"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/export"
|
||||||
|
"github.com/alcionai/corso/src/pkg/metrics"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ExportUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestExportUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *ExportUnitSuite) TestStreamItems() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
backingColl dataMock.Collection
|
||||||
|
expectName string
|
||||||
|
expectErr assert.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no errors",
|
||||||
|
backingColl: dataMock.Collection{
|
||||||
|
ItemData: []data.Item{
|
||||||
|
&dataMock.Item{
|
||||||
|
ItemID: "list1",
|
||||||
|
Reader: makeListJSONReader(t, "list1"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectName: "list1.json",
|
||||||
|
expectErr: assert.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "only recoverable errors",
|
||||||
|
backingColl: dataMock.Collection{
|
||||||
|
ItemsRecoverableErrs: []error{
|
||||||
|
clues.New("some error"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectErr: assert.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "items and recoverable errors",
|
||||||
|
backingColl: dataMock.Collection{
|
||||||
|
ItemData: []data.Item{
|
||||||
|
&dataMock.Item{
|
||||||
|
ItemID: "list2",
|
||||||
|
Reader: makeListJSONReader(t, "list2"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ItemsRecoverableErrs: []error{
|
||||||
|
clues.New("some error"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expectName: "list2.json",
|
||||||
|
expectErr: assert.Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
ch := make(chan export.Item)
|
||||||
|
|
||||||
|
go streamItems(
|
||||||
|
ctx,
|
||||||
|
[]data.RestoreCollection{test.backingColl},
|
||||||
|
version.NoBackup,
|
||||||
|
control.DefaultExportConfig(),
|
||||||
|
ch,
|
||||||
|
&metrics.ExportStats{})
|
||||||
|
|
||||||
|
var (
|
||||||
|
itm export.Item
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
for i := range ch {
|
||||||
|
if i.Error == nil {
|
||||||
|
itm = i
|
||||||
|
} else {
|
||||||
|
err = i.Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
assert.Equal(t, test.expectName, itm.Name, "item name")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeListJSONReader(t *testing.T, listName string) io.ReadCloser {
|
||||||
|
listBytes := getListBytes(t, listName)
|
||||||
|
return io.NopCloser(bytes.NewReader(listBytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
func getListBytes(t *testing.T, listName string) []byte {
|
||||||
|
writer := kjson.NewJsonSerializationWriter()
|
||||||
|
defer writer.Close()
|
||||||
|
|
||||||
|
list := models.NewList()
|
||||||
|
list.SetId(ptr.To(listName))
|
||||||
|
|
||||||
|
err := writer.WriteObjectValue("", list)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
storedListBytes, err := writer.GetSerializedContent()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
return storedListBytes
|
||||||
|
}
|
||||||
@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common/idname"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/site"
|
||||||
"github.com/alcionai/corso/src/internal/m365/resource"
|
"github.com/alcionai/corso/src/internal/m365/resource"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -72,30 +73,51 @@ func (h *baseSharePointHandler) ProduceExportCollections(
|
|||||||
)
|
)
|
||||||
|
|
||||||
for _, dc := range dcs {
|
for _, dc := range dcs {
|
||||||
drivePath, err := path.ToDrivePath(dc.FullPath())
|
cat := dc.FullPath().Category()
|
||||||
if err != nil {
|
|
||||||
return nil, clues.WrapWC(ctx, err, "transforming path to drive path")
|
|
||||||
}
|
|
||||||
|
|
||||||
driveName, ok := h.backupDriveIDNames.NameOf(drivePath.DriveID)
|
ictx := clues.Add(ctx, "fullpath_category", cat)
|
||||||
if !ok {
|
|
||||||
// This should not happen, but just in case
|
|
||||||
logger.Ctx(ctx).With("drive_id", drivePath.DriveID).Info("drive name not found, using drive id")
|
|
||||||
driveName = drivePath.DriveID
|
|
||||||
}
|
|
||||||
|
|
||||||
baseDir := path.Builder{}.
|
switch cat {
|
||||||
Append(path.LibrariesCategory.HumanString()).
|
case path.LibrariesCategory:
|
||||||
Append(driveName).
|
drivePath, err := path.ToDrivePath(dc.FullPath())
|
||||||
Append(drivePath.Folders...)
|
if err != nil {
|
||||||
|
return nil, clues.WrapWC(ictx, err, "transforming path to drive path")
|
||||||
|
}
|
||||||
|
|
||||||
ec = append(
|
driveName, ok := h.backupDriveIDNames.NameOf(drivePath.DriveID)
|
||||||
ec,
|
if !ok {
|
||||||
drive.NewExportCollection(
|
// This should not happen, but just in case
|
||||||
|
logger.Ctx(ictx).With("drive_id", drivePath.DriveID).Info("drive name not found, using drive id")
|
||||||
|
driveName = drivePath.DriveID
|
||||||
|
}
|
||||||
|
|
||||||
|
baseDir := path.Builder{}.
|
||||||
|
Append(path.LibrariesCategory.HumanString()).
|
||||||
|
Append(driveName).
|
||||||
|
Append(drivePath.Folders...)
|
||||||
|
|
||||||
|
coll := drive.NewExportCollection(
|
||||||
baseDir.String(),
|
baseDir.String(),
|
||||||
[]data.RestoreCollection{dc},
|
[]data.RestoreCollection{dc},
|
||||||
backupVersion,
|
backupVersion,
|
||||||
stats))
|
stats)
|
||||||
|
|
||||||
|
ec = append(ec, coll)
|
||||||
|
case path.ListsCategory:
|
||||||
|
folders := dc.FullPath().Folders()
|
||||||
|
pth := path.Builder{}.Append(path.ListsCategory.HumanString()).Append(folders...)
|
||||||
|
|
||||||
|
ec = append(
|
||||||
|
ec,
|
||||||
|
site.NewExportCollection(
|
||||||
|
pth.String(),
|
||||||
|
[]data.RestoreCollection{dc},
|
||||||
|
backupVersion,
|
||||||
|
stats))
|
||||||
|
default:
|
||||||
|
return nil, clues.NewWC(ctx, "data category not supported").
|
||||||
|
With("category", cat)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return ec, el.Failure()
|
return ec, el.Failure()
|
||||||
|
|||||||
@ -60,51 +60,110 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
var (
|
var (
|
||||||
driveID = "driveID1"
|
driveID = "driveID1"
|
||||||
driveName = "driveName1"
|
driveName = "driveName1"
|
||||||
itemName = "name1"
|
exportCfg = control.ExportConfig{}
|
||||||
exportCfg = control.ExportConfig{}
|
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
|
||||||
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
|
|
||||||
expectedPath = path.LibrariesCategory.HumanString() + "/" + driveName
|
|
||||||
expectedItems = []export.Item{
|
|
||||||
{
|
|
||||||
ID: "id1.data",
|
|
||||||
Name: itemName,
|
|
||||||
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
p, err := dpb.ToDataLayerSharePointPath("t", "u", path.LibrariesCategory, false)
|
|
||||||
assert.NoError(t, err, "build path")
|
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
itemInfo details.ItemInfo
|
itemName string
|
||||||
|
itemID string
|
||||||
|
itemInfo details.ItemInfo
|
||||||
|
getCollPath func(t *testing.T) path.Path
|
||||||
|
statsCat path.CategoryType
|
||||||
|
expectedItems []export.Item
|
||||||
|
expectedPath string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "OneDriveLegacyItemInfo",
|
name: "OneDriveLegacyItemInfo",
|
||||||
|
itemName: "name1",
|
||||||
|
itemID: "id1.data",
|
||||||
itemInfo: details.ItemInfo{
|
itemInfo: details.ItemInfo{
|
||||||
OneDrive: &details.OneDriveInfo{
|
OneDrive: &details.OneDriveInfo{
|
||||||
ItemType: details.OneDriveItem,
|
ItemType: details.OneDriveItem,
|
||||||
ItemName: itemName,
|
ItemName: "name1",
|
||||||
Size: 1,
|
Size: 1,
|
||||||
DriveName: driveName,
|
DriveName: driveName,
|
||||||
DriveID: driveID,
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
getCollPath: func(t *testing.T) path.Path {
|
||||||
|
p, err := dpb.ToDataLayerSharePointPath("t", "u", path.LibrariesCategory, false)
|
||||||
|
assert.NoError(t, err, "build path")
|
||||||
|
|
||||||
|
return p
|
||||||
|
},
|
||||||
|
statsCat: path.FilesCategory,
|
||||||
|
expectedPath: path.LibrariesCategory.HumanString() + "/" + driveName,
|
||||||
|
expectedItems: []export.Item{
|
||||||
|
{
|
||||||
|
ID: "id1.data",
|
||||||
|
Name: "name1",
|
||||||
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "SharePointItemInfo",
|
name: "SharePointItemInfo, Libraries Category",
|
||||||
|
itemName: "name1",
|
||||||
|
itemID: "id1.data",
|
||||||
itemInfo: details.ItemInfo{
|
itemInfo: details.ItemInfo{
|
||||||
SharePoint: &details.SharePointInfo{
|
SharePoint: &details.SharePointInfo{
|
||||||
ItemType: details.SharePointLibrary,
|
ItemType: details.SharePointLibrary,
|
||||||
ItemName: itemName,
|
ItemName: "name1",
|
||||||
Size: 1,
|
Size: 1,
|
||||||
DriveName: driveName,
|
DriveName: driveName,
|
||||||
DriveID: driveID,
|
DriveID: driveID,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
getCollPath: func(t *testing.T) path.Path {
|
||||||
|
p, err := dpb.ToDataLayerSharePointPath("t", "u", path.LibrariesCategory, false)
|
||||||
|
assert.NoError(t, err, "build path")
|
||||||
|
|
||||||
|
return p
|
||||||
|
},
|
||||||
|
statsCat: path.FilesCategory,
|
||||||
|
expectedPath: path.LibrariesCategory.HumanString() + "/" + driveName,
|
||||||
|
expectedItems: []export.Item{
|
||||||
|
{
|
||||||
|
ID: "id1.data",
|
||||||
|
Name: "name1",
|
||||||
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "SharePointItemInfo, Lists Category",
|
||||||
|
itemName: "list1",
|
||||||
|
itemID: "listid1",
|
||||||
|
itemInfo: details.ItemInfo{
|
||||||
|
SharePoint: &details.SharePointInfo{
|
||||||
|
ItemType: details.SharePointList,
|
||||||
|
List: &details.ListInfo{
|
||||||
|
Name: "list1",
|
||||||
|
ItemCount: 10,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
getCollPath: func(t *testing.T) path.Path {
|
||||||
|
p, err := path.Elements{"listid1"}.
|
||||||
|
Builder().
|
||||||
|
ToDataLayerSharePointListPath("t", "u", path.ListsCategory, false)
|
||||||
|
assert.NoError(t, err, "build path")
|
||||||
|
|
||||||
|
return p
|
||||||
|
},
|
||||||
|
statsCat: path.ListsCategory,
|
||||||
|
expectedPath: path.ListsCategory.HumanString() + "/listid1",
|
||||||
|
expectedItems: []export.Item{
|
||||||
|
{
|
||||||
|
ID: "listid1",
|
||||||
|
Name: "listid1.json",
|
||||||
|
Body: io.NopCloser((bytes.NewBufferString("body1"))),
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -115,15 +174,15 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
dcs := []data.RestoreCollection{
|
dcs := []data.RestoreCollection{
|
||||||
data.FetchRestoreCollection{
|
data.FetchRestoreCollection{
|
||||||
Collection: dataMock.Collection{
|
Collection: dataMock.Collection{
|
||||||
Path: p,
|
Path: test.getCollPath(t),
|
||||||
ItemData: []data.Item{
|
ItemData: []data.Item{
|
||||||
&dataMock.Item{
|
&dataMock.Item{
|
||||||
ItemID: "id1.data",
|
ItemID: test.itemID,
|
||||||
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
Reader: io.NopCloser(bytes.NewBufferString("body1")),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
FetchItemByNamer: finD{id: "id1.meta", name: itemName},
|
FetchItemByNamer: finD{id: "id1.meta", name: test.itemName},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -142,7 +201,7 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
assert.NoError(t, err, "export collections error")
|
assert.NoError(t, err, "export collections error")
|
||||||
assert.Len(t, ecs, 1, "num of collections")
|
assert.Len(t, ecs, 1, "num of collections")
|
||||||
|
|
||||||
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
|
assert.Equal(t, test.expectedPath, ecs[0].BasePath(), "base dir")
|
||||||
|
|
||||||
fitems := []export.Item{}
|
fitems := []export.Item{}
|
||||||
size := 0
|
size := 0
|
||||||
@ -159,11 +218,11 @@ func (suite *ExportUnitSuite) TestExportRestoreCollections() {
|
|||||||
fitems = append(fitems, item)
|
fitems = append(fitems, item)
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, expectedItems, fitems, "items")
|
assert.Equal(t, test.expectedItems, fitems, "items")
|
||||||
|
|
||||||
expectedStats := metrics.ExportStats{}
|
expectedStats := metrics.ExportStats{}
|
||||||
expectedStats.UpdateBytes(path.FilesCategory, int64(size))
|
expectedStats.UpdateBytes(test.statsCat, int64(size))
|
||||||
expectedStats.UpdateResourceCount(path.FilesCategory)
|
expectedStats.UpdateResourceCount(test.statsCat)
|
||||||
assert.Equal(t, expectedStats, stats, "stats")
|
assert.Equal(t, expectedStats, stats, "stats")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user