Use Libraries prefix when exporting SharePoint files (#4153)

As of now we only export SP libraries, but in future we will have to export lists and pages. This ensure that we can accommodate them later.

<!-- PR description-->

---

#### Does this PR need a docs update or release note?

- [x]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [ ]  No

#### Type of change

<!--- Please check the type of change your PR introduces: --->
- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* #<issue>

#### Test Plan

<!-- How will this be tested prior to merging.-->
- [x] 💪 Manual
- [x]  Unit test
- [ ] 💚 E2E
This commit is contained in:
Abin Simon 2023-08-31 11:36:01 +05:30 committed by GitHub
parent 08ab7abb76
commit 2c4cd663d1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 158 additions and 0 deletions

View File

@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] (beta) ## [Unreleased] (beta)
### Changed
- SharePoint exported libraries are now exported with a `Libraries` prefix.
## [v0.12.0] (beta) - 2023-08-29 ## [v0.12.0] (beta) - 2023-08-29
### Added ### Added

View File

@ -47,6 +47,7 @@ func ProduceExportCollections(
} }
baseDir := path.Builder{}. baseDir := path.Builder{}.
Append("Libraries").
Append(driveName). Append(driveName).
Append(drivePath.Folders...) Append(drivePath.Folders...)

View File

@ -0,0 +1,154 @@
package sharepoint
import (
"bytes"
"context"
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock"
odConsts "github.com/alcionai/corso/src/internal/m365/service/onedrive/consts"
odStub "github.com/alcionai/corso/src/internal/m365/service/onedrive/stub"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
type ExportUnitSuite struct {
tester.Suite
}
func TestExportUnitSuite(t *testing.T) {
suite.Run(t, &ExportUnitSuite{Suite: tester.NewUnitSuite(t)})
}
type finD struct {
id string
name string
err error
}
func (fd finD) FetchItemByName(ctx context.Context, name string) (data.Item, error) {
if fd.err != nil {
return nil, fd.err
}
if name == fd.id {
return &dataMock.Item{
ItemID: fd.id,
Reader: io.NopCloser(bytes.NewBufferString(`{"filename": "` + fd.name + `"}`)),
}, nil
}
return nil, assert.AnError
}
type mockRestoreCollection struct {
path path.Path
items []*dataMock.Item
}
func (rc mockRestoreCollection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item {
ch := make(chan data.Item)
go func() {
defer close(ch)
el := errs.Local()
for _, item := range rc.items {
if item.ReadErr != nil {
el.AddRecoverable(ctx, item.ReadErr)
continue
}
ch <- item
}
}()
return ch
}
func (rc mockRestoreCollection) FullPath() path.Path {
return rc.path
}
func (suite *ExportUnitSuite) TestExportRestoreCollections() {
t := suite.T()
ctx, flush := tester.NewContext(t)
defer flush()
var (
driveID = "driveID1"
driveName = "driveName1"
exportCfg = control.ExportConfig{}
dpb = odConsts.DriveFolderPrefixBuilder(driveID)
cache = idname.NewCache(
// Cache check with lowercased ids
map[string]string{strings.ToLower(driveID): driveName},
)
dii = odStub.DriveItemInfo()
expectedPath = "Libraries/" + driveName
expectedItems = []export.Item{
{
ID: "id1.data",
Data: export.ItemData{
Name: "name1",
Body: io.NopCloser((bytes.NewBufferString("body1"))),
},
},
}
)
dii.OneDrive.ItemName = "name1"
p, err := dpb.ToDataLayerOneDrivePath("t", "u", false)
assert.NoError(t, err, "build path")
dcs := []data.RestoreCollection{
data.FetchRestoreCollection{
Collection: mockRestoreCollection{
path: p,
items: []*dataMock.Item{
{
ItemID: "id1.data",
Reader: io.NopCloser(bytes.NewBufferString("body1")),
ItemInfo: dii,
},
},
},
FetchItemByNamer: finD{id: "id1.meta", name: "name1"},
},
}
ecs, err := ProduceExportCollections(
ctx,
int(version.Backup),
exportCfg,
control.DefaultOptions(),
dcs,
cache,
nil,
fault.New(true))
assert.NoError(t, err, "export collections error")
assert.Len(t, ecs, 1, "num of collections")
assert.Equal(t, expectedPath, ecs[0].BasePath(), "base dir")
fitems := []export.Item{}
for item := range ecs[0].Items(ctx) {
fitems = append(fitems, item)
}
assert.Equal(t, expectedItems, fitems, "items")
}