GC: Backup: Sharepoint: Collection Implementation (#1477)
## Description Creates test package for SharePoint List Collection. Verifies Item and ListInfo for collection <!-- Insert PR description--> ## Type of change - [x] 🌻 Feature ## Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * related to #1474<issue> ## Test Plan - [x] ⚡ Unit testt: E2E
This commit is contained in:
parent
10b992224c
commit
765fd6222b
@ -1,11 +1,14 @@
|
|||||||
package sharepoint
|
package sharepoint
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -13,7 +16,8 @@ type DataCategory int
|
|||||||
|
|
||||||
//go:generate stringer -type=DataCategory
|
//go:generate stringer -type=DataCategory
|
||||||
const (
|
const (
|
||||||
Unknown DataCategory = iota
|
collectionChannelBufferSize = 50
|
||||||
|
Unknown DataCategory = iota
|
||||||
List
|
List
|
||||||
Drive
|
Drive
|
||||||
)
|
)
|
||||||
@ -25,15 +29,37 @@ var (
|
|||||||
|
|
||||||
type Collection struct {
|
type Collection struct {
|
||||||
data chan data.Stream
|
data chan data.Stream
|
||||||
// folderPath indicates the hierarchy within the collection
|
jobs []string
|
||||||
folderPath path.Path
|
// fullPath indicates the hierarchy within the collection
|
||||||
|
fullPath path.Path
|
||||||
// M365 IDs of the items of this collection
|
// M365 IDs of the items of this collection
|
||||||
service graph.Service
|
service graph.Service
|
||||||
statusUpdater support.StatusUpdater
|
statusUpdater support.StatusUpdater
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewCollection(
|
||||||
|
folderPath path.Path,
|
||||||
|
service graph.Service,
|
||||||
|
statusUpdater support.StatusUpdater,
|
||||||
|
) *Collection {
|
||||||
|
c := &Collection{
|
||||||
|
fullPath: folderPath,
|
||||||
|
jobs: make([]string, 0),
|
||||||
|
data: make(chan data.Stream, collectionChannelBufferSize),
|
||||||
|
service: service,
|
||||||
|
statusUpdater: statusUpdater,
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddJob appends additional objectID to job field
|
||||||
|
func (sc *Collection) AddJob(objID string) {
|
||||||
|
sc.jobs = append(sc.jobs, objID)
|
||||||
|
}
|
||||||
|
|
||||||
func (sc *Collection) FullPath() path.Path {
|
func (sc *Collection) FullPath() path.Path {
|
||||||
return sc.FullPath()
|
return sc.fullPath
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sc *Collection) Items() <-chan data.Stream {
|
func (sc *Collection) Items() <-chan data.Stream {
|
||||||
@ -43,6 +69,7 @@ func (sc *Collection) Items() <-chan data.Stream {
|
|||||||
type Item struct {
|
type Item struct {
|
||||||
id string
|
id string
|
||||||
data io.ReadCloser
|
data io.ReadCloser
|
||||||
|
info *details.SharepointInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sd *Item) UUID() string {
|
func (sd *Item) UUID() string {
|
||||||
@ -52,3 +79,24 @@ func (sd *Item) UUID() string {
|
|||||||
func (sd *Item) ToReader() io.ReadCloser {
|
func (sd *Item) ToReader() io.ReadCloser {
|
||||||
return sd.data
|
return sd.data
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (sd *Item) Info() details.ItemInfo {
|
||||||
|
return details.ItemInfo{Sharepoint: sd.info}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sc *Collection) finishPopulation(ctx context.Context, success int, totalBytes int64, errs error) {
|
||||||
|
close(sc.data)
|
||||||
|
attempted := len(sc.jobs)
|
||||||
|
status := support.CreateStatus(
|
||||||
|
ctx,
|
||||||
|
support.Backup,
|
||||||
|
1,
|
||||||
|
support.CollectionMetrics{
|
||||||
|
Objects: attempted,
|
||||||
|
Successes: success,
|
||||||
|
TotalBytes: totalBytes,
|
||||||
|
},
|
||||||
|
errs,
|
||||||
|
sc.fullPath.Folder())
|
||||||
|
logger.Ctx(ctx).Debug(status.String())
|
||||||
|
}
|
||||||
|
|||||||
90
src/internal/connector/sharepoint/collection_test.go
Normal file
90
src/internal/connector/sharepoint/collection_test.go
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
package sharepoint
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
kw "github.com/microsoft/kiota-serialization-json-go"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/connector/mockconnector"
|
||||||
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SharePointCollectionSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSharePointCollectionSuite(t *testing.T) {
|
||||||
|
suite.Run(t, new(SharePointCollectionSuite))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *SharePointCollectionSuite) TestSharePointDataReader_Valid() {
|
||||||
|
t := suite.T()
|
||||||
|
m := []byte("test message")
|
||||||
|
name := "aFile"
|
||||||
|
sc := &Item{
|
||||||
|
id: name,
|
||||||
|
data: io.NopCloser(bytes.NewReader(m)),
|
||||||
|
}
|
||||||
|
readData, err := io.ReadAll(sc.ToReader())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, name, sc.id)
|
||||||
|
assert.Equal(t, readData, m)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestSharePointListCollection tests basic functionality to create
|
||||||
|
// SharePoint collection and to use the data stream channel.
|
||||||
|
func (suite *SharePointCollectionSuite) TestSharePointListCollection() {
|
||||||
|
t := suite.T()
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
ow := kw.NewJsonSerializationWriter()
|
||||||
|
listing := mockconnector.GetMockList("Mock List")
|
||||||
|
testName := "MockListing"
|
||||||
|
listing.SetDisplayName(&testName)
|
||||||
|
|
||||||
|
err := ow.WriteObjectValue("", listing)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
byteArray, err := ow.GetSerializedContent()
|
||||||
|
require.NoError(t, err)
|
||||||
|
// TODO: Replace with Sharepoint--> ToDataLayerSharePoint
|
||||||
|
// https://github.com/alcionai/corso/issues/1401
|
||||||
|
dir, err := path.Builder{}.Append("directory").
|
||||||
|
ToDataLayerExchangePathForCategory(
|
||||||
|
"some",
|
||||||
|
"user",
|
||||||
|
path.EmailCategory,
|
||||||
|
false)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
col := NewCollection(dir, nil, nil)
|
||||||
|
col.data <- &Item{
|
||||||
|
id: testName,
|
||||||
|
data: io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
|
info: sharepointListInfo(listing),
|
||||||
|
}
|
||||||
|
col.finishPopulation(ctx, 0, 0, nil)
|
||||||
|
|
||||||
|
readItems := []data.Stream{}
|
||||||
|
for item := range col.Items() {
|
||||||
|
readItems = append(readItems, item)
|
||||||
|
}
|
||||||
|
|
||||||
|
require.Equal(t, len(readItems), 1)
|
||||||
|
item := readItems[0]
|
||||||
|
shareInfo, ok := item.(data.StreamInfo)
|
||||||
|
require.True(t, ok)
|
||||||
|
require.NotNil(t, shareInfo.Info())
|
||||||
|
require.NotNil(t, shareInfo.Info().Sharepoint)
|
||||||
|
assert.Equal(t, testName, shareInfo.Info().Sharepoint.ItemName)
|
||||||
|
}
|
||||||
@ -8,9 +8,9 @@ func _() {
|
|||||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||||
// Re-run the stringer command to generate them again.
|
// Re-run the stringer command to generate them again.
|
||||||
var x [1]struct{}
|
var x [1]struct{}
|
||||||
_ = x[Unknown-0]
|
_ = x[Unknown-1]
|
||||||
_ = x[List-1]
|
_ = x[List-2]
|
||||||
_ = x[Drive-2]
|
_ = x[Drive-3]
|
||||||
}
|
}
|
||||||
|
|
||||||
const _DataCategory_name = "UnknownListDrive"
|
const _DataCategory_name = "UnknownListDrive"
|
||||||
@ -18,8 +18,9 @@ const _DataCategory_name = "UnknownListDrive"
|
|||||||
var _DataCategory_index = [...]uint8{0, 7, 11, 16}
|
var _DataCategory_index = [...]uint8{0, 7, 11, 16}
|
||||||
|
|
||||||
func (i DataCategory) String() string {
|
func (i DataCategory) String() string {
|
||||||
|
i -= 1
|
||||||
if i < 0 || i >= DataCategory(len(_DataCategory_index)-1) {
|
if i < 0 || i >= DataCategory(len(_DataCategory_index)-1) {
|
||||||
return "DataCategory(" + strconv.FormatInt(int64(i), 10) + ")"
|
return "DataCategory(" + strconv.FormatInt(int64(i+1), 10) + ")"
|
||||||
}
|
}
|
||||||
return _DataCategory_name[_DataCategory_index[i]:_DataCategory_index[i+1]]
|
return _DataCategory_name[_DataCategory_index[i]:_DataCategory_index[i+1]]
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user