GC: Backup: Sharepoint: Collection Implementation (#1477)

## Description
Creates test package for SharePoint List Collection. Verifies Item and ListInfo for collection

<!-- Insert PR description-->

## Type of change

- [x] 🌻 Feature

## Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* related to #1474<issue>

## Test Plan

- [x]  Unit testt: E2E
This commit is contained in:
Danny 2022-11-14 12:51:44 -05:00 committed by GitHub
parent 10b992224c
commit 765fd6222b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 147 additions and 8 deletions

View File

@ -1,11 +1,14 @@
package sharepoint
import (
"context"
"io"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/connector/support"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path"
)
@ -13,7 +16,8 @@ type DataCategory int
//go:generate stringer -type=DataCategory
const (
Unknown DataCategory = iota
collectionChannelBufferSize = 50
Unknown DataCategory = iota
List
Drive
)
@ -25,15 +29,37 @@ var (
type Collection struct {
data chan data.Stream
// folderPath indicates the hierarchy within the collection
folderPath path.Path
jobs []string
// fullPath indicates the hierarchy within the collection
fullPath path.Path
// M365 IDs of the items of this collection
service graph.Service
statusUpdater support.StatusUpdater
}
func NewCollection(
folderPath path.Path,
service graph.Service,
statusUpdater support.StatusUpdater,
) *Collection {
c := &Collection{
fullPath: folderPath,
jobs: make([]string, 0),
data: make(chan data.Stream, collectionChannelBufferSize),
service: service,
statusUpdater: statusUpdater,
}
return c
}
// AddJob appends additional objectID to job field
func (sc *Collection) AddJob(objID string) {
sc.jobs = append(sc.jobs, objID)
}
func (sc *Collection) FullPath() path.Path {
return sc.FullPath()
return sc.fullPath
}
func (sc *Collection) Items() <-chan data.Stream {
@ -43,6 +69,7 @@ func (sc *Collection) Items() <-chan data.Stream {
type Item struct {
id string
data io.ReadCloser
info *details.SharepointInfo
}
func (sd *Item) UUID() string {
@ -52,3 +79,24 @@ func (sd *Item) UUID() string {
func (sd *Item) ToReader() io.ReadCloser {
return sd.data
}
func (sd *Item) Info() details.ItemInfo {
return details.ItemInfo{Sharepoint: sd.info}
}
func (sc *Collection) finishPopulation(ctx context.Context, success int, totalBytes int64, errs error) {
close(sc.data)
attempted := len(sc.jobs)
status := support.CreateStatus(
ctx,
support.Backup,
1,
support.CollectionMetrics{
Objects: attempted,
Successes: success,
TotalBytes: totalBytes,
},
errs,
sc.fullPath.Folder())
logger.Ctx(ctx).Debug(status.String())
}

View File

@ -0,0 +1,90 @@
package sharepoint
import (
"bytes"
"io"
"testing"
kw "github.com/microsoft/kiota-serialization-json-go"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/mockconnector"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
type SharePointCollectionSuite struct {
suite.Suite
}
func TestSharePointCollectionSuite(t *testing.T) {
suite.Run(t, new(SharePointCollectionSuite))
}
func (suite *SharePointCollectionSuite) TestSharePointDataReader_Valid() {
t := suite.T()
m := []byte("test message")
name := "aFile"
sc := &Item{
id: name,
data: io.NopCloser(bytes.NewReader(m)),
}
readData, err := io.ReadAll(sc.ToReader())
require.NoError(t, err)
assert.Equal(t, name, sc.id)
assert.Equal(t, readData, m)
}
// TestSharePointListCollection tests basic functionality to create
// SharePoint collection and to use the data stream channel.
func (suite *SharePointCollectionSuite) TestSharePointListCollection() {
t := suite.T()
ctx, flush := tester.NewContext()
defer flush()
ow := kw.NewJsonSerializationWriter()
listing := mockconnector.GetMockList("Mock List")
testName := "MockListing"
listing.SetDisplayName(&testName)
err := ow.WriteObjectValue("", listing)
require.NoError(t, err)
byteArray, err := ow.GetSerializedContent()
require.NoError(t, err)
// TODO: Replace with Sharepoint--> ToDataLayerSharePoint
// https://github.com/alcionai/corso/issues/1401
dir, err := path.Builder{}.Append("directory").
ToDataLayerExchangePathForCategory(
"some",
"user",
path.EmailCategory,
false)
require.NoError(t, err)
col := NewCollection(dir, nil, nil)
col.data <- &Item{
id: testName,
data: io.NopCloser(bytes.NewReader(byteArray)),
info: sharepointListInfo(listing),
}
col.finishPopulation(ctx, 0, 0, nil)
readItems := []data.Stream{}
for item := range col.Items() {
readItems = append(readItems, item)
}
require.Equal(t, len(readItems), 1)
item := readItems[0]
shareInfo, ok := item.(data.StreamInfo)
require.True(t, ok)
require.NotNil(t, shareInfo.Info())
require.NotNil(t, shareInfo.Info().Sharepoint)
assert.Equal(t, testName, shareInfo.Info().Sharepoint.ItemName)
}

View File

@ -8,9 +8,9 @@ func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[Unknown-0]
_ = x[List-1]
_ = x[Drive-2]
_ = x[Unknown-1]
_ = x[List-2]
_ = x[Drive-3]
}
const _DataCategory_name = "UnknownListDrive"
@ -18,8 +18,9 @@ const _DataCategory_name = "UnknownListDrive"
var _DataCategory_index = [...]uint8{0, 7, 11, 16}
func (i DataCategory) String() string {
i -= 1
if i < 0 || i >= DataCategory(len(_DataCategory_index)-1) {
return "DataCategory(" + strconv.FormatInt(int64(i), 10) + ")"
return "DataCategory(" + strconv.FormatInt(int64(i+1), 10) + ")"
}
return _DataCategory_name[_DataCategory_index[i]:_DataCategory_index[i+1]]
}