Persist metadata files for group mailbox posts (#5135)
<!-- PR description--> * Each post now has a `.data` and `.meta` file. * I've only made changes to groups lazy reader to keep this PR short. Note that channels don't have meta/data file concepts, we don't want to accidentally enable this for channels. Given channels doesn't use lazy reader, this is safe to do short term. * I'll be adding small follow up PRs to 1) Make sure channels files don't get assigned `.data` suffix. 2) Support for data and meta files for prefetch conversations backup. --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change <!--- Please check the type of change your PR introduces: ---> - [x] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup #### Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * #<issue> #### Test Plan <!-- How will this be tested prior to merging.--> - [ ] 💪 Manual - [x] ⚡ Unit test - [ ] 💚 E2E
This commit is contained in:
parent
f1406a3334
commit
50ba30539a
@ -2,6 +2,7 @@ package groups
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -60,6 +61,11 @@ func (bh mockBackupHandler) augmentItemInfo(
|
|||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (bh mockBackupHandler) supportsItemMetadata() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func (bh mockBackupHandler) canMakeDeltaQueries() bool {
|
func (bh mockBackupHandler) canMakeDeltaQueries() bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@ -136,6 +142,13 @@ func (bh mockBackupHandler) getItem(
|
|||||||
return bh.messages[itemID], bh.info[itemID], bh.getMessageErr[itemID]
|
return bh.messages[itemID], bh.info[itemID], bh.getMessageErr[itemID]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (bh mockBackupHandler) getItemMetadata(
|
||||||
|
_ context.Context,
|
||||||
|
_ models.Channelable,
|
||||||
|
) (io.ReadCloser, int, error) {
|
||||||
|
return nil, 0, errMetadataFilesNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// Unit Suite
|
// Unit Suite
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package groups
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"io"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
@ -105,6 +106,17 @@ func (bh channelsBackupHandler) getItem(
|
|||||||
return bh.ac.GetChannelMessage(ctx, groupID, containerIDs[0], messageID)
|
return bh.ac.GetChannelMessage(ctx, groupID, containerIDs[0], messageID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Channel messages don't carry metadata files. Return unsupported error.
|
||||||
|
// Adding this method for interface compliance.
|
||||||
|
//
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (bh channelsBackupHandler) getItemMetadata(
|
||||||
|
_ context.Context,
|
||||||
|
_ models.Channelable,
|
||||||
|
) (io.ReadCloser, int, error) {
|
||||||
|
return nil, 0, errMetadataFilesNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
//lint:ignore U1000 false linter issue due to generics
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
func (bh channelsBackupHandler) augmentItemInfo(
|
func (bh channelsBackupHandler) augmentItemInfo(
|
||||||
dgi *details.GroupsInfo,
|
dgi *details.GroupsInfo,
|
||||||
@ -113,6 +125,12 @@ func (bh channelsBackupHandler) augmentItemInfo(
|
|||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (bh channelsBackupHandler) supportsItemMetadata() bool {
|
||||||
|
// No .data and .meta files for channel messages
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func channelContainer(ch models.Channelable) container[models.Channelable] {
|
func channelContainer(ch models.Channelable) container[models.Channelable] {
|
||||||
return container[models.Channelable]{
|
return container[models.Channelable]{
|
||||||
storageDirFolders: path.Elements{ptr.Val(ch.GetId())},
|
storageDirFolders: path.Elements{ptr.Val(ch.GetId())},
|
||||||
|
|||||||
@ -23,6 +23,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph"
|
||||||
|
"github.com/alcionai/corso/src/pkg/services/m365/api/graph/metadata"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -30,6 +31,8 @@ var (
|
|||||||
_ data.BackupCollection = &lazyFetchCollection[graph.GetIDer, groupsItemer]{}
|
_ data.BackupCollection = &lazyFetchCollection[graph.GetIDer, groupsItemer]{}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var errMetadataFilesNotSupported = clues.New("metadata files not supported")
|
||||||
|
|
||||||
const (
|
const (
|
||||||
collectionChannelBufferSize = 1000
|
collectionChannelBufferSize = 1000
|
||||||
numberOfRetries = 4
|
numberOfRetries = 4
|
||||||
@ -176,7 +179,15 @@ func (col *prefetchCollection[C, I]) streamItems(ctx context.Context, errs *faul
|
|||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
defer func() { <-semaphoreCh }()
|
defer func() { <-semaphoreCh }()
|
||||||
|
|
||||||
|
// This is a no-op for conversations, as there is no way to detect
|
||||||
|
// deleted items. It might be added in future if graph supports it,
|
||||||
|
// so make sure we put up both .data and .meta files for deletions.
|
||||||
|
if col.getAndAugment.supportsItemMetadata() {
|
||||||
|
col.stream <- data.NewDeletedItem(id + metadata.DataFileSuffix)
|
||||||
|
col.stream <- data.NewDeletedItem(id + metadata.MetaFileSuffix)
|
||||||
|
} else {
|
||||||
col.stream <- data.NewDeletedItem(id)
|
col.stream <- data.NewDeletedItem(id)
|
||||||
|
}
|
||||||
|
|
||||||
atomic.AddInt64(&streamedItems, 1)
|
atomic.AddInt64(&streamedItems, 1)
|
||||||
col.Counter.Inc(count.StreamItemsRemoved)
|
col.Counter.Inc(count.StreamItemsRemoved)
|
||||||
@ -200,6 +211,18 @@ func (col *prefetchCollection[C, I]) streamItems(ctx context.Context, errs *faul
|
|||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
defer func() { <-semaphoreCh }()
|
defer func() { <-semaphoreCh }()
|
||||||
|
|
||||||
|
ictx := clues.Add(
|
||||||
|
ctx,
|
||||||
|
"item_id", id,
|
||||||
|
"parent_path", path.LoggableDir(col.LocationPath().String()))
|
||||||
|
|
||||||
|
// Conversation posts carry a .data suffix, while channel messages
|
||||||
|
// don't have any suffix. Metadata files are only supported for conversations.
|
||||||
|
dataFile := id
|
||||||
|
if col.getAndAugment.supportsItemMetadata() {
|
||||||
|
dataFile += metadata.DataFileSuffix
|
||||||
|
}
|
||||||
|
|
||||||
writer := kjson.NewJsonSerializationWriter()
|
writer := kjson.NewJsonSerializationWriter()
|
||||||
defer writer.Close()
|
defer writer.Close()
|
||||||
|
|
||||||
@ -234,24 +257,48 @@ func (col *prefetchCollection[C, I]) streamItems(ctx context.Context, errs *faul
|
|||||||
|
|
||||||
info.ParentPath = col.LocationPath().String()
|
info.ParentPath = col.LocationPath().String()
|
||||||
|
|
||||||
storeItem, err := data.NewPrefetchedItemWithInfo(
|
dataItem, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(itemData)),
|
io.NopCloser(bytes.NewReader(itemData)),
|
||||||
id,
|
dataFile,
|
||||||
details.ItemInfo{Groups: info})
|
details.ItemInfo{Groups: info})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err := clues.StackWC(ctx, err).Label(fault.LabelForceNoBackupCreation)
|
err := clues.StackWC(ictx, err).Label(fault.LabelForceNoBackupCreation)
|
||||||
el.AddRecoverable(ctx, err)
|
el.AddRecoverable(ictx, err)
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
col.stream <- storeItem
|
// Handle metadata before data so that if metadata file fails,
|
||||||
|
// we are not left with an orphaned data file. No op for channel
|
||||||
|
// messages.
|
||||||
|
if col.getAndAugment.supportsItemMetadata() {
|
||||||
|
metaFile := id + metadata.MetaFileSuffix
|
||||||
|
|
||||||
|
// Use modTime from item info as it's the latest.
|
||||||
|
metaItem, err := downloadItemMeta[C, I](
|
||||||
|
ictx,
|
||||||
|
col.getAndAugment,
|
||||||
|
col.contains,
|
||||||
|
metaFile,
|
||||||
|
info.Modified,
|
||||||
|
el)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
col.stream <- metaItem
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add data file to collection only if the metadata file(if any) was
|
||||||
|
// successfully added. This also helps to maintain consistency between
|
||||||
|
// prefetch & lazy fetch collection behaviors.
|
||||||
|
col.stream <- dataItem
|
||||||
|
|
||||||
atomic.AddInt64(&streamedItems, 1)
|
atomic.AddInt64(&streamedItems, 1)
|
||||||
atomic.AddInt64(&totalBytes, info.Size)
|
atomic.AddInt64(&totalBytes, info.Size)
|
||||||
|
|
||||||
if col.Counter.Inc(count.StreamItemsAdded)%1000 == 0 {
|
if col.Counter.Inc(count.StreamItemsAdded)%1000 == 0 {
|
||||||
logger.Ctx(ctx).Infow("item stream progress", "stats", col.Counter.Values())
|
logger.Ctx(ictx).Infow("item stream progress", "stats", col.Counter.Values())
|
||||||
}
|
}
|
||||||
|
|
||||||
col.Counter.Add(count.StreamBytesAdded, info.Size)
|
col.Counter.Add(count.StreamBytesAdded, info.Size)
|
||||||
@ -341,7 +388,16 @@ func (col *lazyFetchCollection[C, I]) streamItems(ctx context.Context, errs *fau
|
|||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
defer func() { <-semaphoreCh }()
|
defer func() { <-semaphoreCh }()
|
||||||
|
|
||||||
|
// This is a no-op for conversations, as there is no way to detect
|
||||||
|
// deleted items in a conversation. It might be added in the future
|
||||||
|
// if graph supports it, so make sure we put up both .data and .meta
|
||||||
|
// files for deletions.
|
||||||
|
if col.getAndAugment.supportsItemMetadata() {
|
||||||
|
col.stream <- data.NewDeletedItem(id + metadata.DataFileSuffix)
|
||||||
|
col.stream <- data.NewDeletedItem(id + metadata.MetaFileSuffix)
|
||||||
|
} else {
|
||||||
col.stream <- data.NewDeletedItem(id)
|
col.stream <- data.NewDeletedItem(id)
|
||||||
|
}
|
||||||
|
|
||||||
atomic.AddInt64(&streamedItems, 1)
|
atomic.AddInt64(&streamedItems, 1)
|
||||||
col.Counter.Inc(count.StreamItemsRemoved)
|
col.Counter.Inc(count.StreamItemsRemoved)
|
||||||
@ -370,6 +426,37 @@ func (col *lazyFetchCollection[C, I]) streamItems(ctx context.Context, errs *fau
|
|||||||
"item_id", id,
|
"item_id", id,
|
||||||
"parent_path", path.LoggableDir(col.LocationPath().String()))
|
"parent_path", path.LoggableDir(col.LocationPath().String()))
|
||||||
|
|
||||||
|
// Channel message files don't carry .data suffix, post files do.
|
||||||
|
dataFile := id
|
||||||
|
|
||||||
|
// Handle metadata before data so that if metadata file fails,
|
||||||
|
// we are not left with an orphaned data file.
|
||||||
|
//
|
||||||
|
// If the data download fails for some reason other than deleted in
|
||||||
|
// flight, we will still end up persisting a .meta file. This is
|
||||||
|
// fine since the next backup will overwrite it.
|
||||||
|
//
|
||||||
|
// If item is deleted in flight, we will end up with an orphaned
|
||||||
|
// .meta file. The only impact here is storage bloat, which
|
||||||
|
// is minimal.
|
||||||
|
if col.getAndAugment.supportsItemMetadata() {
|
||||||
|
dataFile += metadata.DataFileSuffix
|
||||||
|
metaFile := id + metadata.MetaFileSuffix
|
||||||
|
|
||||||
|
metaItem, err := downloadItemMeta[C, I](
|
||||||
|
ictx,
|
||||||
|
col.getAndAugment,
|
||||||
|
col.contains,
|
||||||
|
metaFile,
|
||||||
|
modTime,
|
||||||
|
el)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
col.stream <- metaItem
|
||||||
|
}
|
||||||
|
|
||||||
col.stream <- data.NewLazyItemWithInfo(
|
col.stream <- data.NewLazyItemWithInfo(
|
||||||
ictx,
|
ictx,
|
||||||
&lazyItemGetter[C, I]{
|
&lazyItemGetter[C, I]{
|
||||||
@ -381,7 +468,7 @@ func (col *lazyFetchCollection[C, I]) streamItems(ctx context.Context, errs *fau
|
|||||||
contains: col.contains,
|
contains: col.contains,
|
||||||
parentPath: col.LocationPath().String(),
|
parentPath: col.LocationPath().String(),
|
||||||
},
|
},
|
||||||
id,
|
dataFile,
|
||||||
modTime,
|
modTime,
|
||||||
col.Counter,
|
col.Counter,
|
||||||
el)
|
el)
|
||||||
@ -463,3 +550,36 @@ func (lig *lazyItemGetter[C, I]) GetData(
|
|||||||
false,
|
false,
|
||||||
nil
|
nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func downloadItemMeta[C graph.GetIDer, I groupsItemer](
|
||||||
|
ctx context.Context,
|
||||||
|
gim getItemMetadataer[C, I],
|
||||||
|
c container[C],
|
||||||
|
metaFile string,
|
||||||
|
modTime time.Time,
|
||||||
|
errs *fault.Bus,
|
||||||
|
) (data.Item, error) {
|
||||||
|
itemMeta, _, err := gim.getItemMetadata(
|
||||||
|
ctx,
|
||||||
|
c.container)
|
||||||
|
if err != nil {
|
||||||
|
errs.AddRecoverable(ctx, clues.StackWC(ctx, err))
|
||||||
|
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip adding progress reader for metadata files. It doesn't add
|
||||||
|
// much value.
|
||||||
|
storeItem, err := data.NewPrefetchedItem(
|
||||||
|
itemMeta,
|
||||||
|
metaFile,
|
||||||
|
// Use the same last modified time as post's.
|
||||||
|
modTime)
|
||||||
|
if err != nil {
|
||||||
|
errs.AddRecoverable(ctx, clues.StackWC(ctx, err))
|
||||||
|
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return storeItem, nil
|
||||||
|
}
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"slices"
|
"slices"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -13,6 +14,7 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/common/readers"
|
"github.com/alcionai/corso/src/internal/common/readers"
|
||||||
@ -165,12 +167,26 @@ func (m getAndAugmentChannelMessage) getItem(
|
|||||||
return msg, &details.GroupsInfo{}, m.Err
|
return msg, &details.GroupsInfo{}, m.Err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (getAndAugmentChannelMessage) getItemMetadata(
|
||||||
|
_ context.Context,
|
||||||
|
_ models.Channelable,
|
||||||
|
) (io.ReadCloser, int, error) {
|
||||||
|
return nil, 0, errMetadataFilesNotSupported
|
||||||
|
}
|
||||||
|
|
||||||
//lint:ignore U1000 false linter issue due to generics
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
func (getAndAugmentChannelMessage) augmentItemInfo(*details.GroupsInfo, models.Channelable) {
|
func (getAndAugmentChannelMessage) augmentItemInfo(*details.GroupsInfo, models.Channelable) {
|
||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *CollectionUnitSuite) TestPrefetchCollection_streamItems() {
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (getAndAugmentChannelMessage) supportsItemMetadata() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test prefetch streamItems() for channel messages.
|
||||||
|
func (suite *CollectionUnitSuite) TestPrefetchCollection_ChannelMessages() {
|
||||||
var (
|
var (
|
||||||
t = suite.T()
|
t = suite.T()
|
||||||
start = time.Now().Add(-1 * time.Second)
|
start = time.Now().Add(-1 * time.Second)
|
||||||
@ -249,6 +265,11 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_streamItems() {
|
|||||||
go col.streamItems(ctx, errs)
|
go col.streamItems(ctx, errs)
|
||||||
|
|
||||||
for item := range col.stream {
|
for item := range col.stream {
|
||||||
|
// Make sure item IDs don't have .data and .meta suffixes.
|
||||||
|
// Those are only meant for conversations.
|
||||||
|
assert.False(t, strings.HasSuffix(item.ID(), ".data"))
|
||||||
|
assert.False(t, strings.HasSuffix(item.ID(), ".meta"))
|
||||||
|
|
||||||
itemCount++
|
itemCount++
|
||||||
|
|
||||||
_, aok := test.added[item.ID()]
|
_, aok := test.added[item.ID()]
|
||||||
@ -277,8 +298,164 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_streamItems() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test prefetch streamItems() for conversations.
|
||||||
|
func (suite *CollectionUnitSuite) TestPrefetchCollection_Conversations() {
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
start = time.Now().Add(-1 * time.Second)
|
||||||
|
statusUpdater = func(*support.ControllerOperationStatus) {}
|
||||||
|
)
|
||||||
|
|
||||||
|
fullPath, err := path.Build("t", "pr", path.GroupsService, path.ConversationPostsCategory, false, "f", "s")
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
locPath, err := path.Build("t", "pr", path.GroupsService, path.ConversationPostsCategory, false, "f", "s")
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
added map[string]time.Time
|
||||||
|
removed map[string]struct{}
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no items",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "only added items",
|
||||||
|
added: map[string]time.Time{
|
||||||
|
"fisher": {},
|
||||||
|
"flannigan": {},
|
||||||
|
"fitzbog": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "only removed items",
|
||||||
|
removed: map[string]struct{}{
|
||||||
|
"princess": {},
|
||||||
|
"poppy": {},
|
||||||
|
"petunia": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "added and removed items",
|
||||||
|
added: map[string]time.Time{
|
||||||
|
"goblin": {},
|
||||||
|
},
|
||||||
|
removed: map[string]struct{}{
|
||||||
|
"general": {},
|
||||||
|
"goose": {},
|
||||||
|
"grumbles": {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
errs = fault.New(true)
|
||||||
|
itemCount int
|
||||||
|
itemMap = map[string]data.Item{}
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
col := &prefetchCollection[models.Conversationable, models.Postable]{
|
||||||
|
BaseCollection: data.NewBaseCollection(
|
||||||
|
fullPath,
|
||||||
|
nil,
|
||||||
|
locPath.ToBuilder(),
|
||||||
|
control.DefaultOptions(),
|
||||||
|
false,
|
||||||
|
count.New()),
|
||||||
|
added: test.added,
|
||||||
|
contains: container[models.Conversationable]{},
|
||||||
|
removed: test.removed,
|
||||||
|
getAndAugment: &getAndAugmentConversation{},
|
||||||
|
stream: make(chan data.Item),
|
||||||
|
statusUpdater: statusUpdater,
|
||||||
|
}
|
||||||
|
|
||||||
|
go col.streamItems(ctx, errs)
|
||||||
|
|
||||||
|
for item := range col.stream {
|
||||||
|
var trimmedID string
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.HasSuffix(item.ID(), ".data"):
|
||||||
|
trimmedID = strings.TrimSuffix(item.ID(), ".data")
|
||||||
|
case strings.HasSuffix(item.ID(), ".meta"):
|
||||||
|
trimmedID = strings.TrimSuffix(item.ID(), ".meta")
|
||||||
|
default:
|
||||||
|
assert.Fail(t, "unexpected item suffix: %s", item.ID())
|
||||||
|
}
|
||||||
|
|
||||||
|
itemCount++
|
||||||
|
itemMap[item.ID()] = item
|
||||||
|
|
||||||
|
_, aok := test.added[trimmedID]
|
||||||
|
if aok {
|
||||||
|
assert.False(t, item.Deleted(), "additions should not be marked as deleted")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, rok := test.removed[trimmedID]
|
||||||
|
if rok {
|
||||||
|
assert.True(t, item.Deleted(), "removals should be marked as deleted")
|
||||||
|
dimt, ok := item.(data.ItemModTime)
|
||||||
|
require.True(t, ok, "item implements data.ItemModTime")
|
||||||
|
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.NoError(t, errs.Failure())
|
||||||
|
assert.Equal(
|
||||||
|
t,
|
||||||
|
2*(len(test.added)+len(test.removed)),
|
||||||
|
itemCount,
|
||||||
|
"should see all expected items")
|
||||||
|
|
||||||
|
addedAndRemoved := append(maps.Keys(test.added), maps.Keys(test.removed)...)
|
||||||
|
for _, id := range addedAndRemoved {
|
||||||
|
// Should have a .data and a .meta file
|
||||||
|
d, ok := itemMap[id+".data"]
|
||||||
|
assert.True(t, ok, "should have data file for %q", id)
|
||||||
|
|
||||||
|
m, ok := itemMap[id+".meta"]
|
||||||
|
assert.True(t, ok, "should have meta file for %q", id)
|
||||||
|
|
||||||
|
// Meta files should not have item info.
|
||||||
|
assert.Implements(t, (*data.Item)(nil), m)
|
||||||
|
|
||||||
|
if slices.Contains(maps.Keys(test.removed), id) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mod times should match. Not doing this check for removed items
|
||||||
|
// since mod time is set to now() for them.
|
||||||
|
assert.Equal(
|
||||||
|
t,
|
||||||
|
d.(data.ItemModTime).ModTime(),
|
||||||
|
m.(data.ItemModTime).ModTime(),
|
||||||
|
"item mod time")
|
||||||
|
|
||||||
|
// Read meta file data. The data is of no significance, we just want
|
||||||
|
// to make sure the file is readable.
|
||||||
|
r := m.ToReader()
|
||||||
|
|
||||||
|
_, err := io.ReadAll(r)
|
||||||
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
r.Close()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type getAndAugmentConversation struct {
|
type getAndAugmentConversation struct {
|
||||||
GetItemErr error
|
GetItemErr error
|
||||||
|
GetMetaErr error
|
||||||
CallIDs []string
|
CallIDs []string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -297,12 +474,25 @@ func (m *getAndAugmentConversation) getItem(
|
|||||||
return p, &details.GroupsInfo{}, m.GetItemErr
|
return p, &details.GroupsInfo{}, m.GetItemErr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (m *getAndAugmentConversation) getItemMetadata(
|
||||||
|
_ context.Context,
|
||||||
|
_ models.Conversationable,
|
||||||
|
) (io.ReadCloser, int, error) {
|
||||||
|
return io.NopCloser(strings.NewReader("test")), 4, m.GetMetaErr
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
//lint:ignore U1000 false linter issue due to generics
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
func (m *getAndAugmentConversation) augmentItemInfo(*details.GroupsInfo, models.Conversationable) {
|
func (m *getAndAugmentConversation) augmentItemInfo(*details.GroupsInfo, models.Conversationable) {
|
||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (m *getAndAugmentConversation) supportsItemMetadata() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
func (m *getAndAugmentConversation) check(t *testing.T, expected []string) {
|
func (m *getAndAugmentConversation) check(t *testing.T, expected []string) {
|
||||||
// Sort before comparing. We could use a set, but that would prevent us from
|
// Sort before comparing. We could use a set, but that would prevent us from
|
||||||
// detecting duplicates.
|
// detecting duplicates.
|
||||||
@ -312,7 +502,8 @@ func (m *getAndAugmentConversation) check(t *testing.T, expected []string) {
|
|||||||
assert.Equal(t, expected, m.CallIDs, "expected calls")
|
assert.Equal(t, expected, m.CallIDs, "expected calls")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
// Test lazy streamItems() for conversations.
|
||||||
|
func (suite *CollectionUnitSuite) TestLazyFetchCollection_Conversations() {
|
||||||
var (
|
var (
|
||||||
t = suite.T()
|
t = suite.T()
|
||||||
start = time.Now().Add(-time.Second)
|
start = time.Now().Add(-time.Second)
|
||||||
@ -343,13 +534,11 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
|||||||
added: map[string]time.Time{
|
added: map[string]time.Time{
|
||||||
"fisher": start.Add(time.Minute),
|
"fisher": start.Add(time.Minute),
|
||||||
"flannigan": start.Add(2 * time.Minute),
|
"flannigan": start.Add(2 * time.Minute),
|
||||||
"fitzbog": start.Add(3 * time.Minute),
|
|
||||||
},
|
},
|
||||||
expectItemCount: 3,
|
expectItemCount: 4,
|
||||||
expectReads: []string{
|
expectReads: []string{
|
||||||
"fisher",
|
"fisher",
|
||||||
"flannigan",
|
"flannigan",
|
||||||
"fitzbog",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -359,7 +548,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
|||||||
"poppy": {},
|
"poppy": {},
|
||||||
"petunia": {},
|
"petunia": {},
|
||||||
},
|
},
|
||||||
expectItemCount: 3,
|
expectItemCount: 6,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
// TODO(pandeyabs): Overlaps between added and removed are deleted
|
// TODO(pandeyabs): Overlaps between added and removed are deleted
|
||||||
@ -371,14 +560,14 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
|||||||
// prefetch collections.
|
// prefetch collections.
|
||||||
name: "added and removed items",
|
name: "added and removed items",
|
||||||
added: map[string]time.Time{
|
added: map[string]time.Time{
|
||||||
"goblin": {},
|
"goblin": start.Add(time.Minute),
|
||||||
},
|
},
|
||||||
removed: map[string]struct{}{
|
removed: map[string]struct{}{
|
||||||
"general": {},
|
"general": {},
|
||||||
"goose": {},
|
"goose": {},
|
||||||
"grumbles": {},
|
"grumbles": {},
|
||||||
},
|
},
|
||||||
expectItemCount: 4,
|
expectItemCount: 8,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -387,7 +576,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
|||||||
var (
|
var (
|
||||||
t = suite.T()
|
t = suite.T()
|
||||||
errs = fault.New(true)
|
errs = fault.New(true)
|
||||||
itemCount int
|
itemMap = map[string]data.Item{}
|
||||||
)
|
)
|
||||||
|
|
||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
@ -413,9 +602,20 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for item := range col.Items(ctx, errs) {
|
for item := range col.Items(ctx, errs) {
|
||||||
itemCount++
|
var trimmedID string
|
||||||
|
|
||||||
_, rok := test.removed[item.ID()]
|
switch {
|
||||||
|
case strings.HasSuffix(item.ID(), ".data"):
|
||||||
|
trimmedID = strings.TrimSuffix(item.ID(), ".data")
|
||||||
|
case strings.HasSuffix(item.ID(), ".meta"):
|
||||||
|
trimmedID = strings.TrimSuffix(item.ID(), ".meta")
|
||||||
|
default:
|
||||||
|
assert.Fail(t, "unexpected item suffix: %s", item.ID())
|
||||||
|
}
|
||||||
|
|
||||||
|
itemMap[item.ID()] = item
|
||||||
|
|
||||||
|
_, rok := test.removed[trimmedID]
|
||||||
if rok {
|
if rok {
|
||||||
assert.True(t, item.Deleted(), "removals should be marked as deleted")
|
assert.True(t, item.Deleted(), "removals should be marked as deleted")
|
||||||
dimt, ok := item.(data.ItemModTime)
|
dimt, ok := item.(data.ItemModTime)
|
||||||
@ -423,7 +623,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
|||||||
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
|
assert.True(t, dimt.ModTime().After(start), "deleted items should set mod time to now()")
|
||||||
}
|
}
|
||||||
|
|
||||||
modTime, aok := test.added[item.ID()]
|
modTime, aok := test.added[trimmedID]
|
||||||
if !rok && aok {
|
if !rok && aok {
|
||||||
// Item's mod time should be what's passed into the collection
|
// Item's mod time should be what's passed into the collection
|
||||||
// initializer.
|
// initializer.
|
||||||
@ -434,7 +634,10 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
|||||||
|
|
||||||
// Check if the test wants us to read the item's data so the lazy
|
// Check if the test wants us to read the item's data so the lazy
|
||||||
// data fetch is executed.
|
// data fetch is executed.
|
||||||
if slices.Contains(test.expectReads, item.ID()) {
|
//
|
||||||
|
// Inspect with trimmedID since getItem() operates on ID, not the file
|
||||||
|
if strings.HasSuffix(item.ID(), ".data") &&
|
||||||
|
slices.Contains(test.expectReads, trimmedID) {
|
||||||
r := item.ToReader()
|
r := item.ToReader()
|
||||||
|
|
||||||
_, err := io.ReadAll(r)
|
_, err := io.ReadAll(r)
|
||||||
@ -458,9 +661,40 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
|
|||||||
assert.NoError(t, errs.Failure())
|
assert.NoError(t, errs.Failure())
|
||||||
assert.Equal(
|
assert.Equal(
|
||||||
t,
|
t,
|
||||||
test.expectItemCount,
|
test.expectItemCount, // 2*(len(test.added)+len(test.removed)),
|
||||||
itemCount,
|
len(itemMap),
|
||||||
"should see all expected items")
|
"should see all expected items")
|
||||||
|
|
||||||
|
addedAndRemoved := append(maps.Keys(test.added), maps.Keys(test.removed)...)
|
||||||
|
for _, id := range addedAndRemoved {
|
||||||
|
// Should have a .data and a .meta file
|
||||||
|
d, ok := itemMap[id+".data"]
|
||||||
|
assert.True(t, ok, "should have data file for %q", id)
|
||||||
|
|
||||||
|
m, ok := itemMap[id+".meta"]
|
||||||
|
assert.True(t, ok, "should have meta file for %q", id)
|
||||||
|
|
||||||
|
// Meta files should not have item info.
|
||||||
|
assert.Implements(t, (*data.Item)(nil), m)
|
||||||
|
|
||||||
|
if slices.Contains(maps.Keys(test.removed), id) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mod times should match. Not doing this check for removed items
|
||||||
|
// since mod time is set to now() for them.
|
||||||
|
assert.Equal(t, d.(data.ItemModTime).ModTime(), m.(data.ItemModTime).ModTime(), "item mod time")
|
||||||
|
|
||||||
|
// Read meta file data. The data is of no significance, we just want
|
||||||
|
// to make sure the file is readable.
|
||||||
|
r := m.ToReader()
|
||||||
|
|
||||||
|
_, err := io.ReadAll(r)
|
||||||
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
r.Close()
|
||||||
|
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,12 +1,16 @@
|
|||||||
package groups
|
package groups
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
@ -132,6 +136,24 @@ func (bh conversationsBackupHandler) getItem(
|
|||||||
postID)
|
postID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (bh conversationsBackupHandler) getItemMetadata(
|
||||||
|
ctx context.Context,
|
||||||
|
c models.Conversationable,
|
||||||
|
) (io.ReadCloser, int, error) {
|
||||||
|
meta := metadata.ConversationPostMetadata{
|
||||||
|
Recipients: []string{bh.resourceEmail},
|
||||||
|
Topic: ptr.Val(c.GetTopic()),
|
||||||
|
}
|
||||||
|
|
||||||
|
metaJSON, err := json.Marshal(meta)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, clues.WrapWC(ctx, err, "serializing post metadata")
|
||||||
|
}
|
||||||
|
|
||||||
|
return io.NopCloser(bytes.NewReader(metaJSON)), len(metaJSON), nil
|
||||||
|
}
|
||||||
|
|
||||||
//lint:ignore U1000 false linter issue due to generics
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
func (bh conversationsBackupHandler) augmentItemInfo(
|
func (bh conversationsBackupHandler) augmentItemInfo(
|
||||||
dgi *details.GroupsInfo,
|
dgi *details.GroupsInfo,
|
||||||
@ -148,6 +170,11 @@ func (bh conversationsBackupHandler) augmentItemInfo(
|
|||||||
dgi.Post.Topic = ptr.Val(c.GetTopic())
|
dgi.Post.Topic = ptr.Val(c.GetTopic())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (bh conversationsBackupHandler) supportsItemMetadata() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
func conversationThreadContainer(
|
func conversationThreadContainer(
|
||||||
c models.Conversationable,
|
c models.Conversationable,
|
||||||
t models.ConversationThreadable,
|
t models.ConversationThreadable,
|
||||||
|
|||||||
@ -0,0 +1,66 @@
|
|||||||
|
package groups
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
|
"github.com/alcionai/corso/src/internal/m365/collection/groups/metadata"
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
resourceEmail = "test@example.com"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConversationHandlerUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestConversationHandlerUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &ConversationHandlerUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Basic test to ensure metadata is serialized and deserialized correctly.
|
||||||
|
func (suite *ConversationHandlerUnitSuite) TestGetItemMetadata() {
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
bh = conversationsBackupHandler{
|
||||||
|
resourceEmail: resourceEmail,
|
||||||
|
}
|
||||||
|
|
||||||
|
topic = "test topic"
|
||||||
|
conv = models.NewConversation()
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext(t)
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
conv.SetTopic(&topic)
|
||||||
|
|
||||||
|
rc, size, err := bh.getItemMetadata(ctx, conv)
|
||||||
|
assert.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
require.NotNil(t, rc, "nil read closer")
|
||||||
|
assert.Greater(t, size, 0, "incorrect size")
|
||||||
|
|
||||||
|
defer rc.Close()
|
||||||
|
|
||||||
|
m, err := io.ReadAll(rc)
|
||||||
|
assert.NoError(t, err, "reading metadata")
|
||||||
|
|
||||||
|
var meta metadata.ConversationPostMetadata
|
||||||
|
|
||||||
|
err = json.Unmarshal(m, &meta)
|
||||||
|
assert.NoError(t, err, "deserializing metadata")
|
||||||
|
|
||||||
|
assert.Equal(t, []string{resourceEmail}, meta.Recipients, "incorrect recipients")
|
||||||
|
assert.Equal(t, ptr.Val(conv.GetTopic()), meta.Topic, "incorrect topic")
|
||||||
|
}
|
||||||
@ -2,6 +2,7 @@ package groups
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"io"
|
||||||
|
|
||||||
"github.com/microsoft/kiota-abstractions-go/serialization"
|
"github.com/microsoft/kiota-abstractions-go/serialization"
|
||||||
|
|
||||||
@ -33,7 +34,9 @@ type backupHandler[C graph.GetIDer, I groupsItemer] interface {
|
|||||||
|
|
||||||
type getItemAndAugmentInfoer[C graph.GetIDer, I groupsItemer] interface {
|
type getItemAndAugmentInfoer[C graph.GetIDer, I groupsItemer] interface {
|
||||||
getItemer[I]
|
getItemer[I]
|
||||||
|
getItemMetadataer[C, I]
|
||||||
augmentItemInfoer[C]
|
augmentItemInfoer[C]
|
||||||
|
supportsItemMetadataer[C, I]
|
||||||
}
|
}
|
||||||
|
|
||||||
type augmentItemInfoer[C graph.GetIDer] interface {
|
type augmentItemInfoer[C graph.GetIDer] interface {
|
||||||
@ -51,6 +54,17 @@ type getItemer[I groupsItemer] interface {
|
|||||||
) (I, *details.GroupsInfo, error)
|
) (I, *details.GroupsInfo, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type getItemMetadataer[C graph.GetIDer, I groupsItemer] interface {
|
||||||
|
getItemMetadata(
|
||||||
|
ctx context.Context,
|
||||||
|
c C,
|
||||||
|
) (io.ReadCloser, int, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type supportsItemMetadataer[C graph.GetIDer, I groupsItemer] interface {
|
||||||
|
supportsItemMetadata() bool
|
||||||
|
}
|
||||||
|
|
||||||
// gets all containers for the resource
|
// gets all containers for the resource
|
||||||
type getContainerser[C graph.GetIDer] interface {
|
type getContainerser[C graph.GetIDer] interface {
|
||||||
getContainers(
|
getContainers(
|
||||||
|
|||||||
@ -0,0 +1,8 @@
|
|||||||
|
package metadata
|
||||||
|
|
||||||
|
// ConversationPostMetadata stores metadata for a given conversation post,
|
||||||
|
// stored as a .meta file in kopia.
|
||||||
|
type ConversationPostMetadata struct {
|
||||||
|
Recipients []string `json:"recipients,omitempty"`
|
||||||
|
Topic string `json:"topic,omitempty"`
|
||||||
|
}
|
||||||
@ -1,6 +1,8 @@
|
|||||||
package metadata
|
package metadata
|
||||||
|
|
||||||
import "strings"
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
MetaFileSuffix = ".meta"
|
MetaFileSuffix = ".meta"
|
||||||
|
|||||||
@ -9,9 +9,12 @@ func IsMetadataFile(p path.Path) bool {
|
|||||||
case path.OneDriveService:
|
case path.OneDriveService:
|
||||||
return HasMetaSuffix(p.Item())
|
return HasMetaSuffix(p.Item())
|
||||||
|
|
||||||
case path.SharePointService, path.GroupsService:
|
case path.SharePointService:
|
||||||
return p.Category() == path.LibrariesCategory && HasMetaSuffix(p.Item())
|
return p.Category() == path.LibrariesCategory && HasMetaSuffix(p.Item())
|
||||||
|
|
||||||
|
case path.GroupsService:
|
||||||
|
return p.Category() == path.LibrariesCategory && HasMetaSuffix(p.Item()) ||
|
||||||
|
p.Category() == path.ConversationPostsCategory && HasMetaSuffix(p.Item())
|
||||||
default:
|
default:
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
@ -152,3 +152,108 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Directories() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *MetadataUnitSuite) TestIsMetadataFile() {
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
service path.ServiceType
|
||||||
|
category path.CategoryType
|
||||||
|
isMetaFile bool
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "onedrive .data file",
|
||||||
|
service: path.OneDriveService,
|
||||||
|
category: path.FilesCategory,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "sharepoint library .data file",
|
||||||
|
service: path.SharePointService,
|
||||||
|
category: path.LibrariesCategory,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "group library .data file",
|
||||||
|
service: path.GroupsService,
|
||||||
|
category: path.LibrariesCategory,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "group conversations .data file",
|
||||||
|
service: path.GroupsService,
|
||||||
|
category: path.ConversationPostsCategory,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "onedrive .meta file",
|
||||||
|
service: path.OneDriveService,
|
||||||
|
category: path.FilesCategory,
|
||||||
|
isMetaFile: true,
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "sharepoint library .meta file",
|
||||||
|
service: path.SharePointService,
|
||||||
|
category: path.LibrariesCategory,
|
||||||
|
isMetaFile: true,
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "group library .meta file",
|
||||||
|
service: path.GroupsService,
|
||||||
|
category: path.LibrariesCategory,
|
||||||
|
isMetaFile: true,
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "group conversations .meta file",
|
||||||
|
service: path.GroupsService,
|
||||||
|
category: path.ConversationPostsCategory,
|
||||||
|
isMetaFile: true,
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
// For services which don't have metadata files, make sure the function
|
||||||
|
// returns false. We don't want .meta suffix (assuming it exists) in
|
||||||
|
// these cases to be interpreted as metadata files.
|
||||||
|
{
|
||||||
|
name: "exchange service",
|
||||||
|
service: path.ExchangeService,
|
||||||
|
category: path.EmailCategory,
|
||||||
|
isMetaFile: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "group channels",
|
||||||
|
service: path.GroupsService,
|
||||||
|
category: path.ChannelMessagesCategory,
|
||||||
|
isMetaFile: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "lists",
|
||||||
|
service: path.SharePointService,
|
||||||
|
category: path.ListsCategory,
|
||||||
|
isMetaFile: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
fileName := "file"
|
||||||
|
if test.isMetaFile {
|
||||||
|
fileName += metadata.MetaFileSuffix
|
||||||
|
} else {
|
||||||
|
fileName += metadata.DataFileSuffix
|
||||||
|
}
|
||||||
|
|
||||||
|
p, err := path.Build(
|
||||||
|
"t",
|
||||||
|
"u",
|
||||||
|
test.service,
|
||||||
|
test.category,
|
||||||
|
true,
|
||||||
|
"some", "path", "for", fileName)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
actual := metadata.IsMetadataFile(p)
|
||||||
|
assert.Equal(t, test.expected, actual)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user