Differentiate file suffixes between channel messages and conversations
This commit is contained in:
parent
7477c58698
commit
5230e60b24
@ -61,6 +61,11 @@ func (bh mockBackupHandler) augmentItemInfo(
|
|||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (bh mockBackupHandler) supportsItemMetadata() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func (bh mockBackupHandler) canMakeDeltaQueries() bool {
|
func (bh mockBackupHandler) canMakeDeltaQueries() bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|||||||
@ -125,6 +125,12 @@ func (bh channelsBackupHandler) augmentItemInfo(
|
|||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (bh channelsBackupHandler) supportsItemMetadata() bool {
|
||||||
|
// No .data and .meta files for channel messages
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func channelContainer(ch models.Channelable) container[models.Channelable] {
|
func channelContainer(ch models.Channelable) container[models.Channelable] {
|
||||||
return container[models.Channelable]{
|
return container[models.Channelable]{
|
||||||
storageDirFolders: path.Elements{ptr.Val(ch.GetId())},
|
storageDirFolders: path.Elements{ptr.Val(ch.GetId())},
|
||||||
|
|||||||
@ -348,8 +348,12 @@ func (col *lazyFetchCollection[C, I]) streamItems(ctx context.Context, errs *fau
|
|||||||
// deleted items in a conversation. It might be added in the future
|
// deleted items in a conversation. It might be added in the future
|
||||||
// if graph supports it, so make sure we put up both .data and .meta
|
// if graph supports it, so make sure we put up both .data and .meta
|
||||||
// files for deletions.
|
// files for deletions.
|
||||||
col.stream <- data.NewDeletedItem(id + metadata.DataFileSuffix)
|
if col.getAndAugment.supportsItemMetadata() {
|
||||||
col.stream <- data.NewDeletedItem(id + metadata.MetaFileSuffix)
|
col.stream <- data.NewDeletedItem(id + metadata.DataFileSuffix)
|
||||||
|
col.stream <- data.NewDeletedItem(id + metadata.MetaFileSuffix)
|
||||||
|
} else {
|
||||||
|
col.stream <- data.NewDeletedItem(id)
|
||||||
|
}
|
||||||
|
|
||||||
atomic.AddInt64(&streamedItems, 1)
|
atomic.AddInt64(&streamedItems, 1)
|
||||||
col.Counter.Inc(count.StreamItemsRemoved)
|
col.Counter.Inc(count.StreamItemsRemoved)
|
||||||
@ -378,6 +382,10 @@ func (col *lazyFetchCollection[C, I]) streamItems(ctx context.Context, errs *fau
|
|||||||
"item_id", id,
|
"item_id", id,
|
||||||
"parent_path", path.LoggableDir(col.LocationPath().String()))
|
"parent_path", path.LoggableDir(col.LocationPath().String()))
|
||||||
|
|
||||||
|
// Conversation posts carry a .data suffix, while channel messages
|
||||||
|
// don't have any suffix. Metadata files are only supported for conversations.
|
||||||
|
dataFile := id
|
||||||
|
|
||||||
// Handle metadata before data so that if metadata file fails,
|
// Handle metadata before data so that if metadata file fails,
|
||||||
// we are not left with an orphaned data file.
|
// we are not left with an orphaned data file.
|
||||||
//
|
//
|
||||||
@ -396,15 +404,14 @@ func (col *lazyFetchCollection[C, I]) streamItems(ctx context.Context, errs *fau
|
|||||||
if err != nil && !errors.Is(err, errMetadataFilesNotSupported) {
|
if err != nil && !errors.Is(err, errMetadataFilesNotSupported) {
|
||||||
errs.AddRecoverable(ctx, clues.StackWC(ctx, err))
|
errs.AddRecoverable(ctx, clues.StackWC(ctx, err))
|
||||||
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
|
||||||
// Skip adding progress reader for metadata files. It doesn't add
|
// Skip adding progress reader for metadata files. It doesn't add
|
||||||
// much value.
|
// much value.
|
||||||
storeItem, err := data.NewPrefetchedItem(
|
storeItem, err := data.NewPrefetchedItem(
|
||||||
itemMeta,
|
itemMeta,
|
||||||
id+metadata.MetaFileSuffix,
|
metaFile,
|
||||||
// Use the same last modified time as post's.
|
// Use the same last modified time as post's.
|
||||||
modTime)
|
modTime)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -422,12 +429,12 @@ func (col *lazyFetchCollection[C, I]) streamItems(ctx context.Context, errs *fau
|
|||||||
modTime: modTime,
|
modTime: modTime,
|
||||||
getAndAugment: col.getAndAugment,
|
getAndAugment: col.getAndAugment,
|
||||||
resourceID: col.protectedResource,
|
resourceID: col.protectedResource,
|
||||||
itemID: id,
|
itemID: dataFile,
|
||||||
containerIDs: col.FullPath().Folders(),
|
containerIDs: col.FullPath().Folders(),
|
||||||
contains: col.contains,
|
contains: col.contains,
|
||||||
parentPath: col.LocationPath().String(),
|
parentPath: col.LocationPath().String(),
|
||||||
},
|
},
|
||||||
id+metadata.DataFileSuffix,
|
dataFile,
|
||||||
modTime,
|
modTime,
|
||||||
col.Counter,
|
col.Counter,
|
||||||
el)
|
el)
|
||||||
|
|||||||
@ -180,6 +180,11 @@ func (getAndAugmentChannelMessage) augmentItemInfo(*details.GroupsInfo, models.C
|
|||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (getAndAugmentChannelMessage) supportsItemMetadata() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func (suite *CollectionUnitSuite) TestPrefetchCollection_streamItems() {
|
func (suite *CollectionUnitSuite) TestPrefetchCollection_streamItems() {
|
||||||
var (
|
var (
|
||||||
t = suite.T()
|
t = suite.T()
|
||||||
@ -322,6 +327,11 @@ func (m *getAndAugmentConversation) augmentItemInfo(*details.GroupsInfo, models.
|
|||||||
// no-op
|
// no-op
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (m *getAndAugmentConversation) supportsItemMetadata() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
func (m *getAndAugmentConversation) check(t *testing.T, expected []string) {
|
func (m *getAndAugmentConversation) check(t *testing.T, expected []string) {
|
||||||
// Sort before comparing. We could use a set, but that would prevent us from
|
// Sort before comparing. We could use a set, but that would prevent us from
|
||||||
// detecting duplicates.
|
// detecting duplicates.
|
||||||
|
|||||||
@ -170,6 +170,11 @@ func (bh conversationsBackupHandler) augmentItemInfo(
|
|||||||
dgi.Post.Topic = ptr.Val(c.GetTopic())
|
dgi.Post.Topic = ptr.Val(c.GetTopic())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//lint:ignore U1000 false linter issue due to generics
|
||||||
|
func (bh conversationsBackupHandler) supportsItemMetadata() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
func conversationThreadContainer(
|
func conversationThreadContainer(
|
||||||
c models.Conversationable,
|
c models.Conversationable,
|
||||||
t models.ConversationThreadable,
|
t models.ConversationThreadable,
|
||||||
|
|||||||
@ -36,6 +36,7 @@ type getItemAndAugmentInfoer[C graph.GetIDer, I groupsItemer] interface {
|
|||||||
getItemer[I]
|
getItemer[I]
|
||||||
getItemMetadataer[C, I]
|
getItemMetadataer[C, I]
|
||||||
augmentItemInfoer[C]
|
augmentItemInfoer[C]
|
||||||
|
supportsItemMetadataer[C, I]
|
||||||
}
|
}
|
||||||
|
|
||||||
type augmentItemInfoer[C graph.GetIDer] interface {
|
type augmentItemInfoer[C graph.GetIDer] interface {
|
||||||
@ -60,6 +61,10 @@ type getItemMetadataer[C graph.GetIDer, I groupsItemer] interface {
|
|||||||
) (io.ReadCloser, int, error)
|
) (io.ReadCloser, int, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type supportsItemMetadataer[C graph.GetIDer, I groupsItemer] interface {
|
||||||
|
supportsItemMetadata() bool
|
||||||
|
}
|
||||||
|
|
||||||
// gets all containers for the resource
|
// gets all containers for the resource
|
||||||
type getContainerser[C graph.GetIDer] interface {
|
type getContainerser[C graph.GetIDer] interface {
|
||||||
getContainers(
|
getContainers(
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user