Use generic unindex item struct (#4366)
Switch all metadata files (of all types) to use the generic unindexed item type. Transitioned items include: * previous paths and deltas for all services * site list for groups * drive .meta files --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change - [ ] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [x] 🧹 Tech Debt/Cleanup #### Issue(s) * #4191 #### Test Plan - [ ] 💪 Manual - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
5521177aee
commit
a806ab59bf
@ -33,11 +33,7 @@ const (
|
||||
MaxOneNoteFileSize = 2 * 1024 * 1024 * 1024
|
||||
)
|
||||
|
||||
var (
|
||||
_ data.BackupCollection = &Collection{}
|
||||
_ data.Item = &metadata.Item{}
|
||||
_ data.ItemModTime = &metadata.Item{}
|
||||
)
|
||||
var _ data.BackupCollection = &Collection{}
|
||||
|
||||
// Collection represents a set of OneDrive objects retrieved from M365
|
||||
type Collection struct {
|
||||
@ -588,13 +584,15 @@ func (oc *Collection) streamDriveItem(
|
||||
return progReader, nil
|
||||
})
|
||||
|
||||
oc.data <- &metadata.Item{
|
||||
ItemID: metaFileName + metaSuffix,
|
||||
Data: metaReader,
|
||||
// We wrap the reader with a lazy reader so that the progress bar is only
|
||||
// initialized if the file is read. Since we're not actually lazily reading
|
||||
// data just use the eager item implementation.
|
||||
oc.data <- data.NewUnindexedPrefetchedItem(
|
||||
metaReader,
|
||||
metaFileName+metaSuffix,
|
||||
// Metadata file should always use the latest time as
|
||||
// permissions change does not update mod time.
|
||||
Mod: time.Now(),
|
||||
}
|
||||
time.Now())
|
||||
|
||||
// Item read successfully, add to collection
|
||||
if isFile {
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
package metadata
|
||||
|
||||
import (
|
||||
"io"
|
||||
"time"
|
||||
)
|
||||
|
||||
@ -41,17 +40,3 @@ type Metadata struct {
|
||||
Permissions []Permission `json:"permissions,omitempty"`
|
||||
LinkShares []LinkShare `json:"linkShares,omitempty"`
|
||||
}
|
||||
|
||||
type Item struct {
|
||||
ItemID string
|
||||
Data io.ReadCloser
|
||||
Mod time.Time
|
||||
}
|
||||
|
||||
// Deleted implements an interface function. However, OneDrive items are marked
|
||||
// as deleted by adding them to the exclude list so this can always return
|
||||
// false.
|
||||
func (i *Item) Deleted() bool { return false }
|
||||
func (i *Item) ID() string { return i.ItemID }
|
||||
func (i *Item) ToReader() io.ReadCloser { return i.Data }
|
||||
func (i *Item) ModTime() time.Time { return i.Mod }
|
||||
|
||||
@ -5,6 +5,7 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
@ -16,7 +17,7 @@ import (
|
||||
|
||||
var (
|
||||
_ data.BackupCollection = &MetadataCollection{}
|
||||
_ data.Item = &MetadataItem{}
|
||||
_ data.Item = &metadataItem{}
|
||||
)
|
||||
|
||||
// MetadataCollection in a simple collection that assumes all items to be
|
||||
@ -24,7 +25,7 @@ var (
|
||||
// created. This collection has no logic for lazily fetching item data.
|
||||
type MetadataCollection struct {
|
||||
fullPath path.Path
|
||||
items []MetadataItem
|
||||
items []metadataItem
|
||||
statusUpdater support.StatusUpdater
|
||||
}
|
||||
|
||||
@ -40,23 +41,29 @@ func NewMetadataEntry(fileName string, mData any) MetadataCollectionEntry {
|
||||
return MetadataCollectionEntry{fileName, mData}
|
||||
}
|
||||
|
||||
func (mce MetadataCollectionEntry) toMetadataItem() (MetadataItem, error) {
|
||||
func (mce MetadataCollectionEntry) toMetadataItem() (metadataItem, error) {
|
||||
if len(mce.fileName) == 0 {
|
||||
return MetadataItem{}, clues.New("missing metadata filename")
|
||||
return metadataItem{}, clues.New("missing metadata filename")
|
||||
}
|
||||
|
||||
if mce.data == nil {
|
||||
return MetadataItem{}, clues.New("missing metadata")
|
||||
return metadataItem{}, clues.New("missing metadata")
|
||||
}
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
encoder := json.NewEncoder(buf)
|
||||
|
||||
if err := encoder.Encode(mce.data); err != nil {
|
||||
return MetadataItem{}, clues.Wrap(err, "serializing metadata")
|
||||
return metadataItem{}, clues.Wrap(err, "serializing metadata")
|
||||
}
|
||||
|
||||
return NewMetadataItem(mce.fileName, buf.Bytes()), nil
|
||||
return metadataItem{
|
||||
Item: data.NewUnindexedPrefetchedItem(
|
||||
io.NopCloser(buf),
|
||||
mce.fileName,
|
||||
time.Now()),
|
||||
size: int64(buf.Len()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// MakeMetadataCollection creates a metadata collection that has a file
|
||||
@ -71,7 +78,7 @@ func MakeMetadataCollection(
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
items := make([]MetadataItem, 0, len(metadata))
|
||||
items := make([]metadataItem, 0, len(metadata))
|
||||
|
||||
for _, md := range metadata {
|
||||
item, err := md.toMetadataItem()
|
||||
@ -89,7 +96,7 @@ func MakeMetadataCollection(
|
||||
|
||||
func NewMetadataCollection(
|
||||
p path.Path,
|
||||
items []MetadataItem,
|
||||
items []metadataItem,
|
||||
statusUpdater support.StatusUpdater,
|
||||
) *MetadataCollection {
|
||||
return &MetadataCollection{
|
||||
@ -148,7 +155,7 @@ func (md MetadataCollection) Items(
|
||||
defer close(res)
|
||||
|
||||
for _, item := range md.items {
|
||||
totalBytes += int64(len(item.data))
|
||||
totalBytes += item.size
|
||||
res <- item
|
||||
}
|
||||
}()
|
||||
@ -156,36 +163,7 @@ func (md MetadataCollection) Items(
|
||||
return res
|
||||
}
|
||||
|
||||
// MetadataItem is an in-memory data.Item implementation. MetadataItem does
|
||||
// not implement additional interfaces like data.ItemInfo, so it should only
|
||||
// be used for items with a small amount of content that don't need to be added
|
||||
// to backup details.
|
||||
//
|
||||
// Currently the expected use-case for this struct are storing metadata for a
|
||||
// backup like delta tokens or a mapping of container IDs to container paths.
|
||||
type MetadataItem struct {
|
||||
// uuid is an ID that can be used to refer to the item.
|
||||
uuid string
|
||||
// data is a buffer of data that the item refers to.
|
||||
data []byte
|
||||
}
|
||||
|
||||
func NewMetadataItem(uuid string, itemData []byte) MetadataItem {
|
||||
return MetadataItem{
|
||||
uuid: uuid,
|
||||
data: itemData,
|
||||
}
|
||||
}
|
||||
|
||||
func (mi MetadataItem) ID() string {
|
||||
return mi.uuid
|
||||
}
|
||||
|
||||
// TODO(ashmrtn): Fill in once we know how to handle this.
|
||||
func (mi MetadataItem) Deleted() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (mi MetadataItem) ToReader() io.ReadCloser {
|
||||
return io.NopCloser(bytes.NewReader(mi.data))
|
||||
type metadataItem struct {
|
||||
data.Item
|
||||
size int64
|
||||
}
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
package graph
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/google/uuid"
|
||||
@ -11,6 +13,7 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/suite"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/support"
|
||||
"github.com/alcionai/corso/src/internal/tester"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
@ -63,10 +66,18 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
|
||||
len(itemData),
|
||||
"Requires same number of items and data")
|
||||
|
||||
items := []MetadataItem{}
|
||||
items := []metadataItem{}
|
||||
|
||||
for i := 0; i < len(itemNames); i++ {
|
||||
items = append(items, NewMetadataItem(itemNames[i], itemData[i]))
|
||||
items = append(
|
||||
items,
|
||||
metadataItem{
|
||||
Item: data.NewUnindexedPrefetchedItem(
|
||||
io.NopCloser(bytes.NewReader(itemData[i])),
|
||||
itemNames[i],
|
||||
time.Time{}),
|
||||
size: int64(len(itemData[i])),
|
||||
})
|
||||
}
|
||||
|
||||
p, err := path.Build(
|
||||
|
||||
@ -751,10 +751,6 @@ func compareDriveItem(
|
||||
}
|
||||
|
||||
if isMeta {
|
||||
var itemType *metadata.Item
|
||||
|
||||
assert.IsType(t, itemType, item)
|
||||
|
||||
var (
|
||||
itemMeta metadata.Metadata
|
||||
expectedMeta metadata.Metadata
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
@ -128,7 +129,7 @@ type streamCollection struct {
|
||||
// folderPath indicates what level in the hierarchy this collection
|
||||
// represents
|
||||
folderPath path.Path
|
||||
item *streamItem
|
||||
item data.Item
|
||||
}
|
||||
|
||||
func (dc *streamCollection) FullPath() path.Path {
|
||||
@ -157,27 +158,6 @@ func (dc *streamCollection) Items(context.Context, *fault.Bus) <-chan data.Item
|
||||
return items
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// item
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type streamItem struct {
|
||||
name string
|
||||
data []byte
|
||||
}
|
||||
|
||||
func (di *streamItem) ID() string {
|
||||
return di.name
|
||||
}
|
||||
|
||||
func (di *streamItem) ToReader() io.ReadCloser {
|
||||
return io.NopCloser(bytes.NewReader(di.data))
|
||||
}
|
||||
|
||||
func (di *streamItem) Deleted() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// common reader/writer/deleter
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -204,10 +184,10 @@ func collect(
|
||||
|
||||
dc := streamCollection{
|
||||
folderPath: p,
|
||||
item: &streamItem{
|
||||
name: col.itemName,
|
||||
data: bs,
|
||||
},
|
||||
item: data.NewUnindexedPrefetchedItem(
|
||||
io.NopCloser(bytes.NewReader(bs)),
|
||||
col.itemName,
|
||||
time.Now()),
|
||||
}
|
||||
|
||||
return &dc, nil
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user