Rename generic item structs and functions (#4421)
`unindexedPrefetchedItem` -> `prefetchedItem` `prefetchedItem` -> `prefetchedItemWithInfo` `unindexedLazyItem` -> `lazyItem` `lazyItem` -> `lazyItemWithInfo` --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change - [ ] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [x] 🧹 Tech Debt/Cleanup #### Issue(s) * #4328 #### Test Plan - [ ] 💪 Manual - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
fc508d7160
commit
6f25be4ad2
@ -16,23 +16,23 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
_ Item = &unindexedPrefetchedItem{}
|
|
||||||
_ ItemModTime = &unindexedPrefetchedItem{}
|
|
||||||
|
|
||||||
_ Item = &prefetchedItem{}
|
_ Item = &prefetchedItem{}
|
||||||
_ ItemInfo = &prefetchedItem{}
|
|
||||||
_ ItemModTime = &prefetchedItem{}
|
_ ItemModTime = &prefetchedItem{}
|
||||||
|
|
||||||
_ Item = &unindexedLazyItem{}
|
_ Item = &prefetchedItemWithInfo{}
|
||||||
_ ItemModTime = &unindexedLazyItem{}
|
_ ItemInfo = &prefetchedItemWithInfo{}
|
||||||
|
_ ItemModTime = &prefetchedItemWithInfo{}
|
||||||
|
|
||||||
_ Item = &lazyItem{}
|
_ Item = &lazyItem{}
|
||||||
_ ItemInfo = &lazyItem{}
|
|
||||||
_ ItemModTime = &lazyItem{}
|
_ ItemModTime = &lazyItem{}
|
||||||
|
|
||||||
|
_ Item = &lazyItemWithInfo{}
|
||||||
|
_ ItemInfo = &lazyItemWithInfo{}
|
||||||
|
_ ItemModTime = &lazyItemWithInfo{}
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewDeletedItem(itemID string) Item {
|
func NewDeletedItem(itemID string) Item {
|
||||||
return &unindexedPrefetchedItem{
|
return &prefetchedItem{
|
||||||
id: itemID,
|
id: itemID,
|
||||||
deleted: true,
|
deleted: true,
|
||||||
// TODO(ashmrtn): This really doesn't need to be set since deleted items are
|
// TODO(ashmrtn): This really doesn't need to be set since deleted items are
|
||||||
@ -42,11 +42,11 @@ func NewDeletedItem(itemID string) Item {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewUnindexedPrefetchedItem(
|
func NewPrefetchedItem(
|
||||||
reader io.ReadCloser,
|
reader io.ReadCloser,
|
||||||
itemID string,
|
itemID string,
|
||||||
modTime time.Time,
|
modTime time.Time,
|
||||||
) (*unindexedPrefetchedItem, error) {
|
) (*prefetchedItem, error) {
|
||||||
r, err := readers.NewVersionedBackupReader(
|
r, err := readers.NewVersionedBackupReader(
|
||||||
readers.SerializationFormat{Version: readers.DefaultSerializationVersion},
|
readers.SerializationFormat{Version: readers.DefaultSerializationVersion},
|
||||||
reader)
|
reader)
|
||||||
@ -54,19 +54,18 @@ func NewUnindexedPrefetchedItem(
|
|||||||
return nil, clues.Stack(err)
|
return nil, clues.Stack(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &unindexedPrefetchedItem{
|
return &prefetchedItem{
|
||||||
id: itemID,
|
id: itemID,
|
||||||
reader: r,
|
reader: r,
|
||||||
modTime: modTime,
|
modTime: modTime,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// unindexedPrefetchedItem represents a single item retrieved from the remote
|
// prefetchedItem represents a single item retrieved from the remote service.
|
||||||
// service.
|
|
||||||
//
|
//
|
||||||
// This item doesn't implement ItemInfo so it's safe to use for items like
|
// This item doesn't implement ItemInfo so it's safe to use for items like
|
||||||
// metadata that shouldn't appear in backup details.
|
// metadata that shouldn't appear in backup details.
|
||||||
type unindexedPrefetchedItem struct {
|
type prefetchedItem struct {
|
||||||
id string
|
id string
|
||||||
reader io.ReadCloser
|
reader io.ReadCloser
|
||||||
// modTime is the modified time of the item. It should match the modTime in
|
// modTime is the modified time of the item. It should match the modTime in
|
||||||
@ -79,48 +78,49 @@ type unindexedPrefetchedItem struct {
|
|||||||
deleted bool
|
deleted bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i unindexedPrefetchedItem) ID() string {
|
func (i prefetchedItem) ID() string {
|
||||||
return i.id
|
return i.id
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *unindexedPrefetchedItem) ToReader() io.ReadCloser {
|
func (i *prefetchedItem) ToReader() io.ReadCloser {
|
||||||
return i.reader
|
return i.reader
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i unindexedPrefetchedItem) Deleted() bool {
|
func (i prefetchedItem) Deleted() bool {
|
||||||
return i.deleted
|
return i.deleted
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i unindexedPrefetchedItem) ModTime() time.Time {
|
func (i prefetchedItem) ModTime() time.Time {
|
||||||
return i.modTime
|
return i.modTime
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewPrefetchedItem(
|
func NewPrefetchedItemWithInfo(
|
||||||
reader io.ReadCloser,
|
reader io.ReadCloser,
|
||||||
itemID string,
|
itemID string,
|
||||||
info details.ItemInfo,
|
info details.ItemInfo,
|
||||||
) (*prefetchedItem, error) {
|
) (*prefetchedItemWithInfo, error) {
|
||||||
inner, err := NewUnindexedPrefetchedItem(reader, itemID, info.Modified())
|
inner, err := NewPrefetchedItem(reader, itemID, info.Modified())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, clues.Stack(err)
|
return nil, clues.Stack(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &prefetchedItem{
|
return &prefetchedItemWithInfo{
|
||||||
unindexedPrefetchedItem: inner,
|
prefetchedItem: inner,
|
||||||
info: info,
|
info: info,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// prefetchedItem represents a single item retrieved from the remote service.
|
// prefetchedItemWithInfo represents a single item retrieved from the remote
|
||||||
|
// service.
|
||||||
//
|
//
|
||||||
// This item implements ItemInfo so it should be used for things that need to
|
// This item implements ItemInfo so it should be used for things that need to
|
||||||
// appear in backup details.
|
// appear in backup details.
|
||||||
type prefetchedItem struct {
|
type prefetchedItemWithInfo struct {
|
||||||
*unindexedPrefetchedItem
|
*prefetchedItem
|
||||||
info details.ItemInfo
|
info details.ItemInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i prefetchedItem) Info() (details.ItemInfo, error) {
|
func (i prefetchedItemWithInfo) Info() (details.ItemInfo, error) {
|
||||||
return i.info, nil
|
return i.info, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,14 +131,14 @@ type ItemDataGetter interface {
|
|||||||
) (io.ReadCloser, *details.ItemInfo, bool, error)
|
) (io.ReadCloser, *details.ItemInfo, bool, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewUnindexedLazyItem(
|
func NewLazyItem(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
itemGetter ItemDataGetter,
|
itemGetter ItemDataGetter,
|
||||||
itemID string,
|
itemID string,
|
||||||
modTime time.Time,
|
modTime time.Time,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) *unindexedLazyItem {
|
) *lazyItem {
|
||||||
return &unindexedLazyItem{
|
return &lazyItem{
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
id: itemID,
|
id: itemID,
|
||||||
itemGetter: itemGetter,
|
itemGetter: itemGetter,
|
||||||
@ -147,13 +147,13 @@ func NewUnindexedLazyItem(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// unindexedLazyItem represents a single item retrieved from the remote service.
|
// lazyItem represents a single item retrieved from the remote service. It
|
||||||
// It lazily fetches the item's data when the first call to ToReader().Read() is
|
// lazily fetches the item's data when the first call to ToReader().Read() is
|
||||||
// made.
|
// made.
|
||||||
//
|
//
|
||||||
// This item doesn't implement ItemInfo so it's safe to use for items like
|
// This item doesn't implement ItemInfo so it's safe to use for items like
|
||||||
// metadata that shouldn't appear in backup details.
|
// metadata that shouldn't appear in backup details.
|
||||||
type unindexedLazyItem struct {
|
type lazyItem struct {
|
||||||
ctx context.Context
|
ctx context.Context
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
id string
|
id string
|
||||||
@ -165,19 +165,19 @@ type unindexedLazyItem struct {
|
|||||||
// struct so we can tell if it's been set already or not.
|
// struct so we can tell if it's been set already or not.
|
||||||
//
|
//
|
||||||
// This also helps with garbage collection because now the golang garbage
|
// This also helps with garbage collection because now the golang garbage
|
||||||
// collector can collect the lazyItem struct once the storage engine is done
|
// collector can collect the lazyItemWithInfo struct once the storage engine
|
||||||
// with it. The ItemInfo struct needs to stick around until the end of the
|
// is done with it. The ItemInfo struct needs to stick around until the end of
|
||||||
// backup though as backup details is written last.
|
// the backup though as backup details is written last.
|
||||||
info *details.ItemInfo
|
info *details.ItemInfo
|
||||||
|
|
||||||
delInFlight bool
|
delInFlight bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *unindexedLazyItem) ID() string {
|
func (i *lazyItem) ID() string {
|
||||||
return i.id
|
return i.id
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *unindexedLazyItem) ToReader() io.ReadCloser {
|
func (i *lazyItem) ToReader() io.ReadCloser {
|
||||||
return lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
|
return lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
|
||||||
// Don't allow getting Item info while trying to initialize said info.
|
// Don't allow getting Item info while trying to initialize said info.
|
||||||
// GetData could be a long running call, but in theory nothing should happen
|
// GetData could be a long running call, but in theory nothing should happen
|
||||||
@ -219,23 +219,23 @@ func (i *unindexedLazyItem) ToReader() io.ReadCloser {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *unindexedLazyItem) Deleted() bool {
|
func (i *lazyItem) Deleted() bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *unindexedLazyItem) ModTime() time.Time {
|
func (i *lazyItem) ModTime() time.Time {
|
||||||
return i.modTime
|
return i.modTime
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewLazyItem(
|
func NewLazyItemWithInfo(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
itemGetter ItemDataGetter,
|
itemGetter ItemDataGetter,
|
||||||
itemID string,
|
itemID string,
|
||||||
modTime time.Time,
|
modTime time.Time,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) *lazyItem {
|
) *lazyItemWithInfo {
|
||||||
return &lazyItem{
|
return &lazyItemWithInfo{
|
||||||
unindexedLazyItem: NewUnindexedLazyItem(
|
lazyItem: NewLazyItem(
|
||||||
ctx,
|
ctx,
|
||||||
itemGetter,
|
itemGetter,
|
||||||
itemID,
|
itemID,
|
||||||
@ -244,17 +244,17 @@ func NewLazyItem(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// lazyItem represents a single item retrieved from the remote service. It
|
// lazyItemWithInfo represents a single item retrieved from the remote service.
|
||||||
// lazily fetches the item's data when the first call to ToReader().Read() is
|
// It lazily fetches the item's data when the first call to ToReader().Read() is
|
||||||
// made.
|
// made.
|
||||||
//
|
//
|
||||||
// This item implements ItemInfo so it should be used for things that need to
|
// This item implements ItemInfo so it should be used for things that need to
|
||||||
// appear in backup details.
|
// appear in backup details.
|
||||||
type lazyItem struct {
|
type lazyItemWithInfo struct {
|
||||||
*unindexedLazyItem
|
*lazyItem
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *lazyItem) Info() (details.ItemInfo, error) {
|
func (i *lazyItemWithInfo) Info() (details.ItemInfo, error) {
|
||||||
i.mu.Lock()
|
i.mu.Lock()
|
||||||
defer i.mu.Unlock()
|
defer i.mu.Unlock()
|
||||||
|
|
||||||
|
|||||||
@ -51,7 +51,7 @@ func TestItemUnitSuite(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ItemUnitSuite) TestUnindexedPrefetchedItem() {
|
func (suite *ItemUnitSuite) TestUnindexedPrefetchedItem() {
|
||||||
prefetch, err := data.NewUnindexedPrefetchedItem(
|
prefetch, err := data.NewPrefetchedItem(
|
||||||
io.NopCloser(bytes.NewReader([]byte{})),
|
io.NopCloser(bytes.NewReader([]byte{})),
|
||||||
"foo",
|
"foo",
|
||||||
time.Time{})
|
time.Time{})
|
||||||
@ -69,7 +69,7 @@ func (suite *ItemUnitSuite) TestUnindexedLazyItem() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
lazy := data.NewUnindexedLazyItem(
|
lazy := data.NewLazyItem(
|
||||||
ctx,
|
ctx,
|
||||||
nil,
|
nil,
|
||||||
"foo",
|
"foo",
|
||||||
@ -148,7 +148,7 @@ func (suite *ItemUnitSuite) TestPrefetchedItem() {
|
|||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
item, err := data.NewPrefetchedItem(test.reader, id, test.info)
|
item, err := data.NewPrefetchedItemWithInfo(test.reader, id, test.info)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
assert.Equal(t, id, item.ID(), "ID")
|
assert.Equal(t, id, item.ID(), "ID")
|
||||||
@ -291,7 +291,7 @@ func (suite *ItemUnitSuite) TestLazyItem() {
|
|||||||
|
|
||||||
defer test.mid.check(t, true)
|
defer test.mid.check(t, true)
|
||||||
|
|
||||||
item := data.NewLazyItem(
|
item := data.NewLazyItemWithInfo(
|
||||||
ctx,
|
ctx,
|
||||||
test.mid,
|
test.mid,
|
||||||
id,
|
id,
|
||||||
@ -354,7 +354,7 @@ func (suite *ItemUnitSuite) TestLazyItem_DeletedInFlight() {
|
|||||||
mid := &mockItemDataGetter{delInFlight: true}
|
mid := &mockItemDataGetter{delInFlight: true}
|
||||||
defer mid.check(t, true)
|
defer mid.check(t, true)
|
||||||
|
|
||||||
item := data.NewLazyItem(ctx, mid, id, now, errs)
|
item := data.NewLazyItemWithInfo(ctx, mid, id, now, errs)
|
||||||
|
|
||||||
assert.Equal(t, id, item.ID(), "ID")
|
assert.Equal(t, id, item.ID(), "ID")
|
||||||
assert.False(t, item.Deleted(), "deleted")
|
assert.False(t, item.Deleted(), "deleted")
|
||||||
@ -400,7 +400,7 @@ func (suite *ItemUnitSuite) TestLazyItem_InfoBeforeReadErrors() {
|
|||||||
mid := &mockItemDataGetter{}
|
mid := &mockItemDataGetter{}
|
||||||
defer mid.check(t, false)
|
defer mid.check(t, false)
|
||||||
|
|
||||||
item := data.NewLazyItem(ctx, mid, id, now, errs)
|
item := data.NewLazyItemWithInfo(ctx, mid, id, now, errs)
|
||||||
|
|
||||||
assert.Equal(t, id, item.ID(), "ID")
|
assert.Equal(t, id, item.ID(), "ID")
|
||||||
assert.False(t, item.Deleted(), "deleted")
|
assert.False(t, item.Deleted(), "deleted")
|
||||||
|
|||||||
@ -575,7 +575,7 @@ func (oc *Collection) streamDriveItem(
|
|||||||
// This ensures that downloads won't be attempted unless that consumer
|
// This ensures that downloads won't be attempted unless that consumer
|
||||||
// attempts to read bytes. Assumption is that kopia will check things
|
// attempts to read bytes. Assumption is that kopia will check things
|
||||||
// like file modtimes before attempting to read.
|
// like file modtimes before attempting to read.
|
||||||
oc.data <- data.NewLazyItem(
|
oc.data <- data.NewLazyItemWithInfo(
|
||||||
ctx,
|
ctx,
|
||||||
&lazyItemGetter{
|
&lazyItemGetter{
|
||||||
info: &itemInfo,
|
info: &itemInfo,
|
||||||
@ -600,7 +600,7 @@ func (oc *Collection) streamDriveItem(
|
|||||||
return progReader, nil
|
return progReader, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
storeItem, err := data.NewUnindexedPrefetchedItem(
|
storeItem, err := data.NewPrefetchedItem(
|
||||||
metaReader,
|
metaReader,
|
||||||
metaFileName+metaSuffix,
|
metaFileName+metaSuffix,
|
||||||
// Metadata file should always use the latest time as
|
// Metadata file should always use the latest time as
|
||||||
|
|||||||
@ -278,7 +278,7 @@ func (col *prefetchCollection) streamItems(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
item, err := data.NewPrefetchedItem(
|
item, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(itemData)),
|
io.NopCloser(bytes.NewReader(itemData)),
|
||||||
id,
|
id,
|
||||||
details.ItemInfo{Exchange: info})
|
details.ItemInfo{Exchange: info})
|
||||||
@ -403,7 +403,7 @@ func (col *lazyFetchCollection) streamItems(
|
|||||||
"service", path.ExchangeService.String(),
|
"service", path.ExchangeService.String(),
|
||||||
"category", col.Category().String())
|
"category", col.Category().String())
|
||||||
|
|
||||||
stream <- data.NewLazyItem(
|
stream <- data.NewLazyItemWithInfo(
|
||||||
ictx,
|
ictx,
|
||||||
&lazyItemGetter{
|
&lazyItemGetter{
|
||||||
userID: user,
|
userID: user,
|
||||||
|
|||||||
@ -56,7 +56,7 @@ func (suite *CollectionUnitSuite) TestPrefetchedItem_Reader() {
|
|||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
ed, err := data.NewPrefetchedItem(
|
ed, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(test.readData)),
|
io.NopCloser(bytes.NewReader(test.readData)),
|
||||||
"itemID",
|
"itemID",
|
||||||
details.ItemInfo{})
|
details.ItemInfo{})
|
||||||
@ -494,7 +494,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_NoRead_GetInfo_Errors() {
|
|||||||
ctx, flush := tester.NewContext(t)
|
ctx, flush := tester.NewContext(t)
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
li := data.NewLazyItem(
|
li := data.NewLazyItemWithInfo(
|
||||||
ctx,
|
ctx,
|
||||||
nil,
|
nil,
|
||||||
"itemID",
|
"itemID",
|
||||||
@ -552,7 +552,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_GetDataErrors() {
|
|||||||
SerializeErr: test.serializeErr,
|
SerializeErr: test.serializeErr,
|
||||||
}
|
}
|
||||||
|
|
||||||
li := data.NewLazyItem(
|
li := data.NewLazyItemWithInfo(
|
||||||
ctx,
|
ctx,
|
||||||
&lazyItemGetter{
|
&lazyItemGetter{
|
||||||
userID: "userID",
|
userID: "userID",
|
||||||
@ -592,7 +592,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_ReturnsEmptyReaderOnDeletedInFlig
|
|||||||
|
|
||||||
getter := &mock.ItemGetSerialize{GetErr: graph.ErrDeletedInFlight}
|
getter := &mock.ItemGetSerialize{GetErr: graph.ErrDeletedInFlight}
|
||||||
|
|
||||||
li := data.NewLazyItem(
|
li := data.NewLazyItemWithInfo(
|
||||||
ctx,
|
ctx,
|
||||||
&lazyItemGetter{
|
&lazyItemGetter{
|
||||||
userID: "userID",
|
userID: "userID",
|
||||||
@ -645,7 +645,7 @@ func (suite *CollectionUnitSuite) TestLazyItem() {
|
|||||||
|
|
||||||
getter := &mock.ItemGetSerialize{GetData: testData}
|
getter := &mock.ItemGetSerialize{GetData: testData}
|
||||||
|
|
||||||
li := data.NewLazyItem(
|
li := data.NewLazyItemWithInfo(
|
||||||
ctx,
|
ctx,
|
||||||
&lazyItemGetter{
|
&lazyItemGetter{
|
||||||
userID: "userID",
|
userID: "userID",
|
||||||
|
|||||||
@ -176,7 +176,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
|
|||||||
|
|
||||||
info.ParentPath = col.LocationPath().String()
|
info.ParentPath = col.LocationPath().String()
|
||||||
|
|
||||||
storeItem, err := data.NewPrefetchedItem(
|
storeItem, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(itemData)),
|
io.NopCloser(bytes.NewReader(itemData)),
|
||||||
id,
|
id,
|
||||||
details.ItemInfo{Groups: info})
|
details.ItemInfo{Groups: info})
|
||||||
|
|||||||
@ -49,7 +49,7 @@ func (suite *CollectionUnitSuite) TestPrefetchedItem_Reader() {
|
|||||||
suite.Run(test.name, func() {
|
suite.Run(test.name, func() {
|
||||||
t := suite.T()
|
t := suite.T()
|
||||||
|
|
||||||
ed, err := data.NewPrefetchedItem(
|
ed, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(test.readData)),
|
io.NopCloser(bytes.NewReader(test.readData)),
|
||||||
"itemID",
|
"itemID",
|
||||||
details.ItemInfo{})
|
details.ItemInfo{})
|
||||||
|
|||||||
@ -212,7 +212,7 @@ func (sc *Collection) retrieveLists(
|
|||||||
|
|
||||||
metrics.Successes++
|
metrics.Successes++
|
||||||
|
|
||||||
item, err := data.NewPrefetchedItem(
|
item, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(byteArray)),
|
io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
ptr.Val(lst.GetId()),
|
ptr.Val(lst.GetId()),
|
||||||
details.ItemInfo{SharePoint: ListToSPInfo(lst, size)})
|
details.ItemInfo{SharePoint: ListToSPInfo(lst, size)})
|
||||||
@ -279,7 +279,7 @@ func (sc *Collection) retrievePages(
|
|||||||
metrics.Bytes += size
|
metrics.Bytes += size
|
||||||
metrics.Successes++
|
metrics.Successes++
|
||||||
|
|
||||||
item, err := data.NewPrefetchedItem(
|
item, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(byteArray)),
|
io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
ptr.Val(pg.GetId()),
|
ptr.Val(pg.GetId()),
|
||||||
details.ItemInfo{SharePoint: pageToSPInfo(pg, root, size)})
|
details.ItemInfo{SharePoint: pageToSPInfo(pg, root, size)})
|
||||||
|
|||||||
@ -103,7 +103,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
|||||||
byteArray, err := ow.GetSerializedContent()
|
byteArray, err := ow.GetSerializedContent()
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
data, err := data.NewPrefetchedItem(
|
data, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(byteArray)),
|
io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
name,
|
name,
|
||||||
details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})
|
details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})
|
||||||
@ -133,7 +133,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
|
|||||||
page, err := betaAPI.CreatePageFromBytes(byteArray)
|
page, err := betaAPI.CreatePageFromBytes(byteArray)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
data, err := data.NewPrefetchedItem(
|
data, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(byteArray)),
|
io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
itemName,
|
itemName,
|
||||||
details.ItemInfo{SharePoint: betaAPI.PageInfo(page, int64(len(byteArray)))})
|
details.ItemInfo{SharePoint: betaAPI.PageInfo(page, int64(len(byteArray)))})
|
||||||
@ -196,7 +196,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
|
|||||||
byteArray, err := service.Serialize(listing)
|
byteArray, err := service.Serialize(listing)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
listData, err := data.NewPrefetchedItem(
|
listData, err := data.NewPrefetchedItemWithInfo(
|
||||||
io.NopCloser(bytes.NewReader(byteArray)),
|
io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
testName,
|
testName,
|
||||||
details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})
|
details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})
|
||||||
|
|||||||
@ -57,7 +57,7 @@ func (mce MetadataCollectionEntry) toMetadataItem() (metadataItem, error) {
|
|||||||
return metadataItem{}, clues.Wrap(err, "serializing metadata")
|
return metadataItem{}, clues.Wrap(err, "serializing metadata")
|
||||||
}
|
}
|
||||||
|
|
||||||
item, err := data.NewUnindexedPrefetchedItem(
|
item, err := data.NewPrefetchedItem(
|
||||||
io.NopCloser(buf),
|
io.NopCloser(buf),
|
||||||
mce.fileName,
|
mce.fileName,
|
||||||
time.Now())
|
time.Now())
|
||||||
|
|||||||
@ -70,7 +70,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
|
|||||||
items := []metadataItem{}
|
items := []metadataItem{}
|
||||||
|
|
||||||
for i := 0; i < len(itemNames); i++ {
|
for i := 0; i < len(itemNames); i++ {
|
||||||
item, err := data.NewUnindexedPrefetchedItem(
|
item, err := data.NewPrefetchedItem(
|
||||||
io.NopCloser(bytes.NewReader(itemData[i])),
|
io.NopCloser(bytes.NewReader(itemData[i])),
|
||||||
itemNames[i],
|
itemNames[i],
|
||||||
time.Time{})
|
time.Time{})
|
||||||
|
|||||||
@ -109,7 +109,7 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() {
|
|||||||
//nolint:lll
|
//nolint:lll
|
||||||
byteArray := spMock.Page("Byte Test")
|
byteArray := spMock.Page("Byte Test")
|
||||||
|
|
||||||
pageData, err := data.NewUnindexedPrefetchedItem(
|
pageData, err := data.NewPrefetchedItem(
|
||||||
io.NopCloser(bytes.NewReader(byteArray)),
|
io.NopCloser(bytes.NewReader(byteArray)),
|
||||||
testName,
|
testName,
|
||||||
time.Now())
|
time.Now())
|
||||||
|
|||||||
@ -182,7 +182,7 @@ func collect(
|
|||||||
return nil, clues.Wrap(err, "marshalling body").WithClues(ctx)
|
return nil, clues.Wrap(err, "marshalling body").WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
item, err := data.NewUnindexedPrefetchedItem(
|
item, err := data.NewPrefetchedItem(
|
||||||
io.NopCloser(bytes.NewReader(bs)),
|
io.NopCloser(bytes.NewReader(bs)),
|
||||||
col.itemName,
|
col.itemName,
|
||||||
time.Now())
|
time.Now())
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user