Rename generic item structs and functions (#4421)

`unindexedPrefetchedItem` -> `prefetchedItem`
`prefetchedItem` -> `prefetchedItemWithInfo`
`unindexedLazyItem` -> `lazyItem`
`lazyItem` -> `lazyItemWithInfo`

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* #4328

#### Test Plan

- [ ] 💪 Manual
- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
ashmrtn 2023-10-09 12:12:35 -07:00 committed by GitHub
parent fc508d7160
commit 6f25be4ad2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 76 additions and 76 deletions

View File

@ -16,23 +16,23 @@ import (
)
var (
_ Item = &unindexedPrefetchedItem{}
_ ItemModTime = &unindexedPrefetchedItem{}
_ Item = &prefetchedItem{}
_ ItemInfo = &prefetchedItem{}
_ ItemModTime = &prefetchedItem{}
_ Item = &unindexedLazyItem{}
_ ItemModTime = &unindexedLazyItem{}
_ Item = &prefetchedItemWithInfo{}
_ ItemInfo = &prefetchedItemWithInfo{}
_ ItemModTime = &prefetchedItemWithInfo{}
_ Item = &lazyItem{}
_ ItemInfo = &lazyItem{}
_ ItemModTime = &lazyItem{}
_ Item = &lazyItemWithInfo{}
_ ItemInfo = &lazyItemWithInfo{}
_ ItemModTime = &lazyItemWithInfo{}
)
func NewDeletedItem(itemID string) Item {
return &unindexedPrefetchedItem{
return &prefetchedItem{
id: itemID,
deleted: true,
// TODO(ashmrtn): This really doesn't need to be set since deleted items are
@ -42,11 +42,11 @@ func NewDeletedItem(itemID string) Item {
}
}
func NewUnindexedPrefetchedItem(
func NewPrefetchedItem(
reader io.ReadCloser,
itemID string,
modTime time.Time,
) (*unindexedPrefetchedItem, error) {
) (*prefetchedItem, error) {
r, err := readers.NewVersionedBackupReader(
readers.SerializationFormat{Version: readers.DefaultSerializationVersion},
reader)
@ -54,19 +54,18 @@ func NewUnindexedPrefetchedItem(
return nil, clues.Stack(err)
}
return &unindexedPrefetchedItem{
return &prefetchedItem{
id: itemID,
reader: r,
modTime: modTime,
}, nil
}
// unindexedPrefetchedItem represents a single item retrieved from the remote
// service.
// prefetchedItem represents a single item retrieved from the remote service.
//
// This item doesn't implement ItemInfo so it's safe to use for items like
// metadata that shouldn't appear in backup details.
type unindexedPrefetchedItem struct {
type prefetchedItem struct {
id string
reader io.ReadCloser
// modTime is the modified time of the item. It should match the modTime in
@ -79,48 +78,49 @@ type unindexedPrefetchedItem struct {
deleted bool
}
func (i unindexedPrefetchedItem) ID() string {
func (i prefetchedItem) ID() string {
return i.id
}
func (i *unindexedPrefetchedItem) ToReader() io.ReadCloser {
func (i *prefetchedItem) ToReader() io.ReadCloser {
return i.reader
}
func (i unindexedPrefetchedItem) Deleted() bool {
func (i prefetchedItem) Deleted() bool {
return i.deleted
}
func (i unindexedPrefetchedItem) ModTime() time.Time {
func (i prefetchedItem) ModTime() time.Time {
return i.modTime
}
func NewPrefetchedItem(
func NewPrefetchedItemWithInfo(
reader io.ReadCloser,
itemID string,
info details.ItemInfo,
) (*prefetchedItem, error) {
inner, err := NewUnindexedPrefetchedItem(reader, itemID, info.Modified())
) (*prefetchedItemWithInfo, error) {
inner, err := NewPrefetchedItem(reader, itemID, info.Modified())
if err != nil {
return nil, clues.Stack(err)
}
return &prefetchedItem{
unindexedPrefetchedItem: inner,
return &prefetchedItemWithInfo{
prefetchedItem: inner,
info: info,
}, nil
}
// prefetchedItem represents a single item retrieved from the remote service.
// prefetchedItemWithInfo represents a single item retrieved from the remote
// service.
//
// This item implements ItemInfo so it should be used for things that need to
// appear in backup details.
type prefetchedItem struct {
*unindexedPrefetchedItem
type prefetchedItemWithInfo struct {
*prefetchedItem
info details.ItemInfo
}
func (i prefetchedItem) Info() (details.ItemInfo, error) {
func (i prefetchedItemWithInfo) Info() (details.ItemInfo, error) {
return i.info, nil
}
@ -131,14 +131,14 @@ type ItemDataGetter interface {
) (io.ReadCloser, *details.ItemInfo, bool, error)
}
func NewUnindexedLazyItem(
func NewLazyItem(
ctx context.Context,
itemGetter ItemDataGetter,
itemID string,
modTime time.Time,
errs *fault.Bus,
) *unindexedLazyItem {
return &unindexedLazyItem{
) *lazyItem {
return &lazyItem{
ctx: ctx,
id: itemID,
itemGetter: itemGetter,
@ -147,13 +147,13 @@ func NewUnindexedLazyItem(
}
}
// unindexedLazyItem represents a single item retrieved from the remote service.
// It lazily fetches the item's data when the first call to ToReader().Read() is
// lazyItem represents a single item retrieved from the remote service. It
// lazily fetches the item's data when the first call to ToReader().Read() is
// made.
//
// This item doesn't implement ItemInfo so it's safe to use for items like
// metadata that shouldn't appear in backup details.
type unindexedLazyItem struct {
type lazyItem struct {
ctx context.Context
mu sync.Mutex
id string
@ -165,19 +165,19 @@ type unindexedLazyItem struct {
// struct so we can tell if it's been set already or not.
//
// This also helps with garbage collection because now the golang garbage
// collector can collect the lazyItem struct once the storage engine is done
// with it. The ItemInfo struct needs to stick around until the end of the
// backup though as backup details is written last.
// collector can collect the lazyItemWithInfo struct once the storage engine
// is done with it. The ItemInfo struct needs to stick around until the end of
// the backup though as backup details is written last.
info *details.ItemInfo
delInFlight bool
}
func (i *unindexedLazyItem) ID() string {
func (i *lazyItem) ID() string {
return i.id
}
func (i *unindexedLazyItem) ToReader() io.ReadCloser {
func (i *lazyItem) ToReader() io.ReadCloser {
return lazy.NewLazyReadCloser(func() (io.ReadCloser, error) {
// Don't allow getting Item info while trying to initialize said info.
// GetData could be a long running call, but in theory nothing should happen
@ -219,23 +219,23 @@ func (i *unindexedLazyItem) ToReader() io.ReadCloser {
})
}
func (i *unindexedLazyItem) Deleted() bool {
func (i *lazyItem) Deleted() bool {
return false
}
func (i *unindexedLazyItem) ModTime() time.Time {
func (i *lazyItem) ModTime() time.Time {
return i.modTime
}
func NewLazyItem(
func NewLazyItemWithInfo(
ctx context.Context,
itemGetter ItemDataGetter,
itemID string,
modTime time.Time,
errs *fault.Bus,
) *lazyItem {
return &lazyItem{
unindexedLazyItem: NewUnindexedLazyItem(
) *lazyItemWithInfo {
return &lazyItemWithInfo{
lazyItem: NewLazyItem(
ctx,
itemGetter,
itemID,
@ -244,17 +244,17 @@ func NewLazyItem(
}
}
// lazyItem represents a single item retrieved from the remote service. It
// lazily fetches the item's data when the first call to ToReader().Read() is
// lazyItemWithInfo represents a single item retrieved from the remote service.
// It lazily fetches the item's data when the first call to ToReader().Read() is
// made.
//
// This item implements ItemInfo so it should be used for things that need to
// appear in backup details.
type lazyItem struct {
*unindexedLazyItem
type lazyItemWithInfo struct {
*lazyItem
}
func (i *lazyItem) Info() (details.ItemInfo, error) {
func (i *lazyItemWithInfo) Info() (details.ItemInfo, error) {
i.mu.Lock()
defer i.mu.Unlock()

View File

@ -51,7 +51,7 @@ func TestItemUnitSuite(t *testing.T) {
}
func (suite *ItemUnitSuite) TestUnindexedPrefetchedItem() {
prefetch, err := data.NewUnindexedPrefetchedItem(
prefetch, err := data.NewPrefetchedItem(
io.NopCloser(bytes.NewReader([]byte{})),
"foo",
time.Time{})
@ -69,7 +69,7 @@ func (suite *ItemUnitSuite) TestUnindexedLazyItem() {
ctx, flush := tester.NewContext(t)
defer flush()
lazy := data.NewUnindexedLazyItem(
lazy := data.NewLazyItem(
ctx,
nil,
"foo",
@ -148,7 +148,7 @@ func (suite *ItemUnitSuite) TestPrefetchedItem() {
suite.Run(test.name, func() {
t := suite.T()
item, err := data.NewPrefetchedItem(test.reader, id, test.info)
item, err := data.NewPrefetchedItemWithInfo(test.reader, id, test.info)
require.NoError(t, err, clues.ToCore(err))
assert.Equal(t, id, item.ID(), "ID")
@ -291,7 +291,7 @@ func (suite *ItemUnitSuite) TestLazyItem() {
defer test.mid.check(t, true)
item := data.NewLazyItem(
item := data.NewLazyItemWithInfo(
ctx,
test.mid,
id,
@ -354,7 +354,7 @@ func (suite *ItemUnitSuite) TestLazyItem_DeletedInFlight() {
mid := &mockItemDataGetter{delInFlight: true}
defer mid.check(t, true)
item := data.NewLazyItem(ctx, mid, id, now, errs)
item := data.NewLazyItemWithInfo(ctx, mid, id, now, errs)
assert.Equal(t, id, item.ID(), "ID")
assert.False(t, item.Deleted(), "deleted")
@ -400,7 +400,7 @@ func (suite *ItemUnitSuite) TestLazyItem_InfoBeforeReadErrors() {
mid := &mockItemDataGetter{}
defer mid.check(t, false)
item := data.NewLazyItem(ctx, mid, id, now, errs)
item := data.NewLazyItemWithInfo(ctx, mid, id, now, errs)
assert.Equal(t, id, item.ID(), "ID")
assert.False(t, item.Deleted(), "deleted")

View File

@ -575,7 +575,7 @@ func (oc *Collection) streamDriveItem(
// This ensures that downloads won't be attempted unless that consumer
// attempts to read bytes. Assumption is that kopia will check things
// like file modtimes before attempting to read.
oc.data <- data.NewLazyItem(
oc.data <- data.NewLazyItemWithInfo(
ctx,
&lazyItemGetter{
info: &itemInfo,
@ -600,7 +600,7 @@ func (oc *Collection) streamDriveItem(
return progReader, nil
})
storeItem, err := data.NewUnindexedPrefetchedItem(
storeItem, err := data.NewPrefetchedItem(
metaReader,
metaFileName+metaSuffix,
// Metadata file should always use the latest time as

View File

@ -278,7 +278,7 @@ func (col *prefetchCollection) streamItems(
return
}
item, err := data.NewPrefetchedItem(
item, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(itemData)),
id,
details.ItemInfo{Exchange: info})
@ -403,7 +403,7 @@ func (col *lazyFetchCollection) streamItems(
"service", path.ExchangeService.String(),
"category", col.Category().String())
stream <- data.NewLazyItem(
stream <- data.NewLazyItemWithInfo(
ictx,
&lazyItemGetter{
userID: user,

View File

@ -56,7 +56,7 @@ func (suite *CollectionUnitSuite) TestPrefetchedItem_Reader() {
suite.Run(test.name, func() {
t := suite.T()
ed, err := data.NewPrefetchedItem(
ed, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(test.readData)),
"itemID",
details.ItemInfo{})
@ -494,7 +494,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_NoRead_GetInfo_Errors() {
ctx, flush := tester.NewContext(t)
defer flush()
li := data.NewLazyItem(
li := data.NewLazyItemWithInfo(
ctx,
nil,
"itemID",
@ -552,7 +552,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_GetDataErrors() {
SerializeErr: test.serializeErr,
}
li := data.NewLazyItem(
li := data.NewLazyItemWithInfo(
ctx,
&lazyItemGetter{
userID: "userID",
@ -592,7 +592,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_ReturnsEmptyReaderOnDeletedInFlig
getter := &mock.ItemGetSerialize{GetErr: graph.ErrDeletedInFlight}
li := data.NewLazyItem(
li := data.NewLazyItemWithInfo(
ctx,
&lazyItemGetter{
userID: "userID",
@ -645,7 +645,7 @@ func (suite *CollectionUnitSuite) TestLazyItem() {
getter := &mock.ItemGetSerialize{GetData: testData}
li := data.NewLazyItem(
li := data.NewLazyItemWithInfo(
ctx,
&lazyItemGetter{
userID: "userID",

View File

@ -176,7 +176,7 @@ func (col *Collection) streamItems(ctx context.Context, errs *fault.Bus) {
info.ParentPath = col.LocationPath().String()
storeItem, err := data.NewPrefetchedItem(
storeItem, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(itemData)),
id,
details.ItemInfo{Groups: info})

View File

@ -49,7 +49,7 @@ func (suite *CollectionUnitSuite) TestPrefetchedItem_Reader() {
suite.Run(test.name, func() {
t := suite.T()
ed, err := data.NewPrefetchedItem(
ed, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(test.readData)),
"itemID",
details.ItemInfo{})

View File

@ -212,7 +212,7 @@ func (sc *Collection) retrieveLists(
metrics.Successes++
item, err := data.NewPrefetchedItem(
item, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)),
ptr.Val(lst.GetId()),
details.ItemInfo{SharePoint: ListToSPInfo(lst, size)})
@ -279,7 +279,7 @@ func (sc *Collection) retrievePages(
metrics.Bytes += size
metrics.Successes++
item, err := data.NewPrefetchedItem(
item, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)),
ptr.Val(pg.GetId()),
details.ItemInfo{SharePoint: pageToSPInfo(pg, root, size)})

View File

@ -103,7 +103,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
byteArray, err := ow.GetSerializedContent()
require.NoError(t, err, clues.ToCore(err))
data, err := data.NewPrefetchedItem(
data, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)),
name,
details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})
@ -133,7 +133,7 @@ func (suite *SharePointCollectionSuite) TestCollection_Items() {
page, err := betaAPI.CreatePageFromBytes(byteArray)
require.NoError(t, err, clues.ToCore(err))
data, err := data.NewPrefetchedItem(
data, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)),
itemName,
details.ItemInfo{SharePoint: betaAPI.PageInfo(page, int64(len(byteArray)))})
@ -196,7 +196,7 @@ func (suite *SharePointCollectionSuite) TestListCollection_Restore() {
byteArray, err := service.Serialize(listing)
require.NoError(t, err, clues.ToCore(err))
listData, err := data.NewPrefetchedItem(
listData, err := data.NewPrefetchedItemWithInfo(
io.NopCloser(bytes.NewReader(byteArray)),
testName,
details.ItemInfo{SharePoint: ListToSPInfo(listing, int64(len(byteArray)))})

View File

@ -57,7 +57,7 @@ func (mce MetadataCollectionEntry) toMetadataItem() (metadataItem, error) {
return metadataItem{}, clues.Wrap(err, "serializing metadata")
}
item, err := data.NewUnindexedPrefetchedItem(
item, err := data.NewPrefetchedItem(
io.NopCloser(buf),
mce.fileName,
time.Now())

View File

@ -70,7 +70,7 @@ func (suite *MetadataCollectionUnitSuite) TestItems() {
items := []metadataItem{}
for i := 0; i < len(itemNames); i++ {
item, err := data.NewUnindexedPrefetchedItem(
item, err := data.NewPrefetchedItem(
io.NopCloser(bytes.NewReader(itemData[i])),
itemNames[i],
time.Time{})

View File

@ -109,7 +109,7 @@ func (suite *SharePointPageSuite) TestRestoreSinglePage() {
//nolint:lll
byteArray := spMock.Page("Byte Test")
pageData, err := data.NewUnindexedPrefetchedItem(
pageData, err := data.NewPrefetchedItem(
io.NopCloser(bytes.NewReader(byteArray)),
testName,
time.Now())

View File

@ -182,7 +182,7 @@ func collect(
return nil, clues.Wrap(err, "marshalling body").WithClues(ctx)
}
item, err := data.NewUnindexedPrefetchedItem(
item, err := data.NewPrefetchedItem(
io.NopCloser(bytes.NewReader(bs)),
col.itemName,
time.Now())