Remove updated field from details (#4004)

No longer required by SDK users and not exposed directly to CLI users unless they're looking at the JSON output from details. This field is not documented anywhere though so there's no guarantees that we technically need to uphold with it.

Manually tested:
* restoring from a backup that had this field
* making a fresh backup without this field when the merge base did have this field

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

* closes #3986

#### Test Plan

- [x] 💪 Manual
- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
ashmrtn 2023-08-11 09:32:21 -07:00 committed by GitHub
parent 893598d8ba
commit eb95b43010
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 52 additions and 163 deletions

View File

@ -231,7 +231,6 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
err = cp.deets.Add( err = cp.deets.Add(
d.repoPath, d.repoPath,
d.locationPath, d.locationPath,
!d.cached,
*d.info) *d.info)
if err != nil { if err != nil {
cp.errs.AddRecoverable(cp.ctx, clues.New("adding item to details"). cp.errs.AddRecoverable(cp.ctx, clues.New("adding item to details").

View File

@ -569,8 +569,6 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() {
assert.Len(t, entries, test.expectedNumEntries) assert.Len(t, entries, test.expectedNumEntries)
for _, entry := range entries { for _, entry := range entries {
assert.Equal(t, !cachedTest.cached, entry.Updated)
foundItems[entry.ItemRef] = true foundItems[entry.ItemRef] = true
} }

View File

@ -924,10 +924,6 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
// 47 file and 2 folder entries. // 47 file and 2 folder entries.
test.expectedUploadedFiles+test.expectedCachedFiles+2, test.expectedUploadedFiles+test.expectedCachedFiles+2,
) )
for _, entry := range details {
test.deetsUpdated(t, entry.Updated)
}
} }
checkSnapshotTags( checkSnapshotTags(
@ -1207,8 +1203,6 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
) )
for _, entry := range details { for _, entry := range details {
assert.True(t, entry.Updated)
if test.hasMetaDeets { if test.hasMetaDeets {
continue continue
} }

View File

@ -153,13 +153,12 @@ func RestoreCollection(
ctx context.Context, ctx context.Context,
repoRef path.Path, repoRef path.Path,
locationRef *path.Builder, locationRef *path.Builder,
updated bool,
info details.ItemInfo, info details.ItemInfo,
) { ) {
deetsLock.Lock() deetsLock.Lock()
defer deetsLock.Unlock() defer deetsLock.Unlock()
err = deets.Add(repoRef, locationRef, updated, info) err = deets.Add(repoRef, locationRef, info)
if err != nil { if err != nil {
// Not critical enough to need to stop restore operation. // Not critical enough to need to stop restore operation.
logger.CtxErr(ctx, err).Infow("adding restored item to details") logger.CtxErr(ctx, err).Infow("adding restored item to details")
@ -231,7 +230,7 @@ func RestoreCollection(
} }
// TODO: implement locationRef // TODO: implement locationRef
updateDeets(ictx, itemPath, &path.Builder{}, true, itemInfo) updateDeets(ictx, itemPath, &path.Builder{}, itemInfo)
atomic.AddInt64(&metricsSuccess, 1) atomic.AddInt64(&metricsSuccess, 1)
}(ctx, itemData) }(ctx, itemData)

View File

@ -263,7 +263,6 @@ func RestoreListCollection(
err = deets.Add( err = deets.Add(
itemPath, itemPath,
&path.Builder{}, // TODO: implement locationRef &path.Builder{}, // TODO: implement locationRef
true,
itemInfo) itemInfo)
if err != nil { if err != nil {
// Not critical enough to need to stop restore operation. // Not critical enough to need to stop restore operation.
@ -343,7 +342,6 @@ func RestorePageCollection(
err = deets.Add( err = deets.Add(
itemPath, itemPath,
&path.Builder{}, // TODO: implement locationRef &path.Builder{}, // TODO: implement locationRef
true,
itemInfo) itemInfo)
if err != nil { if err != nil {
// Not critical enough to need to stop restore operation. // Not critical enough to need to stop restore operation.

View File

@ -215,7 +215,6 @@ func restoreCollection(
err = deets.Add( err = deets.Add(
itemPath, itemPath,
locationRef, locationRef,
true,
details.ItemInfo{ details.ItemInfo{
Exchange: info, Exchange: info,
}) })

View File

@ -580,7 +580,7 @@ func getNewPathRefs(
entry *details.Entry, entry *details.Entry,
repoRef path.Path, repoRef path.Path,
backupVersion int, backupVersion int,
) (path.Path, *path.Builder, bool, error) { ) (path.Path, *path.Builder, error) {
// Right now we can't guarantee that we have an old location in the // Right now we can't guarantee that we have an old location in the
// previous details entry so first try a lookup without a location to see // previous details entry so first try a lookup without a location to see
// if it matches so we don't need to try parsing from the old entry. // if it matches so we don't need to try parsing from the old entry.
@ -594,35 +594,25 @@ func getNewPathRefs(
entry.Modified(), entry.Modified(),
nil) nil)
if err != nil { if err != nil {
return nil, nil, false, clues.Wrap(err, "getting new paths") return nil, nil, clues.Wrap(err, "getting new paths")
} else if newPath == nil { } else if newPath == nil {
// This entry doesn't need merging. // This entry doesn't need merging.
return nil, nil, false, nil return nil, nil, nil
} else if newLoc == nil { } else if newLoc == nil {
return nil, nil, false, clues.New("unable to find new exchange location") return nil, nil, clues.New("unable to find new exchange location")
} }
// This is kind of jank cause we're in a transitionary period, but even if return newPath, newLoc, nil
// we're consesrvative here about marking something as updated the RepoRef
// comparison in the caller should catch the change. Calendars is the only
// exception, since it uses IDs for folders, but we should already be
// populating the LocationRef for them.
//
// Without this, all OneDrive items will be marked as updated the first time
// around because OneDrive hasn't been persisting LocationRef before now.
updated := len(entry.LocationRef) > 0 && newLoc.String() != entry.LocationRef
return newPath, newLoc, updated, nil
} }
// We didn't have an exact entry, so retry with a location. // We didn't have an exact entry, so retry with a location.
locRef, err := entry.ToLocationIDer(backupVersion) locRef, err := entry.ToLocationIDer(backupVersion)
if err != nil { if err != nil {
return nil, nil, false, clues.Wrap(err, "getting previous item location") return nil, nil, clues.Wrap(err, "getting previous item location")
} }
if locRef == nil { if locRef == nil {
return nil, nil, false, clues.New("entry with empty LocationRef") return nil, nil, clues.New("entry with empty LocationRef")
} }
newPath, newLoc, err := dataFromBackup.GetNewPathRefs( newPath, newLoc, err := dataFromBackup.GetNewPathRefs(
@ -630,16 +620,14 @@ func getNewPathRefs(
entry.Modified(), entry.Modified(),
locRef) locRef)
if err != nil { if err != nil {
return nil, nil, false, clues.Wrap(err, "getting new paths with old location") return nil, nil, clues.Wrap(err, "getting new paths with old location")
} else if newPath == nil { } else if newPath == nil {
return nil, nil, false, nil return nil, nil, nil
} else if newLoc == nil { } else if newLoc == nil {
return nil, nil, false, clues.New("unable to get new paths") return nil, nil, clues.New("unable to get new paths")
} }
updated := len(entry.LocationRef) > 0 && newLoc.String() != entry.LocationRef return newPath, newLoc, nil
return newPath, newLoc, updated, nil
} }
func mergeItemsFromBase( func mergeItemsFromBase(
@ -702,7 +690,7 @@ func mergeItemsFromBase(
ictx := clues.Add(ctx, "repo_ref", rr) ictx := clues.Add(ctx, "repo_ref", rr)
newPath, newLoc, locUpdated, err := getNewPathRefs( newPath, newLoc, err := getNewPathRefs(
dataFromBackup, dataFromBackup,
entry, entry,
rr, rr,
@ -721,13 +709,9 @@ func mergeItemsFromBase(
item := entry.ItemInfo item := entry.ItemInfo
details.UpdateItem(&item, newLoc) details.UpdateItem(&item, newLoc)
// TODO(ashmrtn): This can most likely be removed altogether.
itemUpdated := newPath.String() != rr.String() || locUpdated
err = deets.Add( err = deets.Add(
newPath, newPath,
newLoc, newLoc,
itemUpdated,
item) item)
if err != nil { if err != nil {
return manifestAddedEntries, return manifestAddedEntries,

View File

@ -285,7 +285,6 @@ func makeDetailsEntry(
ItemRef: p.Item(), ItemRef: p.Item(),
LocationRef: lr, LocationRef: lr,
ItemInfo: details.ItemInfo{}, ItemInfo: details.ItemInfo{},
Updated: updated,
} }
switch p.Service() { switch p.Service() {

View File

@ -92,7 +92,6 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
require.NoError(t, deetsBuilder.Add( require.NoError(t, deetsBuilder.Add(
deetsPath, deetsPath,
locPath, locPath,
true,
details.ItemInfo{ details.ItemInfo{
Exchange: &details.ExchangeInfo{ Exchange: &details.ExchangeInfo{
ItemType: details.ExchangeMail, ItemType: details.ExchangeMail,
@ -128,7 +127,6 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
require.NoError(t, deetsBuilder.Add( require.NoError(t, deetsBuilder.Add(
deetsPath, deetsPath,
locPath, locPath,
true,
details.ItemInfo{ details.ItemInfo{
Exchange: &details.ExchangeInfo{ Exchange: &details.ExchangeInfo{
ItemType: details.ExchangeMail, ItemType: details.ExchangeMail,
@ -201,7 +199,6 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef) assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef)
assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef) assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef)
assert.Equal(t, deets.Entries[0].ItemRef, readDeets.Entries[0].ItemRef) assert.Equal(t, deets.Entries[0].ItemRef, readDeets.Entries[0].ItemRef)
assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated)
assert.NotNil(t, readDeets.Entries[0].Exchange) assert.NotNil(t, readDeets.Entries[0].Exchange)
assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange) assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange)
} else { } else {

View File

@ -26,7 +26,6 @@ func (b *Builder) Empty() bool {
func (b *Builder) Add( func (b *Builder) Add(
repoRef path.Path, repoRef path.Path,
locationRef *path.Builder, locationRef *path.Builder,
updated bool,
info ItemInfo, info ItemInfo,
) error { ) error {
b.mu.Lock() b.mu.Lock()
@ -35,7 +34,6 @@ func (b *Builder) Add(
entry, err := b.d.add( entry, err := b.d.add(
repoRef, repoRef,
locationRef, locationRef,
updated,
info) info)
if err != nil { if err != nil {
return clues.Wrap(err, "adding entry to details") return clues.Wrap(err, "adding entry to details")
@ -117,7 +115,6 @@ func (b *Builder) addFolderEntries(
} }
folder.Folder.Size += entry.size() folder.Folder.Size += entry.size()
folder.Updated = folder.Updated || entry.Updated
itemModified := entry.Modified() itemModified := entry.Modified()
if folder.Folder.Modified.Before(itemModified) { if folder.Folder.Modified.Before(itemModified) {

View File

@ -29,7 +29,6 @@ type Details struct {
func (d *Details) add( func (d *Details) add(
repoRef path.Path, repoRef path.Path,
locationRef *path.Builder, locationRef *path.Builder,
updated bool,
info ItemInfo, info ItemInfo,
) (Entry, error) { ) (Entry, error) {
if locationRef == nil { if locationRef == nil {
@ -42,7 +41,6 @@ func (d *Details) add(
ParentRef: repoRef.ToBuilder().Dir().ShortRef(), ParentRef: repoRef.ToBuilder().Dir().ShortRef(),
LocationRef: locationRef.String(), LocationRef: locationRef.String(),
ItemRef: repoRef.Item(), ItemRef: repoRef.Item(),
Updated: updated,
ItemInfo: info, ItemInfo: info,
} }

View File

@ -305,8 +305,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_NoLocationFolders() {
} }
for _, test := range table { for _, test := range table {
for _, updated := range []bool{false, true} { suite.Run(test.name, func() {
suite.Run(fmt.Sprintf("%s Updated %v", test.name, updated), func() {
t := suite.T() t := suite.T()
rr, err := path.FromDataLayerPath(test.entry.RepoRef, true) rr, err := path.FromDataLayerPath(test.entry.RepoRef, true)
@ -317,13 +316,12 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_NoLocationFolders() {
// Make a local copy so we can modify it. // Make a local copy so we can modify it.
localItem := test.entry localItem := test.entry
err = db.Add(rr, &path.Builder{}, updated, localItem.ItemInfo) err = db.Add(rr, &path.Builder{}, localItem.ItemInfo)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
// Clear LocationRef that's automatically populated since we passed an // Clear LocationRef that's automatically populated since we passed an
// empty builder above. // empty builder above.
localItem.LocationRef = "" localItem.LocationRef = ""
localItem.Updated = updated
expectedShortRef := localItem.ShortRef expectedShortRef := localItem.ShortRef
localItem.ShortRef = "" localItem.ShortRef = ""
@ -339,7 +337,6 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_NoLocationFolders() {
test.shortRefEqual(t, expectedShortRef, gotShortRef, "ShortRef") test.shortRefEqual(t, expectedShortRef, gotShortRef, "ShortRef")
}) })
} }
}
} }
func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() { func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() {
@ -446,7 +443,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() {
expectedDirs func() []Entry expectedDirs func() []Entry
}{ }{
{ {
name: "One Exchange Email None Updated", name: "One Exchange Email",
entries: func() []Entry { entries: func() []Entry {
e := exchangeMail1 e := exchangeMail1
ei := *exchangeMail1.Exchange ei := *exchangeMail1.Exchange
@ -472,35 +469,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() {
}, },
}, },
{ {
name: "One Exchange Email Updated", name: "Two Exchange Emails",
entries: func() []Entry {
e := exchangeMail1
ei := *exchangeMail1.Exchange
e.Exchange = &ei
e.Updated = true
return []Entry{e}
},
expectedDirs: func() []Entry {
res := []Entry{}
for _, entry := range exchangeFolders {
e := entry
ei := *entry.Folder
e.Folder = &ei
e.Folder.Size = exchangeMail1.Exchange.Size
e.Folder.Modified = exchangeMail1.Exchange.Modified
e.Updated = true
res = append(res, e)
}
return res
},
},
{
name: "Two Exchange Emails None Updated",
entries: func() []Entry { entries: func() []Entry {
res := []Entry{} res := []Entry{}
@ -532,41 +501,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() {
}, },
}, },
{ {
name: "Two Exchange Emails One Updated", name: "One Email And One Contact",
entries: func() []Entry {
res := []Entry{}
for i, entry := range []Entry{exchangeMail1, exchangeMail2} {
e := entry
ei := *entry.Exchange
e.Exchange = &ei
e.Updated = i == 1
res = append(res, e)
}
return res
},
expectedDirs: func() []Entry {
res := []Entry{}
for _, entry := range exchangeFolders {
e := entry
ei := *entry.Folder
e.Folder = &ei
e.Folder.Size = exchangeMail1.Exchange.Size + exchangeMail2.Exchange.Size
e.Folder.Modified = exchangeMail2.Exchange.Modified
e.Updated = true
res = append(res, e)
}
return res
},
},
{
name: "One Email And One Contact None Updated",
entries: func() []Entry { entries: func() []Entry {
res := []Entry{} res := []Entry{}
@ -609,7 +544,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() {
}, },
}, },
{ {
name: "One OneDrive Item None Updated", name: "One OneDrive Item",
entries: func() []Entry { entries: func() []Entry {
e := oneDrive1 e := oneDrive1
ei := *oneDrive1.OneDrive ei := *oneDrive1.OneDrive
@ -636,7 +571,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() {
}, },
}, },
{ {
name: "One SharePoint Item None Updated", name: "One SharePoint Item",
entries: func() []Entry { entries: func() []Entry {
e := sharePoint1 e := sharePoint1
ei := *sharePoint1.SharePoint ei := *sharePoint1.SharePoint
@ -663,7 +598,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() {
}, },
}, },
{ {
name: "One SharePoint Legacy Item None Updated", name: "One SharePoint Legacy Item",
entries: func() []Entry { entries: func() []Entry {
e := sharePoint1 e := sharePoint1
ei := *sharePoint1.SharePoint ei := *sharePoint1.SharePoint
@ -703,7 +638,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() {
loc, err := path.Builder{}.SplitUnescapeAppend(entry.LocationRef) loc, err := path.Builder{}.SplitUnescapeAppend(entry.LocationRef)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = db.Add(rr, loc, entry.Updated, entry.ItemInfo) err = db.Add(rr, loc, entry.ItemInfo)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
} }
@ -982,7 +917,6 @@ func (suite *DetailsUnitSuite) TestBuilder_Add_shortRefsUniqueFromFolder() {
itemPath, itemPath,
// Don't need to generate folders for this entry we just want the ShortRef. // Don't need to generate folders for this entry we just want the ShortRef.
&path.Builder{}, &path.Builder{},
false,
info) info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
@ -1017,13 +951,13 @@ func (suite *DetailsUnitSuite) TestBuilder_Add_cleansFileIDSuffixes() {
// Don't need to generate folders for this entry, we just want the itemRef // Don't need to generate folders for this entry, we just want the itemRef
loc := &path.Builder{} loc := &path.Builder{}
err := b.Add(dataSfx, loc, false, info) err := b.Add(dataSfx, loc, info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = b.Add(dirMetaSfx, loc, false, info) err = b.Add(dirMetaSfx, loc, info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = b.Add(metaSfx, loc, false, info) err = b.Add(metaSfx, loc, info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
for _, ent := range b.Details().Items() { for _, ent := range b.Details().Items() {
@ -1057,16 +991,16 @@ func (suite *DetailsUnitSuite) TestBuilder_DetailsNoDuplicate() {
// Don't need to generate folders for this entry, we just want the itemRef // Don't need to generate folders for this entry, we just want the itemRef
loc := &path.Builder{} loc := &path.Builder{}
err := b.Add(dataSfx, loc, false, info) err := b.Add(dataSfx, loc, info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = b.Add(dataSfx2, loc, false, info) err = b.Add(dataSfx2, loc, info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = b.Add(dirMetaSfx, loc, false, info) err = b.Add(dirMetaSfx, loc, info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
err = b.Add(metaSfx, loc, false, info) err = b.Add(metaSfx, loc, info)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
b.knownFolders = map[string]Entry{ b.knownFolders = map[string]Entry{
@ -1076,7 +1010,6 @@ func (suite *DetailsUnitSuite) TestBuilder_DetailsNoDuplicate() {
ParentRef: "1234", ParentRef: "1234",
LocationRef: "ab", LocationRef: "ab",
ItemRef: "cd", ItemRef: "cd",
Updated: false,
ItemInfo: info, ItemInfo: info,
}, },
"dummy2": { "dummy2": {
@ -1085,7 +1018,6 @@ func (suite *DetailsUnitSuite) TestBuilder_DetailsNoDuplicate() {
ParentRef: "12342", ParentRef: "12342",
LocationRef: "ab2", LocationRef: "ab2",
ItemRef: "cd2", ItemRef: "cd2",
Updated: false,
ItemInfo: info, ItemInfo: info,
}, },
"dummy3": { "dummy3": {
@ -1094,7 +1026,6 @@ func (suite *DetailsUnitSuite) TestBuilder_DetailsNoDuplicate() {
ParentRef: "12343", ParentRef: "12343",
LocationRef: "ab3", LocationRef: "ab3",
ItemRef: "cd3", ItemRef: "cd3",
Updated: false,
ItemInfo: info, ItemInfo: info,
}, },
} }

View File

@ -50,10 +50,6 @@ type Entry struct {
// are only as unique as m365 mail item IDs themselves. // are only as unique as m365 mail item IDs themselves.
ItemRef string `json:"itemRef,omitempty"` ItemRef string `json:"itemRef,omitempty"`
// Indicates the item was added or updated in this backup
// Always `true` for full backups
Updated bool `json:"updated"`
ItemInfo ItemInfo
} }

View File

@ -599,7 +599,7 @@ func (suite *RepositoryModelIntgSuite) TestGetBackupDetails() {
loc := path.Builder{}.Append(repoPath.Folders()...) loc := path.Builder{}.Append(repoPath.Folders()...)
builder := &details.Builder{} builder := &details.Builder{}
require.NoError(suite.T(), builder.Add(repoPath, loc, false, info)) require.NoError(suite.T(), builder.Add(repoPath, loc, info))
table := []struct { table := []struct {
name string name string
@ -680,7 +680,7 @@ func (suite *RepositoryModelIntgSuite) TestGetBackupErrors() {
loc := path.Builder{}.Append(repoPath.Folders()...) loc := path.Builder{}.Append(repoPath.Folders()...)
builder := &details.Builder{} builder := &details.Builder{}
require.NoError(suite.T(), builder.Add(repoPath, loc, false, info)) require.NoError(suite.T(), builder.Add(repoPath, loc, info))
table := []struct { table := []struct {
name string name string