From eb95b430100408d20c0b982b8357161fe800fbbb Mon Sep 17 00:00:00 2001 From: ashmrtn <3891298+ashmrtn@users.noreply.github.com> Date: Fri, 11 Aug 2023 09:32:21 -0700 Subject: [PATCH] Remove updated field from details (#4004) No longer required by SDK users and not exposed directly to CLI users unless they're looking at the JSON output from details. This field is not documented anywhere though so there's no guarantees that we technically need to uphold with it. Manually tested: * restoring from a backup that had this field * making a fresh backup without this field when the merge base did have this field --- #### Does this PR need a docs update or release note? - [ ] :white_check_mark: Yes, it's included - [ ] :clock1: Yes, but in a later PR - [x] :no_entry: No #### Type of change - [ ] :sunflower: Feature - [ ] :bug: Bugfix - [ ] :world_map: Documentation - [ ] :robot: Supportability/Tests - [ ] :computer: CI/Deployment - [x] :broom: Tech Debt/Cleanup #### Issue(s) * closes #3986 #### Test Plan - [x] :muscle: Manual - [x] :zap: Unit test - [x] :green_heart: E2E --- src/internal/kopia/upload.go | 1 - src/internal/kopia/upload_test.go | 2 - src/internal/kopia/wrapper_test.go | 6 - src/internal/m365/collection/drive/restore.go | 5 +- src/internal/m365/collection/site/restore.go | 2 - src/internal/m365/service/exchange/restore.go | 1 - src/internal/operations/backup.go | 40 ++--- src/internal/operations/backup_test.go | 1 - src/internal/streamstore/collectables_test.go | 3 - src/pkg/backup/details/builder.go | 3 - src/pkg/backup/details/details.go | 2 - src/pkg/backup/details/details_test.go | 141 +++++------------- src/pkg/backup/details/entry.go | 4 - .../repository/repository_unexported_test.go | 4 +- 14 files changed, 52 insertions(+), 163 deletions(-) diff --git a/src/internal/kopia/upload.go b/src/internal/kopia/upload.go index 522d3fad5..ebf72ae19 100644 --- a/src/internal/kopia/upload.go +++ b/src/internal/kopia/upload.go @@ -231,7 +231,6 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) { err = cp.deets.Add( d.repoPath, d.locationPath, - !d.cached, *d.info) if err != nil { cp.errs.AddRecoverable(cp.ctx, clues.New("adding item to details"). diff --git a/src/internal/kopia/upload_test.go b/src/internal/kopia/upload_test.go index 95c39c46d..6902a4af9 100644 --- a/src/internal/kopia/upload_test.go +++ b/src/internal/kopia/upload_test.go @@ -569,8 +569,6 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() { assert.Len(t, entries, test.expectedNumEntries) for _, entry := range entries { - assert.Equal(t, !cachedTest.cached, entry.Updated) - foundItems[entry.ItemRef] = true } diff --git a/src/internal/kopia/wrapper_test.go b/src/internal/kopia/wrapper_test.go index 582c3ff78..b24f2a9c1 100644 --- a/src/internal/kopia/wrapper_test.go +++ b/src/internal/kopia/wrapper_test.go @@ -924,10 +924,6 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() { // 47 file and 2 folder entries. test.expectedUploadedFiles+test.expectedCachedFiles+2, ) - - for _, entry := range details { - test.deetsUpdated(t, entry.Updated) - } } checkSnapshotTags( @@ -1207,8 +1203,6 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() { ) for _, entry := range details { - assert.True(t, entry.Updated) - if test.hasMetaDeets { continue } diff --git a/src/internal/m365/collection/drive/restore.go b/src/internal/m365/collection/drive/restore.go index ad7cad33f..5b2e861cd 100644 --- a/src/internal/m365/collection/drive/restore.go +++ b/src/internal/m365/collection/drive/restore.go @@ -153,13 +153,12 @@ func RestoreCollection( ctx context.Context, repoRef path.Path, locationRef *path.Builder, - updated bool, info details.ItemInfo, ) { deetsLock.Lock() defer deetsLock.Unlock() - err = deets.Add(repoRef, locationRef, updated, info) + err = deets.Add(repoRef, locationRef, info) if err != nil { // Not critical enough to need to stop restore operation. logger.CtxErr(ctx, err).Infow("adding restored item to details") @@ -231,7 +230,7 @@ func RestoreCollection( } // TODO: implement locationRef - updateDeets(ictx, itemPath, &path.Builder{}, true, itemInfo) + updateDeets(ictx, itemPath, &path.Builder{}, itemInfo) atomic.AddInt64(&metricsSuccess, 1) }(ctx, itemData) diff --git a/src/internal/m365/collection/site/restore.go b/src/internal/m365/collection/site/restore.go index 875ac5115..97408c4f2 100644 --- a/src/internal/m365/collection/site/restore.go +++ b/src/internal/m365/collection/site/restore.go @@ -263,7 +263,6 @@ func RestoreListCollection( err = deets.Add( itemPath, &path.Builder{}, // TODO: implement locationRef - true, itemInfo) if err != nil { // Not critical enough to need to stop restore operation. @@ -343,7 +342,6 @@ func RestorePageCollection( err = deets.Add( itemPath, &path.Builder{}, // TODO: implement locationRef - true, itemInfo) if err != nil { // Not critical enough to need to stop restore operation. diff --git a/src/internal/m365/service/exchange/restore.go b/src/internal/m365/service/exchange/restore.go index 7871f68d4..53cb08639 100644 --- a/src/internal/m365/service/exchange/restore.go +++ b/src/internal/m365/service/exchange/restore.go @@ -215,7 +215,6 @@ func restoreCollection( err = deets.Add( itemPath, locationRef, - true, details.ItemInfo{ Exchange: info, }) diff --git a/src/internal/operations/backup.go b/src/internal/operations/backup.go index de196416b..0c9a468a9 100644 --- a/src/internal/operations/backup.go +++ b/src/internal/operations/backup.go @@ -580,7 +580,7 @@ func getNewPathRefs( entry *details.Entry, repoRef path.Path, backupVersion int, -) (path.Path, *path.Builder, bool, error) { +) (path.Path, *path.Builder, error) { // Right now we can't guarantee that we have an old location in the // previous details entry so first try a lookup without a location to see // if it matches so we don't need to try parsing from the old entry. @@ -594,35 +594,25 @@ func getNewPathRefs( entry.Modified(), nil) if err != nil { - return nil, nil, false, clues.Wrap(err, "getting new paths") + return nil, nil, clues.Wrap(err, "getting new paths") } else if newPath == nil { // This entry doesn't need merging. - return nil, nil, false, nil + return nil, nil, nil } else if newLoc == nil { - return nil, nil, false, clues.New("unable to find new exchange location") + return nil, nil, clues.New("unable to find new exchange location") } - // This is kind of jank cause we're in a transitionary period, but even if - // we're consesrvative here about marking something as updated the RepoRef - // comparison in the caller should catch the change. Calendars is the only - // exception, since it uses IDs for folders, but we should already be - // populating the LocationRef for them. - // - // Without this, all OneDrive items will be marked as updated the first time - // around because OneDrive hasn't been persisting LocationRef before now. - updated := len(entry.LocationRef) > 0 && newLoc.String() != entry.LocationRef - - return newPath, newLoc, updated, nil + return newPath, newLoc, nil } // We didn't have an exact entry, so retry with a location. locRef, err := entry.ToLocationIDer(backupVersion) if err != nil { - return nil, nil, false, clues.Wrap(err, "getting previous item location") + return nil, nil, clues.Wrap(err, "getting previous item location") } if locRef == nil { - return nil, nil, false, clues.New("entry with empty LocationRef") + return nil, nil, clues.New("entry with empty LocationRef") } newPath, newLoc, err := dataFromBackup.GetNewPathRefs( @@ -630,16 +620,14 @@ func getNewPathRefs( entry.Modified(), locRef) if err != nil { - return nil, nil, false, clues.Wrap(err, "getting new paths with old location") + return nil, nil, clues.Wrap(err, "getting new paths with old location") } else if newPath == nil { - return nil, nil, false, nil + return nil, nil, nil } else if newLoc == nil { - return nil, nil, false, clues.New("unable to get new paths") + return nil, nil, clues.New("unable to get new paths") } - updated := len(entry.LocationRef) > 0 && newLoc.String() != entry.LocationRef - - return newPath, newLoc, updated, nil + return newPath, newLoc, nil } func mergeItemsFromBase( @@ -702,7 +690,7 @@ func mergeItemsFromBase( ictx := clues.Add(ctx, "repo_ref", rr) - newPath, newLoc, locUpdated, err := getNewPathRefs( + newPath, newLoc, err := getNewPathRefs( dataFromBackup, entry, rr, @@ -721,13 +709,9 @@ func mergeItemsFromBase( item := entry.ItemInfo details.UpdateItem(&item, newLoc) - // TODO(ashmrtn): This can most likely be removed altogether. - itemUpdated := newPath.String() != rr.String() || locUpdated - err = deets.Add( newPath, newLoc, - itemUpdated, item) if err != nil { return manifestAddedEntries, diff --git a/src/internal/operations/backup_test.go b/src/internal/operations/backup_test.go index 360c37a66..3ae3125a0 100644 --- a/src/internal/operations/backup_test.go +++ b/src/internal/operations/backup_test.go @@ -285,7 +285,6 @@ func makeDetailsEntry( ItemRef: p.Item(), LocationRef: lr, ItemInfo: details.ItemInfo{}, - Updated: updated, } switch p.Service() { diff --git a/src/internal/streamstore/collectables_test.go b/src/internal/streamstore/collectables_test.go index ccbeab7c2..5257ee718 100644 --- a/src/internal/streamstore/collectables_test.go +++ b/src/internal/streamstore/collectables_test.go @@ -92,7 +92,6 @@ func (suite *StreamStoreIntgSuite) TestStreamer() { require.NoError(t, deetsBuilder.Add( deetsPath, locPath, - true, details.ItemInfo{ Exchange: &details.ExchangeInfo{ ItemType: details.ExchangeMail, @@ -128,7 +127,6 @@ func (suite *StreamStoreIntgSuite) TestStreamer() { require.NoError(t, deetsBuilder.Add( deetsPath, locPath, - true, details.ItemInfo{ Exchange: &details.ExchangeInfo{ ItemType: details.ExchangeMail, @@ -201,7 +199,6 @@ func (suite *StreamStoreIntgSuite) TestStreamer() { assert.Equal(t, deets.Entries[0].RepoRef, readDeets.Entries[0].RepoRef) assert.Equal(t, deets.Entries[0].LocationRef, readDeets.Entries[0].LocationRef) assert.Equal(t, deets.Entries[0].ItemRef, readDeets.Entries[0].ItemRef) - assert.Equal(t, deets.Entries[0].Updated, readDeets.Entries[0].Updated) assert.NotNil(t, readDeets.Entries[0].Exchange) assert.Equal(t, *deets.Entries[0].Exchange, *readDeets.Entries[0].Exchange) } else { diff --git a/src/pkg/backup/details/builder.go b/src/pkg/backup/details/builder.go index 41ce3c60f..825ccc513 100644 --- a/src/pkg/backup/details/builder.go +++ b/src/pkg/backup/details/builder.go @@ -26,7 +26,6 @@ func (b *Builder) Empty() bool { func (b *Builder) Add( repoRef path.Path, locationRef *path.Builder, - updated bool, info ItemInfo, ) error { b.mu.Lock() @@ -35,7 +34,6 @@ func (b *Builder) Add( entry, err := b.d.add( repoRef, locationRef, - updated, info) if err != nil { return clues.Wrap(err, "adding entry to details") @@ -117,7 +115,6 @@ func (b *Builder) addFolderEntries( } folder.Folder.Size += entry.size() - folder.Updated = folder.Updated || entry.Updated itemModified := entry.Modified() if folder.Folder.Modified.Before(itemModified) { diff --git a/src/pkg/backup/details/details.go b/src/pkg/backup/details/details.go index ec2fdfcd5..add52d809 100644 --- a/src/pkg/backup/details/details.go +++ b/src/pkg/backup/details/details.go @@ -29,7 +29,6 @@ type Details struct { func (d *Details) add( repoRef path.Path, locationRef *path.Builder, - updated bool, info ItemInfo, ) (Entry, error) { if locationRef == nil { @@ -42,7 +41,6 @@ func (d *Details) add( ParentRef: repoRef.ToBuilder().Dir().ShortRef(), LocationRef: locationRef.String(), ItemRef: repoRef.Item(), - Updated: updated, ItemInfo: info, } diff --git a/src/pkg/backup/details/details_test.go b/src/pkg/backup/details/details_test.go index b804c04cf..6c9434867 100644 --- a/src/pkg/backup/details/details_test.go +++ b/src/pkg/backup/details/details_test.go @@ -305,40 +305,37 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_NoLocationFolders() { } for _, test := range table { - for _, updated := range []bool{false, true} { - suite.Run(fmt.Sprintf("%s Updated %v", test.name, updated), func() { - t := suite.T() + suite.Run(test.name, func() { + t := suite.T() - rr, err := path.FromDataLayerPath(test.entry.RepoRef, true) - require.NoError(t, err, clues.ToCore(err)) + rr, err := path.FromDataLayerPath(test.entry.RepoRef, true) + require.NoError(t, err, clues.ToCore(err)) - db := &Builder{} + db := &Builder{} - // Make a local copy so we can modify it. - localItem := test.entry + // Make a local copy so we can modify it. + localItem := test.entry - err = db.Add(rr, &path.Builder{}, updated, localItem.ItemInfo) - require.NoError(t, err, clues.ToCore(err)) + err = db.Add(rr, &path.Builder{}, localItem.ItemInfo) + require.NoError(t, err, clues.ToCore(err)) - // Clear LocationRef that's automatically populated since we passed an - // empty builder above. - localItem.LocationRef = "" - localItem.Updated = updated + // Clear LocationRef that's automatically populated since we passed an + // empty builder above. + localItem.LocationRef = "" - expectedShortRef := localItem.ShortRef - localItem.ShortRef = "" + expectedShortRef := localItem.ShortRef + localItem.ShortRef = "" - deets := db.Details() - assert.Len(t, deets.Entries, 1) + deets := db.Details() + assert.Len(t, deets.Entries, 1) - got := deets.Entries[0] - gotShortRef := got.ShortRef - got.ShortRef = "" + got := deets.Entries[0] + gotShortRef := got.ShortRef + got.ShortRef = "" - assert.Equal(t, localItem, got, "DetailsEntry") - test.shortRefEqual(t, expectedShortRef, gotShortRef, "ShortRef") - }) - } + assert.Equal(t, localItem, got, "DetailsEntry") + test.shortRefEqual(t, expectedShortRef, gotShortRef, "ShortRef") + }) } } @@ -446,7 +443,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() { expectedDirs func() []Entry }{ { - name: "One Exchange Email None Updated", + name: "One Exchange Email", entries: func() []Entry { e := exchangeMail1 ei := *exchangeMail1.Exchange @@ -472,35 +469,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() { }, }, { - name: "One Exchange Email Updated", - entries: func() []Entry { - e := exchangeMail1 - ei := *exchangeMail1.Exchange - e.Exchange = &ei - e.Updated = true - - return []Entry{e} - }, - expectedDirs: func() []Entry { - res := []Entry{} - - for _, entry := range exchangeFolders { - e := entry - ei := *entry.Folder - - e.Folder = &ei - e.Folder.Size = exchangeMail1.Exchange.Size - e.Folder.Modified = exchangeMail1.Exchange.Modified - e.Updated = true - - res = append(res, e) - } - - return res - }, - }, - { - name: "Two Exchange Emails None Updated", + name: "Two Exchange Emails", entries: func() []Entry { res := []Entry{} @@ -532,41 +501,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() { }, }, { - name: "Two Exchange Emails One Updated", - entries: func() []Entry { - res := []Entry{} - - for i, entry := range []Entry{exchangeMail1, exchangeMail2} { - e := entry - ei := *entry.Exchange - e.Exchange = &ei - e.Updated = i == 1 - - res = append(res, e) - } - - return res - }, - expectedDirs: func() []Entry { - res := []Entry{} - - for _, entry := range exchangeFolders { - e := entry - ei := *entry.Folder - - e.Folder = &ei - e.Folder.Size = exchangeMail1.Exchange.Size + exchangeMail2.Exchange.Size - e.Folder.Modified = exchangeMail2.Exchange.Modified - e.Updated = true - - res = append(res, e) - } - - return res - }, - }, - { - name: "One Email And One Contact None Updated", + name: "One Email And One Contact", entries: func() []Entry { res := []Entry{} @@ -609,7 +544,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() { }, }, { - name: "One OneDrive Item None Updated", + name: "One OneDrive Item", entries: func() []Entry { e := oneDrive1 ei := *oneDrive1.OneDrive @@ -636,7 +571,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() { }, }, { - name: "One SharePoint Item None Updated", + name: "One SharePoint Item", entries: func() []Entry { e := sharePoint1 ei := *sharePoint1.SharePoint @@ -663,7 +598,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() { }, }, { - name: "One SharePoint Legacy Item None Updated", + name: "One SharePoint Legacy Item", entries: func() []Entry { e := sharePoint1 ei := *sharePoint1.SharePoint @@ -703,7 +638,7 @@ func (suite *DetailsUnitSuite) TestDetailsAdd_LocationFolders() { loc, err := path.Builder{}.SplitUnescapeAppend(entry.LocationRef) require.NoError(t, err, clues.ToCore(err)) - err = db.Add(rr, loc, entry.Updated, entry.ItemInfo) + err = db.Add(rr, loc, entry.ItemInfo) require.NoError(t, err, clues.ToCore(err)) } @@ -982,7 +917,6 @@ func (suite *DetailsUnitSuite) TestBuilder_Add_shortRefsUniqueFromFolder() { itemPath, // Don't need to generate folders for this entry we just want the ShortRef. &path.Builder{}, - false, info) require.NoError(t, err, clues.ToCore(err)) @@ -1017,13 +951,13 @@ func (suite *DetailsUnitSuite) TestBuilder_Add_cleansFileIDSuffixes() { // Don't need to generate folders for this entry, we just want the itemRef loc := &path.Builder{} - err := b.Add(dataSfx, loc, false, info) + err := b.Add(dataSfx, loc, info) require.NoError(t, err, clues.ToCore(err)) - err = b.Add(dirMetaSfx, loc, false, info) + err = b.Add(dirMetaSfx, loc, info) require.NoError(t, err, clues.ToCore(err)) - err = b.Add(metaSfx, loc, false, info) + err = b.Add(metaSfx, loc, info) require.NoError(t, err, clues.ToCore(err)) for _, ent := range b.Details().Items() { @@ -1057,16 +991,16 @@ func (suite *DetailsUnitSuite) TestBuilder_DetailsNoDuplicate() { // Don't need to generate folders for this entry, we just want the itemRef loc := &path.Builder{} - err := b.Add(dataSfx, loc, false, info) + err := b.Add(dataSfx, loc, info) require.NoError(t, err, clues.ToCore(err)) - err = b.Add(dataSfx2, loc, false, info) + err = b.Add(dataSfx2, loc, info) require.NoError(t, err, clues.ToCore(err)) - err = b.Add(dirMetaSfx, loc, false, info) + err = b.Add(dirMetaSfx, loc, info) require.NoError(t, err, clues.ToCore(err)) - err = b.Add(metaSfx, loc, false, info) + err = b.Add(metaSfx, loc, info) require.NoError(t, err, clues.ToCore(err)) b.knownFolders = map[string]Entry{ @@ -1076,7 +1010,6 @@ func (suite *DetailsUnitSuite) TestBuilder_DetailsNoDuplicate() { ParentRef: "1234", LocationRef: "ab", ItemRef: "cd", - Updated: false, ItemInfo: info, }, "dummy2": { @@ -1085,7 +1018,6 @@ func (suite *DetailsUnitSuite) TestBuilder_DetailsNoDuplicate() { ParentRef: "12342", LocationRef: "ab2", ItemRef: "cd2", - Updated: false, ItemInfo: info, }, "dummy3": { @@ -1094,7 +1026,6 @@ func (suite *DetailsUnitSuite) TestBuilder_DetailsNoDuplicate() { ParentRef: "12343", LocationRef: "ab3", ItemRef: "cd3", - Updated: false, ItemInfo: info, }, } diff --git a/src/pkg/backup/details/entry.go b/src/pkg/backup/details/entry.go index 83b9af133..47e2d5196 100644 --- a/src/pkg/backup/details/entry.go +++ b/src/pkg/backup/details/entry.go @@ -50,10 +50,6 @@ type Entry struct { // are only as unique as m365 mail item IDs themselves. ItemRef string `json:"itemRef,omitempty"` - // Indicates the item was added or updated in this backup - // Always `true` for full backups - Updated bool `json:"updated"` - ItemInfo } diff --git a/src/pkg/repository/repository_unexported_test.go b/src/pkg/repository/repository_unexported_test.go index c2915c19e..d5369b6e6 100644 --- a/src/pkg/repository/repository_unexported_test.go +++ b/src/pkg/repository/repository_unexported_test.go @@ -599,7 +599,7 @@ func (suite *RepositoryModelIntgSuite) TestGetBackupDetails() { loc := path.Builder{}.Append(repoPath.Folders()...) builder := &details.Builder{} - require.NoError(suite.T(), builder.Add(repoPath, loc, false, info)) + require.NoError(suite.T(), builder.Add(repoPath, loc, info)) table := []struct { name string @@ -680,7 +680,7 @@ func (suite *RepositoryModelIntgSuite) TestGetBackupErrors() { loc := path.Builder{}.Append(repoPath.Folders()...) builder := &details.Builder{} - require.NoError(suite.T(), builder.Add(repoPath, loc, false, info)) + require.NoError(suite.T(), builder.Add(repoPath, loc, info)) table := []struct { name string