Set file mod time in KopiaWrapper (#1405)

## Description

Set the mod time of uploaded files to either the mod time of the item (if it has one) or the current time (if it does not have one). Also add tests to check caching in kopia works properly.

## Type of change

<!--- Please check the type of change your PR introduces: --->
- [x] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Test
- [ ] 💻 CI/Deployment
- [ ] 🐹 Trivial/Minor

## Issue(s)

* closes #621 

part of:
* #547 

merge after:
* #1427 
* #1430

## Test Plan

<!-- How will this be tested prior to merging.-->
- [ ] 💪 Manual
- [x]  Unit test
- [ ] 💚 E2E
This commit is contained in:
ashmrtn 2022-12-02 08:49:27 -08:00 committed by GitHub
parent 9963e50b99
commit 6dcbc8f2a1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 80 additions and 27 deletions

View File

@ -19,6 +19,7 @@ type MockExchangeDataCollection struct {
messageCount int messageCount int
Data [][]byte Data [][]byte
Names []string Names []string
ModTimes []time.Time
} }
var ( var (
@ -36,12 +37,15 @@ func NewMockExchangeCollection(pathRepresentation path.Path, numMessagesToReturn
messageCount: numMessagesToReturn, messageCount: numMessagesToReturn,
Data: [][]byte{}, Data: [][]byte{},
Names: []string{}, Names: []string{},
ModTimes: []time.Time{},
} }
baseTime := time.Now()
for i := 0; i < c.messageCount; i++ { for i := 0; i < c.messageCount; i++ {
// We can plug in whatever data we want here (can be an io.Reader to a test data file if needed) // We can plug in whatever data we want here (can be an io.Reader to a test data file if needed)
c.Data = append(c.Data, GetMockMessageBytes("From: NewMockExchangeCollection")) c.Data = append(c.Data, GetMockMessageBytes("From: NewMockExchangeCollection"))
c.Names = append(c.Names, uuid.NewString()) c.Names = append(c.Names, uuid.NewString())
c.ModTimes = append(c.ModTimes, baseTime.Add(1*time.Hour))
} }
return c return c
@ -97,9 +101,10 @@ func (medc *MockExchangeDataCollection) Items() <-chan data.Stream {
for i := 0; i < medc.messageCount; i++ { for i := 0; i < medc.messageCount; i++ {
res <- &MockExchangeData{ res <- &MockExchangeData{
ID: medc.Names[i], ID: medc.Names[i],
Reader: io.NopCloser(bytes.NewReader(medc.Data[i])), Reader: io.NopCloser(bytes.NewReader(medc.Data[i])),
size: int64(len(medc.Data[i])), size: int64(len(medc.Data[i])),
modifiedTime: medc.ModTimes[i],
} }
} }
}() }()
@ -109,10 +114,11 @@ func (medc *MockExchangeDataCollection) Items() <-chan data.Stream {
// ExchangeData represents a single item retrieved from exchange // ExchangeData represents a single item retrieved from exchange
type MockExchangeData struct { type MockExchangeData struct {
ID string ID string
Reader io.ReadCloser Reader io.ReadCloser
ReadErr error ReadErr error
size int64 size int64
modifiedTime time.Time
} }
func (med *MockExchangeData) UUID() string { func (med *MockExchangeData) UUID() string {
@ -141,6 +147,10 @@ func (med *MockExchangeData) Size() int64 {
return med.size return med.size
} }
func (med *MockExchangeData) ModTime() time.Time {
return med.modifiedTime
}
type errReader struct { type errReader struct {
readErr error readErr error
} }

View File

@ -2,6 +2,7 @@ package data
import ( import (
"io" "io"
"time"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
@ -47,6 +48,11 @@ type StreamSize interface {
Size() int64 Size() int64
} }
// StreamModTime is used to provide the modified time of the stream's data.
type StreamModTime interface {
ModTime() time.Time
}
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------
// functionality // functionality
// ------------------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------------------

View File

@ -7,6 +7,7 @@ import (
"runtime/trace" "runtime/trace"
"sync" "sync"
"sync/atomic" "sync/atomic"
"time"
"unsafe" "unsafe"
"github.com/hashicorp/go-multierror" "github.com/hashicorp/go-multierror"
@ -127,6 +128,8 @@ type BackupStats struct {
TotalUploadedBytes int64 TotalUploadedBytes int64
TotalFileCount int TotalFileCount int
CachedFileCount int
UncachedFileCount int
TotalDirectoryCount int TotalDirectoryCount int
IgnoredErrorCount int IgnoredErrorCount int
ErrorCount int ErrorCount int
@ -147,6 +150,8 @@ func manifestToStats(
TotalUploadedBytes: uploadCount.NumBytes, TotalUploadedBytes: uploadCount.NumBytes,
TotalFileCount: int(man.Stats.TotalFileCount), TotalFileCount: int(man.Stats.TotalFileCount),
CachedFileCount: int(man.Stats.CachedFiles),
UncachedFileCount: int(man.Stats.NonCachedFiles),
TotalDirectoryCount: int(man.Stats.TotalDirectoryCount), TotalDirectoryCount: int(man.Stats.TotalDirectoryCount),
IgnoredErrorCount: int(man.Stats.IgnoredErrorCount), IgnoredErrorCount: int(man.Stats.IgnoredErrorCount),
ErrorCount: int(man.Stats.ErrorCount), ErrorCount: int(man.Stats.ErrorCount),
@ -340,8 +345,14 @@ func getStreamItemFunc(
d := &itemDetails{info: ei.Info(), repoPath: itemPath} d := &itemDetails{info: ei.Info(), repoPath: itemPath}
progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d) progress.put(encodeAsPath(itemPath.PopFront().Elements()...), d)
entry := virtualfs.StreamingFileFromReader( modTime := time.Now()
if smt, ok := e.(data.StreamModTime); ok {
modTime = smt.ModTime()
}
entry := virtualfs.StreamingFileWithModTimeFromReader(
encodeAsPath(e.UUID()), encodeAsPath(e.UUID()),
modTime,
&backupStreamReader{ &backupStreamReader{
version: serializationVersion, version: serializationVersion,
ReadCloser: e.ToReader(), ReadCloser: e.ToReader(),

View File

@ -839,8 +839,6 @@ func (suite *KopiaIntegrationSuite) TearDownTest() {
} }
func (suite *KopiaIntegrationSuite) TestBackupCollections() { func (suite *KopiaIntegrationSuite) TestBackupCollections() {
t := suite.T()
collections := []data.Collection{ collections := []data.Collection{
mockconnector.NewMockExchangeCollection( mockconnector.NewMockExchangeCollection(
suite.testPath1, suite.testPath1,
@ -865,24 +863,52 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
expectedTags[tk] = tv expectedTags[tk] = tv
} }
stats, deets, err := suite.w.BackupCollections(suite.ctx, collections, path.ExchangeService) table := []struct {
assert.NoError(t, err) name string
assert.Equal(t, stats.TotalFileCount, 47) expectedUploadedFiles int
assert.Equal(t, stats.TotalDirectoryCount, 6) expectedCachedFiles int
assert.Equal(t, stats.IgnoredErrorCount, 0) }{
assert.Equal(t, stats.ErrorCount, 0) {
assert.False(t, stats.Incomplete) name: "Uncached",
assert.Equal(t, path.ExchangeService.String(), deets.Tags[model.ServiceTag]) expectedUploadedFiles: 47,
// 47 file and 6 folder entries. expectedCachedFiles: 0,
assert.Len(t, deets.Entries, 47+6) },
{
name: "Cached",
expectedUploadedFiles: 0,
expectedCachedFiles: 47,
},
}
checkSnapshotTags( for _, test := range table {
t, suite.T().Run(test.name, func(t *testing.T) {
suite.ctx, stats, deets, err := suite.w.BackupCollections(suite.ctx, collections, path.ExchangeService)
suite.w.c, assert.NoError(t, err)
expectedTags,
stats.SnapshotID, assert.Equal(t, test.expectedUploadedFiles, stats.TotalFileCount, "total files")
) assert.Equal(t, test.expectedUploadedFiles, stats.UncachedFileCount, "uncached files")
assert.Equal(t, test.expectedCachedFiles, stats.CachedFileCount, "cached files")
assert.Equal(t, 6, stats.TotalDirectoryCount)
assert.Equal(t, 0, stats.IgnoredErrorCount)
assert.Equal(t, 0, stats.ErrorCount)
assert.False(t, stats.Incomplete)
assert.Equal(t, path.ExchangeService.String(), deets.Tags[model.ServiceTag])
// 47 file and 6 folder entries.
assert.Len(
t,
deets.Entries,
test.expectedUploadedFiles+test.expectedCachedFiles+6,
)
checkSnapshotTags(
t,
suite.ctx,
suite.w.c,
expectedTags,
stats.SnapshotID,
)
})
}
} }
func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() { func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {