Add more tests for url cache (#3593)
PR contents 1. Address a corner case where cache may be half filled ( e.g. scenario: delta query failing after a few pages). Empty the cache on any delta failures 2. Add unit tests --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change <!--- Please check the type of change your PR introduces: ---> - [ ] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [x] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [ ] 🧹 Tech Debt/Cleanup #### Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * #<issue> #### Test Plan <!-- How will this be tested prior to merging.--> - [ ] 💪 Manual - [x] ⚡ Unit test - [ ] 💚 E2E
This commit is contained in:
parent
0a17a72800
commit
dd19b484c8
@ -2494,6 +2494,22 @@ func driveItem(
|
||||
return coreItem(id, name, parentPath, parentID, isFile, isFolder, isPackage)
|
||||
}
|
||||
|
||||
func fileItem(
|
||||
id, name, parentPath, parentID, url string,
|
||||
deleted bool,
|
||||
) models.DriveItemable {
|
||||
di := driveItem(id, name, parentPath, parentID, true, false, false)
|
||||
di.SetAdditionalData(map[string]interface{}{
|
||||
"@microsoft.graph.downloadUrl": url,
|
||||
})
|
||||
|
||||
if deleted {
|
||||
di.SetDeleted(models.NewDeleted())
|
||||
}
|
||||
|
||||
return di
|
||||
}
|
||||
|
||||
func malwareItem(
|
||||
id string,
|
||||
name string,
|
||||
|
||||
@ -9,6 +9,7 @@ import (
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||
"github.com/alcionai/corso/src/internal/common/str"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/logger"
|
||||
"github.com/alcionai/corso/src/pkg/services/m365/api"
|
||||
@ -33,7 +34,7 @@ type urlCache struct {
|
||||
|
||||
itemPager api.DriveItemEnumerator
|
||||
|
||||
errors *fault.Bus
|
||||
errs *fault.Bus
|
||||
}
|
||||
|
||||
// newURLache creates a new URL cache for the specified drive ID
|
||||
@ -41,7 +42,7 @@ func newURLCache(
|
||||
driveID string,
|
||||
refreshInterval time.Duration,
|
||||
itemPager api.DriveItemEnumerator,
|
||||
errors *fault.Bus,
|
||||
errs *fault.Bus,
|
||||
) (*urlCache, error) {
|
||||
err := validateCacheParams(
|
||||
driveID,
|
||||
@ -57,7 +58,7 @@ func newURLCache(
|
||||
driveID: driveID,
|
||||
refreshInterval: refreshInterval,
|
||||
itemPager: itemPager,
|
||||
errors: errors,
|
||||
errs: errs,
|
||||
},
|
||||
nil
|
||||
}
|
||||
@ -72,7 +73,7 @@ func validateCacheParams(
|
||||
return clues.New("drive id is empty")
|
||||
}
|
||||
|
||||
if refreshInterval <= 1*time.Second {
|
||||
if refreshInterval < 1*time.Second {
|
||||
return clues.New("invalid refresh interval")
|
||||
}
|
||||
|
||||
@ -94,7 +95,6 @@ func (uc *urlCache) getItemProperties(
|
||||
|
||||
ctx = clues.Add(ctx, "drive_id", uc.driveID)
|
||||
|
||||
// Lazy refresh
|
||||
if uc.needsRefresh() {
|
||||
err := uc.refreshCache(ctx)
|
||||
if err != nil {
|
||||
@ -146,6 +146,9 @@ func (uc *urlCache) refreshCache(
|
||||
|
||||
err := uc.deltaQuery(ctx)
|
||||
if err != nil {
|
||||
// clear cache
|
||||
uc.idToProps = make(map[string]itemProps)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
@ -171,7 +174,7 @@ func (uc *urlCache) deltaQuery(
|
||||
uc.updateCache,
|
||||
map[string]string{},
|
||||
"",
|
||||
uc.errors)
|
||||
uc.errs)
|
||||
if err != nil {
|
||||
return clues.Wrap(err, "delta query")
|
||||
}
|
||||
@ -224,12 +227,14 @@ func (uc *urlCache) updateCache(
|
||||
continue
|
||||
}
|
||||
|
||||
var url string
|
||||
var (
|
||||
url string
|
||||
ad = item.GetAdditionalData()
|
||||
)
|
||||
|
||||
for _, key := range downloadURLKeys {
|
||||
tmp, ok := item.GetAdditionalData()[key].(*string)
|
||||
if ok {
|
||||
url = ptr.Val(tmp)
|
||||
if v, err := str.AnyValueToString(key, ad); err == nil {
|
||||
url = v
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
package onedrive
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"sync"
|
||||
"testing"
|
||||
@ -152,3 +154,375 @@ func (suite *URLCacheIntegrationSuite) TestURLCacheBasic() {
|
||||
// Validate that <= 1 delta queries were made
|
||||
require.LessOrEqual(t, cache.deltaQueryCount, 1)
|
||||
}
|
||||
|
||||
type URLCacheUnitSuite struct {
|
||||
tester.Suite
|
||||
}
|
||||
|
||||
func TestURLCacheUnitSuite(t *testing.T) {
|
||||
suite.Run(t, &URLCacheUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||
}
|
||||
|
||||
func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
deltaString := "delta"
|
||||
next := "next"
|
||||
driveID := "drive1"
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
pagerResult map[string][]deltaPagerResult
|
||||
expectedItemProps map[string]itemProps
|
||||
expectedErr require.ErrorAssertionFunc
|
||||
cacheAssert func(*urlCache, time.Time)
|
||||
}{
|
||||
{
|
||||
name: "single item in cache",
|
||||
pagerResult: map[string][]deltaPagerResult{
|
||||
driveID: {
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
},
|
||||
deltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://dummy1.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
expectedErr: require.NoError,
|
||||
cacheAssert: func(uc *urlCache, startTime time.Time) {
|
||||
require.Greater(suite.T(), uc.lastRefreshTime, startTime)
|
||||
require.Equal(suite.T(), 1, uc.deltaQueryCount)
|
||||
require.Equal(suite.T(), 1, len(uc.idToProps))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple items in cache",
|
||||
pagerResult: map[string][]deltaPagerResult{
|
||||
driveID: {
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
fileItem("4", "file4", "root", "root", "https://dummy4.com", false),
|
||||
fileItem("5", "file5", "root", "root", "https://dummy5.com", false),
|
||||
},
|
||||
deltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://dummy1.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://dummy2.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
"3": {
|
||||
downloadURL: "https://dummy3.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
"4": {
|
||||
downloadURL: "https://dummy4.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
"5": {
|
||||
downloadURL: "https://dummy5.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
expectedErr: require.NoError,
|
||||
cacheAssert: func(uc *urlCache, startTime time.Time) {
|
||||
require.Greater(suite.T(), uc.lastRefreshTime, startTime)
|
||||
require.Equal(suite.T(), 1, uc.deltaQueryCount)
|
||||
require.Equal(suite.T(), 5, len(uc.idToProps))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "duplicate items with potentially new urls",
|
||||
pagerResult: map[string][]deltaPagerResult{
|
||||
driveID: {
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("3", "file3", "root", "root", "https://dummy3.com", false),
|
||||
fileItem("1", "file1", "root", "root", "https://test1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://test2.com", false),
|
||||
},
|
||||
deltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "https://test1.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://test2.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
"3": {
|
||||
downloadURL: "https://dummy3.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
expectedErr: require.NoError,
|
||||
cacheAssert: func(uc *urlCache, startTime time.Time) {
|
||||
require.Greater(suite.T(), uc.lastRefreshTime, startTime)
|
||||
require.Equal(suite.T(), 1, uc.deltaQueryCount)
|
||||
require.Equal(suite.T(), 3, len(uc.idToProps))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "deleted items",
|
||||
pagerResult: map[string][]deltaPagerResult{
|
||||
driveID: {
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", true),
|
||||
},
|
||||
deltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {
|
||||
downloadURL: "",
|
||||
isDeleted: true,
|
||||
},
|
||||
"2": {
|
||||
downloadURL: "https://dummy2.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
},
|
||||
expectedErr: require.NoError,
|
||||
cacheAssert: func(uc *urlCache, startTime time.Time) {
|
||||
require.Greater(suite.T(), uc.lastRefreshTime, startTime)
|
||||
require.Equal(suite.T(), 1, uc.deltaQueryCount)
|
||||
require.Equal(suite.T(), 2, len(uc.idToProps))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "item not found in cache",
|
||||
pagerResult: map[string][]deltaPagerResult{
|
||||
driveID: {
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
},
|
||||
deltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"2": {},
|
||||
},
|
||||
expectedErr: require.Error,
|
||||
cacheAssert: func(uc *urlCache, startTime time.Time) {
|
||||
require.Greater(suite.T(), uc.lastRefreshTime, startTime)
|
||||
require.Equal(suite.T(), 1, uc.deltaQueryCount)
|
||||
require.Equal(suite.T(), 1, len(uc.idToProps))
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multi-page delta query error",
|
||||
pagerResult: map[string][]deltaPagerResult{
|
||||
driveID: {
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
},
|
||||
nextLink: &next,
|
||||
},
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
fileItem("2", "file2", "root", "root", "https://dummy2.com", false),
|
||||
},
|
||||
deltaLink: &deltaString,
|
||||
err: errors.New("delta query error"),
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"1": {},
|
||||
"2": {},
|
||||
},
|
||||
expectedErr: require.Error,
|
||||
cacheAssert: func(uc *urlCache, _ time.Time) {
|
||||
require.Equal(suite.T(), time.Time{}, uc.lastRefreshTime)
|
||||
require.Equal(suite.T(), 0, uc.deltaQueryCount)
|
||||
require.Equal(suite.T(), 0, len(uc.idToProps))
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "folder item",
|
||||
pagerResult: map[string][]deltaPagerResult{
|
||||
driveID: {
|
||||
{
|
||||
items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
driveItem("2", "folder2", "root", "root", false, true, false),
|
||||
},
|
||||
deltaLink: &deltaString,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
"2": {},
|
||||
},
|
||||
expectedErr: require.Error,
|
||||
cacheAssert: func(uc *urlCache, startTime time.Time) {
|
||||
require.Greater(suite.T(), uc.lastRefreshTime, startTime)
|
||||
require.Equal(suite.T(), 1, uc.deltaQueryCount)
|
||||
require.Equal(suite.T(), 1, len(uc.idToProps))
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
ctx, flush := tester.NewContext(t)
|
||||
defer flush()
|
||||
|
||||
itemPager := &mockItemPager{
|
||||
toReturn: test.pagerResult[driveID],
|
||||
}
|
||||
|
||||
cache, err := newURLCache(
|
||||
driveID,
|
||||
1*time.Hour,
|
||||
itemPager,
|
||||
fault.New(true))
|
||||
|
||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||
|
||||
numConcurrentReq := 100
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(numConcurrentReq)
|
||||
|
||||
startTime := time.Now()
|
||||
|
||||
for i := 0; i < numConcurrentReq; i++ {
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
for id, expected := range test.expectedItemProps {
|
||||
time.Sleep(time.Duration(rand.Intn(100)) * time.Millisecond)
|
||||
|
||||
props, err := cache.getItemProperties(ctx, id)
|
||||
|
||||
test.expectedErr(suite.T(), err, clues.ToCore(err))
|
||||
require.Equal(suite.T(), expected, props)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
test.cacheAssert(cache, startTime)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Test needsRefresh
|
||||
func (suite *URLCacheUnitSuite) TestNeedsRefresh() {
|
||||
driveID := "drive1"
|
||||
t := suite.T()
|
||||
refreshInterval := 1 * time.Second
|
||||
|
||||
cache, err := newURLCache(
|
||||
driveID,
|
||||
refreshInterval,
|
||||
&mockItemPager{},
|
||||
fault.New(true))
|
||||
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
// cache is empty
|
||||
require.True(t, cache.needsRefresh())
|
||||
|
||||
// cache is not empty, but refresh interval has passed
|
||||
cache.idToProps["1"] = itemProps{
|
||||
downloadURL: "https://dummy1.com",
|
||||
isDeleted: false,
|
||||
}
|
||||
|
||||
time.Sleep(refreshInterval)
|
||||
require.True(t, cache.needsRefresh())
|
||||
|
||||
// none of the above
|
||||
cache.lastRefreshTime = time.Now()
|
||||
require.False(t, cache.needsRefresh())
|
||||
}
|
||||
|
||||
// Test newURLCache
|
||||
func (suite *URLCacheUnitSuite) TestNewURLCache() {
|
||||
// table driven tests
|
||||
table := []struct {
|
||||
name string
|
||||
driveID string
|
||||
refreshInt time.Duration
|
||||
itemPager api.DriveItemEnumerator
|
||||
errors *fault.Bus
|
||||
expectedErr require.ErrorAssertionFunc
|
||||
}{
|
||||
{
|
||||
name: "invalid driveID",
|
||||
driveID: "",
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: &mockItemPager{},
|
||||
errors: fault.New(true),
|
||||
expectedErr: require.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid refresh interval",
|
||||
driveID: "drive1",
|
||||
refreshInt: 100 * time.Millisecond,
|
||||
itemPager: &mockItemPager{},
|
||||
errors: fault.New(true),
|
||||
expectedErr: require.Error,
|
||||
},
|
||||
{
|
||||
name: "invalid itemPager",
|
||||
driveID: "drive1",
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: nil,
|
||||
errors: fault.New(true),
|
||||
expectedErr: require.Error,
|
||||
},
|
||||
{
|
||||
name: "valid",
|
||||
driveID: "drive1",
|
||||
refreshInt: 1 * time.Hour,
|
||||
itemPager: &mockItemPager{},
|
||||
errors: fault.New(true),
|
||||
expectedErr: require.NoError,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range table {
|
||||
suite.Run(test.name, func() {
|
||||
t := suite.T()
|
||||
_, err := newURLCache(
|
||||
test.driveID,
|
||||
test.refreshInt,
|
||||
test.itemPager,
|
||||
test.errors)
|
||||
|
||||
test.expectedErr(t, err, clues.ToCore(err))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user