Misc changes - add urlCacher interface, add some tests for cache integration
This commit is contained in:
parent
c62bcc6d95
commit
01bb562bb5
@ -85,7 +85,7 @@ type Collection struct {
|
||||
// should only be true if the old delta token expired
|
||||
doNotMergeItems bool
|
||||
|
||||
cache *urlCache
|
||||
cache urlCacher
|
||||
}
|
||||
|
||||
func pathToLocation(p path.Path) (*path.Builder, error) {
|
||||
@ -111,7 +111,7 @@ func NewCollection(
|
||||
ctrlOpts control.Options,
|
||||
colScope collectionScope,
|
||||
doNotMergeItems bool,
|
||||
cache *urlCache,
|
||||
cache urlCacher,
|
||||
) (*Collection, error) {
|
||||
// TODO(ashmrtn): If OneDrive switches to using folder IDs then this will need
|
||||
// to be changed as we won't be able to extract path information from the
|
||||
@ -153,7 +153,7 @@ func newColl(
|
||||
ctrlOpts control.Options,
|
||||
colScope collectionScope,
|
||||
doNotMergeItems bool,
|
||||
cache *urlCache,
|
||||
cache urlCacher,
|
||||
) *Collection {
|
||||
c := &Collection{
|
||||
handler: handler,
|
||||
@ -376,7 +376,7 @@ func (oc *Collection) readFromCache(
|
||||
itemID string,
|
||||
) (io.ReadCloser, error) {
|
||||
if oc.cache == nil {
|
||||
return nil, clues.New("nil url cache")
|
||||
return nil, clues.New("nil cache")
|
||||
}
|
||||
|
||||
props, err := oc.cache.getItemProperties(ctx, itemID)
|
||||
|
||||
@ -2,6 +2,7 @@ package onedrive
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
@ -604,6 +605,18 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
|
||||
}
|
||||
}
|
||||
|
||||
var _ urlCacher = &mockURLCache{}
|
||||
|
||||
type mockURLCache struct {
|
||||
Get func(ctx context.Context, itemID string) (itemProps, error)
|
||||
}
|
||||
|
||||
func (muc *mockURLCache) getItemProperties(
|
||||
ctx context.Context,
|
||||
itemID string) (itemProps, error) {
|
||||
return muc.Get(ctx, itemID)
|
||||
}
|
||||
|
||||
func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
var (
|
||||
driveID string
|
||||
@ -615,6 +628,12 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
|
||||
itemWID.SetId(ptr.To("brainhooldy"))
|
||||
|
||||
m := &mockURLCache{
|
||||
Get: func(ctx context.Context, itemID string) (itemProps, error) {
|
||||
return itemProps{}, clues.Stack(assert.AnError)
|
||||
},
|
||||
}
|
||||
|
||||
table := []struct {
|
||||
name string
|
||||
mgi mock.GetsItem
|
||||
@ -623,6 +642,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
getErr []error
|
||||
expectErr require.ErrorAssertionFunc
|
||||
expect require.ValueAssertionFunc
|
||||
muc *mockURLCache
|
||||
}{
|
||||
{
|
||||
name: "good",
|
||||
@ -631,6 +651,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
getErr: []error{nil},
|
||||
expectErr: require.NoError,
|
||||
expect: require.NotNil,
|
||||
muc: m,
|
||||
},
|
||||
{
|
||||
name: "expired url redownloads",
|
||||
@ -640,6 +661,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
getErr: []error{errUnauth, nil},
|
||||
expectErr: require.NoError,
|
||||
expect: require.NotNil,
|
||||
muc: m,
|
||||
},
|
||||
{
|
||||
name: "immediate error",
|
||||
@ -647,6 +669,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
getErr: []error{assert.AnError},
|
||||
expectErr: require.Error,
|
||||
expect: require.Nil,
|
||||
muc: m,
|
||||
},
|
||||
{
|
||||
name: "re-fetching the item fails",
|
||||
@ -655,6 +678,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
mgi: mock.GetsItem{Item: nil, Err: assert.AnError},
|
||||
expectErr: require.Error,
|
||||
expect: require.Nil,
|
||||
muc: m,
|
||||
},
|
||||
{
|
||||
name: "expired url fails redownload",
|
||||
@ -664,6 +688,43 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
getErr: []error{errUnauth, assert.AnError},
|
||||
expectErr: require.Error,
|
||||
expect: require.Nil,
|
||||
muc: m,
|
||||
},
|
||||
{
|
||||
name: "expired url fetched from cache",
|
||||
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
||||
itemInfo: details.ItemInfo{},
|
||||
respBody: []io.ReadCloser{nil, iorc},
|
||||
getErr: []error{errUnauth, nil},
|
||||
expectErr: require.NoError,
|
||||
expect: require.NotNil,
|
||||
muc: &mockURLCache{
|
||||
Get: func(ctx context.Context, itemID string) (itemProps, error) {
|
||||
return itemProps{
|
||||
downloadURL: "http://example.com",
|
||||
isDeleted: false,
|
||||
},
|
||||
nil
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "expired url fetched from cache but item deleted",
|
||||
mgi: mock.GetsItem{Item: itemWID, Err: assert.AnError},
|
||||
itemInfo: details.ItemInfo{},
|
||||
respBody: []io.ReadCloser{nil, nil},
|
||||
getErr: []error{errUnauth, assert.AnError},
|
||||
expectErr: require.Error,
|
||||
expect: require.Nil,
|
||||
muc: &mockURLCache{
|
||||
Get: func(ctx context.Context, itemID string) (itemProps, error) {
|
||||
return itemProps{
|
||||
downloadURL: "http://example.com",
|
||||
isDeleted: true,
|
||||
},
|
||||
nil
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, test := range table {
|
||||
@ -698,7 +759,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||
control.Options{ToggleFeatures: control.Toggles{}},
|
||||
CollectionScopeFolder,
|
||||
true,
|
||||
nil)
|
||||
test.muc)
|
||||
require.NoError(t, err, clues.ToCore(err))
|
||||
|
||||
r, err := coll.downloadContent(ctx, mbh, item, driveID)
|
||||
|
||||
@ -6,7 +6,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||
@ -386,7 +385,7 @@ func (c *Collections) Get(
|
||||
|
||||
// Only create a drive cache if there are less than 300k items in the drive.
|
||||
// TODO: Tune this number. Delta query for 300k items takes ~20 mins.
|
||||
if numDriveItems < 300*1000 {
|
||||
if numDriveItems < urlCacheDriveItemThreshold {
|
||||
logger.Ctx(ictx).Info("adding url cache for drive ", driveID)
|
||||
|
||||
err = c.addURLCacheToDriveCollections(ictx, driveID, errs)
|
||||
@ -463,7 +462,7 @@ func (c *Collections) addURLCacheToDriveCollections(
|
||||
) error {
|
||||
uc, err := newURLCache(
|
||||
driveID,
|
||||
1*time.Hour, // TODO: Add const
|
||||
urlCacheRefreshInterval,
|
||||
errs,
|
||||
c.handler.ItemPager(driveID, "", api.DriveItemSelectDefault()))
|
||||
if err != nil {
|
||||
|
||||
@ -77,7 +77,7 @@ func downloadFile(
|
||||
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
|
||||
}
|
||||
|
||||
if (resp.StatusCode / 100) != 2 {
|
||||
if resp != nil && (resp.StatusCode/100) != 2 {
|
||||
// upstream error checks can compare the status with
|
||||
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
|
||||
return nil, clues.
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user