Misc changes - add urlCacher interface, add some tests for cache integration
This commit is contained in:
parent
c62bcc6d95
commit
01bb562bb5
@ -85,7 +85,7 @@ type Collection struct {
|
|||||||
// should only be true if the old delta token expired
|
// should only be true if the old delta token expired
|
||||||
doNotMergeItems bool
|
doNotMergeItems bool
|
||||||
|
|
||||||
cache *urlCache
|
cache urlCacher
|
||||||
}
|
}
|
||||||
|
|
||||||
func pathToLocation(p path.Path) (*path.Builder, error) {
|
func pathToLocation(p path.Path) (*path.Builder, error) {
|
||||||
@ -111,7 +111,7 @@ func NewCollection(
|
|||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
colScope collectionScope,
|
colScope collectionScope,
|
||||||
doNotMergeItems bool,
|
doNotMergeItems bool,
|
||||||
cache *urlCache,
|
cache urlCacher,
|
||||||
) (*Collection, error) {
|
) (*Collection, error) {
|
||||||
// TODO(ashmrtn): If OneDrive switches to using folder IDs then this will need
|
// TODO(ashmrtn): If OneDrive switches to using folder IDs then this will need
|
||||||
// to be changed as we won't be able to extract path information from the
|
// to be changed as we won't be able to extract path information from the
|
||||||
@ -153,7 +153,7 @@ func newColl(
|
|||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
colScope collectionScope,
|
colScope collectionScope,
|
||||||
doNotMergeItems bool,
|
doNotMergeItems bool,
|
||||||
cache *urlCache,
|
cache urlCacher,
|
||||||
) *Collection {
|
) *Collection {
|
||||||
c := &Collection{
|
c := &Collection{
|
||||||
handler: handler,
|
handler: handler,
|
||||||
@ -376,7 +376,7 @@ func (oc *Collection) readFromCache(
|
|||||||
itemID string,
|
itemID string,
|
||||||
) (io.ReadCloser, error) {
|
) (io.ReadCloser, error) {
|
||||||
if oc.cache == nil {
|
if oc.cache == nil {
|
||||||
return nil, clues.New("nil url cache")
|
return nil, clues.New("nil cache")
|
||||||
}
|
}
|
||||||
|
|
||||||
props, err := oc.cache.getItemProperties(ctx, itemID)
|
props, err := oc.cache.getItemProperties(ctx, itemID)
|
||||||
|
|||||||
@ -2,6 +2,7 @@ package onedrive
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -604,6 +605,18 @@ func (suite *GetDriveItemUnitTestSuite) TestGetDriveItem_error() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var _ urlCacher = &mockURLCache{}
|
||||||
|
|
||||||
|
type mockURLCache struct {
|
||||||
|
Get func(ctx context.Context, itemID string) (itemProps, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (muc *mockURLCache) getItemProperties(
|
||||||
|
ctx context.Context,
|
||||||
|
itemID string) (itemProps, error) {
|
||||||
|
return muc.Get(ctx, itemID)
|
||||||
|
}
|
||||||
|
|
||||||
func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
||||||
var (
|
var (
|
||||||
driveID string
|
driveID string
|
||||||
@ -615,6 +628,12 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
|
|
||||||
itemWID.SetId(ptr.To("brainhooldy"))
|
itemWID.SetId(ptr.To("brainhooldy"))
|
||||||
|
|
||||||
|
m := &mockURLCache{
|
||||||
|
Get: func(ctx context.Context, itemID string) (itemProps, error) {
|
||||||
|
return itemProps{}, clues.Stack(assert.AnError)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
mgi mock.GetsItem
|
mgi mock.GetsItem
|
||||||
@ -623,6 +642,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
getErr []error
|
getErr []error
|
||||||
expectErr require.ErrorAssertionFunc
|
expectErr require.ErrorAssertionFunc
|
||||||
expect require.ValueAssertionFunc
|
expect require.ValueAssertionFunc
|
||||||
|
muc *mockURLCache
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "good",
|
name: "good",
|
||||||
@ -631,6 +651,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
getErr: []error{nil},
|
getErr: []error{nil},
|
||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
expect: require.NotNil,
|
expect: require.NotNil,
|
||||||
|
muc: m,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "expired url redownloads",
|
name: "expired url redownloads",
|
||||||
@ -640,6 +661,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
getErr: []error{errUnauth, nil},
|
getErr: []error{errUnauth, nil},
|
||||||
expectErr: require.NoError,
|
expectErr: require.NoError,
|
||||||
expect: require.NotNil,
|
expect: require.NotNil,
|
||||||
|
muc: m,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "immediate error",
|
name: "immediate error",
|
||||||
@ -647,6 +669,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
getErr: []error{assert.AnError},
|
getErr: []error{assert.AnError},
|
||||||
expectErr: require.Error,
|
expectErr: require.Error,
|
||||||
expect: require.Nil,
|
expect: require.Nil,
|
||||||
|
muc: m,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "re-fetching the item fails",
|
name: "re-fetching the item fails",
|
||||||
@ -655,6 +678,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
mgi: mock.GetsItem{Item: nil, Err: assert.AnError},
|
mgi: mock.GetsItem{Item: nil, Err: assert.AnError},
|
||||||
expectErr: require.Error,
|
expectErr: require.Error,
|
||||||
expect: require.Nil,
|
expect: require.Nil,
|
||||||
|
muc: m,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "expired url fails redownload",
|
name: "expired url fails redownload",
|
||||||
@ -664,6 +688,43 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
getErr: []error{errUnauth, assert.AnError},
|
getErr: []error{errUnauth, assert.AnError},
|
||||||
expectErr: require.Error,
|
expectErr: require.Error,
|
||||||
expect: require.Nil,
|
expect: require.Nil,
|
||||||
|
muc: m,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "expired url fetched from cache",
|
||||||
|
mgi: mock.GetsItem{Item: itemWID, Err: nil},
|
||||||
|
itemInfo: details.ItemInfo{},
|
||||||
|
respBody: []io.ReadCloser{nil, iorc},
|
||||||
|
getErr: []error{errUnauth, nil},
|
||||||
|
expectErr: require.NoError,
|
||||||
|
expect: require.NotNil,
|
||||||
|
muc: &mockURLCache{
|
||||||
|
Get: func(ctx context.Context, itemID string) (itemProps, error) {
|
||||||
|
return itemProps{
|
||||||
|
downloadURL: "http://example.com",
|
||||||
|
isDeleted: false,
|
||||||
|
},
|
||||||
|
nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "expired url fetched from cache but item deleted",
|
||||||
|
mgi: mock.GetsItem{Item: itemWID, Err: assert.AnError},
|
||||||
|
itemInfo: details.ItemInfo{},
|
||||||
|
respBody: []io.ReadCloser{nil, nil},
|
||||||
|
getErr: []error{errUnauth, assert.AnError},
|
||||||
|
expectErr: require.Error,
|
||||||
|
expect: require.Nil,
|
||||||
|
muc: &mockURLCache{
|
||||||
|
Get: func(ctx context.Context, itemID string) (itemProps, error) {
|
||||||
|
return itemProps{
|
||||||
|
downloadURL: "http://example.com",
|
||||||
|
isDeleted: true,
|
||||||
|
},
|
||||||
|
nil
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, test := range table {
|
for _, test := range table {
|
||||||
@ -698,7 +759,7 @@ func (suite *GetDriveItemUnitTestSuite) TestDownloadContent() {
|
|||||||
control.Options{ToggleFeatures: control.Toggles{}},
|
control.Options{ToggleFeatures: control.Toggles{}},
|
||||||
CollectionScopeFolder,
|
CollectionScopeFolder,
|
||||||
true,
|
true,
|
||||||
nil)
|
test.muc)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
r, err := coll.downloadContent(ctx, mbh, item, driveID)
|
r, err := coll.downloadContent(ctx, mbh, item, driveID)
|
||||||
|
|||||||
@ -6,7 +6,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
@ -386,7 +385,7 @@ func (c *Collections) Get(
|
|||||||
|
|
||||||
// Only create a drive cache if there are less than 300k items in the drive.
|
// Only create a drive cache if there are less than 300k items in the drive.
|
||||||
// TODO: Tune this number. Delta query for 300k items takes ~20 mins.
|
// TODO: Tune this number. Delta query for 300k items takes ~20 mins.
|
||||||
if numDriveItems < 300*1000 {
|
if numDriveItems < urlCacheDriveItemThreshold {
|
||||||
logger.Ctx(ictx).Info("adding url cache for drive ", driveID)
|
logger.Ctx(ictx).Info("adding url cache for drive ", driveID)
|
||||||
|
|
||||||
err = c.addURLCacheToDriveCollections(ictx, driveID, errs)
|
err = c.addURLCacheToDriveCollections(ictx, driveID, errs)
|
||||||
@ -463,7 +462,7 @@ func (c *Collections) addURLCacheToDriveCollections(
|
|||||||
) error {
|
) error {
|
||||||
uc, err := newURLCache(
|
uc, err := newURLCache(
|
||||||
driveID,
|
driveID,
|
||||||
1*time.Hour, // TODO: Add const
|
urlCacheRefreshInterval,
|
||||||
errs,
|
errs,
|
||||||
c.handler.ItemPager(driveID, "", api.DriveItemSelectDefault()))
|
c.handler.ItemPager(driveID, "", api.DriveItemSelectDefault()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@ -77,7 +77,7 @@ func downloadFile(
|
|||||||
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
|
return nil, clues.New("malware detected").Label(graph.LabelsMalware)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (resp.StatusCode / 100) != 2 {
|
if resp != nil && (resp.StatusCode/100) != 2 {
|
||||||
// upstream error checks can compare the status with
|
// upstream error checks can compare the status with
|
||||||
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
|
// clues.HasLabel(err, graph.LabelStatus(http.KnownStatusCode))
|
||||||
return nil, clues.
|
return nil, clues.
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user