deduplicate cache folders (#1814)

## Description

These two structures are effectively identical.
This refactor deferrs to the graph version since
services may still wrap that for more specific
behavior as needed.

## Does this PR need a docs update or release note?

- [x]  No 

## Type of change

- [x] 🐹 Trivial/Minor

## Issue(s)

* #1727

## Test Plan

- [x]  Unit test
- [x] 💚 E2E
This commit is contained in:
Keepers 2022-12-16 16:43:00 -07:00 committed by GitHub
parent 8c15c3ce16
commit 6c72eefdae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 55 additions and 84 deletions

View File

@ -551,9 +551,11 @@ func (suite *ConnectorCreateExchangeCollectionIntegrationSuite) TestEventsSerial
for stream := range streamChannel { for stream := range streamChannel {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
read, err := buf.ReadFrom(stream.ToReader()) read, err := buf.ReadFrom(stream.ToReader())
assert.NoError(t, err) assert.NoError(t, err)
assert.NotZero(t, read) assert.NotZero(t, read)
event, err := support.CreateEventFromBytes(buf.Bytes()) event, err := support.CreateEventFromBytes(buf.Bytes())
assert.NotNil(t, event) assert.NotNil(t, event)
assert.NoError(t, err, "creating event from bytes: "+buf.String()) assert.NoError(t, err, "creating event from bytes: "+buf.String())

View File

@ -5,7 +5,6 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/connector/graph" "github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/pkg/path"
) )
// checkIDAndName is a helper function to ensure that // checkIDAndName is a helper function to ensure that
@ -39,29 +38,6 @@ func checkRequiredValues(c graph.Container) error {
return nil return nil
} }
// =========================================
// cachedContainer Implementations
// =========================================
var _ graph.CachedContainer = &cacheFolder{}
type cacheFolder struct {
graph.Container
p *path.Builder
}
// =========================================
// Required Functions to satisfy interfaces
// =========================================
func (cf cacheFolder) Path() *path.Builder {
return cf.p
}
func (cf *cacheFolder) SetPath(newPath *path.Builder) {
cf.p = newPath
}
// CalendarDisplayable is a transformative struct that aligns // CalendarDisplayable is a transformative struct that aligns
// models.Calendarable interface with the container interface. // models.Calendarable interface with the container interface.
// Calendars do not have a parentFolderID. Therefore, // Calendars do not have a parentFolderID. Therefore,

View File

@ -43,10 +43,7 @@ func (cfc *contactFolderCache) populateContactRoot(
"fetching root contact folder: "+support.ConnectorStackErrorTrace(err)) "fetching root contact folder: "+support.ConnectorStackErrorTrace(err))
} }
temp := cacheFolder{ temp := graph.NewCacheFolder(f, path.Builder{}.Append(baseContainerPath...))
Container: f,
p: path.Builder{}.Append(baseContainerPath...),
}
if err := cfc.addFolder(temp); err != nil { if err := cfc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding cache root") return errors.Wrap(err, "adding cache root")
@ -100,9 +97,7 @@ func (cfc *contactFolderCache) Populate(
continue continue
} }
temp := cacheFolder{ temp := graph.NewCacheFolder(fold, nil)
Container: fold,
}
err = cfc.addFolder(temp) err = cfc.addFolder(temp)
if err != nil { if err != nil {

View File

@ -85,10 +85,10 @@ func (cr *containerResolver) PathInCache(pathString string) (string, bool) {
// addFolder adds a folder to the cache with the given ID. If the item is // addFolder adds a folder to the cache with the given ID. If the item is
// already in the cache does nothing. The path for the item is not modified. // already in the cache does nothing. The path for the item is not modified.
func (cr *containerResolver) addFolder(cf cacheFolder) error { func (cr *containerResolver) addFolder(cf graph.CacheFolder) error {
// Only require a non-nil non-empty parent if the path isn't already // Only require a non-nil non-empty parent if the path isn't already
// populated. // populated.
if cf.p != nil { if cf.Path() != nil {
if err := checkIDAndName(cf.Container); err != nil { if err := checkIDAndName(cf.Container); err != nil {
return errors.Wrap(err, "adding item to cache") return errors.Wrap(err, "adding item to cache")
} }
@ -120,7 +120,7 @@ func (cr *containerResolver) Items() []graph.CachedContainer {
// AddToCache adds container to map in field 'cache' // AddToCache adds container to map in field 'cache'
// @returns error iff the required values are not accessible. // @returns error iff the required values are not accessible.
func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container) error { func (cr *containerResolver) AddToCache(ctx context.Context, f graph.Container) error {
temp := cacheFolder{ temp := graph.CacheFolder{
Container: f, Container: f,
} }

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/connector/graph"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -144,67 +145,67 @@ func (suite *FolderCacheUnitSuite) TestCheckRequiredValues() {
func (suite *FolderCacheUnitSuite) TestAddFolder() { func (suite *FolderCacheUnitSuite) TestAddFolder() {
table := []struct { table := []struct {
name string name string
cf cacheFolder cf graph.CacheFolder
check assert.ErrorAssertionFunc check assert.ErrorAssertionFunc
}{ }{
{ {
name: "NoParentNoPath", name: "NoParentNoPath",
cf: cacheFolder{ cf: graph.NewCacheFolder(
Container: &mockContainer{ &mockContainer{
id: &testID, id: &testID,
name: &testName, name: &testName,
parentID: nil, parentID: nil,
}, },
p: nil, nil,
}, ),
check: assert.Error, check: assert.Error,
}, },
{ {
name: "NoParentPath", name: "NoParentPath",
cf: cacheFolder{ cf: graph.NewCacheFolder(
Container: &mockContainer{ &mockContainer{
id: &testID, id: &testID,
name: &testName, name: &testName,
parentID: nil, parentID: nil,
}, },
p: path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
}, ),
check: assert.NoError, check: assert.NoError,
}, },
{ {
name: "NoName", name: "NoName",
cf: cacheFolder{ cf: graph.NewCacheFolder(
Container: &mockContainer{ &mockContainer{
id: &testID, id: &testID,
name: nil, name: nil,
parentID: &testParentID, parentID: &testParentID,
}, },
p: path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
}, ),
check: assert.Error, check: assert.Error,
}, },
{ {
name: "NoID", name: "NoID",
cf: cacheFolder{ cf: graph.NewCacheFolder(
Container: &mockContainer{ &mockContainer{
id: nil, id: nil,
name: &testName, name: &testName,
parentID: &testParentID, parentID: &testParentID,
}, },
p: path.Builder{}.Append("foo"), path.Builder{}.Append("foo"),
}, ),
check: assert.Error, check: assert.Error,
}, },
{ {
name: "NoPath", name: "NoPath",
cf: cacheFolder{ cf: graph.NewCacheFolder(
Container: &mockContainer{ &mockContainer{
id: &testID, id: &testID,
name: &testName, name: &testName,
parentID: &testParentID, parentID: &testParentID,
}, },
p: nil, nil,
}, ),
check: assert.NoError, check: assert.NoError,
}, },
} }

View File

@ -16,7 +16,7 @@ import (
func MetadataFileNames(cat path.CategoryType) []string { func MetadataFileNames(cat path.CategoryType) []string {
switch cat { switch cat {
case path.EmailCategory, path.ContactsCategory: case path.EmailCategory, path.ContactsCategory:
return []string{graph.DeltaTokenFileName, graph.PreviousPathFileName} return []string{graph.DeltaURLsFileName, graph.PreviousPathFileName}
default: default:
return []string{graph.PreviousPathFileName} return []string{graph.PreviousPathFileName}
} }
@ -82,7 +82,7 @@ func ParseMetadataCollections(
cdps.paths = m cdps.paths = m
case graph.DeltaTokenFileName: case graph.DeltaURLsFileName:
if len(cdps.deltas) > 0 { if len(cdps.deltas) > 0 {
return nil, errors.Errorf("multiple versions of %s delta metadata", category) return nil, errors.Errorf("multiple versions of %s delta metadata", category)
} }

View File

@ -43,7 +43,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{ {
name: "delta urls", name: "delta urls",
data: []fileValues{ data: []fileValues{
{graph.DeltaTokenFileName, "delta-link"}, {graph.DeltaURLsFileName, "delta-link"},
}, },
expectDeltas: map[string]string{ expectDeltas: map[string]string{
"key": "delta-link", "key": "delta-link",
@ -53,8 +53,8 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{ {
name: "multiple delta urls", name: "multiple delta urls",
data: []fileValues{ data: []fileValues{
{graph.DeltaTokenFileName, "delta-link"}, {graph.DeltaURLsFileName, "delta-link"},
{graph.DeltaTokenFileName, "delta-link-2"}, {graph.DeltaURLsFileName, "delta-link-2"},
}, },
expectError: assert.Error, expectError: assert.Error,
}, },
@ -79,7 +79,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{ {
name: "delta urls and previous paths", name: "delta urls and previous paths",
data: []fileValues{ data: []fileValues{
{graph.DeltaTokenFileName, "delta-link"}, {graph.DeltaURLsFileName, "delta-link"},
{graph.PreviousPathFileName, "prev-path"}, {graph.PreviousPathFileName, "prev-path"},
}, },
expectDeltas: map[string]string{ expectDeltas: map[string]string{
@ -93,7 +93,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{ {
name: "delta urls with special chars", name: "delta urls with special chars",
data: []fileValues{ data: []fileValues{
{graph.DeltaTokenFileName, "`!@#$%^&*()_[]{}/\"\\"}, {graph.DeltaURLsFileName, "`!@#$%^&*()_[]{}/\"\\"},
}, },
expectDeltas: map[string]string{ expectDeltas: map[string]string{
"key": "`!@#$%^&*()_[]{}/\"\\", "key": "`!@#$%^&*()_[]{}/\"\\",
@ -103,7 +103,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
{ {
name: "delta urls with escaped chars", name: "delta urls with escaped chars",
data: []fileValues{ data: []fileValues{
{graph.DeltaTokenFileName, `\n\r\t\b\f\v\0\\`}, {graph.DeltaURLsFileName, `\n\r\t\b\f\v\0\\`},
}, },
expectDeltas: map[string]string{ expectDeltas: map[string]string{
"key": "\\n\\r\\t\\b\\f\\v\\0\\\\", "key": "\\n\\r\\t\\b\\f\\v\\0\\\\",
@ -116,7 +116,7 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
// rune(92) = \, rune(110) = n. Ensuring it's not possible to // rune(92) = \, rune(110) = n. Ensuring it's not possible to
// error in serializing/deserializing and produce a single newline // error in serializing/deserializing and produce a single newline
// character from those two runes. // character from those two runes.
{graph.DeltaTokenFileName, string([]rune{rune(92), rune(110)})}, {graph.DeltaURLsFileName, string([]rune{rune(92), rune(110)})},
}, },
expectDeltas: map[string]string{ expectDeltas: map[string]string{
"key": "\\n", "key": "\\n",
@ -154,12 +154,20 @@ func (suite *DataCollectionsUnitSuite) TestParseMetadataCollections() {
emails := cdps[path.EmailCategory] emails := cdps[path.EmailCategory]
deltas, paths := emails.deltas, emails.paths deltas, paths := emails.deltas, emails.paths
if len(test.expectDeltas) > 0 {
assert.NotEmpty(t, deltas, "deltas")
}
if len(test.expectPaths) > 0 {
assert.NotEmpty(t, paths, "paths")
}
for k, v := range test.expectDeltas { for k, v := range test.expectDeltas {
assert.Equal(t, v, deltas[k], "deltas elements") assert.Equal(t, v, deltas[k], "deltas elements")
} }
for k, v := range test.expectPaths { for k, v := range test.expectPaths {
assert.Equal(t, v, paths[k], "deltas elements") assert.Equal(t, v, paths[k], "paths elements")
} }
}) })
} }

View File

@ -72,10 +72,7 @@ func (ecc *eventCalendarCache) Populate(
} }
for _, container := range directories { for _, container := range directories {
temp := cacheFolder{ temp := graph.NewCacheFolder(container, path.Builder{}.Append(*container.GetDisplayName()))
Container: container,
p: path.Builder{}.Append(*container.GetDisplayName()),
}
if err := ecc.addFolder(temp); err != nil { if err := ecc.addFolder(temp); err != nil {
errs = support.WrapAndAppend( errs = support.WrapAndAppend(
@ -95,10 +92,7 @@ func (ecc *eventCalendarCache) AddToCache(ctx context.Context, f graph.Container
return errors.Wrap(err, "adding cache folder") return errors.Wrap(err, "adding cache folder")
} }
temp := cacheFolder{ temp := graph.NewCacheFolder(f, path.Builder{}.Append(*f.GetDisplayName()))
Container: f,
p: path.Builder{}.Append(*f.GetDisplayName()),
}
if err := ecc.addFolder(temp); err != nil { if err := ecc.addFolder(temp); err != nil {
return errors.Wrap(err, "adding cache folder") return errors.Wrap(err, "adding cache folder")

View File

@ -55,10 +55,7 @@ func (mc *mailFolderCache) populateMailRoot(
directory = DefaultMailFolder directory = DefaultMailFolder
} }
temp := cacheFolder{ temp := graph.NewCacheFolder(f, path.Builder{}.Append(directory))
Container: f,
p: path.Builder{}.Append(directory),
}
if err := mc.addFolder(temp); err != nil { if err := mc.addFolder(temp); err != nil {
return errors.Wrap(err, "initializing mail resolver") return errors.Wrap(err, "initializing mail resolver")
@ -98,9 +95,7 @@ func (mc *mailFolderCache) Populate(
} }
for _, f := range resp.GetValue() { for _, f := range resp.GetValue() {
temp := cacheFolder{ temp := graph.NewCacheFolder(f, nil)
Container: f,
}
// Use addFolder instead of AddToCache to be conservative about path // Use addFolder instead of AddToCache to be conservative about path
// population. The fetch order of the folders could cause failures while // population. The fetch order of the folders could cause failures while

View File

@ -113,7 +113,7 @@ func FilterContainersAndFillCollections(
} }
if len(deltaURLs) > 0 { if len(deltaURLs) > 0 {
entries = append(entries, graph.NewMetadataEntry(graph.DeltaTokenFileName, deltaURLs)) entries = append(entries, graph.NewMetadataEntry(graph.DeltaURLsFileName, deltaURLs))
} }
if col, err := graph.MakeMetadataCollection( if col, err := graph.MakeMetadataCollection(

View File

@ -10,9 +10,9 @@ import (
) )
const ( const (
// DeltaTokenFileName is the name of the file containing delta token(s) for a // DeltaURLsFileName is the name of the file containing delta token(s) for a
// given endpoint. The endpoint granularity varies by service. // given endpoint. The endpoint granularity varies by service.
DeltaTokenFileName = "delta" DeltaURLsFileName = "delta"
// PreviousPathFileName is the name of the file containing previous path(s) for a // PreviousPathFileName is the name of the file containing previous path(s) for a
// given endpoint. // given endpoint.
PreviousPathFileName = "previouspath" PreviousPathFileName = "previouspath"
@ -21,7 +21,7 @@ const (
// AllMetadataFileNames produces the standard set of filenames used to store graph // AllMetadataFileNames produces the standard set of filenames used to store graph
// metadata such as delta tokens and folderID->path references. // metadata such as delta tokens and folderID->path references.
func AllMetadataFileNames() []string { func AllMetadataFileNames() []string {
return []string{DeltaTokenFileName, PreviousPathFileName} return []string{DeltaURLsFileName, PreviousPathFileName}
} }
type QueryParams struct { type QueryParams struct {