all non-resourcer compliance

Updates all path package uses that does not involve
a Resourcer.  Third to last step to wrapping up compliance.
Second step is to update the Resourcer interface to
comply with ServiceResources.  Last step is to clean up
any lingering linters, bugs, tests, and other things needed
to make the build green.
This commit is contained in:
ryanfkeepers 2023-08-11 16:16:40 -06:00
parent bf398f6c8d
commit 48aae5d485
18 changed files with 284 additions and 119 deletions

View File

@ -515,7 +515,7 @@ func streamBaseEntries(
// TODO(ashmrtn): We may eventually want to make this a function that is // TODO(ashmrtn): We may eventually want to make this a function that is
// passed in so that we can more easily switch it between different external // passed in so that we can more easily switch it between different external
// service provider implementations. // service provider implementations.
if !metadata.IsMetadataFile(itemPath) { if !metadata.IsMetadataFilePath(itemPath) {
// All items have item info in the base backup. However, we need to make // All items have item info in the base backup. However, we need to make
// sure we have enough metadata to find those entries. To do that we add // sure we have enough metadata to find those entries. To do that we add
// the item to progress and having progress aggregate everything for // the item to progress and having progress aggregate everything for

View File

@ -403,17 +403,19 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
// No excludes yet as this isn't an incremental backup. // No excludes yet as this isn't an incremental backup.
assert.True(t, excludes.Empty()) assert.True(t, excludes.Empty())
// assume the last service in the path is sharepoint.
srs := cols[0].FullPath().ServiceResources()
service := srs[len(srs)-1].Service
t.Logf("cols[0] Path: %s\n", cols[0].FullPath().String()) t.Logf("cols[0] Path: %s\n", cols[0].FullPath().String())
assert.Equal( assert.Equal(t, path.SharePointMetadataService, service)
t,
path.SharePointMetadataService.String(), // assume the last service in the path is sharepoint.
cols[0].FullPath().Service().String()) srs = cols[1].FullPath().ServiceResources()
service = srs[len(srs)-1].Service
t.Logf("cols[1] Path: %s\n", cols[1].FullPath().String()) t.Logf("cols[1] Path: %s\n", cols[1].FullPath().String())
assert.Equal( assert.Equal(t, path.SharePointService, service)
t,
path.SharePointService.String(),
cols[1].FullPath().Service().String())
} }
func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() { func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {

View File

@ -218,12 +218,16 @@ func (sc *Collection) retrieveLists(
var ( var (
metrics support.CollectionMetrics metrics support.CollectionMetrics
el = errs.Local() el = errs.Local()
// todo: pass in the resourceOwner as an idname.Provider
srs = sc.fullPath.ServiceResources()
// take the last resource in srs, since that should be the data owner
protectedResource = srs[len(srs)-1].ProtectedResource
) )
lists, err := loadSiteLists( lists, err := loadSiteLists(
ctx, ctx,
sc.client.Stable, sc.client.Stable,
sc.fullPath.ResourceOwner(), protectedResource,
sc.jobs, sc.jobs,
errs) errs)
if err != nil { if err != nil {
@ -279,6 +283,10 @@ func (sc *Collection) retrievePages(
var ( var (
metrics support.CollectionMetrics metrics support.CollectionMetrics
el = errs.Local() el = errs.Local()
// todo: pass in the resourceOwner as an idname.Provider
srs = sc.fullPath.ServiceResources()
// take the last resource in srs, since that should be the data owner
protectedResource = srs[len(srs)-1].ProtectedResource
) )
betaService := sc.betaService betaService := sc.betaService
@ -286,14 +294,14 @@ func (sc *Collection) retrievePages(
return metrics, clues.New("beta service required").WithClues(ctx) return metrics, clues.New("beta service required").WithClues(ctx)
} }
parent, err := as.GetByID(ctx, sc.fullPath.ResourceOwner()) parent, err := as.GetByID(ctx, protectedResource)
if err != nil { if err != nil {
return metrics, err return metrics, err
} }
root := ptr.Val(parent.GetWebUrl()) root := ptr.Val(parent.GetWebUrl())
pages, err := betaAPI.GetSitePages(ctx, betaService, sc.fullPath.ResourceOwner(), sc.jobs, errs) pages, err := betaAPI.GetSitePages(ctx, betaService, protectedResource, sc.jobs, errs)
if err != nil { if err != nil {
return metrics, err return metrics, err
} }

View File

@ -69,7 +69,9 @@ func ConsumeRestoreCollections(
ictx = clues.Add(ctx, ictx = clues.Add(ctx,
"category", category, "category", category,
"restore_location", clues.Hide(rcc.RestoreConfig.Location), "restore_location", clues.Hide(rcc.RestoreConfig.Location),
"resource_owner", clues.Hide(dc.FullPath().ResourceOwner()), "resource_owners", clues.Hide(
path.ServiceResourcesToResources(
dc.FullPath().ServiceResources())),
"full_path", dc.FullPath()) "full_path", dc.FullPath())
) )
@ -219,9 +221,12 @@ func RestoreListCollection(
var ( var (
metrics = support.CollectionMetrics{} metrics = support.CollectionMetrics{}
directory = dc.FullPath() directory = dc.FullPath()
siteID = directory.ResourceOwner()
items = dc.Items(ctx, errs) items = dc.Items(ctx, errs)
el = errs.Local() el = errs.Local()
// todo: pass in the resourceOwner as an idname.Provider
srs = directory.ServiceResources()
// take the last resource in srs, since that should be the data owner
protectedResource = srs[len(srs)-1].ProtectedResource
) )
trace.Log(ctx, "m365:sharepoint:restoreListCollection", directory.String()) trace.Log(ctx, "m365:sharepoint:restoreListCollection", directory.String())
@ -245,7 +250,7 @@ func RestoreListCollection(
ctx, ctx,
service, service,
itemData, itemData,
siteID, protectedResource,
restoreContainerName) restoreContainerName)
if err != nil { if err != nil {
el.AddRecoverable(ctx, err) el.AddRecoverable(ctx, err)
@ -292,7 +297,10 @@ func RestorePageCollection(
var ( var (
metrics = support.CollectionMetrics{} metrics = support.CollectionMetrics{}
directory = dc.FullPath() directory = dc.FullPath()
siteID = directory.ResourceOwner() // todo: pass in the resourceOwner as an idname.Provider
srs = directory.ServiceResources()
// take the last resource in srs, since that should be the data owner
protectedResource = srs[len(srs)-1].ProtectedResource
) )
trace.Log(ctx, "m365:sharepoint:restorePageCollection", directory.String()) trace.Log(ctx, "m365:sharepoint:restorePageCollection", directory.String())
@ -325,7 +333,7 @@ func RestorePageCollection(
ctx, ctx,
service, service,
itemData, itemData,
siteID, protectedResource,
restoreContainerName) restoreContainerName)
if err != nil { if err != nil {
el.AddRecoverable(ctx, err) el.AddRecoverable(ctx, err)

View File

@ -1249,11 +1249,11 @@ func (suite *ControllerIntegrationSuite) TestRestoreAndBackup_largeMailAttachmen
func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() { func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
table := []struct { table := []struct {
name string name string
resourceCat resource.Category resourceCat resource.Category
selectorFunc func(t *testing.T) selectors.Selector selectorFunc func(t *testing.T) selectors.Selector
service path.ServiceType metadataServices []path.ServiceType
categories []string categories []string
}{ }{
{ {
name: "Exchange", name: "Exchange",
@ -1267,7 +1267,7 @@ func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
return sel.Selector return sel.Selector
}, },
service: path.ExchangeService, metadataServices: []path.ServiceType{path.ExchangeMetadataService},
categories: []string{ categories: []string{
path.EmailCategory.String(), path.EmailCategory.String(),
path.ContactsCategory.String(), path.ContactsCategory.String(),
@ -1283,7 +1283,7 @@ func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
return sel.Selector return sel.Selector
}, },
service: path.OneDriveService, metadataServices: []path.ServiceType{path.OneDriveMetadataService},
categories: []string{ categories: []string{
path.FilesCategory.String(), path.FilesCategory.String(),
}, },
@ -1302,7 +1302,7 @@ func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
return sel.Selector return sel.Selector
}, },
service: path.SharePointService, metadataServices: []path.ServiceType{path.SharePointMetadataService},
categories: []string{ categories: []string{
path.LibrariesCategory.String(), path.LibrariesCategory.String(),
// not yet in use // not yet in use
@ -1365,7 +1365,7 @@ func (suite *ControllerIntegrationSuite) TestBackup_CreatesPrefixCollections() {
// Ignore metadata collections. // Ignore metadata collections.
fullPath := col.FullPath() fullPath := col.FullPath()
if fullPath.Service() != test.service { if path.ServiceResourcesMatchServices(fullPath.ServiceResources(), test.metadataServices) {
continue continue
} }

View File

@ -5,13 +5,30 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
func IsMetadataFile(p path.Path) bool { // IsMetadataFilePath checks whether the LAST service in the path
switch p.Service() { // supports metadata file types and, if so, whether the item has
// a meta suffix.
func IsMetadataFilePath(p path.Path) bool {
return IsMetadataFile(
p.ServiceResources(),
p.Category(),
p.Item())
}
// IsMetadataFile accepts the ServiceResources, cat, and Item values from
// a path (or equivalent representation) and returns true if the item
// is a Metadata entry.
func IsMetadataFile(
srs []path.ServiceResource,
cat path.CategoryType,
itemID string,
) bool {
switch srs[len(srs)-1].Service {
case path.OneDriveService: case path.OneDriveService:
return metadata.HasMetaSuffix(p.Item()) return metadata.HasMetaSuffix(itemID)
case path.SharePointService: case path.SharePointService:
return p.Category() == path.LibrariesCategory && metadata.HasMetaSuffix(p.Item()) return cat == path.LibrariesCategory && metadata.HasMetaSuffix(itemID)
default: default:
return false return false

View File

@ -1,7 +1,7 @@
package metadata_test package metadata_test
import ( import (
"fmt" "strings"
"testing" "testing"
"github.com/alcionai/clues" "github.com/alcionai/clues"
@ -18,7 +18,7 @@ import (
type boolfAssertionFunc func(assert.TestingT, bool, string, ...any) bool type boolfAssertionFunc func(assert.TestingT, bool, string, ...any) bool
type testCase struct { type testCase struct {
service path.ServiceType srs []path.ServiceResource
category path.CategoryType category path.CategoryType
expected boolfAssertionFunc expected boolfAssertionFunc
} }
@ -39,40 +39,89 @@ var (
cases = []testCase{ cases = []testCase{
{ {
service: path.ExchangeService, srs: []path.ServiceResource{{
Service: path.ExchangeService,
ProtectedResource: user,
}},
category: path.EmailCategory, category: path.EmailCategory,
expected: assert.Falsef, expected: assert.Falsef,
}, },
{ {
service: path.ExchangeService, srs: []path.ServiceResource{{
Service: path.ExchangeService,
ProtectedResource: user,
}},
category: path.ContactsCategory, category: path.ContactsCategory,
expected: assert.Falsef, expected: assert.Falsef,
}, },
{ {
service: path.ExchangeService, srs: []path.ServiceResource{{
Service: path.ExchangeService,
ProtectedResource: user,
}},
category: path.EventsCategory, category: path.EventsCategory,
expected: assert.Falsef, expected: assert.Falsef,
}, },
{ {
service: path.OneDriveService, srs: []path.ServiceResource{{
Service: path.OneDriveService,
ProtectedResource: user,
}},
category: path.FilesCategory, category: path.FilesCategory,
expected: assert.Truef, expected: assert.Truef,
}, },
{ {
service: path.SharePointService, srs: []path.ServiceResource{{
Service: path.SharePointService,
ProtectedResource: user,
}},
category: path.LibrariesCategory, category: path.LibrariesCategory,
expected: assert.Truef, expected: assert.Truef,
}, },
{ {
service: path.SharePointService, srs: []path.ServiceResource{{
Service: path.SharePointService,
ProtectedResource: user,
}},
category: path.ListsCategory, category: path.ListsCategory,
expected: assert.Falsef, expected: assert.Falsef,
}, },
{ {
service: path.SharePointService, srs: []path.ServiceResource{{
Service: path.SharePointService,
ProtectedResource: user,
}},
category: path.PagesCategory, category: path.PagesCategory,
expected: assert.Falsef, expected: assert.Falsef,
}, },
{
srs: []path.ServiceResource{
{
Service: path.OneDriveService,
ProtectedResource: user,
},
{
Service: path.ExchangeService,
ProtectedResource: user,
},
},
category: path.EventsCategory,
expected: assert.Falsef,
},
{
srs: []path.ServiceResource{
{
Service: path.ExchangeService,
ProtectedResource: user,
},
{
Service: path.OneDriveService,
ProtectedResource: user,
},
},
category: path.FilesCategory,
expected: assert.Truef,
},
} }
) )
@ -87,21 +136,26 @@ func TestMetadataUnitSuite(t *testing.T) {
func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_MetaSuffixes() { func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_MetaSuffixes() {
for _, test := range cases { for _, test := range cases {
for _, ext := range metaSuffixes { for _, ext := range metaSuffixes {
suite.Run(fmt.Sprintf("%s %s %s", test.service, test.category, ext), func() { name := []string{}
for _, sr := range test.srs {
name = append(name, sr.Service.String())
}
name = append(name, test.category.String(), ext)
suite.Run(strings.Join(name, " "), func() {
t := suite.T() t := suite.T()
p, err := path.Build( p, err := path.Build(
tenant, tenant,
[]path.ServiceResource{{ test.srs,
Service: test.service,
ProtectedResource: user,
}},
test.category, test.category,
true, true,
"file"+ext) "file"+ext)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
test.expected(t, metadata.IsMetadataFile(p), "extension %s", ext) test.expected(t, metadata.IsMetadataFilePath(p), "extension %s", ext)
}) })
} }
} }
@ -110,21 +164,26 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_MetaSuffixes() {
func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_NotMetaSuffixes() { func (suite *MetadataUnitSuite) TestIsMetadataFile_Files_NotMetaSuffixes() {
for _, test := range cases { for _, test := range cases {
for _, ext := range notMetaSuffixes { for _, ext := range notMetaSuffixes {
suite.Run(fmt.Sprintf("%s %s %s", test.service, test.category, ext), func() { name := []string{}
for _, sr := range test.srs {
name = append(name, sr.Service.String())
}
name = append(name, test.category.String(), ext)
suite.Run(strings.Join(name, " "), func() {
t := suite.T() t := suite.T()
p, err := path.Build( p, err := path.Build(
tenant, tenant,
[]path.ServiceResource{{ test.srs,
Service: test.service,
ProtectedResource: user,
}},
test.category, test.category,
true, true,
"file"+ext) "file"+ext)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Falsef(t, metadata.IsMetadataFile(p), "extension %s", ext) assert.Falsef(t, metadata.IsMetadataFilePath(p), "extension %s", ext)
}) })
} }
} }
@ -135,21 +194,26 @@ func (suite *MetadataUnitSuite) TestIsMetadataFile_Directories() {
for _, test := range cases { for _, test := range cases {
for _, ext := range suffixes { for _, ext := range suffixes {
suite.Run(fmt.Sprintf("%s %s %s", test.service, test.category, ext), func() { name := []string{}
for _, sr := range test.srs {
name = append(name, sr.Service.String())
}
name = append(name, test.category.String(), ext)
suite.Run(strings.Join(name, " "), func() {
t := suite.T() t := suite.T()
p, err := path.Build( p, err := path.Build(
tenant, tenant,
[]path.ServiceResource{{ test.srs,
Service: test.service,
ProtectedResource: user,
}},
test.category, test.category,
false, false,
"file"+ext) "file"+ext)
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
assert.Falsef(t, metadata.IsMetadataFile(p), "extension %s", ext) assert.Falsef(t, metadata.IsMetadataFilePath(p), "extension %s", ext)
}) })
} }
} }

View File

@ -865,7 +865,6 @@ func compareItem(
t *testing.T, t *testing.T,
colPath path.Path, colPath path.Path,
expected map[string][]byte, expected map[string][]byte,
service path.ServiceType,
category path.CategoryType, category path.CategoryType,
item data.Stream, item data.Stream,
mci m365Stub.ConfigInfo, mci m365Stub.ConfigInfo,
@ -875,7 +874,11 @@ func compareItem(
assert.NotZero(t, mt.ModTime()) assert.NotZero(t, mt.ModTime())
} }
switch service { // assume the last service in the path is the data owner
srs := colPath.ServiceResources()
lastService := srs[len(srs)-1].Service
switch lastService {
case path.ExchangeService: case path.ExchangeService:
switch category { switch category {
case path.EmailCategory: case path.EmailCategory:
@ -900,7 +903,7 @@ func compareItem(
return compareDriveItem(t, expected, item, mci, rootDir) return compareDriveItem(t, expected, item, mci, rootDir)
default: default:
assert.FailNowf(t, "unexpected service: %s", service.String()) assert.FailNowf(t, "unexpected service: %s", lastService.String())
} }
return true return true
@ -929,9 +932,12 @@ func checkHasCollections(
fp := g.FullPath() fp := g.FullPath()
loc := g.(data.LocationPather).LocationPath() loc := g.(data.LocationPather).LocationPath()
// take the last service, since it should be the one owning data
srs := fp.ServiceResources()
service := srs[len(srs)-1].Service
if fp.Service() == path.OneDriveService || if service == path.OneDriveService ||
(fp.Service() == path.SharePointService && fp.Category() == path.LibrariesCategory) { (service == path.SharePointService && fp.Category() == path.LibrariesCategory) {
dp, err := path.ToDrivePath(fp) dp, err := path.ToDrivePath(fp)
if !assert.NoError(t, err, clues.ToCore(err)) { if !assert.NoError(t, err, clues.ToCore(err)) {
continue continue
@ -971,11 +977,12 @@ func checkCollections(
for _, returned := range got { for _, returned := range got {
var ( var (
hasItems bool hasItems bool
service = returned.FullPath().Service()
category = returned.FullPath().Category() category = returned.FullPath().Category()
expectedColData = expected[returned.FullPath().String()] expectedColData = expected[returned.FullPath().String()]
folders = returned.FullPath().Elements() folders = returned.FullPath().Elements()
rootDir = folders[len(folders)-1] == mci.RestoreCfg.Location rootDir = folders[len(folders)-1] == mci.RestoreCfg.Location
srs = returned.FullPath().ServiceResources()
lastService = srs[len(srs)-1].Service
) )
// Need to iterate through all items even if we don't expect to find a match // Need to iterate through all items even if we don't expect to find a match
@ -987,9 +994,9 @@ func checkCollections(
// is for actual pull items. // is for actual pull items.
// TODO(ashmrtn): Should probably eventually check some data in metadata // TODO(ashmrtn): Should probably eventually check some data in metadata
// collections. // collections.
if service == path.ExchangeMetadataService || if lastService == path.ExchangeMetadataService ||
service == path.OneDriveMetadataService || lastService == path.OneDriveMetadataService ||
service == path.SharePointMetadataService { lastService == path.SharePointMetadataService {
skipped++ skipped++
continue continue
} }
@ -1005,7 +1012,6 @@ func checkCollections(
t, t,
returned.FullPath(), returned.FullPath(),
expectedColData, expectedColData,
service,
category, category,
item, item,
mci, mci,

View File

@ -491,7 +491,9 @@ func (suite *BackupIntgSuite) TestMailFetch() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
for _, c := range collections { for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService { if path.ServiceResourcesMatchServices(
c.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService}) {
continue continue
} }
@ -577,7 +579,9 @@ func (suite *BackupIntgSuite) TestDelta() {
var metadata data.BackupCollection var metadata data.BackupCollection
for _, coll := range collections { for _, coll := range collections {
if coll.FullPath().Service() == path.ExchangeMetadataService { if path.ServiceResourcesMatchServices(
coll.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService}) {
metadata = coll metadata = coll
} }
} }
@ -611,7 +615,9 @@ func (suite *BackupIntgSuite) TestDelta() {
// Delta usage is commented out at the moment, anyway. So this is currently // Delta usage is commented out at the moment, anyway. So this is currently
// a sanity check that the minimum behavior won't break. // a sanity check that the minimum behavior won't break.
for _, coll := range collections { for _, coll := range collections {
if coll.FullPath().Service() != path.ExchangeMetadataService { if !path.ServiceResourcesMatchServices(
coll.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService}) {
ec, ok := coll.(*Collection) ec, ok := coll.(*Collection)
require.True(t, ok, "collection is *Collection") require.True(t, ok, "collection is *Collection")
assert.NotNil(t, ec) assert.NotNil(t, ec)
@ -666,7 +672,9 @@ func (suite *BackupIntgSuite) TestMailSerializationRegression() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService isMetadata := path.ServiceResourcesMatchServices(
edc.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService})
streamChannel := edc.Items(ctx, fault.New(true)) streamChannel := edc.Items(ctx, fault.New(true))
// Verify that each message can be restored // Verify that each message can be restored
@ -744,7 +752,9 @@ func (suite *BackupIntgSuite) TestContactSerializationRegression() {
require.GreaterOrEqual(t, 2, len(edcs), "expected 1 <= num collections <= 2") require.GreaterOrEqual(t, 2, len(edcs), "expected 1 <= num collections <= 2")
for _, edc := range edcs { for _, edc := range edcs {
isMetadata := edc.FullPath().Service() == path.ExchangeMetadataService isMetadata := path.ServiceResourcesMatchServices(
edc.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService})
count := 0 count := 0
for stream := range edc.Items(ctx, fault.New(true)) { for stream := range edc.Items(ctx, fault.New(true)) {
@ -874,7 +884,10 @@ func (suite *BackupIntgSuite) TestEventsSerializationRegression() {
for _, edc := range collections { for _, edc := range collections {
var isMetadata bool var isMetadata bool
if edc.FullPath().Service() != path.ExchangeMetadataService { // FIXME: this doesn't seem right, it's saying "if not metadata, isMetadata = true"
if !path.ServiceResourcesMatchServices(
edc.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService}) {
isMetadata = true isMetadata = true
assert.Equal(t, test.expected, edc.FullPath().Folder(false)) assert.Equal(t, test.expected, edc.FullPath().Folder(false))
} else { } else {
@ -1140,7 +1153,9 @@ func (suite *CollectionPopulationSuite) TestPopulateCollections() {
deleteds, news, metadatas, doNotMerges := 0, 0, 0, 0 deleteds, news, metadatas, doNotMerges := 0, 0, 0, 0
for _, c := range collections { for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService { if path.ServiceResourcesMatchServices(
c.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService}) {
metadatas++ metadatas++
continue continue
} }
@ -1491,7 +1506,9 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_D
continue continue
} }
if c.FullPath().Service() == path.ExchangeMetadataService { if path.ServiceResourcesMatchServices(
c.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService}) {
metadatas++ metadatas++
checkMetadata(t, ctx, qp.Category, test.expectMetadata(t, qp.Category), c) checkMetadata(t, ctx, qp.Category, test.expectMetadata(t, qp.Category), c)
continue continue
@ -1650,7 +1667,9 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_r
deleteds, news, metadatas, doNotMerges := 0, 0, 0, 0 deleteds, news, metadatas, doNotMerges := 0, 0, 0, 0
for _, c := range collections { for _, c := range collections {
if c.FullPath().Service() == path.ExchangeMetadataService { if path.ServiceResourcesMatchServices(
c.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService}) {
metadatas++ metadatas++
continue continue
} }
@ -2082,7 +2101,9 @@ func (suite *CollectionPopulationSuite) TestFilterContainersAndFillCollections_i
require.NotNil(t, p) require.NotNil(t, p)
if p.Service() == path.ExchangeMetadataService { if path.ServiceResourcesMatchServices(
c.FullPath().ServiceResources(),
[]path.ServiceType{path.ExchangeMetadataService}) {
metadatas++ metadatas++
continue continue
} }

View File

@ -57,7 +57,9 @@ func ConsumeRestoreCollections(
ictx = clues.Add(ctx, ictx = clues.Add(ctx,
"category", category, "category", category,
"restore_location", clues.Hide(rcc.RestoreConfig.Location), "restore_location", clues.Hide(rcc.RestoreConfig.Location),
"protected_resource", clues.Hide(dc.FullPath().ResourceOwner()), "resource_owners", clues.Hide(
path.ServiceResourcesToResources(
dc.FullPath().ServiceResources())),
"full_path", dc.FullPath()) "full_path", dc.FullPath())
) )

View File

@ -61,7 +61,9 @@ func ConsumeRestoreCollections(
ictx = clues.Add(ctx, ictx = clues.Add(ctx,
"category", category, "category", category,
"restore_location", clues.Hide(rcc.RestoreConfig.Location), "restore_location", clues.Hide(rcc.RestoreConfig.Location),
"resource_owner", clues.Hide(dc.FullPath().ResourceOwner()), "resource_owners", clues.Hide(
path.ServiceResourcesToResources(
dc.FullPath().ServiceResources())),
"full_path", dc.FullPath()) "full_path", dc.FullPath())
) )

View File

@ -7,7 +7,7 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" "github.com/alcionai/corso/src/internal/m365/graph/metadata"
"github.com/alcionai/corso/src/internal/m365/mock" "github.com/alcionai/corso/src/internal/m365/mock"
"github.com/alcionai/corso/src/internal/m365/resource" "github.com/alcionai/corso/src/internal/m365/resource"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
@ -63,9 +63,11 @@ func GetCollectionsAndExpected(
for _, owner := range config.ResourceOwners { for _, owner := range config.ResourceOwners {
numItems, kopiaItems, ownerCollections, userExpectedData, err := CollectionsForInfo( numItems, kopiaItems, ownerCollections, userExpectedData, err := CollectionsForInfo(
config.Service,
config.Tenant, config.Tenant,
owner, []path.ServiceResource{{
Service: config.Service,
ProtectedResource: owner,
}},
config.RestoreCfg, config.RestoreCfg,
testCollections, testCollections,
backupVersion) backupVersion)
@ -128,9 +130,7 @@ func CollectionsForInfo(
baseExpected[info.Items[i].LookupKey] = info.Items[i].Data baseExpected[info.Items[i].LookupKey] = info.Items[i].Data
// We do not count metadata files against item count // We do not count metadata files against item count
if backupVersion > 0 && if backupVersion > 0 && metadata.IsMetadataFile(srs, info.Category, info.Items[i].Name) {
(service == path.OneDriveService || service == path.SharePointService) &&
metadata.HasMetaSuffix(info.Items[i].Name) {
continue continue
} }
@ -165,9 +165,13 @@ func backupOutputPathFromRestore(
inputPath path.Path, inputPath path.Path,
) (path.Path, error) { ) (path.Path, error) {
base := []string{restoreCfg.Location} base := []string{restoreCfg.Location}
srs := inputPath.ServiceResources()
// only the last service is checked, because that should be the service
// whose data is stored..
lastService := srs[len(srs)-1].Service
// OneDrive has leading information like the drive ID. // OneDrive has leading information like the drive ID.
if inputPath.Service() == path.OneDriveService || inputPath.Service() == path.SharePointService { if lastService == path.OneDriveService || lastService == path.SharePointService {
folders := inputPath.Folders() folders := inputPath.Folders()
base = append(append([]string{}, folders[:3]...), restoreCfg.Location) base = append(append([]string{}, folders[:3]...), restoreCfg.Location)
@ -176,14 +180,13 @@ func backupOutputPathFromRestore(
} }
} }
if inputPath.Service() == path.ExchangeService && inputPath.Category() == path.EmailCategory { if lastService == path.ExchangeService && inputPath.Category() == path.EmailCategory {
base = append(base, inputPath.Folders()...) base = append(base, inputPath.Folders()...)
} }
return path.Build( return path.Build(
inputPath.Tenant(), inputPath.Tenant(),
inputPath.ResourceOwner(), inputPath.ServiceResources(),
inputPath.Service(),
inputPath.Category(), inputPath.Category(),
false, false,
base...) base...)

View File

@ -566,7 +566,10 @@ func getNewPathRefs(
// TODO(ashmrtn): In the future we can remove this first check as we'll be // TODO(ashmrtn): In the future we can remove this first check as we'll be
// able to assume we always have the location in the previous entry. We'll end // able to assume we always have the location in the previous entry. We'll end
// up doing some extra parsing, but it will simplify this code. // up doing some extra parsing, but it will simplify this code.
if repoRef.Service() == path.ExchangeService { //
// Currently safe to check only the 0th SR, since exchange had no subservices
// at the time of the locations addition.
if repoRef.ServiceResources()[0].Service == path.ExchangeService {
newPath, newLoc, err := dataFromBackup.GetNewPathRefs( newPath, newLoc, err := dataFromBackup.GetNewPathRefs(
repoRef.ToBuilder(), repoRef.ToBuilder(),
entry.Modified(), entry.Modified(),

View File

@ -266,6 +266,7 @@ func makePath(t *testing.T, elements []string, isItem bool) path.Path {
return p return p
} }
// FIXME: out of date, does not contain sharepoint support
func makeDetailsEntry( func makeDetailsEntry(
t *testing.T, t *testing.T,
p path.Path, p path.Path,
@ -290,7 +291,10 @@ func makeDetailsEntry(
Updated: updated, Updated: updated,
} }
switch p.Service() { srs := p.ServiceResources()
lastService := srs[len(srs)-1].Service
switch lastService {
case path.ExchangeService: case path.ExchangeService:
if p.Category() != path.EmailCategory { if p.Category() != path.EmailCategory {
assert.FailNowf( assert.FailNowf(
@ -321,7 +325,7 @@ func makeDetailsEntry(
assert.FailNowf( assert.FailNowf(
t, t,
"service %s not supported in helper function", "service %s not supported in helper function",
p.Service().String()) lastService.String())
} }
return res return res
@ -529,6 +533,23 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections
fault.New(true)) fault.New(true))
} }
// makeElements allows creation of repoRefs that wouldn't
// pass paths package validators.
func makeElements(
tenant string,
srs []path.ServiceResource,
cat path.CategoryType,
suffix ...string,
) path.Elements {
elems := append(
path.Elements{tenant},
path.ServiceResourcesToElements(srs)...)
elems = append(elems, cat.String())
elems = append(elems, suffix...)
return elems
}
func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems() { func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems() {
var ( var (
tenant = "a-tenant" tenant = "a-tenant"
@ -709,17 +730,11 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
DetailsModel: details.DetailsModel{ DetailsModel: details.DetailsModel{
Entries: []details.Entry{ Entries: []details.Entry{
{ {
RepoRef: stdpath.Join( RepoRef: stdpath.Join(makeElements(
append( itemPath1.Tenant(),
[]string{ itemPath1.ServiceResources(),
itemPath1.Tenant(), path.UnknownCategory,
itemPath1.Service().String(), itemPath1.Folders()...)...),
itemPath1.ResourceOwner(),
path.UnknownCategory.String(),
},
itemPath1.Folders()...,
)...,
),
ItemInfo: details.ItemInfo{ ItemInfo: details.ItemInfo{
OneDrive: &details.OneDriveInfo{ OneDrive: &details.OneDriveInfo{
ItemType: details.OneDriveItem, ItemType: details.OneDriveItem,
@ -740,16 +755,13 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
res := newMockDetailsMergeInfoer() res := newMockDetailsMergeInfoer()
p := makePath( p := makePath(
suite.T(), suite.T(),
[]string{ makeElements(
itemPath1.Tenant(), itemPath1.Tenant(),
path.OneDriveService.String(), itemPath1.ServiceResources(),
itemPath1.ResourceOwner(), path.FilesCategory,
path.FilesCategory.String(),
"personal", "personal",
"item1", "item1"),
}, true)
true,
)
res.add(itemPath1, p, nil) res.add(itemPath1, p, nil)

View File

@ -198,8 +198,13 @@ func collect(
service path.ServiceType, service path.ServiceType,
col Collectable, col Collectable,
) (data.BackupCollection, error) { ) (data.BackupCollection, error) {
srs := []path.ServiceResource{{
Service: service,
ProtectedResource: col.purpose,
}}
// construct the path of the container // construct the path of the container
p, err := path.Builder{}.ToStreamStorePath(tenantID, col.purpose, service, false) p, err := path.Builder{}.ToStreamStorePath(tenantID, srs, false)
if err != nil { if err != nil {
return nil, clues.Stack(err).WithClues(ctx) return nil, clues.Stack(err).WithClues(ctx)
} }
@ -257,10 +262,15 @@ func read(
rer inject.RestoreProducer, rer inject.RestoreProducer,
errs *fault.Bus, errs *fault.Bus,
) error { ) error {
srs := []path.ServiceResource{{
Service: service,
ProtectedResource: col.purpose,
}}
// construct the path of the container // construct the path of the container
p, err := path.Builder{}. p, err := path.Builder{}.
Append(col.itemName). Append(col.itemName).
ToStreamStorePath(tenantID, col.purpose, service, true) ToStreamStorePath(tenantID, srs, true)
if err != nil { if err != nil {
return clues.Stack(err).WithClues(ctx) return clues.Stack(err).WithClues(ctx)
} }

View File

@ -31,7 +31,7 @@ func newDataLayerResourcePath(
cat CategoryType, cat CategoryType,
isItem bool, isItem bool,
) dataLayerResourcePath { ) dataLayerResourcePath {
pfx := append([]string{tenant}, serviceResourcesToElements(srs)...) pfx := append([]string{tenant}, ServiceResourcesToElements(srs)...)
pfx = append(pfx, cat.String()) pfx = append(pfx, cat.String())
return dataLayerResourcePath{ return dataLayerResourcePath{

View File

@ -2,6 +2,7 @@ package path
import ( import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/common/tform" "github.com/alcionai/corso/src/internal/common/tform"
@ -84,11 +85,13 @@ func ServiceResourcesToResources(srs []ServiceResource) []string {
return prs return prs
} }
// --------------------------------------------------------------------------- func ServiceResourcesMatchServices(srs []ServiceResource, sts []ServiceType) bool {
// Unexported Helpers return slices.EqualFunc(srs, sts, func(sr ServiceResource, st ServiceType) bool {
// --------------------------------------------------------------------------- return sr.Service == st
})
}
func serviceResourcesToElements(srs []ServiceResource) Elements { func ServiceResourcesToElements(srs []ServiceResource) Elements {
es := make(Elements, 0, len(srs)*2) es := make(Elements, 0, len(srs)*2)
for _, tuple := range srs { for _, tuple := range srs {
@ -99,6 +102,10 @@ func serviceResourcesToElements(srs []ServiceResource) Elements {
return es return es
} }
// ---------------------------------------------------------------------------
// Unexported Helpers
// ---------------------------------------------------------------------------
// elementsToServiceResources turns as many pairs of elems as possible // elementsToServiceResources turns as many pairs of elems as possible
// into ServiceResource tuples. Elems must begin with a service, but // into ServiceResource tuples. Elems must begin with a service, but
// may contain more entries than there are service/resource pairs. // may contain more entries than there are service/resource pairs.

View File

@ -157,7 +157,7 @@ func (suite *ServiceResourceUnitSuite) TestServiceResourceToElements() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
result := serviceResourcesToElements(test.srs) result := ServiceResourcesToElements(test.srs)
// not ElementsMatch, order matters // not ElementsMatch, order matters
assert.Equal(t, test.expect, result) assert.Equal(t, test.expect, result)