migrate onedrive using prefix collection (#3122)
#### Does this PR need a docs update or release note? - [x] 🕐 Yes, but in a later PR #### Type of change - [x] 🌻 Feature #### Issue(s) * #2825 #### Test Plan - [x] 💪 Manual - [x] ⚡ Unit test - [x] 💚 E2E
This commit is contained in:
parent
f5a4c3c0ba
commit
62daf10213
@ -13,7 +13,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/options"
|
"github.com/alcionai/corso/src/cli/options"
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
. "github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/backup"
|
"github.com/alcionai/corso/src/pkg/backup"
|
||||||
@ -198,7 +198,7 @@ func runBackups(
|
|||||||
r repository.Repository,
|
r repository.Repository,
|
||||||
serviceName, resourceOwnerType string,
|
serviceName, resourceOwnerType string,
|
||||||
selectorSet []selectors.Selector,
|
selectorSet []selectors.Selector,
|
||||||
ins common.IDNameSwapper,
|
ins idname.Cacher,
|
||||||
) error {
|
) error {
|
||||||
var (
|
var (
|
||||||
bIDs []string
|
bIDs []string
|
||||||
|
|||||||
@ -18,7 +18,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/config"
|
"github.com/alcionai/corso/src/cli/config"
|
||||||
"github.com/alcionai/corso/src/cli/print"
|
"github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
"github.com/alcionai/corso/src/internal/operations"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -257,12 +257,7 @@ func (suite *PreparedBackupExchangeE2ESuite) SetupSuite() {
|
|||||||
|
|
||||||
var (
|
var (
|
||||||
users = []string{suite.m365UserID}
|
users = []string{suite.m365UserID}
|
||||||
idToName = map[string]string{suite.m365UserID: suite.m365UserID}
|
ins = idname.NewCache(map[string]string{suite.m365UserID: suite.m365UserID})
|
||||||
nameToID = map[string]string{suite.m365UserID: suite.m365UserID}
|
|
||||||
ins = common.IDsNames{
|
|
||||||
IDToName: idToName,
|
|
||||||
NameToID: nameToID,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for _, set := range []path.CategoryType{email, contacts, events} {
|
for _, set := range []path.CategoryType{email, contacts, events} {
|
||||||
|
|||||||
@ -16,7 +16,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/config"
|
"github.com/alcionai/corso/src/cli/config"
|
||||||
"github.com/alcionai/corso/src/cli/print"
|
"github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
"github.com/alcionai/corso/src/internal/operations"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
@ -171,12 +171,7 @@ func (suite *BackupDeleteOneDriveE2ESuite) SetupSuite() {
|
|||||||
var (
|
var (
|
||||||
m365UserID = tester.M365UserID(t)
|
m365UserID = tester.M365UserID(t)
|
||||||
users = []string{m365UserID}
|
users = []string{m365UserID}
|
||||||
idToName = map[string]string{m365UserID: m365UserID}
|
ins = idname.NewCache(map[string]string{m365UserID: m365UserID})
|
||||||
nameToID = map[string]string{m365UserID: m365UserID}
|
|
||||||
ins = common.IDsNames{
|
|
||||||
IDToName: idToName,
|
|
||||||
NameToID: nameToID,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// some tests require an existing backup
|
// some tests require an existing backup
|
||||||
|
|||||||
@ -12,7 +12,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/options"
|
"github.com/alcionai/corso/src/cli/options"
|
||||||
. "github.com/alcionai/corso/src/cli/print"
|
. "github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -203,7 +203,7 @@ func validateSharePointBackupCreateFlags(sites, weburls, cats []string) error {
|
|||||||
// TODO: users might specify a data type, this only supports AllData().
|
// TODO: users might specify a data type, this only supports AllData().
|
||||||
func sharePointBackupCreateSelectors(
|
func sharePointBackupCreateSelectors(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ins common.IDNameSwapper,
|
ins idname.Cacher,
|
||||||
sites, weburls, cats []string,
|
sites, weburls, cats []string,
|
||||||
) (*selectors.SharePointBackup, error) {
|
) (*selectors.SharePointBackup, error) {
|
||||||
if len(sites) == 0 && len(weburls) == 0 {
|
if len(sites) == 0 && len(weburls) == 0 {
|
||||||
@ -223,7 +223,7 @@ func sharePointBackupCreateSelectors(
|
|||||||
return addCategories(sel, cats), nil
|
return addCategories(sel, cats), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func includeAllSitesWithCategories(ins common.IDNameSwapper, categories []string) *selectors.SharePointBackup {
|
func includeAllSitesWithCategories(ins idname.Cacher, categories []string) *selectors.SharePointBackup {
|
||||||
return addCategories(selectors.NewSharePointBackup(ins.IDs()), categories)
|
return addCategories(selectors.NewSharePointBackup(ins.IDs()), categories)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -16,7 +16,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/config"
|
"github.com/alcionai/corso/src/cli/config"
|
||||||
"github.com/alcionai/corso/src/cli/print"
|
"github.com/alcionai/corso/src/cli/print"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
"github.com/alcionai/corso/src/internal/operations"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
@ -135,12 +135,7 @@ func (suite *BackupDeleteSharePointE2ESuite) SetupSuite() {
|
|||||||
var (
|
var (
|
||||||
m365SiteID = tester.M365SiteID(t)
|
m365SiteID = tester.M365SiteID(t)
|
||||||
sites = []string{m365SiteID}
|
sites = []string{m365SiteID}
|
||||||
idToName = map[string]string{m365SiteID: m365SiteID}
|
ins = idname.NewCache(map[string]string{m365SiteID: m365SiteID})
|
||||||
nameToID = map[string]string{m365SiteID: m365SiteID}
|
|
||||||
ins = common.IDsNames{
|
|
||||||
IDToName: idToName,
|
|
||||||
NameToID: nameToID,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// some tests require an existing backup
|
// some tests require an existing backup
|
||||||
|
|||||||
@ -12,7 +12,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli/options"
|
"github.com/alcionai/corso/src/cli/options"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/cli/utils/testdata"
|
"github.com/alcionai/corso/src/cli/utils/testdata"
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
@ -156,10 +156,7 @@ func (suite *SharePointUnitSuite) TestSharePointBackupCreateSelectors() {
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ins = common.IDsNames{
|
ins = idname.NewCache(map[string]string{id1: url1, id2: url2})
|
||||||
IDToName: map[string]string{id1: url1, id2: url2},
|
|
||||||
NameToID: map[string]string{url1: id1, url2: id2},
|
|
||||||
}
|
|
||||||
bothIDs = []string{id1, id2}
|
bothIDs = []string{id1, id2}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -13,7 +13,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/cli"
|
"github.com/alcionai/corso/src/cli"
|
||||||
"github.com/alcionai/corso/src/cli/config"
|
"github.com/alcionai/corso/src/cli/config"
|
||||||
"github.com/alcionai/corso/src/cli/utils"
|
"github.com/alcionai/corso/src/cli/utils"
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
"github.com/alcionai/corso/src/internal/operations"
|
"github.com/alcionai/corso/src/internal/operations"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
@ -78,12 +78,7 @@ func (suite *RestoreExchangeE2ESuite) SetupSuite() {
|
|||||||
|
|
||||||
var (
|
var (
|
||||||
users = []string{suite.m365UserID}
|
users = []string{suite.m365UserID}
|
||||||
idToName = map[string]string{suite.m365UserID: suite.m365UserID}
|
ins = idname.NewCache(map[string]string{suite.m365UserID: suite.m365UserID})
|
||||||
nameToID = map[string]string{suite.m365UserID: suite.m365UserID}
|
|
||||||
ins = common.IDsNames{
|
|
||||||
IDToName: idToName,
|
|
||||||
NameToID: nameToID,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// init the repo first
|
// init the repo first
|
||||||
|
|||||||
@ -1,51 +0,0 @@
|
|||||||
package common
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
type IDNamer interface {
|
|
||||||
// the canonical id of the thing, generated and usable
|
|
||||||
// by whichever system has ownership of it.
|
|
||||||
ID() string
|
|
||||||
// the human-readable name of the thing.
|
|
||||||
Name() string
|
|
||||||
}
|
|
||||||
|
|
||||||
type IDNameSwapper interface {
|
|
||||||
IDOf(name string) (string, bool)
|
|
||||||
NameOf(id string) (string, bool)
|
|
||||||
IDs() []string
|
|
||||||
Names() []string
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ IDNameSwapper = &IDsNames{}
|
|
||||||
|
|
||||||
type IDsNames struct {
|
|
||||||
IDToName map[string]string
|
|
||||||
NameToID map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
// IDOf returns the id associated with the given name.
|
|
||||||
func (in IDsNames) IDOf(name string) (string, bool) {
|
|
||||||
id, ok := in.NameToID[strings.ToLower(name)]
|
|
||||||
return id, ok
|
|
||||||
}
|
|
||||||
|
|
||||||
// NameOf returns the name associated with the given id.
|
|
||||||
func (in IDsNames) NameOf(id string) (string, bool) {
|
|
||||||
name, ok := in.IDToName[strings.ToLower(id)]
|
|
||||||
return name, ok
|
|
||||||
}
|
|
||||||
|
|
||||||
// IDs returns all known ids.
|
|
||||||
func (in IDsNames) IDs() []string {
|
|
||||||
return maps.Keys(in.IDToName)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Names returns all known names.
|
|
||||||
func (in IDsNames) Names() []string {
|
|
||||||
return maps.Keys(in.NameToID)
|
|
||||||
}
|
|
||||||
107
src/internal/common/idname/idname.go
Normal file
107
src/internal/common/idname/idname.go
Normal file
@ -0,0 +1,107 @@
|
|||||||
|
package idname
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Provider is a tuple containing an ID and a Name. Names are
|
||||||
|
// assumed to be human-displayable versions of system IDs.
|
||||||
|
// Providers should always be populated, while a nil values is
|
||||||
|
// likely an error. Compliant structs should provide both a name
|
||||||
|
// and an ID, never just one. Values are not validated, so both
|
||||||
|
// values being empty is an allowed conditions, but the assumption
|
||||||
|
// is that downstream consumers will have problems as a result.
|
||||||
|
type Provider interface {
|
||||||
|
// ID returns the canonical id of the thing, generated and
|
||||||
|
// usable by whichever system has ownership of it.
|
||||||
|
ID() string
|
||||||
|
// the human-readable name of the thing.
|
||||||
|
Name() string
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Provider = &is{}
|
||||||
|
|
||||||
|
type is struct {
|
||||||
|
id string
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (is is) ID() string { return is.id }
|
||||||
|
func (is is) Name() string { return is.name }
|
||||||
|
|
||||||
|
type Cacher interface {
|
||||||
|
IDOf(name string) (string, bool)
|
||||||
|
NameOf(id string) (string, bool)
|
||||||
|
IDs() []string
|
||||||
|
Names() []string
|
||||||
|
ProviderForID(id string) Provider
|
||||||
|
ProviderForName(id string) Provider
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Cacher = &cache{}
|
||||||
|
|
||||||
|
type cache struct {
|
||||||
|
idToName map[string]string
|
||||||
|
nameToID map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCache(idToName map[string]string) cache {
|
||||||
|
nti := make(map[string]string, len(idToName))
|
||||||
|
|
||||||
|
for id, name := range idToName {
|
||||||
|
nti[name] = id
|
||||||
|
}
|
||||||
|
|
||||||
|
return cache{
|
||||||
|
idToName: idToName,
|
||||||
|
nameToID: nti,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDOf returns the id associated with the given name.
|
||||||
|
func (c cache) IDOf(name string) (string, bool) {
|
||||||
|
id, ok := c.nameToID[strings.ToLower(name)]
|
||||||
|
return id, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// NameOf returns the name associated with the given id.
|
||||||
|
func (c cache) NameOf(id string) (string, bool) {
|
||||||
|
name, ok := c.idToName[strings.ToLower(id)]
|
||||||
|
return name, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs returns all known ids.
|
||||||
|
func (c cache) IDs() []string {
|
||||||
|
return maps.Keys(c.idToName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Names returns all known names.
|
||||||
|
func (c cache) Names() []string {
|
||||||
|
return maps.Keys(c.nameToID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c cache) ProviderForID(id string) Provider {
|
||||||
|
n, ok := c.NameOf(id)
|
||||||
|
if !ok {
|
||||||
|
return &is{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &is{
|
||||||
|
id: id,
|
||||||
|
name: n,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c cache) ProviderForName(name string) Provider {
|
||||||
|
i, ok := c.IDOf(name)
|
||||||
|
if !ok {
|
||||||
|
return &is{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &is{
|
||||||
|
id: i,
|
||||||
|
name: name,
|
||||||
|
}
|
||||||
|
}
|
||||||
84
src/internal/common/idname/mock/mock.go
Normal file
84
src/internal/common/idname/mock/mock.go
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
package mock
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ idname.Provider = &in{}
|
||||||
|
|
||||||
|
func NewProvider(id, name string) *in {
|
||||||
|
return &in{
|
||||||
|
id: id,
|
||||||
|
name: name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type in struct {
|
||||||
|
id string
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i in) ID() string { return i.id }
|
||||||
|
func (i in) Name() string { return i.name }
|
||||||
|
|
||||||
|
type Cache struct {
|
||||||
|
IDToName map[string]string
|
||||||
|
NameToID map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCache(itn, nti map[string]string) Cache {
|
||||||
|
return Cache{
|
||||||
|
IDToName: itn,
|
||||||
|
NameToID: nti,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDOf returns the id associated with the given name.
|
||||||
|
func (c Cache) IDOf(name string) (string, bool) {
|
||||||
|
id, ok := c.NameToID[strings.ToLower(name)]
|
||||||
|
return id, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// NameOf returns the name associated with the given id.
|
||||||
|
func (c Cache) NameOf(id string) (string, bool) {
|
||||||
|
name, ok := c.IDToName[strings.ToLower(id)]
|
||||||
|
return name, ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// IDs returns all known ids.
|
||||||
|
func (c Cache) IDs() []string {
|
||||||
|
return maps.Keys(c.IDToName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Names returns all known names.
|
||||||
|
func (c Cache) Names() []string {
|
||||||
|
return maps.Keys(c.NameToID)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) ProviderForID(id string) idname.Provider {
|
||||||
|
n, ok := c.NameOf(id)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &in{
|
||||||
|
id: id,
|
||||||
|
name: n,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Cache) ProviderForName(name string) idname.Provider {
|
||||||
|
i, ok := c.IDOf(name)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &in{
|
||||||
|
id: i,
|
||||||
|
name: name,
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -6,7 +6,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/connector/discovery"
|
"github.com/alcionai/corso/src/internal/connector/discovery"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
@ -34,9 +34,10 @@ import (
|
|||||||
// prior history (ie, incrementals) and run a full backup.
|
// prior history (ie, incrementals) and run a full backup.
|
||||||
func (gc *GraphConnector) ProduceBackupCollections(
|
func (gc *GraphConnector) ProduceBackupCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
owner common.IDNamer,
|
owner idname.Provider,
|
||||||
sels selectors.Selector,
|
sels selectors.Selector,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
|
lastBackupVersion int,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, map[string]map[string]struct{}, error) {
|
) ([]data.BackupCollection, map[string]map[string]struct{}, error) {
|
||||||
@ -103,6 +104,7 @@ func (gc *GraphConnector) ProduceBackupCollections(
|
|||||||
sels,
|
sels,
|
||||||
sels,
|
sels,
|
||||||
metadata,
|
metadata,
|
||||||
|
lastBackupVersion,
|
||||||
gc.credentials.AzureTenantID,
|
gc.credentials.AzureTenantID,
|
||||||
gc.itemClient,
|
gc.itemClient,
|
||||||
gc.Service,
|
gc.Service,
|
||||||
|
|||||||
@ -10,10 +10,12 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
"github.com/alcionai/corso/src/internal/connector/sharepoint"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
@ -208,6 +210,7 @@ func (suite *DataCollectionIntgSuite) TestDataCollections_invalidResourceOwner()
|
|||||||
test.getSelector(t),
|
test.getSelector(t),
|
||||||
test.getSelector(t),
|
test.getSelector(t),
|
||||||
nil,
|
nil,
|
||||||
|
version.NoBackup,
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
assert.Error(t, err, clues.ToCore(err))
|
assert.Error(t, err, clues.ToCore(err))
|
||||||
@ -342,9 +345,10 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Libraries() {
|
|||||||
|
|
||||||
cols, excludes, err := gc.ProduceBackupCollections(
|
cols, excludes, err := gc.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
sel.Selector,
|
inMock.NewProvider(id, name),
|
||||||
sel.Selector,
|
sel.Selector,
|
||||||
nil,
|
nil,
|
||||||
|
version.NoBackup,
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -386,9 +390,10 @@ func (suite *SPCollectionIntgSuite) TestCreateSharePointCollection_Lists() {
|
|||||||
|
|
||||||
cols, excludes, err := gc.ProduceBackupCollections(
|
cols, excludes, err := gc.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
sel.Selector,
|
inMock.NewProvider(id, name),
|
||||||
sel.Selector,
|
sel.Selector,
|
||||||
nil,
|
nil,
|
||||||
|
version.NoBackup,
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|||||||
@ -6,7 +6,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
@ -163,7 +163,7 @@ func parseMetadataCollections(
|
|||||||
// Add iota to this call -> mail, contacts, calendar, etc.
|
// Add iota to this call -> mail, contacts, calendar, etc.
|
||||||
func DataCollections(
|
func DataCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
user common.IDNamer,
|
user idname.Provider,
|
||||||
selector selectors.Selector,
|
selector selectors.Selector,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
acct account.M365Config,
|
acct account.M365Config,
|
||||||
@ -214,6 +214,7 @@ func DataCollections(
|
|||||||
if len(collections) > 0 {
|
if len(collections) > 0 {
|
||||||
baseCols, err := graph.BaseCollections(
|
baseCols, err := graph.BaseCollections(
|
||||||
ctx,
|
ctx,
|
||||||
|
collections,
|
||||||
acct.AzureTenantID,
|
acct.AzureTenantID,
|
||||||
user.ID(),
|
user.ID(),
|
||||||
path.ExchangeService,
|
path.ExchangeService,
|
||||||
@ -249,7 +250,7 @@ func getterByType(ac api.Client, category path.CategoryType) (addedAndRemovedIte
|
|||||||
func createCollections(
|
func createCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
creds account.M365Config,
|
creds account.M365Config,
|
||||||
user common.IDNamer,
|
user idname.Provider,
|
||||||
scope selectors.ExchangeScope,
|
scope selectors.ExchangeScope,
|
||||||
dps DeltaPaths,
|
dps DeltaPaths,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
|
|||||||
@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
@ -239,7 +240,6 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
|
|||||||
userID = tester.M365UserID(suite.T())
|
userID = tester.M365UserID(suite.T())
|
||||||
users = []string{userID}
|
users = []string{userID}
|
||||||
acct, err = tester.NewM365Account(suite.T()).M365Config()
|
acct, err = tester.NewM365Account(suite.T()).M365Config()
|
||||||
ss = selectors.Selector{}.SetDiscreteOwnerIDName(userID, userID)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
@ -268,7 +268,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailFetch() {
|
|||||||
collections, err := createCollections(
|
collections, err := createCollections(
|
||||||
ctx,
|
ctx,
|
||||||
acct,
|
acct,
|
||||||
ss,
|
inMock.NewProvider(userID, userID),
|
||||||
test.scope,
|
test.scope,
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
@ -300,7 +300,6 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
|
|||||||
userID = tester.M365UserID(suite.T())
|
userID = tester.M365UserID(suite.T())
|
||||||
users = []string{userID}
|
users = []string{userID}
|
||||||
acct, err = tester.NewM365Account(suite.T()).M365Config()
|
acct, err = tester.NewM365Account(suite.T()).M365Config()
|
||||||
ss = selectors.Selector{}.SetDiscreteOwnerIDName(userID, userID)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
@ -339,7 +338,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
|
|||||||
collections, err := createCollections(
|
collections, err := createCollections(
|
||||||
ctx,
|
ctx,
|
||||||
acct,
|
acct,
|
||||||
ss,
|
inMock.NewProvider(userID, userID),
|
||||||
test.scope,
|
test.scope,
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
@ -370,7 +369,7 @@ func (suite *DataCollectionsIntegrationSuite) TestDelta() {
|
|||||||
collections, err = createCollections(
|
collections, err = createCollections(
|
||||||
ctx,
|
ctx,
|
||||||
acct,
|
acct,
|
||||||
ss,
|
inMock.NewProvider(userID, userID),
|
||||||
test.scope,
|
test.scope,
|
||||||
dps,
|
dps,
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
@ -405,7 +404,6 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
|
|||||||
t = suite.T()
|
t = suite.T()
|
||||||
wg sync.WaitGroup
|
wg sync.WaitGroup
|
||||||
users = []string{suite.user}
|
users = []string{suite.user}
|
||||||
ss = selectors.Selector{}.SetDiscreteOwnerIDName(suite.user, suite.user)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
acct, err := tester.NewM365Account(t).M365Config()
|
acct, err := tester.NewM365Account(t).M365Config()
|
||||||
@ -417,7 +415,7 @@ func (suite *DataCollectionsIntegrationSuite) TestMailSerializationRegression()
|
|||||||
collections, err := createCollections(
|
collections, err := createCollections(
|
||||||
ctx,
|
ctx,
|
||||||
acct,
|
acct,
|
||||||
ss,
|
inMock.NewProvider(suite.user, suite.user),
|
||||||
sel.Scopes()[0],
|
sel.Scopes()[0],
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
@ -467,7 +465,6 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
|
|||||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
|
|
||||||
users := []string{suite.user}
|
users := []string{suite.user}
|
||||||
ss := selectors.Selector{}.SetDiscreteOwnerIDName(suite.user, suite.user)
|
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
@ -491,7 +488,7 @@ func (suite *DataCollectionsIntegrationSuite) TestContactSerializationRegression
|
|||||||
edcs, err := createCollections(
|
edcs, err := createCollections(
|
||||||
ctx,
|
ctx,
|
||||||
acct,
|
acct,
|
||||||
ss,
|
inMock.NewProvider(suite.user, suite.user),
|
||||||
test.scope,
|
test.scope,
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
@ -556,8 +553,6 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
|
|||||||
bdayID string
|
bdayID string
|
||||||
)
|
)
|
||||||
|
|
||||||
ss := selectors.Selector{}.SetDiscreteOwnerIDName(suite.user, suite.user)
|
|
||||||
|
|
||||||
fn := func(gcf graph.CacheFolder) error {
|
fn := func(gcf graph.CacheFolder) error {
|
||||||
if ptr.Val(gcf.GetDisplayName()) == DefaultCalendar {
|
if ptr.Val(gcf.GetDisplayName()) == DefaultCalendar {
|
||||||
calID = ptr.Val(gcf.GetId())
|
calID = ptr.Val(gcf.GetId())
|
||||||
@ -605,7 +600,7 @@ func (suite *DataCollectionsIntegrationSuite) TestEventsSerializationRegression(
|
|||||||
collections, err := createCollections(
|
collections, err := createCollections(
|
||||||
ctx,
|
ctx,
|
||||||
acct,
|
acct,
|
||||||
ss,
|
inMock.NewProvider(suite.user, suite.user),
|
||||||
test.scope,
|
test.scope,
|
||||||
DeltaPaths{},
|
DeltaPaths{},
|
||||||
control.Defaults(),
|
control.Defaults(),
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
"github.com/alcionai/corso/src/internal/connector/exchange/api"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
@ -117,12 +118,10 @@ func (suite *ServiceIteratorsSuite) SetupSuite() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
|
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections() {
|
||||||
ss := selectors.Selector{}.SetDiscreteOwnerIDName("user_id", "user_id")
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
qp = graph.QueryParams{
|
qp = graph.QueryParams{
|
||||||
Category: path.EmailCategory, // doesn't matter which one we use.
|
Category: path.EmailCategory, // doesn't matter which one we use.
|
||||||
ResourceOwner: ss,
|
ResourceOwner: inMock.NewProvider("user_id", "user_name"),
|
||||||
Credentials: suite.creds,
|
Credentials: suite.creds,
|
||||||
}
|
}
|
||||||
statusUpdater = func(*support.ConnectorOperationStatus) {}
|
statusUpdater = func(*support.ConnectorOperationStatus) {}
|
||||||
@ -437,12 +436,10 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
|
|||||||
ctx, flush := tester.NewContext()
|
ctx, flush := tester.NewContext()
|
||||||
defer flush()
|
defer flush()
|
||||||
|
|
||||||
ss := selectors.Selector{}.SetDiscreteOwnerIDName("user_id", "user_id")
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
qp = graph.QueryParams{
|
qp = graph.QueryParams{
|
||||||
Category: path.EmailCategory, // doesn't matter which one we use.
|
Category: path.EmailCategory, // doesn't matter which one we use.
|
||||||
ResourceOwner: ss,
|
ResourceOwner: inMock.NewProvider("user_id", "user_name"),
|
||||||
Credentials: suite.creds,
|
Credentials: suite.creds,
|
||||||
}
|
}
|
||||||
statusUpdater = func(*support.ConnectorOperationStatus) {}
|
statusUpdater = func(*support.ConnectorOperationStatus) {}
|
||||||
@ -458,7 +455,7 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
|
|||||||
)
|
)
|
||||||
|
|
||||||
require.Equal(t, "user_id", qp.ResourceOwner.ID(), qp.ResourceOwner)
|
require.Equal(t, "user_id", qp.ResourceOwner.ID(), qp.ResourceOwner)
|
||||||
require.Equal(t, "user_id", qp.ResourceOwner.Name(), qp.ResourceOwner)
|
require.Equal(t, "user_name", qp.ResourceOwner.Name(), qp.ResourceOwner)
|
||||||
|
|
||||||
collections := map[string]data.BackupCollection{}
|
collections := map[string]data.BackupCollection{}
|
||||||
|
|
||||||
@ -520,15 +517,13 @@ func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_repea
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incrementals() {
|
func (suite *ServiceIteratorsSuite) TestFilterContainersAndFillCollections_incrementals() {
|
||||||
ss := selectors.Selector{}.SetDiscreteOwnerIDName("user_id", "user_id")
|
|
||||||
|
|
||||||
var (
|
var (
|
||||||
userID = "user_id"
|
userID = "user_id"
|
||||||
tenantID = suite.creds.AzureTenantID
|
tenantID = suite.creds.AzureTenantID
|
||||||
cat = path.EmailCategory // doesn't matter which one we use,
|
cat = path.EmailCategory // doesn't matter which one we use,
|
||||||
qp = graph.QueryParams{
|
qp = graph.QueryParams{
|
||||||
Category: cat,
|
Category: cat,
|
||||||
ResourceOwner: ss,
|
ResourceOwner: inMock.NewProvider("user_id", "user_name"),
|
||||||
Credentials: suite.creds,
|
Credentials: suite.creds,
|
||||||
}
|
}
|
||||||
statusUpdater = func(*support.ConnectorOperationStatus) {}
|
statusUpdater = func(*support.ConnectorOperationStatus) {}
|
||||||
|
|||||||
@ -46,8 +46,13 @@ func (c emptyCollection) DoNotMergeItems() bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// base collections
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func BaseCollections(
|
func BaseCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
|
colls []data.BackupCollection,
|
||||||
tenant, rOwner string,
|
tenant, rOwner string,
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
categories map[path.CategoryType]struct{},
|
categories map[path.CategoryType]struct{},
|
||||||
@ -58,12 +63,20 @@ func BaseCollections(
|
|||||||
res = []data.BackupCollection{}
|
res = []data.BackupCollection{}
|
||||||
el = errs.Local()
|
el = errs.Local()
|
||||||
lastErr error
|
lastErr error
|
||||||
|
collKeys = map[string]struct{}{}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// won't catch deleted collections, since they have no FullPath
|
||||||
|
for _, c := range colls {
|
||||||
|
if c.FullPath() != nil {
|
||||||
|
collKeys[c.FullPath().String()] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for cat := range categories {
|
for cat := range categories {
|
||||||
ictx := clues.Add(ctx, "base_service", service, "base_category", cat)
|
ictx := clues.Add(ctx, "base_service", service, "base_category", cat)
|
||||||
|
|
||||||
p, err := path.Build(tenant, rOwner, service, cat, false, "tmp")
|
p, err := path.ServicePrefix(tenant, rOwner, service, cat)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Shouldn't happen.
|
// Shouldn't happen.
|
||||||
err = clues.Wrap(err, "making path").WithClues(ictx)
|
err = clues.Wrap(err, "making path").WithClues(ictx)
|
||||||
@ -73,19 +86,92 @@ func BaseCollections(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pop off the last path element because we just want the prefix.
|
// only add this collection if it doesn't already exist in the set.
|
||||||
p, err = p.Dir()
|
if _, ok := collKeys[p.String()]; !ok {
|
||||||
if err != nil {
|
|
||||||
// Shouldn't happen.
|
|
||||||
err = clues.Wrap(err, "getting base prefix").WithClues(ictx)
|
|
||||||
el.AddRecoverable(err)
|
|
||||||
lastErr = err
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
res = append(res, emptyCollection{p: p, su: su})
|
res = append(res, emptyCollection{p: p, su: su})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return res, lastErr
|
return res, lastErr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// prefix migration
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var _ data.BackupCollection = prefixCollection{}
|
||||||
|
|
||||||
|
// TODO: move this out of graph. /data would be a much better owner
|
||||||
|
// for a generic struct like this. However, support.StatusUpdater makes
|
||||||
|
// it difficult to extract from this package in a generic way.
|
||||||
|
type prefixCollection struct {
|
||||||
|
full, prev path.Path
|
||||||
|
su support.StatusUpdater
|
||||||
|
state data.CollectionState
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c prefixCollection) Items(ctx context.Context, _ *fault.Bus) <-chan data.Stream {
|
||||||
|
res := make(chan data.Stream)
|
||||||
|
close(res)
|
||||||
|
|
||||||
|
s := support.CreateStatus(ctx, support.Backup, 0, support.CollectionMetrics{}, "")
|
||||||
|
c.su(s)
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c prefixCollection) FullPath() path.Path {
|
||||||
|
return c.full
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c prefixCollection) PreviousPath() path.Path {
|
||||||
|
return c.prev
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c prefixCollection) State() data.CollectionState {
|
||||||
|
return c.state
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c prefixCollection) DoNotMergeItems() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a new collection that only handles prefix pathing.
|
||||||
|
func NewPrefixCollection(prev, full path.Path, su support.StatusUpdater) (*prefixCollection, error) {
|
||||||
|
if prev != nil {
|
||||||
|
if len(prev.Item()) > 0 {
|
||||||
|
return nil, clues.New("prefix collection previous path contains an item")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(prev.Folders()) > 0 {
|
||||||
|
return nil, clues.New("prefix collection previous path contains folders")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if full != nil {
|
||||||
|
if len(full.Item()) > 0 {
|
||||||
|
return nil, clues.New("prefix collection full path contains an item")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(full.Folders()) > 0 {
|
||||||
|
return nil, clues.New("prefix collection full path contains folders")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pc := &prefixCollection{
|
||||||
|
prev: prev,
|
||||||
|
full: full,
|
||||||
|
su: su,
|
||||||
|
state: data.StateOf(prev, full),
|
||||||
|
}
|
||||||
|
|
||||||
|
if pc.state == data.DeletedState {
|
||||||
|
return nil, clues.New("collection attempted to delete prefix")
|
||||||
|
}
|
||||||
|
|
||||||
|
if pc.state == data.NewState {
|
||||||
|
return nil, clues.New("collection attempted to create a new prefix")
|
||||||
|
}
|
||||||
|
|
||||||
|
return pc, nil
|
||||||
|
}
|
||||||
|
|||||||
100
src/internal/connector/graph/collections_test.go
Normal file
100
src/internal/connector/graph/collections_test.go
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
package graph
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CollectionsUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCollectionsUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &CollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *CollectionsUnitSuite) TestNewPrefixCollection() {
|
||||||
|
t := suite.T()
|
||||||
|
serv := path.OneDriveService
|
||||||
|
cat := path.FilesCategory
|
||||||
|
|
||||||
|
p1, err := path.ServicePrefix("t", "ro1", serv, cat)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
p2, err := path.ServicePrefix("t", "ro2", serv, cat)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
items, err := path.Build("t", "ro", serv, cat, true, "fld", "itm")
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
folders, err := path.Build("t", "ro", serv, cat, false, "fld")
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
prev path.Path
|
||||||
|
full path.Path
|
||||||
|
expectErr require.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "not moved",
|
||||||
|
prev: p1,
|
||||||
|
full: p1,
|
||||||
|
expectErr: require.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "moved",
|
||||||
|
prev: p1,
|
||||||
|
full: p2,
|
||||||
|
expectErr: require.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "deleted",
|
||||||
|
prev: p1,
|
||||||
|
full: nil,
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "new",
|
||||||
|
prev: nil,
|
||||||
|
full: p2,
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "prev has items",
|
||||||
|
prev: items,
|
||||||
|
full: p1,
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "prev has folders",
|
||||||
|
prev: folders,
|
||||||
|
full: p1,
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "full has items",
|
||||||
|
prev: p1,
|
||||||
|
full: items,
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "full has folders",
|
||||||
|
prev: p1,
|
||||||
|
full: folders,
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
_, err := NewPrefixCollection(test.prev, test.full, nil)
|
||||||
|
test.expectErr(suite.T(), err, clues.ToCore(err))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -12,7 +12,7 @@ import (
|
|||||||
msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go"
|
msgraphsdkgo "github.com/microsoftgraph/msgraph-sdk-go"
|
||||||
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
msgraphgocore "github.com/microsoftgraph/msgraph-sdk-go-core"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/path"
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
)
|
)
|
||||||
@ -39,7 +39,7 @@ func AllMetadataFileNames() []string {
|
|||||||
|
|
||||||
type QueryParams struct {
|
type QueryParams struct {
|
||||||
Category path.CategoryType
|
Category path.CategoryType
|
||||||
ResourceOwner common.IDNamer
|
ResourceOwner idname.Provider
|
||||||
Credentials account.M365Config
|
Credentials account.M365Config
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
@ -43,7 +43,7 @@ type GraphConnector struct {
|
|||||||
// maps of resource owner ids to names, and names to ids.
|
// maps of resource owner ids to names, and names to ids.
|
||||||
// not guaranteed to be populated, only here as a post-population
|
// not guaranteed to be populated, only here as a post-population
|
||||||
// reference for processes that choose to populate the values.
|
// reference for processes that choose to populate the values.
|
||||||
IDNameLookup common.IDNameSwapper
|
IDNameLookup idname.Cacher
|
||||||
|
|
||||||
// wg is used to track completion of GC tasks
|
// wg is used to track completion of GC tasks
|
||||||
wg *sync.WaitGroup
|
wg *sync.WaitGroup
|
||||||
@ -81,7 +81,7 @@ func NewGraphConnector(
|
|||||||
|
|
||||||
gc := GraphConnector{
|
gc := GraphConnector{
|
||||||
Discovery: ac,
|
Discovery: ac,
|
||||||
IDNameLookup: common.IDsNames{},
|
IDNameLookup: idname.NewCache(nil),
|
||||||
Service: service,
|
Service: service,
|
||||||
|
|
||||||
credentials: creds,
|
credentials: creds,
|
||||||
@ -215,7 +215,7 @@ type getOwnerIDAndNamer interface {
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
discovery m365api.Client,
|
discovery m365api.Client,
|
||||||
owner string,
|
owner string,
|
||||||
ins common.IDNameSwapper,
|
ins idname.Cacher,
|
||||||
) (
|
) (
|
||||||
ownerID string,
|
ownerID string,
|
||||||
ownerName string,
|
ownerName string,
|
||||||
@ -233,7 +233,7 @@ func (r resourceClient) getOwnerIDAndNameFrom(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
discovery m365api.Client,
|
discovery m365api.Client,
|
||||||
owner string,
|
owner string,
|
||||||
ins common.IDNameSwapper,
|
ins idname.Cacher,
|
||||||
) (string, string, error) {
|
) (string, string, error) {
|
||||||
if ins != nil {
|
if ins != nil {
|
||||||
if n, ok := ins.NameOf(owner); ok {
|
if n, ok := ins.NameOf(owner); ok {
|
||||||
@ -277,7 +277,7 @@ func (r resourceClient) getOwnerIDAndNameFrom(
|
|||||||
func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom(
|
func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
owner string, // input value, can be either id or name
|
owner string, // input value, can be either id or name
|
||||||
ins common.IDNameSwapper,
|
ins idname.Cacher,
|
||||||
) (string, string, error) {
|
) (string, string, error) {
|
||||||
// move this to GC method
|
// move this to GC method
|
||||||
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.Discovery, owner, ins)
|
id, name, err := gc.ownerLookup.getOwnerIDAndNameFrom(ctx, gc.Discovery, owner, ins)
|
||||||
@ -285,10 +285,7 @@ func (gc *GraphConnector) PopulateOwnerIDAndNamesFrom(
|
|||||||
return "", "", clues.Wrap(err, "identifying resource owner")
|
return "", "", clues.Wrap(err, "identifying resource owner")
|
||||||
}
|
}
|
||||||
|
|
||||||
gc.IDNameLookup = common.IDsNames{
|
gc.IDNameLookup = idname.NewCache(map[string]string{id: name})
|
||||||
IDToName: map[string]string{id: name},
|
|
||||||
NameToID: map[string]string{name: id},
|
|
||||||
}
|
|
||||||
|
|
||||||
return id, name, nil
|
return id, name, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@ -13,7 +13,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/mock"
|
"github.com/alcionai/corso/src/internal/connector/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
@ -58,7 +58,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
owner string
|
owner string
|
||||||
ins common.IDsNames
|
ins inMock.Cache
|
||||||
rc *resourceClient
|
rc *resourceClient
|
||||||
expectID string
|
expectID string
|
||||||
expectName string
|
expectName string
|
||||||
@ -83,10 +83,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "only id map with owner id",
|
name: "only id map with owner id",
|
||||||
owner: id,
|
owner: id,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(itn, nil),
|
||||||
IDToName: itn,
|
|
||||||
NameToID: nil,
|
|
||||||
},
|
|
||||||
rc: noLookup,
|
rc: noLookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -95,10 +92,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "only name map with owner id",
|
name: "only name map with owner id",
|
||||||
owner: id,
|
owner: id,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(nil, nti),
|
||||||
IDToName: nil,
|
|
||||||
NameToID: nti,
|
|
||||||
},
|
|
||||||
rc: noLookup,
|
rc: noLookup,
|
||||||
expectID: "",
|
expectID: "",
|
||||||
expectName: "",
|
expectName: "",
|
||||||
@ -107,10 +101,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "only name map with owner id and lookup",
|
name: "only name map with owner id and lookup",
|
||||||
owner: id,
|
owner: id,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(nil, nti),
|
||||||
IDToName: nil,
|
|
||||||
NameToID: nti,
|
|
||||||
},
|
|
||||||
rc: lookup,
|
rc: lookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -119,10 +110,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "only id map with owner name",
|
name: "only id map with owner name",
|
||||||
owner: name,
|
owner: name,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(itn, nil),
|
||||||
IDToName: itn,
|
|
||||||
NameToID: nil,
|
|
||||||
},
|
|
||||||
rc: lookup,
|
rc: lookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -131,10 +119,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "only name map with owner name",
|
name: "only name map with owner name",
|
||||||
owner: name,
|
owner: name,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(nil, nti),
|
||||||
IDToName: nil,
|
|
||||||
NameToID: nti,
|
|
||||||
},
|
|
||||||
rc: noLookup,
|
rc: noLookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -143,10 +128,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "only id map with owner name",
|
name: "only id map with owner name",
|
||||||
owner: name,
|
owner: name,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(itn, nil),
|
||||||
IDToName: itn,
|
|
||||||
NameToID: nil,
|
|
||||||
},
|
|
||||||
rc: noLookup,
|
rc: noLookup,
|
||||||
expectID: "",
|
expectID: "",
|
||||||
expectName: "",
|
expectName: "",
|
||||||
@ -155,10 +137,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "only id map with owner name and lookup",
|
name: "only id map with owner name and lookup",
|
||||||
owner: name,
|
owner: name,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(itn, nil),
|
||||||
IDToName: itn,
|
|
||||||
NameToID: nil,
|
|
||||||
},
|
|
||||||
rc: lookup,
|
rc: lookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -167,10 +146,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "both maps with owner id",
|
name: "both maps with owner id",
|
||||||
owner: id,
|
owner: id,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(itn, nti),
|
||||||
IDToName: itn,
|
|
||||||
NameToID: nti,
|
|
||||||
},
|
|
||||||
rc: noLookup,
|
rc: noLookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -179,10 +155,7 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "both maps with owner name",
|
name: "both maps with owner name",
|
||||||
owner: name,
|
owner: name,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(itn, nti),
|
||||||
IDToName: itn,
|
|
||||||
NameToID: nti,
|
|
||||||
},
|
|
||||||
rc: noLookup,
|
rc: noLookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -191,10 +164,9 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "non-matching maps with owner id",
|
name: "non-matching maps with owner id",
|
||||||
owner: id,
|
owner: id,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(
|
||||||
IDToName: map[string]string{"foo": "bar"},
|
map[string]string{"foo": "bar"},
|
||||||
NameToID: map[string]string{"fnords": "smarf"},
|
map[string]string{"fnords": "smarf"}),
|
||||||
},
|
|
||||||
rc: noLookup,
|
rc: noLookup,
|
||||||
expectID: "",
|
expectID: "",
|
||||||
expectName: "",
|
expectName: "",
|
||||||
@ -203,10 +175,9 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "non-matching with owner name",
|
name: "non-matching with owner name",
|
||||||
owner: name,
|
owner: name,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(
|
||||||
IDToName: map[string]string{"foo": "bar"},
|
map[string]string{"foo": "bar"},
|
||||||
NameToID: map[string]string{"fnords": "smarf"},
|
map[string]string{"fnords": "smarf"}),
|
||||||
},
|
|
||||||
rc: noLookup,
|
rc: noLookup,
|
||||||
expectID: "",
|
expectID: "",
|
||||||
expectName: "",
|
expectName: "",
|
||||||
@ -215,10 +186,9 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "non-matching maps with owner id and lookup",
|
name: "non-matching maps with owner id and lookup",
|
||||||
owner: id,
|
owner: id,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(
|
||||||
IDToName: map[string]string{"foo": "bar"},
|
map[string]string{"foo": "bar"},
|
||||||
NameToID: map[string]string{"fnords": "smarf"},
|
map[string]string{"fnords": "smarf"}),
|
||||||
},
|
|
||||||
rc: lookup,
|
rc: lookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -227,10 +197,9 @@ func (suite *GraphConnectorUnitSuite) TestPopulateOwnerIDAndNamesFrom() {
|
|||||||
{
|
{
|
||||||
name: "non-matching with owner name and lookup",
|
name: "non-matching with owner name and lookup",
|
||||||
owner: name,
|
owner: name,
|
||||||
ins: common.IDsNames{
|
ins: inMock.NewCache(
|
||||||
IDToName: map[string]string{"foo": "bar"},
|
map[string]string{"foo": "bar"},
|
||||||
NameToID: map[string]string{"fnords": "smarf"},
|
map[string]string{"fnords": "smarf"}),
|
||||||
},
|
|
||||||
rc: lookup,
|
rc: lookup,
|
||||||
expectID: id,
|
expectID: id,
|
||||||
expectName: name,
|
expectName: name,
|
||||||
@ -553,7 +522,7 @@ func runBackupAndCompare(
|
|||||||
}
|
}
|
||||||
|
|
||||||
backupGC := loadConnector(ctx, t, config.resource)
|
backupGC := loadConnector(ctx, t, config.resource)
|
||||||
backupGC.IDNameLookup = common.IDsNames{IDToName: idToName, NameToID: nameToID}
|
backupGC.IDNameLookup = inMock.NewCache(idToName, nameToID)
|
||||||
|
|
||||||
backupSel := backupSelectorForExpected(t, config.service, expectedDests)
|
backupSel := backupSelectorForExpected(t, config.service, expectedDests)
|
||||||
t.Logf("Selective backup of %s\n", backupSel)
|
t.Logf("Selective backup of %s\n", backupSel)
|
||||||
@ -564,6 +533,7 @@ func runBackupAndCompare(
|
|||||||
backupSel,
|
backupSel,
|
||||||
backupSel,
|
backupSel,
|
||||||
nil,
|
nil,
|
||||||
|
version.NoBackup,
|
||||||
config.opts,
|
config.opts,
|
||||||
fault.New(true))
|
fault.New(true))
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
@ -1106,6 +1076,7 @@ func (suite *GraphConnectorIntegrationSuite) TestMultiFolderBackupDifferentNames
|
|||||||
backupSel,
|
backupSel,
|
||||||
backupSel,
|
backupSel,
|
||||||
nil,
|
nil,
|
||||||
|
version.NoBackup,
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: true,
|
RestorePermissions: true,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
@ -1261,9 +1232,10 @@ func (suite *GraphConnectorIntegrationSuite) TestBackup_CreatesPrefixCollections
|
|||||||
|
|
||||||
dcs, excludes, err := backupGC.ProduceBackupCollections(
|
dcs, excludes, err := backupGC.ProduceBackupCollections(
|
||||||
ctx,
|
ctx,
|
||||||
backupSel,
|
inMock.NewProvider(id, name),
|
||||||
backupSel,
|
backupSel,
|
||||||
nil,
|
nil,
|
||||||
|
version.NoBackup,
|
||||||
control.Options{
|
control.Options{
|
||||||
RestorePermissions: false,
|
RestorePermissions: false,
|
||||||
ToggleFeatures: control.Toggles{},
|
ToggleFeatures: control.Toggles{},
|
||||||
|
|||||||
@ -3,7 +3,7 @@ package mock
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -25,9 +25,10 @@ type GraphConnector struct {
|
|||||||
|
|
||||||
func (gc GraphConnector) ProduceBackupCollections(
|
func (gc GraphConnector) ProduceBackupCollections(
|
||||||
_ context.Context,
|
_ context.Context,
|
||||||
_ common.IDNamer,
|
_ idname.Provider,
|
||||||
_ selectors.Selector,
|
_ selectors.Selector,
|
||||||
_ []data.RestoreCollection,
|
_ []data.RestoreCollection,
|
||||||
|
_ int,
|
||||||
_ control.Options,
|
_ control.Options,
|
||||||
_ *fault.Bus,
|
_ *fault.Bus,
|
||||||
) (
|
) (
|
||||||
|
|||||||
@ -162,12 +162,40 @@ func NewCollection(
|
|||||||
return nil, clues.Wrap(err, "getting previous location").With("prev_path", prevPath.String())
|
return nil, clues.Wrap(err, "getting previous location").With("prev_path", prevPath.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
c := newColl(
|
||||||
|
itemClient,
|
||||||
|
folderPath,
|
||||||
|
prevPath,
|
||||||
|
driveID,
|
||||||
|
service,
|
||||||
|
statusUpdater,
|
||||||
|
source,
|
||||||
|
ctrlOpts,
|
||||||
|
colScope,
|
||||||
|
doNotMergeItems)
|
||||||
|
|
||||||
|
c.locPath = locPath
|
||||||
|
c.prevLocPath = prevLocPath
|
||||||
|
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func newColl(
|
||||||
|
gr graph.Requester,
|
||||||
|
folderPath path.Path,
|
||||||
|
prevPath path.Path,
|
||||||
|
driveID string,
|
||||||
|
service graph.Servicer,
|
||||||
|
statusUpdater support.StatusUpdater,
|
||||||
|
source driveSource,
|
||||||
|
ctrlOpts control.Options,
|
||||||
|
colScope collectionScope,
|
||||||
|
doNotMergeItems bool,
|
||||||
|
) *Collection {
|
||||||
c := &Collection{
|
c := &Collection{
|
||||||
itemClient: itemClient,
|
itemClient: gr,
|
||||||
folderPath: folderPath,
|
folderPath: folderPath,
|
||||||
prevPath: prevPath,
|
prevPath: prevPath,
|
||||||
locPath: locPath,
|
|
||||||
prevLocPath: prevLocPath,
|
|
||||||
driveItems: map[string]models.DriveItemable{},
|
driveItems: map[string]models.DriveItemable{},
|
||||||
driveID: driveID,
|
driveID: driveID,
|
||||||
source: source,
|
source: source,
|
||||||
@ -192,7 +220,7 @@ func NewCollection(
|
|||||||
c.itemMetaReader = oneDriveItemMetaReader
|
c.itemMetaReader = oneDriveItemMetaReader
|
||||||
}
|
}
|
||||||
|
|
||||||
return c, nil
|
return c
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds an itemID to the collection. This will make it eligible to be
|
// Adds an itemID to the collection. This will make it eligible to be
|
||||||
@ -254,17 +282,21 @@ func (oc Collection) PreviousLocationPath() details.LocationIDer {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var ider details.LocationIDer
|
||||||
|
|
||||||
switch oc.source {
|
switch oc.source {
|
||||||
case OneDriveSource:
|
case OneDriveSource:
|
||||||
return details.NewOneDriveLocationIDer(
|
ider = details.NewOneDriveLocationIDer(
|
||||||
oc.driveID,
|
oc.driveID,
|
||||||
oc.prevLocPath.Elements()...)
|
oc.prevLocPath.Elements()...)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return details.NewSharePointLocationIDer(
|
ider = details.NewSharePointLocationIDer(
|
||||||
oc.driveID,
|
oc.driveID,
|
||||||
oc.prevLocPath.Elements()...)
|
oc.prevLocPath.Elements()...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return ider
|
||||||
}
|
}
|
||||||
|
|
||||||
func (oc Collection) State() data.CollectionState {
|
func (oc Collection) State() data.CollectionState {
|
||||||
|
|||||||
@ -6,10 +6,11 @@ import (
|
|||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/connector/graph"
|
"github.com/alcionai/corso/src/internal/connector/graph"
|
||||||
"github.com/alcionai/corso/src/internal/connector/support"
|
"github.com/alcionai/corso/src/internal/connector/support"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/control"
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/logger"
|
"github.com/alcionai/corso/src/pkg/logger"
|
||||||
@ -34,8 +35,9 @@ func (fm odFolderMatcher) Matches(dir string) bool {
|
|||||||
func DataCollections(
|
func DataCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
selector selectors.Selector,
|
selector selectors.Selector,
|
||||||
user common.IDNamer,
|
user idname.Provider,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
|
lastBackupVersion int,
|
||||||
tenant string,
|
tenant string,
|
||||||
itemClient graph.Requester,
|
itemClient graph.Requester,
|
||||||
service graph.Servicer,
|
service graph.Servicer,
|
||||||
@ -91,9 +93,23 @@ func DataCollections(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mcs, err := migrationCollections(
|
||||||
|
service,
|
||||||
|
lastBackupVersion,
|
||||||
|
tenant,
|
||||||
|
user,
|
||||||
|
su,
|
||||||
|
ctrlOpts)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
collections = append(collections, mcs...)
|
||||||
|
|
||||||
if len(collections) > 0 {
|
if len(collections) > 0 {
|
||||||
baseCols, err := graph.BaseCollections(
|
baseCols, err := graph.BaseCollections(
|
||||||
ctx,
|
ctx,
|
||||||
|
collections,
|
||||||
tenant,
|
tenant,
|
||||||
user.ID(),
|
user.ID(),
|
||||||
path.OneDriveService,
|
path.OneDriveService,
|
||||||
@ -109,3 +125,53 @@ func DataCollections(
|
|||||||
|
|
||||||
return collections, allExcludes, el.Failure()
|
return collections, allExcludes, el.Failure()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// adds data migrations to the collection set.
|
||||||
|
func migrationCollections(
|
||||||
|
svc graph.Servicer,
|
||||||
|
lastBackupVersion int,
|
||||||
|
tenant string,
|
||||||
|
user idname.Provider,
|
||||||
|
su support.StatusUpdater,
|
||||||
|
ctrlOpts control.Options,
|
||||||
|
) ([]data.BackupCollection, error) {
|
||||||
|
if !ctrlOpts.ToggleFeatures.RunMigrations {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// assume a version < 0 implies no prior backup, thus nothing to migrate.
|
||||||
|
if version.IsNoBackup(lastBackupVersion) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if lastBackupVersion >= version.AllXMigrateUserPNToID {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// unlike exchange, which enumerates all folders on every
|
||||||
|
// backup, onedrive needs to force the owner PN -> ID migration
|
||||||
|
mc, err := path.ServicePrefix(
|
||||||
|
tenant,
|
||||||
|
user.ID(),
|
||||||
|
path.OneDriveService,
|
||||||
|
path.FilesCategory)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "creating user id migration path")
|
||||||
|
}
|
||||||
|
|
||||||
|
mpc, err := path.ServicePrefix(
|
||||||
|
tenant,
|
||||||
|
user.Name(),
|
||||||
|
path.OneDriveService,
|
||||||
|
path.FilesCategory)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "creating user name migration path")
|
||||||
|
}
|
||||||
|
|
||||||
|
mgn, err := graph.NewPrefixCollection(mpc, mc, su)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "creating migration collection")
|
||||||
|
}
|
||||||
|
|
||||||
|
return []data.BackupCollection{mgn}, nil
|
||||||
|
}
|
||||||
|
|||||||
123
src/internal/connector/onedrive/data_collections_test.go
Normal file
123
src/internal/connector/onedrive/data_collections_test.go
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
package onedrive
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
|
"github.com/alcionai/corso/src/pkg/control"
|
||||||
|
"github.com/alcionai/corso/src/pkg/path"
|
||||||
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DataCollectionsUnitSuite struct {
|
||||||
|
tester.Suite
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDataCollectionsUnitSuite(t *testing.T) {
|
||||||
|
suite.Run(t, &DataCollectionsUnitSuite{Suite: tester.NewUnitSuite(t)})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (suite *DataCollectionsUnitSuite) TestMigrationCollections() {
|
||||||
|
u := selectors.Selector{}
|
||||||
|
u = u.SetDiscreteOwnerIDName("i", "n")
|
||||||
|
|
||||||
|
od := path.OneDriveService.String()
|
||||||
|
fc := path.FilesCategory.String()
|
||||||
|
|
||||||
|
type migr struct {
|
||||||
|
full string
|
||||||
|
prev string
|
||||||
|
}
|
||||||
|
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
version int
|
||||||
|
forceSkip bool
|
||||||
|
expectLen int
|
||||||
|
expectMigration []migr
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no backup version",
|
||||||
|
version: version.NoBackup,
|
||||||
|
forceSkip: false,
|
||||||
|
expectLen: 0,
|
||||||
|
expectMigration: []migr{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "above current version",
|
||||||
|
version: version.Backup + 5,
|
||||||
|
forceSkip: false,
|
||||||
|
expectLen: 0,
|
||||||
|
expectMigration: []migr{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "user pn to id",
|
||||||
|
version: version.AllXMigrateUserPNToID - 1,
|
||||||
|
forceSkip: false,
|
||||||
|
expectLen: 1,
|
||||||
|
expectMigration: []migr{
|
||||||
|
{
|
||||||
|
full: strings.Join([]string{"t", od, "i", fc}, "/"),
|
||||||
|
prev: strings.Join([]string{"t", od, "n", fc}, "/"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "skipped",
|
||||||
|
version: version.Backup + 5,
|
||||||
|
forceSkip: true,
|
||||||
|
expectLen: 0,
|
||||||
|
expectMigration: []migr{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
opts := control.Options{
|
||||||
|
ToggleFeatures: control.Toggles{
|
||||||
|
RunMigrations: !test.forceSkip,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
mc, err := migrationCollections(nil, test.version, "t", u, nil, opts)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
if test.expectLen == 0 {
|
||||||
|
assert.Nil(t, mc)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Len(t, mc, test.expectLen)
|
||||||
|
|
||||||
|
migrs := []migr{}
|
||||||
|
|
||||||
|
for _, col := range mc {
|
||||||
|
var fp, pp string
|
||||||
|
|
||||||
|
if col.FullPath() != nil {
|
||||||
|
fp = col.FullPath().String()
|
||||||
|
}
|
||||||
|
|
||||||
|
if col.PreviousPath() != nil {
|
||||||
|
pp = col.PreviousPath().String()
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Logf("Found migration collection:\n* full: %s\n* prev: %s\n", fp, pp)
|
||||||
|
|
||||||
|
migrs = append(migrs, test.expectMigration...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, m := range migrs {
|
||||||
|
assert.Contains(t, migrs, m, "expected to find migration: %+v", test.expectMigration[i])
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -116,6 +116,7 @@ func DataCollections(
|
|||||||
if len(collections) > 0 {
|
if len(collections) > 0 {
|
||||||
baseCols, err := graph.BaseCollections(
|
baseCols, err := graph.BaseCollections(
|
||||||
ctx,
|
ctx,
|
||||||
|
collections,
|
||||||
creds.AzureTenantID,
|
creds.AzureTenantID,
|
||||||
site,
|
site,
|
||||||
path.SharePointService,
|
path.SharePointService,
|
||||||
|
|||||||
@ -143,7 +143,7 @@ func StateOf(prev, curr path.Path) CollectionState {
|
|||||||
return NewState
|
return NewState
|
||||||
}
|
}
|
||||||
|
|
||||||
if curr.Folder(false) != prev.Folder(false) {
|
if curr.String() != prev.String() {
|
||||||
return MovedState
|
return MovedState
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -25,6 +25,8 @@ func (suite *DataCollectionSuite) TestStateOf() {
|
|||||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar")
|
barP, err := path.Build("t", "u", path.ExchangeService, path.EmailCategory, false, "bar")
|
||||||
require.NoError(suite.T(), err, clues.ToCore(err))
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
|
preP, err := path.Build("_t", "_u", path.ExchangeService, path.EmailCategory, false, "foo")
|
||||||
|
require.NoError(suite.T(), err, clues.ToCore(err))
|
||||||
|
|
||||||
table := []struct {
|
table := []struct {
|
||||||
name string
|
name string
|
||||||
@ -49,6 +51,12 @@ func (suite *DataCollectionSuite) TestStateOf() {
|
|||||||
curr: barP,
|
curr: barP,
|
||||||
expect: MovedState,
|
expect: MovedState,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "moved if prefix changes",
|
||||||
|
prev: fooP,
|
||||||
|
curr: preP,
|
||||||
|
expect: MovedState,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: "deleted",
|
name: "deleted",
|
||||||
prev: fooP,
|
prev: fooP,
|
||||||
|
|||||||
@ -130,7 +130,7 @@ func putInner(
|
|||||||
base.ID = model.StableID(uuid.NewString())
|
base.ID = model.StableID(uuid.NewString())
|
||||||
}
|
}
|
||||||
|
|
||||||
tmpTags, err := tagsForModelWithID(s, base.ID, base.Version, base.Tags)
|
tmpTags, err := tagsForModelWithID(s, base.ID, base.ModelVersion, base.Tags)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Will be wrapped at a higher layer.
|
// Will be wrapped at a higher layer.
|
||||||
return clues.Stack(err).WithClues(ctx)
|
return clues.Stack(err).WithClues(ctx)
|
||||||
@ -158,7 +158,7 @@ func (ms *ModelStore) Put(
|
|||||||
return clues.Stack(errUnrecognizedSchema)
|
return clues.Stack(errUnrecognizedSchema)
|
||||||
}
|
}
|
||||||
|
|
||||||
m.Base().Version = ms.modelVersion
|
m.Base().ModelVersion = ms.modelVersion
|
||||||
|
|
||||||
err := repo.WriteSession(
|
err := repo.WriteSession(
|
||||||
ctx,
|
ctx,
|
||||||
@ -205,7 +205,7 @@ func (ms ModelStore) populateBaseModelFromMetadata(
|
|||||||
|
|
||||||
base.ModelStoreID = m.ID
|
base.ModelStoreID = m.ID
|
||||||
base.ID = model.StableID(id)
|
base.ID = model.StableID(id)
|
||||||
base.Version = v
|
base.ModelVersion = v
|
||||||
base.Tags = m.Labels
|
base.Tags = m.Labels
|
||||||
|
|
||||||
stripHiddenTags(base.Tags)
|
stripHiddenTags(base.Tags)
|
||||||
@ -424,7 +424,7 @@ func (ms *ModelStore) Update(
|
|||||||
return clues.Stack(errNoModelStoreID).WithClues(ctx)
|
return clues.Stack(errNoModelStoreID).WithClues(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
base.Version = ms.modelVersion
|
base.ModelVersion = ms.modelVersion
|
||||||
|
|
||||||
// TODO(ashmrtnz): Can remove if bottleneck.
|
// TODO(ashmrtnz): Can remove if bottleneck.
|
||||||
if err := ms.checkPrevModelVersion(ctx, s, base); err != nil {
|
if err := ms.checkPrevModelVersion(ctx, s, base); err != nil {
|
||||||
|
|||||||
@ -264,7 +264,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutGet() {
|
|||||||
|
|
||||||
require.NotEmpty(t, foo.ModelStoreID)
|
require.NotEmpty(t, foo.ModelStoreID)
|
||||||
require.NotEmpty(t, foo.ID)
|
require.NotEmpty(t, foo.ID)
|
||||||
require.Equal(t, globalModelVersion, foo.Version)
|
require.Equal(t, globalModelVersion, foo.ModelVersion)
|
||||||
|
|
||||||
returned := &fooModel{}
|
returned := &fooModel{}
|
||||||
err = suite.m.Get(suite.ctx, test.s, foo.ID, returned)
|
err = suite.m.Get(suite.ctx, test.s, foo.ID, returned)
|
||||||
@ -569,14 +569,14 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
|
|||||||
name: "NoTags",
|
name: "NoTags",
|
||||||
mutator: func(m *fooModel) {
|
mutator: func(m *fooModel) {
|
||||||
m.Bar = "baz"
|
m.Bar = "baz"
|
||||||
m.Version = 42
|
m.ModelVersion = 42
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "WithTags",
|
name: "WithTags",
|
||||||
mutator: func(m *fooModel) {
|
mutator: func(m *fooModel) {
|
||||||
m.Bar = "baz"
|
m.Bar = "baz"
|
||||||
m.Version = 42
|
m.ModelVersion = 42
|
||||||
m.Tags = map[string]string{
|
m.Tags = map[string]string{
|
||||||
"a": "42",
|
"a": "42",
|
||||||
}
|
}
|
||||||
@ -607,7 +607,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
|
|||||||
|
|
||||||
oldModelID := foo.ModelStoreID
|
oldModelID := foo.ModelStoreID
|
||||||
oldStableID := foo.ID
|
oldStableID := foo.ID
|
||||||
oldVersion := foo.Version
|
oldVersion := foo.ModelVersion
|
||||||
|
|
||||||
test.mutator(foo)
|
test.mutator(foo)
|
||||||
|
|
||||||
@ -616,7 +616,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
|
|||||||
assert.Equal(t, oldStableID, foo.ID)
|
assert.Equal(t, oldStableID, foo.ID)
|
||||||
// The version in the model store has not changed so we get the old
|
// The version in the model store has not changed so we get the old
|
||||||
// version back.
|
// version back.
|
||||||
assert.Equal(t, oldVersion, foo.Version)
|
assert.Equal(t, oldVersion, foo.ModelVersion)
|
||||||
|
|
||||||
returned := &fooModel{}
|
returned := &fooModel{}
|
||||||
|
|
||||||
@ -627,7 +627,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
|
|||||||
ids, err := m.GetIDsForType(ctx, theModelType, nil)
|
ids, err := m.GetIDsForType(ctx, theModelType, nil)
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
require.Len(t, ids, 1)
|
require.Len(t, ids, 1)
|
||||||
assert.Equal(t, globalModelVersion, ids[0].Version)
|
assert.Equal(t, globalModelVersion, ids[0].ModelVersion)
|
||||||
|
|
||||||
if oldModelID == foo.ModelStoreID {
|
if oldModelID == foo.ModelStoreID {
|
||||||
// Unlikely, but we don't control ModelStoreID generation and can't
|
// Unlikely, but we don't control ModelStoreID generation and can't
|
||||||
|
|||||||
@ -3,6 +3,8 @@ package kopia
|
|||||||
import (
|
import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"path"
|
"path"
|
||||||
|
|
||||||
|
"github.com/alcionai/clues"
|
||||||
)
|
)
|
||||||
|
|
||||||
var encoder = base64.URLEncoding
|
var encoder = base64.URLEncoding
|
||||||
@ -20,6 +22,21 @@ func encodeElements(elements ...string) []string {
|
|||||||
return encoded
|
return encoded
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func decodeElements(elements ...string) ([]string, error) {
|
||||||
|
decoded := make([]string, 0, len(elements))
|
||||||
|
|
||||||
|
for _, e := range elements {
|
||||||
|
bs, err := encoder.DecodeString(e)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "decoding element").With("element", e)
|
||||||
|
}
|
||||||
|
|
||||||
|
decoded = append(decoded, string(bs))
|
||||||
|
}
|
||||||
|
|
||||||
|
return decoded, nil
|
||||||
|
}
|
||||||
|
|
||||||
// encodeAsPath takes a set of elements and returns the concatenated elements as
|
// encodeAsPath takes a set of elements and returns the concatenated elements as
|
||||||
// if they were a path. The elements are joined with the separator in the golang
|
// if they were a path. The elements are joined with the separator in the golang
|
||||||
// path package.
|
// path package.
|
||||||
|
|||||||
@ -39,6 +39,11 @@ func (r Reason) TagKeys() []string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Key is the concatenation of the ResourceOwner, Service, and Category.
|
||||||
|
func (r Reason) Key() string {
|
||||||
|
return r.ResourceOwner + r.Service.String() + r.Category.String()
|
||||||
|
}
|
||||||
|
|
||||||
type ManifestEntry struct {
|
type ManifestEntry struct {
|
||||||
*snapshot.Manifest
|
*snapshot.Manifest
|
||||||
// Reason contains the ResourceOwners and Service/Categories that caused this
|
// Reason contains the ResourceOwners and Service/Categories that caused this
|
||||||
|
|||||||
@ -1011,15 +1011,20 @@ func inflateBaseTree(
|
|||||||
return clues.Wrap(err, "subtree root is not directory").WithClues(ictx)
|
return clues.Wrap(err, "subtree root is not directory").WithClues(ictx)
|
||||||
}
|
}
|
||||||
|
|
||||||
// We're assuming here that the prefix for the path has not changed (i.e.
|
// This ensures that a migration on the directory prefix can complete.
|
||||||
// all of tenant, service, resource owner, and category are the same in the
|
// The prefix is the tenant/service/owner/category set, which remains
|
||||||
// old snapshot (snap) and the snapshot we're currently trying to make.
|
// otherwise unchecked in tree inflation below this point.
|
||||||
|
newSubtreePath := subtreePath
|
||||||
|
if p, ok := updatedPaths[subtreePath.String()]; ok {
|
||||||
|
newSubtreePath = p.ToBuilder()
|
||||||
|
}
|
||||||
|
|
||||||
if err = traverseBaseDir(
|
if err = traverseBaseDir(
|
||||||
ictx,
|
ictx,
|
||||||
0,
|
0,
|
||||||
updatedPaths,
|
updatedPaths,
|
||||||
subtreePath.Dir(),
|
subtreePath.Dir(),
|
||||||
subtreePath.Dir(),
|
newSubtreePath.Dir(),
|
||||||
subtreeDir,
|
subtreeDir,
|
||||||
roots,
|
roots,
|
||||||
); err != nil {
|
); err != nil {
|
||||||
|
|||||||
@ -183,16 +183,22 @@ func expectDirs(
|
|||||||
) {
|
) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
|
|
||||||
if exactly {
|
ents := make([]string, 0, len(entries))
|
||||||
require.Len(t, entries, len(dirs))
|
|
||||||
}
|
|
||||||
|
|
||||||
names := make([]string, 0, len(entries))
|
|
||||||
for _, e := range entries {
|
for _, e := range entries {
|
||||||
names = append(names, e.Name())
|
ents = append(ents, e.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Subset(t, names, dirs)
|
dd, err := decodeElements(dirs...)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
de, err := decodeElements(ents...)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
if exactly {
|
||||||
|
require.Lenf(t, entries, len(dirs), "expected exactly %+v\ngot %+v", dd, de)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Subsetf(t, dirs, ents, "expected at least %+v\ngot %+v", dd, de)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getDirEntriesForEntry(
|
func getDirEntriesForEntry(
|
||||||
@ -922,15 +928,18 @@ func (msw *mockSnapshotWalker) SnapshotRoot(*snapshot.Manifest) (fs.Entry, error
|
|||||||
func mockIncrementalBase(
|
func mockIncrementalBase(
|
||||||
id, tenant, resourceOwner string,
|
id, tenant, resourceOwner string,
|
||||||
service path.ServiceType,
|
service path.ServiceType,
|
||||||
category path.CategoryType,
|
categories ...path.CategoryType,
|
||||||
) IncrementalBase {
|
) IncrementalBase {
|
||||||
|
stps := []*path.Builder{}
|
||||||
|
for _, c := range categories {
|
||||||
|
stps = append(stps, path.Builder{}.Append(tenant, service.String(), resourceOwner, c.String()))
|
||||||
|
}
|
||||||
|
|
||||||
return IncrementalBase{
|
return IncrementalBase{
|
||||||
Manifest: &snapshot.Manifest{
|
Manifest: &snapshot.Manifest{
|
||||||
ID: manifest.ID(id),
|
ID: manifest.ID(id),
|
||||||
},
|
},
|
||||||
SubtreePaths: []*path.Builder{
|
SubtreePaths: stps,
|
||||||
path.Builder{}.Append(tenant, service.String(), resourceOwner, category.String()),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2754,3 +2763,167 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
|
|||||||
|
|
||||||
expectTree(t, ctx, expected, dirTree)
|
expectTree(t, ctx, expected, dirTree)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubtrees() {
|
||||||
|
tester.LogTimeOfTest(suite.T())
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
const (
|
||||||
|
contactsDir = "contacts"
|
||||||
|
migratedUser = "user_migrate"
|
||||||
|
)
|
||||||
|
|
||||||
|
oldPrefixPathEmail, err := path.ServicePrefix(testTenant, testUser, path.ExchangeService, path.EmailCategory)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
newPrefixPathEmail, err := path.ServicePrefix(testTenant, migratedUser, path.ExchangeService, path.EmailCategory)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
oldPrefixPathCont, err := path.ServicePrefix(testTenant, testUser, path.ExchangeService, path.ContactsCategory)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
newPrefixPathCont, err := path.ServicePrefix(testTenant, migratedUser, path.ExchangeService, path.ContactsCategory)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
var (
|
||||||
|
inboxFileName1 = testFileName
|
||||||
|
|
||||||
|
inboxFileData1 = testFileData
|
||||||
|
// inboxFileData1v2 = testFileData5
|
||||||
|
|
||||||
|
contactsFileName1 = testFileName3
|
||||||
|
contactsFileData1 = testFileData3
|
||||||
|
)
|
||||||
|
|
||||||
|
// Must be a function that returns a new instance each time as StreamingFile
|
||||||
|
// can only return its Reader once.
|
||||||
|
// baseSnapshot with the following layout:
|
||||||
|
// - a-tenant
|
||||||
|
// - exchange
|
||||||
|
// - user1
|
||||||
|
// - email
|
||||||
|
// - Inbox
|
||||||
|
// - file1
|
||||||
|
// - contacts
|
||||||
|
// - contacts
|
||||||
|
// - file2
|
||||||
|
getBaseSnapshot1 := func() fs.Entry {
|
||||||
|
return baseWithChildren(
|
||||||
|
[]string{testTenant, service, testUser},
|
||||||
|
[]fs.Entry{
|
||||||
|
virtualfs.NewStaticDirectory(
|
||||||
|
encodeElements(category)[0],
|
||||||
|
[]fs.Entry{
|
||||||
|
virtualfs.NewStaticDirectory(
|
||||||
|
encodeElements(testInboxID)[0],
|
||||||
|
[]fs.Entry{
|
||||||
|
virtualfs.StreamingFileWithModTimeFromReader(
|
||||||
|
encodeElements(inboxFileName1)[0],
|
||||||
|
time.Time{},
|
||||||
|
newBackupStreamReader(
|
||||||
|
serializationVersion,
|
||||||
|
io.NopCloser(bytes.NewReader(inboxFileData1)))),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
virtualfs.NewStaticDirectory(
|
||||||
|
encodeElements(path.ContactsCategory.String())[0],
|
||||||
|
[]fs.Entry{
|
||||||
|
virtualfs.NewStaticDirectory(
|
||||||
|
encodeElements(contactsDir)[0],
|
||||||
|
[]fs.Entry{
|
||||||
|
virtualfs.StreamingFileWithModTimeFromReader(
|
||||||
|
encodeElements(contactsFileName1)[0],
|
||||||
|
time.Time{},
|
||||||
|
newBackupStreamReader(
|
||||||
|
serializationVersion,
|
||||||
|
io.NopCloser(bytes.NewReader(contactsFileData1)))),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the following:
|
||||||
|
// * contacts pulled from base1 unchanged even if no collections reference
|
||||||
|
// it
|
||||||
|
// * email pulled from base2
|
||||||
|
//
|
||||||
|
// Expected output:
|
||||||
|
// - a-tenant
|
||||||
|
// - exchange
|
||||||
|
// - user1new
|
||||||
|
// - email
|
||||||
|
// - Inbox
|
||||||
|
// - file1
|
||||||
|
// - contacts
|
||||||
|
// - contacts
|
||||||
|
// - file1
|
||||||
|
expected := expectedTreeWithChildren(
|
||||||
|
[]string{testTenant, service, migratedUser},
|
||||||
|
[]*expectedNode{
|
||||||
|
{
|
||||||
|
name: category,
|
||||||
|
children: []*expectedNode{
|
||||||
|
{
|
||||||
|
name: testInboxID,
|
||||||
|
children: []*expectedNode{
|
||||||
|
{
|
||||||
|
name: inboxFileName1,
|
||||||
|
children: []*expectedNode{},
|
||||||
|
data: inboxFileData1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: path.ContactsCategory.String(),
|
||||||
|
children: []*expectedNode{
|
||||||
|
{
|
||||||
|
name: contactsDir,
|
||||||
|
children: []*expectedNode{
|
||||||
|
{
|
||||||
|
name: contactsFileName1,
|
||||||
|
children: []*expectedNode{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
progress := &corsoProgress{
|
||||||
|
pending: map[string]*itemDetails{},
|
||||||
|
toMerge: newMergeDetails(),
|
||||||
|
errs: fault.New(true),
|
||||||
|
}
|
||||||
|
|
||||||
|
mce := exchMock.NewCollection(newPrefixPathEmail, nil, 0)
|
||||||
|
mce.PrevPath = oldPrefixPathEmail
|
||||||
|
mce.ColState = data.MovedState
|
||||||
|
|
||||||
|
mcc := exchMock.NewCollection(newPrefixPathCont, nil, 0)
|
||||||
|
mcc.PrevPath = oldPrefixPathCont
|
||||||
|
mcc.ColState = data.MovedState
|
||||||
|
|
||||||
|
msw := &mockMultiSnapshotWalker{
|
||||||
|
snaps: map[string]fs.Entry{"id1": getBaseSnapshot1()},
|
||||||
|
}
|
||||||
|
|
||||||
|
dirTree, err := inflateDirTree(
|
||||||
|
ctx,
|
||||||
|
msw,
|
||||||
|
[]IncrementalBase{
|
||||||
|
mockIncrementalBase("id1", testTenant, testUser, path.ExchangeService, path.EmailCategory, path.ContactsCategory),
|
||||||
|
},
|
||||||
|
[]data.BackupCollection{mce, mcc},
|
||||||
|
nil,
|
||||||
|
progress)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
expectTree(t, ctx, expected, dirTree)
|
||||||
|
}
|
||||||
|
|||||||
@ -59,9 +59,9 @@ type BaseModel struct {
|
|||||||
// to refer to this one. This field may change if the model is updated. This
|
// to refer to this one. This field may change if the model is updated. This
|
||||||
// field should be treated as read-only by users.
|
// field should be treated as read-only by users.
|
||||||
ModelStoreID manifest.ID `json:"-"`
|
ModelStoreID manifest.ID `json:"-"`
|
||||||
// Version is a version number that can help track changes across models.
|
// ModelVersion is a version number that can help track changes across models.
|
||||||
// TODO(ashmrtn): Reference version control documentation.
|
// TODO(ashmrtn): Reference version control documentation.
|
||||||
Version int `json:"-"`
|
ModelVersion int `json:"-"`
|
||||||
// Tags associated with this model in the store to facilitate lookup. Tags in
|
// Tags associated with this model in the store to facilitate lookup. Tags in
|
||||||
// the struct are not serialized directly into the stored model, but are part
|
// the struct are not serialized directly into the stored model, but are part
|
||||||
// of the metadata for the model.
|
// of the metadata for the model.
|
||||||
|
|||||||
@ -9,6 +9,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/common/crash"
|
"github.com/alcionai/corso/src/internal/common/crash"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||||
"github.com/alcionai/corso/src/internal/events"
|
"github.com/alcionai/corso/src/internal/events"
|
||||||
@ -18,6 +19,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
"github.com/alcionai/corso/src/internal/stats"
|
"github.com/alcionai/corso/src/internal/stats"
|
||||||
"github.com/alcionai/corso/src/internal/streamstore"
|
"github.com/alcionai/corso/src/internal/streamstore"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/backup"
|
"github.com/alcionai/corso/src/pkg/backup"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
@ -33,12 +35,19 @@ import (
|
|||||||
type BackupOperation struct {
|
type BackupOperation struct {
|
||||||
operation
|
operation
|
||||||
|
|
||||||
ResourceOwner common.IDNamer
|
ResourceOwner idname.Provider
|
||||||
|
|
||||||
Results BackupResults `json:"results"`
|
Results BackupResults `json:"results"`
|
||||||
Selectors selectors.Selector `json:"selectors"`
|
Selectors selectors.Selector `json:"selectors"`
|
||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
|
|
||||||
|
// backupVersion ONLY controls the value that gets persisted to the
|
||||||
|
// backup model after operation. It does NOT modify the operation behavior
|
||||||
|
// to match the version. Its inclusion here is, unfortunately, purely to
|
||||||
|
// facilitate integration testing that requires a certain backup version, and
|
||||||
|
// should be removed when we have a more controlled workaround.
|
||||||
|
backupVersion int
|
||||||
|
|
||||||
account account.Account
|
account account.Account
|
||||||
bp inject.BackupProducer
|
bp inject.BackupProducer
|
||||||
|
|
||||||
@ -62,7 +71,7 @@ func NewBackupOperation(
|
|||||||
bp inject.BackupProducer,
|
bp inject.BackupProducer,
|
||||||
acct account.Account,
|
acct account.Account,
|
||||||
selector selectors.Selector,
|
selector selectors.Selector,
|
||||||
owner common.IDNamer,
|
owner idname.Provider,
|
||||||
bus events.Eventer,
|
bus events.Eventer,
|
||||||
) (BackupOperation, error) {
|
) (BackupOperation, error) {
|
||||||
op := BackupOperation{
|
op := BackupOperation{
|
||||||
@ -70,6 +79,7 @@ func NewBackupOperation(
|
|||||||
ResourceOwner: owner,
|
ResourceOwner: owner,
|
||||||
Selectors: selector,
|
Selectors: selector,
|
||||||
Version: "v0",
|
Version: "v0",
|
||||||
|
backupVersion: version.Backup,
|
||||||
account: acct,
|
account: acct,
|
||||||
incremental: useIncrementalBackup(selector, opts),
|
incremental: useIncrementalBackup(selector, opts),
|
||||||
bp: bp,
|
bp: bp,
|
||||||
@ -210,6 +220,7 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
|
|||||||
sstore,
|
sstore,
|
||||||
opStats.k.SnapshotID,
|
opStats.k.SnapshotID,
|
||||||
op.Results.BackupID,
|
op.Results.BackupID,
|
||||||
|
op.backupVersion,
|
||||||
deets.Details())
|
deets.Details())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
op.Errors.Fail(clues.Wrap(err, "persisting backup"))
|
op.Errors.Fail(clues.Wrap(err, "persisting backup"))
|
||||||
@ -253,12 +264,18 @@ func (op *BackupOperation) do(
|
|||||||
return nil, clues.Wrap(err, "producing manifests and metadata")
|
return nil, clues.Wrap(err, "producing manifests and metadata")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_, lastBackupVersion, err := lastCompleteBackups(ctx, op.store, mans)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "retrieving prior backups")
|
||||||
|
}
|
||||||
|
|
||||||
cs, excludes, err := produceBackupDataCollections(
|
cs, excludes, err := produceBackupDataCollections(
|
||||||
ctx,
|
ctx,
|
||||||
op.bp,
|
op.bp,
|
||||||
op.ResourceOwner,
|
op.ResourceOwner,
|
||||||
op.Selectors,
|
op.Selectors,
|
||||||
mdColls,
|
mdColls,
|
||||||
|
lastBackupVersion,
|
||||||
op.Options,
|
op.Options,
|
||||||
op.Errors)
|
op.Errors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -333,9 +350,10 @@ func useIncrementalBackup(sel selectors.Selector, opts control.Options) bool {
|
|||||||
func produceBackupDataCollections(
|
func produceBackupDataCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
bp inject.BackupProducer,
|
bp inject.BackupProducer,
|
||||||
resourceOwner common.IDNamer,
|
resourceOwner idname.Provider,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
|
lastBackupVersion int,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, map[string]map[string]struct{}, error) {
|
) ([]data.BackupCollection, map[string]map[string]struct{}, error) {
|
||||||
@ -346,7 +364,7 @@ func produceBackupDataCollections(
|
|||||||
closer()
|
closer()
|
||||||
}()
|
}()
|
||||||
|
|
||||||
return bp.ProduceBackupCollections(ctx, resourceOwner, sel, metadata, ctrlOpts, errs)
|
return bp.ProduceBackupCollections(ctx, resourceOwner, sel, metadata, lastBackupVersion, ctrlOpts, errs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -585,6 +603,56 @@ func getNewPathRefs(
|
|||||||
return newPath, newLoc, updated, nil
|
return newPath, newLoc, updated, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func lastCompleteBackups(
|
||||||
|
ctx context.Context,
|
||||||
|
ms *store.Wrapper,
|
||||||
|
mans []*kopia.ManifestEntry,
|
||||||
|
) (map[string]*backup.Backup, int, error) {
|
||||||
|
var (
|
||||||
|
oldestVersion = version.NoBackup
|
||||||
|
result = map[string]*backup.Backup{}
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(mans) == 0 {
|
||||||
|
return result, -1, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, man := range mans {
|
||||||
|
// For now skip snapshots that aren't complete. We will need to revisit this
|
||||||
|
// when we tackle restartability.
|
||||||
|
if len(man.IncompleteReason) > 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
mctx = clues.Add(ctx, "base_manifest_id", man.ID)
|
||||||
|
reasons = man.Reasons
|
||||||
|
)
|
||||||
|
|
||||||
|
bID, ok := man.GetTag(kopia.TagBackupID)
|
||||||
|
if !ok {
|
||||||
|
return result, oldestVersion, clues.New("no backup ID in snapshot manifest").WithClues(mctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
mctx = clues.Add(mctx, "base_manifest_backup_id", bID)
|
||||||
|
|
||||||
|
bup, err := getBackupFromID(mctx, model.StableID(bID), ms)
|
||||||
|
if err != nil {
|
||||||
|
return result, oldestVersion, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, r := range reasons {
|
||||||
|
result[r.Key()] = bup
|
||||||
|
}
|
||||||
|
|
||||||
|
if oldestVersion == -1 || bup.Version < oldestVersion {
|
||||||
|
oldestVersion = bup.Version
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, oldestVersion, nil
|
||||||
|
}
|
||||||
|
|
||||||
func mergeDetails(
|
func mergeDetails(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
ms *store.Wrapper,
|
ms *store.Wrapper,
|
||||||
@ -627,7 +695,7 @@ func mergeDetails(
|
|||||||
detailsStore,
|
detailsStore,
|
||||||
errs)
|
errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return clues.New("fetching base details for backup").WithClues(mctx)
|
return clues.New("fetching base details for backup")
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, entry := range baseDeets.Items() {
|
for _, entry := range baseDeets.Items() {
|
||||||
@ -749,6 +817,7 @@ func (op *BackupOperation) createBackupModels(
|
|||||||
sscw streamstore.CollectorWriter,
|
sscw streamstore.CollectorWriter,
|
||||||
snapID string,
|
snapID string,
|
||||||
backupID model.StableID,
|
backupID model.StableID,
|
||||||
|
backupVersion int,
|
||||||
deets *details.Details,
|
deets *details.Details,
|
||||||
) error {
|
) error {
|
||||||
ctx = clues.Add(ctx, "snapshot_id", snapID, "backup_id", backupID)
|
ctx = clues.Add(ctx, "snapshot_id", snapID, "backup_id", backupID)
|
||||||
@ -783,6 +852,7 @@ func (op *BackupOperation) createBackupModels(
|
|||||||
b := backup.New(
|
b := backup.New(
|
||||||
snapID, ssid,
|
snapID, ssid,
|
||||||
op.Status.String(),
|
op.Status.String(),
|
||||||
|
backupVersion,
|
||||||
backupID,
|
backupID,
|
||||||
op.Selectors,
|
op.Selectors,
|
||||||
op.ResourceOwner.ID(),
|
op.ResourceOwner.ID(),
|
||||||
|
|||||||
@ -17,6 +17,7 @@ import (
|
|||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
"github.com/alcionai/corso/src/internal/connector"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
@ -32,6 +33,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/kopia"
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||||
|
"github.com/alcionai/corso/src/internal/streamstore"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
@ -62,6 +64,7 @@ func prepNewTestBackupOp(
|
|||||||
bus events.Eventer,
|
bus events.Eventer,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
featureToggles control.Toggles,
|
featureToggles control.Toggles,
|
||||||
|
backupVersion int,
|
||||||
) (
|
) (
|
||||||
BackupOperation,
|
BackupOperation,
|
||||||
account.Account,
|
account.Account,
|
||||||
@ -643,7 +646,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchange() {
|
|||||||
ffs = control.Toggles{}
|
ffs = control.Toggles{}
|
||||||
)
|
)
|
||||||
|
|
||||||
bo, acct, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs)
|
bo, acct, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel, ffs, version.Backup)
|
||||||
defer closer()
|
defer closer()
|
||||||
|
|
||||||
m365, err := acct.M365Config()
|
m365, err := acct.M365Config()
|
||||||
@ -836,11 +839,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
|
|||||||
|
|
||||||
// verify test data was populated, and track it for comparisons
|
// verify test data was populated, and track it for comparisons
|
||||||
for category, gen := range dataset {
|
for category, gen := range dataset {
|
||||||
ss := selectors.Selector{}.SetDiscreteOwnerIDName(suite.user, suite.user)
|
|
||||||
|
|
||||||
qp := graph.QueryParams{
|
qp := graph.QueryParams{
|
||||||
Category: category,
|
Category: category,
|
||||||
ResourceOwner: ss,
|
ResourceOwner: inMock.NewProvider(suite.user, suite.user),
|
||||||
Credentials: m365,
|
Credentials: m365,
|
||||||
}
|
}
|
||||||
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
|
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
|
||||||
@ -859,7 +860,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bo, _, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, ffs)
|
bo, _, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, ffs, version.Backup)
|
||||||
defer closer()
|
defer closer()
|
||||||
|
|
||||||
// run the initial backup
|
// run the initial backup
|
||||||
@ -942,11 +943,9 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_exchangeIncrementals() {
|
|||||||
version.Backup,
|
version.Backup,
|
||||||
gen.dbf)
|
gen.dbf)
|
||||||
|
|
||||||
ss := selectors.Selector{}.SetDiscreteOwnerIDName(suite.user, suite.user)
|
|
||||||
|
|
||||||
qp := graph.QueryParams{
|
qp := graph.QueryParams{
|
||||||
Category: category,
|
Category: category,
|
||||||
ResourceOwner: ss,
|
ResourceOwner: inMock.NewProvider(suite.user, suite.user),
|
||||||
Credentials: m365,
|
Credentials: m365,
|
||||||
}
|
}
|
||||||
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
|
cr, err := exchange.PopulateExchangeContainerResolver(ctx, qp, fault.New(true))
|
||||||
@ -1146,7 +1145,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDrive() {
|
|||||||
|
|
||||||
sel.Include(sel.AllData())
|
sel.Include(sel.AllData())
|
||||||
|
|
||||||
bo, _, _, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{})
|
bo, _, _, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{}, version.Backup)
|
||||||
defer closer()
|
defer closer()
|
||||||
|
|
||||||
runAndCheckBackup(t, ctx, &bo, mb, false)
|
runAndCheckBackup(t, ctx, &bo, mb, false)
|
||||||
@ -1236,7 +1235,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
|
|||||||
containerIDs[destName] = ptr.Val(resp.GetId())
|
containerIDs[destName] = ptr.Val(resp.GetId())
|
||||||
}
|
}
|
||||||
|
|
||||||
bo, _, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, ffs)
|
bo, _, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, ffs, version.Backup)
|
||||||
defer closer()
|
defer closer()
|
||||||
|
|
||||||
// run the initial backup
|
// run the initial backup
|
||||||
@ -1580,6 +1579,127 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveIncrementals() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *BackupOpIntegrationSuite) TestBackup_Run_oneDriveOwnerMigration() {
|
||||||
|
ctx, flush := tester.NewContext()
|
||||||
|
defer flush()
|
||||||
|
|
||||||
|
var (
|
||||||
|
t = suite.T()
|
||||||
|
acct = tester.NewM365Account(t)
|
||||||
|
ffs = control.Toggles{}
|
||||||
|
mb = evmock.NewBus()
|
||||||
|
|
||||||
|
categories = map[path.CategoryType][]string{
|
||||||
|
path.FilesCategory: {graph.DeltaURLsFileName, graph.PreviousPathFileName},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
creds, err := acct.M365Config()
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
gc, err := connector.NewGraphConnector(
|
||||||
|
ctx,
|
||||||
|
acct,
|
||||||
|
connector.Users)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
userable, err := gc.Discovery.Users().GetByID(ctx, suite.user)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
uid := ptr.Val(userable.GetId())
|
||||||
|
uname := ptr.Val(userable.GetUserPrincipalName())
|
||||||
|
|
||||||
|
oldsel := selectors.NewOneDriveBackup([]string{uname})
|
||||||
|
oldsel.Include(oldsel.Folders([]string{"test"}, selectors.ExactMatch()))
|
||||||
|
|
||||||
|
bo, _, kw, ms, gc, closer := prepNewTestBackupOp(t, ctx, mb, oldsel.Selector, ffs, 0)
|
||||||
|
defer closer()
|
||||||
|
|
||||||
|
// ensure the initial owner uses name in both cases
|
||||||
|
bo.ResourceOwner = oldsel.SetDiscreteOwnerIDName(uname, uname)
|
||||||
|
// required, otherwise we don't run the migration
|
||||||
|
bo.backupVersion = version.AllXMigrateUserPNToID - 1
|
||||||
|
bo.Options.ToggleFeatures.RunMigrations = false
|
||||||
|
|
||||||
|
require.Equalf(
|
||||||
|
t,
|
||||||
|
bo.ResourceOwner.Name(),
|
||||||
|
bo.ResourceOwner.ID(),
|
||||||
|
"historical representation of user id [%s] should match pn [%s]",
|
||||||
|
bo.ResourceOwner.ID(),
|
||||||
|
bo.ResourceOwner.Name())
|
||||||
|
|
||||||
|
// run the initial backup
|
||||||
|
runAndCheckBackup(t, ctx, &bo, mb, false)
|
||||||
|
|
||||||
|
newsel := selectors.NewOneDriveBackup([]string{uid})
|
||||||
|
newsel.Include(newsel.Folders([]string{"test"}, selectors.ExactMatch()))
|
||||||
|
sel := newsel.SetDiscreteOwnerIDName(uid, uname)
|
||||||
|
|
||||||
|
var (
|
||||||
|
incMB = evmock.NewBus()
|
||||||
|
// the incremental backup op should have a proper user ID for the id.
|
||||||
|
incBO = newTestBackupOp(t, ctx, kw, ms, gc, acct, sel, incMB, ffs, closer)
|
||||||
|
)
|
||||||
|
|
||||||
|
incBO.Options.ToggleFeatures.RunMigrations = true
|
||||||
|
|
||||||
|
require.NotEqualf(
|
||||||
|
t,
|
||||||
|
incBO.ResourceOwner.Name(),
|
||||||
|
incBO.ResourceOwner.ID(),
|
||||||
|
"current representation of user: id [%s] should differ from PN [%s]",
|
||||||
|
incBO.ResourceOwner.ID(),
|
||||||
|
incBO.ResourceOwner.Name())
|
||||||
|
|
||||||
|
err = incBO.Run(ctx)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
checkBackupIsInManifests(t, ctx, kw, &incBO, sel, uid, maps.Keys(categories)...)
|
||||||
|
checkMetadataFilesExist(
|
||||||
|
t,
|
||||||
|
ctx,
|
||||||
|
incBO.Results.BackupID,
|
||||||
|
kw,
|
||||||
|
ms,
|
||||||
|
creds.AzureTenantID,
|
||||||
|
uid,
|
||||||
|
path.OneDriveService,
|
||||||
|
categories)
|
||||||
|
|
||||||
|
// 2 on read/writes to account for metadata: 1 delta and 1 path.
|
||||||
|
assert.LessOrEqual(t, 2, incBO.Results.ItemsWritten, "items written")
|
||||||
|
assert.LessOrEqual(t, 2, incBO.Results.ItemsRead, "items read")
|
||||||
|
assert.NoError(t, incBO.Errors.Failure(), "non-recoverable error", clues.ToCore(incBO.Errors.Failure()))
|
||||||
|
assert.Empty(t, incBO.Errors.Recovered(), "recoverable/iteration errors")
|
||||||
|
assert.Equal(t, 1, incMB.TimesCalled[events.BackupStart], "backup-start events")
|
||||||
|
assert.Equal(t, 1, incMB.TimesCalled[events.BackupEnd], "backup-end events")
|
||||||
|
assert.Equal(t,
|
||||||
|
incMB.CalledWith[events.BackupStart][0][events.BackupID],
|
||||||
|
incBO.Results.BackupID, "backupID pre-declaration")
|
||||||
|
|
||||||
|
bid := incBO.Results.BackupID
|
||||||
|
bup := &backup.Backup{}
|
||||||
|
|
||||||
|
err = ms.Get(ctx, model.BackupSchema, bid, bup)
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
var (
|
||||||
|
ssid = bup.StreamStoreID
|
||||||
|
deets details.Details
|
||||||
|
ss = streamstore.NewStreamer(kw, creds.AzureTenantID, path.OneDriveService)
|
||||||
|
)
|
||||||
|
|
||||||
|
err = ss.Read(ctx, ssid, streamstore.DetailsReader(details.UnmarshalTo(&deets)), fault.New(true))
|
||||||
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
for _, ent := range deets.Entries {
|
||||||
|
// 46 is the tenant uuid + "onedrive" + two slashes
|
||||||
|
if len(ent.RepoRef) > 46 {
|
||||||
|
assert.Contains(t, ent.RepoRef, uid)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
// SharePoint
|
// SharePoint
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
@ -1596,7 +1716,7 @@ func (suite *BackupOpIntegrationSuite) TestBackup_Run_sharePoint() {
|
|||||||
|
|
||||||
sel.Include(selTD.SharePointBackupFolderScope(sel))
|
sel.Include(selTD.SharePointBackupFolderScope(sel))
|
||||||
|
|
||||||
bo, _, kw, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{})
|
bo, _, kw, _, _, closer := prepNewTestBackupOp(t, ctx, mb, sel.Selector, control.Toggles{}, version.Backup)
|
||||||
defer closer()
|
defer closer()
|
||||||
|
|
||||||
runAndCheckBackup(t, ctx, &bo, mb, false)
|
runAndCheckBackup(t, ctx, &bo, mb, false)
|
||||||
|
|||||||
@ -13,6 +13,19 @@ import (
|
|||||||
"github.com/alcionai/corso/src/pkg/store"
|
"github.com/alcionai/corso/src/pkg/store"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func getBackupFromID(
|
||||||
|
ctx context.Context,
|
||||||
|
backupID model.StableID,
|
||||||
|
ms *store.Wrapper,
|
||||||
|
) (*backup.Backup, error) {
|
||||||
|
bup, err := ms.GetBackup(ctx, backupID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, clues.Wrap(err, "getting backup")
|
||||||
|
}
|
||||||
|
|
||||||
|
return bup, nil
|
||||||
|
}
|
||||||
|
|
||||||
func getBackupAndDetailsFromID(
|
func getBackupAndDetailsFromID(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
backupID model.StableID,
|
backupID model.StableID,
|
||||||
@ -22,7 +35,7 @@ func getBackupAndDetailsFromID(
|
|||||||
) (*backup.Backup, *details.Details, error) {
|
) (*backup.Backup, *details.Details, error) {
|
||||||
bup, err := ms.GetBackup(ctx, backupID)
|
bup, err := ms.GetBackup(ctx, backupID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, clues.Wrap(err, "getting backup details ID")
|
return nil, nil, clues.Wrap(err, "getting backup")
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import (
|
|||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
"github.com/alcionai/corso/src/internal/connector"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
@ -22,7 +22,7 @@ func GCWithSelector(
|
|||||||
acct account.Account,
|
acct account.Account,
|
||||||
cr connector.Resource,
|
cr connector.Resource,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
ins common.IDNameSwapper,
|
ins idname.Cacher,
|
||||||
onFail func(),
|
onFail func(),
|
||||||
) *connector.GraphConnector {
|
) *connector.GraphConnector {
|
||||||
gc, err := connector.NewGraphConnector(ctx, acct, cr)
|
gc, err := connector.NewGraphConnector(ctx, acct, cr)
|
||||||
|
|||||||
@ -3,7 +3,7 @@ package inject
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
"github.com/alcionai/corso/src/internal/kopia"
|
"github.com/alcionai/corso/src/internal/kopia"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
@ -18,9 +18,10 @@ type (
|
|||||||
BackupProducer interface {
|
BackupProducer interface {
|
||||||
ProduceBackupCollections(
|
ProduceBackupCollections(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
resourceOwner common.IDNamer,
|
resourceOwner idname.Provider,
|
||||||
sels selectors.Selector,
|
sels selectors.Selector,
|
||||||
metadata []data.RestoreCollection,
|
metadata []data.RestoreCollection,
|
||||||
|
lastBackupVersion int,
|
||||||
ctrlOpts control.Options,
|
ctrlOpts control.Options,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) ([]data.BackupCollection, map[string]map[string]struct{}, error)
|
) ([]data.BackupCollection, map[string]map[string]struct{}, error)
|
||||||
|
|||||||
@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
|
inMock "github.com/alcionai/corso/src/internal/common/idname/mock"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
"github.com/alcionai/corso/src/internal/connector"
|
||||||
"github.com/alcionai/corso/src/internal/connector/exchange"
|
"github.com/alcionai/corso/src/internal/connector/exchange"
|
||||||
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
exchMock "github.com/alcionai/corso/src/internal/connector/exchange/mock"
|
||||||
@ -288,7 +289,7 @@ func setupExchangeBackup(
|
|||||||
gc,
|
gc,
|
||||||
acct,
|
acct,
|
||||||
sel.Selector,
|
sel.Selector,
|
||||||
sel.Selector,
|
inMock.NewProvider(owner, owner),
|
||||||
evmock.NewBus())
|
evmock.NewBus())
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
@ -339,7 +340,7 @@ func setupSharePointBackup(
|
|||||||
gc,
|
gc,
|
||||||
acct,
|
acct,
|
||||||
sel.Selector,
|
sel.Selector,
|
||||||
sel.Selector,
|
inMock.NewProvider(owner, owner),
|
||||||
evmock.NewBus())
|
evmock.NewBus())
|
||||||
require.NoError(t, err, clues.ToCore(err))
|
require.NoError(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
|||||||
@ -9,6 +9,9 @@ const Backup = 7
|
|||||||
// Labels should state their application, the backup version number,
|
// Labels should state their application, the backup version number,
|
||||||
// and the colloquial purpose of the label.
|
// and the colloquial purpose of the label.
|
||||||
const (
|
const (
|
||||||
|
// NoBackup should be used when we cannot find, or do not supply, prior backup metadata.
|
||||||
|
NoBackup = -1
|
||||||
|
|
||||||
// OneDrive1DataAndMetaFiles is the corso backup format version
|
// OneDrive1DataAndMetaFiles is the corso backup format version
|
||||||
// in which we split from storing just the data to storing both
|
// in which we split from storing just the data to storing both
|
||||||
// the data and metadata in two files.
|
// the data and metadata in two files.
|
||||||
@ -39,4 +42,13 @@ const (
|
|||||||
// OneDriveXLocationRef provides LocationRef information for Exchange,
|
// OneDriveXLocationRef provides LocationRef information for Exchange,
|
||||||
// OneDrive, and SharePoint libraries.
|
// OneDrive, and SharePoint libraries.
|
||||||
OneDrive7LocationRef = 7
|
OneDrive7LocationRef = 7
|
||||||
|
|
||||||
|
// AllXMigrateUserPNToID marks when we migrated repo refs from the user's
|
||||||
|
// PrincipalName to their ID for stability.
|
||||||
|
AllXMigrateUserPNToID = Backup + 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// IsNoBackup returns true if the version implies that no prior backup exists.
|
||||||
|
func IsNoBackup(version int) bool {
|
||||||
|
return version <= NoBackup
|
||||||
|
}
|
||||||
|
|||||||
@ -13,7 +13,6 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common"
|
||||||
"github.com/alcionai/corso/src/internal/model"
|
"github.com/alcionai/corso/src/internal/model"
|
||||||
"github.com/alcionai/corso/src/internal/stats"
|
"github.com/alcionai/corso/src/internal/stats"
|
||||||
"github.com/alcionai/corso/src/internal/version"
|
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/selectors"
|
"github.com/alcionai/corso/src/pkg/selectors"
|
||||||
)
|
)
|
||||||
@ -67,6 +66,7 @@ var _ print.Printable = &Backup{}
|
|||||||
|
|
||||||
func New(
|
func New(
|
||||||
snapshotID, streamStoreID, status string,
|
snapshotID, streamStoreID, status string,
|
||||||
|
version int,
|
||||||
id model.StableID,
|
id model.StableID,
|
||||||
selector selectors.Selector,
|
selector selectors.Selector,
|
||||||
ownerID, ownerName string,
|
ownerID, ownerName string,
|
||||||
@ -116,7 +116,7 @@ func New(
|
|||||||
ResourceOwnerID: ownerID,
|
ResourceOwnerID: ownerID,
|
||||||
ResourceOwnerName: ownerName,
|
ResourceOwnerName: ownerName,
|
||||||
|
|
||||||
Version: version.Backup,
|
Version: version,
|
||||||
SnapshotID: snapshotID,
|
SnapshotID: snapshotID,
|
||||||
StreamStoreID: streamStoreID,
|
StreamStoreID: streamStoreID,
|
||||||
|
|
||||||
|
|||||||
16
src/pkg/backup/details/mock/location_ider.go
Normal file
16
src/pkg/backup/details/mock/location_ider.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
package mock
|
||||||
|
|
||||||
|
import "github.com/alcionai/corso/src/pkg/path"
|
||||||
|
|
||||||
|
type LocationIDer struct {
|
||||||
|
Unique *path.Builder
|
||||||
|
Details *path.Builder
|
||||||
|
}
|
||||||
|
|
||||||
|
func (li LocationIDer) ID() *path.Builder {
|
||||||
|
return li.Unique
|
||||||
|
}
|
||||||
|
|
||||||
|
func (li LocationIDer) InDetails() *path.Builder {
|
||||||
|
return li.Details
|
||||||
|
}
|
||||||
@ -103,4 +103,6 @@ type Toggles struct {
|
|||||||
// immutable Exchange IDs. This is only safe to set if the previous backup for
|
// immutable Exchange IDs. This is only safe to set if the previous backup for
|
||||||
// incremental backups used immutable IDs or if a full backup is being done.
|
// incremental backups used immutable IDs or if a full backup is being done.
|
||||||
ExchangeImmutableIDs bool `json:"exchangeImmutableIDs,omitempty"`
|
ExchangeImmutableIDs bool `json:"exchangeImmutableIDs,omitempty"`
|
||||||
|
|
||||||
|
RunMigrations bool `json:"runMigrations"`
|
||||||
}
|
}
|
||||||
|
|||||||
@ -299,18 +299,27 @@ func (pb Builder) Elements() Elements {
|
|||||||
return append(Elements{}, pb.elements...)
|
return append(Elements{}, pb.elements...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// verifyPrefix ensures that the tenant and resourceOwner are valid
|
func ServicePrefix(
|
||||||
// values, and that the builder has some directory structure.
|
tenant, resourceOwner string,
|
||||||
func (pb Builder) verifyPrefix(tenant, resourceOwner string) error {
|
s ServiceType,
|
||||||
|
c CategoryType,
|
||||||
|
) (Path, error) {
|
||||||
|
pb := Builder{}
|
||||||
|
|
||||||
|
if err := ValidateServiceAndCategory(s, c); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
if err := verifyInputValues(tenant, resourceOwner); err != nil {
|
if err := verifyInputValues(tenant, resourceOwner); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(pb.elements) == 0 {
|
return &dataLayerResourcePath{
|
||||||
return clues.New("missing path beyond prefix")
|
Builder: *pb.withPrefix(tenant, s.String(), resourceOwner, c.String()),
|
||||||
}
|
service: s,
|
||||||
|
category: c,
|
||||||
return nil
|
hasItem: false,
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// withPrefix creates a Builder prefixed with the parameter values, and
|
// withPrefix creates a Builder prefixed with the parameter values, and
|
||||||
@ -740,3 +749,17 @@ func join(elements []string) string {
|
|||||||
// '\' according to the escaping rules.
|
// '\' according to the escaping rules.
|
||||||
return strings.Join(elements, string(PathSeparator))
|
return strings.Join(elements, string(PathSeparator))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// verifyPrefix ensures that the tenant and resourceOwner are valid
|
||||||
|
// values, and that the builder has some directory structure.
|
||||||
|
func (pb Builder) verifyPrefix(tenant, resourceOwner string) error {
|
||||||
|
if err := verifyInputValues(tenant, resourceOwner); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(pb.elements) == 0 {
|
||||||
|
return clues.New("missing path beyond prefix")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|||||||
@ -749,3 +749,67 @@ func (suite *PathUnitSuite) TestPath_piiHandling() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (suite *PathUnitSuite) TestToServicePrefix() {
|
||||||
|
table := []struct {
|
||||||
|
name string
|
||||||
|
service ServiceType
|
||||||
|
category CategoryType
|
||||||
|
tenant string
|
||||||
|
owner string
|
||||||
|
expect string
|
||||||
|
expectErr require.ErrorAssertionFunc
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "ok",
|
||||||
|
service: ExchangeService,
|
||||||
|
category: ContactsCategory,
|
||||||
|
tenant: "t",
|
||||||
|
owner: "ro",
|
||||||
|
expect: join([]string{"t", ExchangeService.String(), "ro", ContactsCategory.String()}),
|
||||||
|
expectErr: require.NoError,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bad category",
|
||||||
|
service: ExchangeService,
|
||||||
|
category: FilesCategory,
|
||||||
|
tenant: "t",
|
||||||
|
owner: "ro",
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bad tenant",
|
||||||
|
service: ExchangeService,
|
||||||
|
category: ContactsCategory,
|
||||||
|
tenant: "",
|
||||||
|
owner: "ro",
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bad owner",
|
||||||
|
service: ExchangeService,
|
||||||
|
category: ContactsCategory,
|
||||||
|
tenant: "t",
|
||||||
|
owner: "",
|
||||||
|
expectErr: require.Error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, test := range table {
|
||||||
|
suite.Run(test.name, func() {
|
||||||
|
t := suite.T()
|
||||||
|
|
||||||
|
r, err := ServicePrefix(test.tenant, test.owner, test.service, test.category)
|
||||||
|
test.expectErr(t, err, clues.ToCore(err))
|
||||||
|
|
||||||
|
if r == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Equal(t, test.expect, r.String())
|
||||||
|
assert.NotPanics(t, func() {
|
||||||
|
r.Folders()
|
||||||
|
r.Item()
|
||||||
|
}, "runs Folders() and Item()")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -8,8 +8,8 @@ import (
|
|||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
|
||||||
"github.com/alcionai/corso/src/internal/common/crash"
|
"github.com/alcionai/corso/src/internal/common/crash"
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/connector"
|
"github.com/alcionai/corso/src/internal/connector"
|
||||||
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
"github.com/alcionai/corso/src/internal/connector/onedrive/metadata"
|
||||||
"github.com/alcionai/corso/src/internal/data"
|
"github.com/alcionai/corso/src/internal/data"
|
||||||
@ -63,7 +63,7 @@ type Repository interface {
|
|||||||
NewBackupWithLookup(
|
NewBackupWithLookup(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
self selectors.Selector,
|
self selectors.Selector,
|
||||||
ins common.IDNameSwapper,
|
ins idname.Cacher,
|
||||||
) (operations.BackupOperation, error)
|
) (operations.BackupOperation, error)
|
||||||
NewRestore(
|
NewRestore(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
@ -306,7 +306,7 @@ func (r repository) NewBackup(
|
|||||||
func (r repository) NewBackupWithLookup(
|
func (r repository) NewBackupWithLookup(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
sel selectors.Selector,
|
sel selectors.Selector,
|
||||||
ins common.IDNameSwapper,
|
ins idname.Cacher,
|
||||||
) (operations.BackupOperation, error) {
|
) (operations.BackupOperation, error) {
|
||||||
gc, err := connectToM365(ctx, sel, r.Account)
|
gc, err := connectToM365(ctx, sel, r.Account)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -334,7 +334,7 @@ func (r repository) NewBackupWithLookup(
|
|||||||
gc,
|
gc,
|
||||||
r.Account,
|
r.Account,
|
||||||
sel,
|
sel,
|
||||||
sel,
|
sel, // the selector acts as an IDNamer for its discrete resource owner.
|
||||||
r.Bus)
|
r.Bus)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -17,6 +17,7 @@ import (
|
|||||||
"github.com/alcionai/corso/src/internal/stats"
|
"github.com/alcionai/corso/src/internal/stats"
|
||||||
"github.com/alcionai/corso/src/internal/streamstore"
|
"github.com/alcionai/corso/src/internal/streamstore"
|
||||||
"github.com/alcionai/corso/src/internal/tester"
|
"github.com/alcionai/corso/src/internal/tester"
|
||||||
|
"github.com/alcionai/corso/src/internal/version"
|
||||||
"github.com/alcionai/corso/src/pkg/backup"
|
"github.com/alcionai/corso/src/pkg/backup"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
@ -316,6 +317,7 @@ func writeBackup(
|
|||||||
b := backup.New(
|
b := backup.New(
|
||||||
snapID, ssid,
|
snapID, ssid,
|
||||||
operations.Completed.String(),
|
operations.Completed.String(),
|
||||||
|
version.Backup,
|
||||||
model.StableID(backupID),
|
model.StableID(backupID),
|
||||||
sel,
|
sel,
|
||||||
ownerID, ownerName,
|
ownerID, ownerName,
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"golang.org/x/exp/maps"
|
"golang.org/x/exp/maps"
|
||||||
|
|
||||||
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||||
"github.com/alcionai/corso/src/pkg/fault"
|
"github.com/alcionai/corso/src/pkg/fault"
|
||||||
"github.com/alcionai/corso/src/pkg/filters"
|
"github.com/alcionai/corso/src/pkg/filters"
|
||||||
@ -89,6 +90,8 @@ type pathCategorier interface {
|
|||||||
// Selector
|
// Selector
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
var _ idname.Provider = &Selector{}
|
||||||
|
|
||||||
// The core selector. Has no api for setting or retrieving data.
|
// The core selector. Has no api for setting or retrieving data.
|
||||||
// Is only used to pass along more specific selector instances.
|
// Is only used to pass along more specific selector instances.
|
||||||
type Selector struct {
|
type Selector struct {
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import (
|
|||||||
"github.com/alcionai/clues"
|
"github.com/alcionai/clues"
|
||||||
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
"github.com/microsoftgraph/msgraph-sdk-go/models"
|
||||||
|
|
||||||
"github.com/alcionai/corso/src/internal/common"
|
"github.com/alcionai/corso/src/internal/common/idname"
|
||||||
"github.com/alcionai/corso/src/internal/common/ptr"
|
"github.com/alcionai/corso/src/internal/common/ptr"
|
||||||
"github.com/alcionai/corso/src/internal/connector/discovery"
|
"github.com/alcionai/corso/src/internal/connector/discovery"
|
||||||
"github.com/alcionai/corso/src/pkg/account"
|
"github.com/alcionai/corso/src/pkg/account"
|
||||||
@ -108,10 +108,10 @@ func UsersMap(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
acct account.Account,
|
acct account.Account,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (common.IDsNames, error) {
|
) (idname.Cacher, error) {
|
||||||
users, err := Users(ctx, acct, errs)
|
users, err := Users(ctx, acct, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return common.IDsNames{}, err
|
return idname.NewCache(nil), err
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -125,10 +125,7 @@ func UsersMap(
|
|||||||
nameToID[name] = id
|
nameToID[name] = id
|
||||||
}
|
}
|
||||||
|
|
||||||
ins := common.IDsNames{
|
ins := idname.NewCache(idToName)
|
||||||
IDToName: idToName,
|
|
||||||
NameToID: nameToID,
|
|
||||||
}
|
|
||||||
|
|
||||||
return ins, nil
|
return ins, nil
|
||||||
}
|
}
|
||||||
@ -215,23 +212,19 @@ func SitesMap(
|
|||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
acct account.Account,
|
acct account.Account,
|
||||||
errs *fault.Bus,
|
errs *fault.Bus,
|
||||||
) (common.IDsNames, error) {
|
) (idname.Cacher, error) {
|
||||||
sites, err := Sites(ctx, acct, errs)
|
sites, err := Sites(ctx, acct, errs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return common.IDsNames{}, err
|
return idname.NewCache(nil), err
|
||||||
}
|
}
|
||||||
|
|
||||||
ins := common.IDsNames{
|
itn := make(map[string]string, len(sites))
|
||||||
IDToName: make(map[string]string, len(sites)),
|
|
||||||
NameToID: make(map[string]string, len(sites)),
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, s := range sites {
|
for _, s := range sites {
|
||||||
ins.IDToName[s.ID] = s.WebURL
|
itn[s.ID] = s.WebURL
|
||||||
ins.NameToID[s.WebURL] = s.ID
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ins, nil
|
return idname.NewCache(itn), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user