add basic groups boilerplate (#3971)

Adding in some basic boilerplate for groups service.

---

#### Does this PR need a docs update or release note?

- [x]  No

#### Type of change

- [x] 🌻 Feature
This commit is contained in:
Keepers 2023-08-07 11:57:23 -06:00 committed by GitHub
parent 2cc380b9b9
commit e3c51b7dc9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 1732 additions and 923 deletions

View File

@ -110,7 +110,7 @@ func (ctrl *Controller) ProduceBackupCollections(
bpc,
ctrl.AC,
ctrl.credentials,
ctrl,
ctrl.UpdateStatus,
errs)
if err != nil {
return nil, nil, false, err

View File

@ -307,7 +307,7 @@ func (suite *DataCollectionIntgSuite) TestSharePointDataCollection() {
bpc,
suite.ac,
ctrl.credentials,
ctrl,
ctrl.UpdateStatus,
fault.New(true))
require.NoError(t, err, clues.ToCore(err))
assert.True(t, canUsePreviousBackup, "can use previous backup")

View File

@ -0,0 +1,85 @@
package groups
import (
"context"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
func ProduceBackupCollections(
ctx context.Context,
bpc inject.BackupProducerConfig,
ac api.Client,
creds account.M365Config,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
b, err := bpc.Selector.ToGroupsBackup()
if err != nil {
return nil, nil, false, clues.Wrap(err, "groupsDataCollection: parsing selector")
}
var (
el = errs.Local()
collections = []data.BackupCollection{}
categories = map[path.CategoryType]struct{}{}
ssmb = prefixmatcher.NewStringSetBuilder()
canUsePreviousBackup bool
)
ctx = clues.Add(
ctx,
"group_id", clues.Hide(bpc.ProtectedResource.ID()),
"group_name", clues.Hide(bpc.ProtectedResource.Name()))
for _, scope := range b.Scopes() {
if el.Failure() != nil {
break
}
progressBar := observe.MessageWithCompletion(
ctx,
observe.Bulletf("%s", scope.Category().PathType()))
defer close(progressBar)
var dbcs []data.BackupCollection
switch scope.Category().PathType() {
case path.LibrariesCategory: // TODO
}
collections = append(collections, dbcs...)
categories[scope.Category().PathType()] = struct{}{}
}
if len(collections) > 0 {
baseCols, err := graph.BaseCollections(
ctx,
collections,
creds.AzureTenantID,
bpc.ProtectedResource.ID(),
path.UnknownService, // path.GroupsService
categories,
su,
errs)
if err != nil {
return nil, nil, false, err
}
collections = append(collections, baseCols...)
}
return collections, ssmb.ToReader(), canUsePreviousBackup, el.Failure()
}

View File

@ -0,0 +1,93 @@
package groups
import (
"context"
"errors"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/support"
"github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
// ConsumeRestoreCollections will restore the specified data collections into OneDrive
func ConsumeRestoreCollections(
ctx context.Context,
rcc inject.RestoreConsumerConfig,
ac api.Client,
backupDriveIDNames idname.Cacher,
dcs []data.RestoreCollection,
deets *details.Builder,
errs *fault.Bus,
ctr *count.Bus,
) (*support.ControllerOperationStatus, error) {
var (
restoreMetrics support.CollectionMetrics
// caches = onedrive.NewRestoreCaches(backupDriveIDNames)
el = errs.Local()
)
// TODO: uncomment when a handler is available
// err := caches.Populate(ctx, lrh, rcc.ProtectedResource.ID())
// if err != nil {
// return nil, clues.Wrap(err, "initializing restore caches")
// }
// Reorder collections so that the parents directories are created
// before the child directories; a requirement for permissions.
data.SortRestoreCollections(dcs)
// Iterate through the data collections and restore the contents of each
for _, dc := range dcs {
if el.Failure() != nil {
break
}
var (
err error
category = dc.FullPath().Category()
metrics support.CollectionMetrics
ictx = clues.Add(ctx,
"category", category,
"restore_location", clues.Hide(rcc.RestoreConfig.Location),
"protected_resource", clues.Hide(dc.FullPath().ResourceOwner()),
"full_path", dc.FullPath())
)
switch dc.FullPath().Category() {
case path.LibrariesCategory:
// TODO
default:
return nil, clues.New("data category not supported").
With("category", category).
WithClues(ictx)
}
restoreMetrics = support.CombineMetrics(restoreMetrics, metrics)
if err != nil {
el.AddRecoverable(ctx, err)
}
if errors.Is(err, context.Canceled) {
break
}
}
status := support.CreateStatus(
ctx,
support.Restore,
len(dcs),
restoreMetrics,
rcc.RestoreConfig.Location)
return status, el.Failure()
}

View File

@ -21,18 +21,12 @@ import (
"github.com/alcionai/corso/src/pkg/services/m365/api"
)
type statusUpdater interface {
UpdateStatus(status *support.ControllerOperationStatus)
}
// ProduceBackupCollections returns a set of DataCollection which represents the SharePoint data
// for the specified user
func ProduceBackupCollections(
ctx context.Context,
bpc inject.BackupProducerConfig,
ac api.Client,
creds account.M365Config,
su statusUpdater,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, *prefixmatcher.StringSetMatcher, bool, error) {
b, err := bpc.Selector.ToSharePointBackup()
@ -129,7 +123,7 @@ func ProduceBackupCollections(
bpc.ProtectedResource.ID(),
path.SharePointService,
categories,
su.UpdateStatus,
su,
errs)
if err != nil {
return nil, nil, false, err
@ -146,7 +140,7 @@ func collectLists(
bpc inject.BackupProducerConfig,
ac api.Client,
tenantID string,
updater statusUpdater,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
logger.Ctx(ctx).Debug("Creating SharePoint List Collections")
@ -181,7 +175,7 @@ func collectLists(
dir,
ac,
List,
updater.UpdateStatus,
su,
bpc.Options)
collection.AddJob(tuple.id)
@ -200,7 +194,7 @@ func collectLibraries(
tenantID string,
ssmb *prefixmatcher.StringSetMatchBuilder,
scope selectors.SharePointScope,
updater statusUpdater,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, bool, error) {
logger.Ctx(ctx).Debug("creating SharePoint Library collections")
@ -211,7 +205,7 @@ func collectLibraries(
&libraryBackupHandler{ad, scope},
tenantID,
bpc.ProtectedResource.ID(),
updater.UpdateStatus,
su,
bpc.Options)
)
@ -230,7 +224,7 @@ func collectPages(
bpc inject.BackupProducerConfig,
creds account.M365Config,
ac api.Client,
updater statusUpdater,
su support.StatusUpdater,
errs *fault.Bus,
) ([]data.BackupCollection, error) {
logger.Ctx(ctx).Debug("creating SharePoint Pages collections")
@ -277,7 +271,7 @@ func collectPages(
dir,
ac,
Pages,
updater.UpdateStatus,
su,
bpc.Options)
collection.betaService = betaService
collection.AddJob(tuple.ID)

View File

@ -217,7 +217,7 @@ func (suite *SharePointPagesSuite) TestCollectPages() {
bpc,
creds,
ac,
&MockGraphService{},
(&MockGraphService{}).UpdateStatus,
fault.New(true))
assert.NoError(t, err, clues.ToCore(err))
assert.NotEmpty(t, col)

View File

@ -0,0 +1,143 @@
package details
import (
"sync"
"github.com/alcionai/clues"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/pkg/path"
)
// Builder should be used to create a details model.
type Builder struct {
d Details
mu sync.Mutex `json:"-"`
knownFolders map[string]Entry `json:"-"`
}
func (b *Builder) Add(
repoRef path.Path,
locationRef *path.Builder,
updated bool,
info ItemInfo,
) error {
b.mu.Lock()
defer b.mu.Unlock()
entry, err := b.d.add(
repoRef,
locationRef,
updated,
info)
if err != nil {
return clues.Wrap(err, "adding entry to details")
}
if err := b.addFolderEntries(
repoRef.ToBuilder().Dir(),
locationRef,
entry,
); err != nil {
return clues.Wrap(err, "adding folder entries")
}
return nil
}
func (b *Builder) addFolderEntries(
repoRef, locationRef *path.Builder,
entry Entry,
) error {
if len(repoRef.Elements()) < len(locationRef.Elements()) {
return clues.New("RepoRef shorter than LocationRef").
With("repo_ref", repoRef, "location_ref", locationRef)
}
if b.knownFolders == nil {
b.knownFolders = map[string]Entry{}
}
// Need a unique location because we want to have separate folders for
// different drives and categories even if there's duplicate folder names in
// them.
uniqueLoc, err := entry.uniqueLocation(locationRef)
if err != nil {
return clues.Wrap(err, "getting LocationIDer")
}
for uniqueLoc.elementCount() > 0 {
mapKey := uniqueLoc.ID().ShortRef()
name := uniqueLoc.lastElem()
if len(name) == 0 {
return clues.New("folder with no display name").
With("repo_ref", repoRef, "location_ref", uniqueLoc.InDetails())
}
shortRef := repoRef.ShortRef()
rr := repoRef.String()
// Get the parent of this entry to add as the LocationRef for the folder.
uniqueLoc.dir()
repoRef = repoRef.Dir()
parentRef := repoRef.ShortRef()
folder, ok := b.knownFolders[mapKey]
if !ok {
loc := uniqueLoc.InDetails().String()
folder = Entry{
RepoRef: rr,
ShortRef: shortRef,
ParentRef: parentRef,
LocationRef: loc,
ItemInfo: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
// TODO(ashmrtn): Use the item type returned by the entry once
// SharePoint properly sets it.
DisplayName: name,
},
},
}
if err := entry.updateFolder(folder.Folder); err != nil {
return clues.Wrap(err, "adding folder").
With("parent_repo_ref", repoRef, "location_ref", loc)
}
}
folder.Folder.Size += entry.size()
folder.Updated = folder.Updated || entry.Updated
itemModified := entry.Modified()
if folder.Folder.Modified.Before(itemModified) {
folder.Folder.Modified = itemModified
}
// Always update the map because we're storing structs not pointers to
// structs.
b.knownFolders[mapKey] = folder
}
return nil
}
func (b *Builder) Details() *Details {
b.mu.Lock()
defer b.mu.Unlock()
ents := make([]Entry, len(b.d.Entries))
copy(ents, b.d.Entries)
// Write the cached folder entries to details
details := &Details{
DetailsModel{
Entries: append(ents, maps.Values(b.knownFolders)...),
},
}
return details
}

View File

@ -1,22 +1,13 @@
package details
import (
"context"
"encoding/json"
"io"
"strconv"
"strings"
"sync"
"time"
"github.com/alcionai/clues"
"github.com/dustin/go-humanize"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/m365/onedrive/metadata"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/path"
)
@ -24,383 +15,6 @@ import (
// more than this, then we just show a summary.
const maxPrintLimit = 50
// LocationIDer provides access to location information but guarantees that it
// can also generate a unique location (among items in the same service but
// possibly across data types within the service) that can be used as a key in
// maps and other structures. The unique location may be different than
// InDetails, the location used in backup details.
type LocationIDer interface {
ID() *path.Builder
InDetails() *path.Builder
}
type uniqueLoc struct {
pb *path.Builder
prefixElems int
}
func (ul uniqueLoc) ID() *path.Builder {
return ul.pb
}
func (ul uniqueLoc) InDetails() *path.Builder {
return path.Builder{}.Append(ul.pb.Elements()[ul.prefixElems:]...)
}
// elementCount returns the number of non-prefix elements in the LocationIDer
// (i.e. the number of elements in the InDetails path.Builder).
func (ul uniqueLoc) elementCount() int {
res := len(ul.pb.Elements()) - ul.prefixElems
if res < 0 {
res = 0
}
return res
}
func (ul *uniqueLoc) dir() {
if ul.elementCount() == 0 {
return
}
ul.pb = ul.pb.Dir()
}
// lastElem returns the unescaped last element in the location. If the location
// is empty returns an empty string.
func (ul uniqueLoc) lastElem() string {
if ul.elementCount() == 0 {
return ""
}
return ul.pb.LastElem()
}
// Having service-specific constructors can be kind of clunky, but in this case
// I think they'd be useful to ensure the proper args are used since this
// path.Builder is used as a key in some maps.
// NewExchangeLocationIDer builds a LocationIDer for the given category and
// folder path. The path denoted by the folders should be unique within the
// category.
func NewExchangeLocationIDer(
category path.CategoryType,
escapedFolders ...string,
) (uniqueLoc, error) {
if err := path.ValidateServiceAndCategory(path.ExchangeService, category); err != nil {
return uniqueLoc{}, clues.Wrap(err, "making exchange LocationIDer")
}
pb := path.Builder{}.Append(category.String()).Append(escapedFolders...)
return uniqueLoc{
pb: pb,
prefixElems: 1,
}, nil
}
// NewOneDriveLocationIDer builds a LocationIDer for the drive and folder path.
// The path denoted by the folders should be unique within the drive.
func NewOneDriveLocationIDer(
driveID string,
escapedFolders ...string,
) uniqueLoc {
pb := path.Builder{}.
Append(path.FilesCategory.String(), driveID).
Append(escapedFolders...)
return uniqueLoc{
pb: pb,
prefixElems: 2,
}
}
// NewSharePointLocationIDer builds a LocationIDer for the drive and folder
// path. The path denoted by the folders should be unique within the drive.
func NewSharePointLocationIDer(
driveID string,
escapedFolders ...string,
) uniqueLoc {
pb := path.Builder{}.
Append(path.LibrariesCategory.String(), driveID).
Append(escapedFolders...)
return uniqueLoc{
pb: pb,
prefixElems: 2,
}
}
// --------------------------------------------------------------------------------
// Model
// --------------------------------------------------------------------------------
// DetailsModel describes what was stored in a Backup
type DetailsModel struct {
Entries []Entry `json:"entries"`
}
// Print writes the DetailModel Entries to StdOut, in the format
// requested by the caller.
func (dm DetailsModel) PrintEntries(ctx context.Context) {
printEntries(ctx, dm.Entries)
}
type infoer interface {
Entry | *Entry
// Need this here so we can access the infoType function without a type
// assertion. See https://stackoverflow.com/a/71378366 for more details.
infoType() ItemType
}
func printEntries[T infoer](ctx context.Context, entries []T) {
if print.DisplayJSONFormat() {
printJSON(ctx, entries)
} else {
printTable(ctx, entries)
}
}
func printTable[T infoer](ctx context.Context, entries []T) {
perType := map[ItemType][]print.Printable{}
for _, ent := range entries {
it := ent.infoType()
ps, ok := perType[it]
if !ok {
ps = []print.Printable{}
}
perType[it] = append(ps, print.Printable(ent))
}
for _, ps := range perType {
print.All(ctx, ps...)
}
}
func printJSON[T infoer](ctx context.Context, entries []T) {
ents := []print.Printable{}
for _, ent := range entries {
ents = append(ents, print.Printable(ent))
}
print.All(ctx, ents...)
}
// Paths returns the list of Paths for non-folder and non-meta items extracted
// from the Entries slice.
func (dm DetailsModel) Paths() []string {
r := make([]string, 0, len(dm.Entries))
for _, ent := range dm.Entries {
if ent.Folder != nil || ent.isMetaFile() {
continue
}
r = append(r, ent.RepoRef)
}
return r
}
// Items returns a slice of *ItemInfo that does not contain any FolderInfo
// entries. Required because not all folders in the details are valid resource
// paths, and we want to slice out metadata.
func (dm DetailsModel) Items() entrySet {
res := make([]*Entry, 0, len(dm.Entries))
for i := 0; i < len(dm.Entries); i++ {
ent := dm.Entries[i]
if ent.Folder != nil || ent.isMetaFile() {
continue
}
res = append(res, &ent)
}
return res
}
// FilterMetaFiles returns a copy of the Details with all of the
// .meta files removed from the entries.
func (dm DetailsModel) FilterMetaFiles() DetailsModel {
d2 := DetailsModel{
Entries: []Entry{},
}
for _, ent := range dm.Entries {
if !ent.isMetaFile() {
d2.Entries = append(d2.Entries, ent)
}
}
return d2
}
// SumNonMetaFileSizes returns the total size of items excluding all the
// .meta files from the items.
func (dm DetailsModel) SumNonMetaFileSizes() int64 {
var size int64
// Items will provide only files and filter out folders
for _, ent := range dm.FilterMetaFiles().Items() {
size += ent.size()
}
return size
}
// Check if a file is a metadata file. These are used to store
// additional data like permissions (in case of Drive items) and are
// not to be treated as regular files.
func (de Entry) isMetaFile() bool {
// sharepoint types not needed, since sharepoint permissions were
// added after IsMeta was deprecated.
// Earlier onedrive backups used to store both metafiles and files in details.
// So filter out just the onedrive items and check for metafiles
return de.ItemInfo.OneDrive != nil && de.ItemInfo.OneDrive.IsMeta
}
// ---------------------------------------------------------------------------
// Builder
// ---------------------------------------------------------------------------
// Builder should be used to create a details model.
type Builder struct {
d Details
mu sync.Mutex `json:"-"`
knownFolders map[string]Entry `json:"-"`
}
func (b *Builder) Add(
repoRef path.Path,
locationRef *path.Builder,
updated bool,
info ItemInfo,
) error {
b.mu.Lock()
defer b.mu.Unlock()
entry, err := b.d.add(
repoRef,
locationRef,
updated,
info)
if err != nil {
return clues.Wrap(err, "adding entry to details")
}
if err := b.addFolderEntries(
repoRef.ToBuilder().Dir(),
locationRef,
entry,
); err != nil {
return clues.Wrap(err, "adding folder entries")
}
return nil
}
func (b *Builder) addFolderEntries(
repoRef, locationRef *path.Builder,
entry Entry,
) error {
if len(repoRef.Elements()) < len(locationRef.Elements()) {
return clues.New("RepoRef shorter than LocationRef").
With("repo_ref", repoRef, "location_ref", locationRef)
}
if b.knownFolders == nil {
b.knownFolders = map[string]Entry{}
}
// Need a unique location because we want to have separate folders for
// different drives and categories even if there's duplicate folder names in
// them.
uniqueLoc, err := entry.uniqueLocation(locationRef)
if err != nil {
return clues.Wrap(err, "getting LocationIDer")
}
for uniqueLoc.elementCount() > 0 {
mapKey := uniqueLoc.ID().ShortRef()
name := uniqueLoc.lastElem()
if len(name) == 0 {
return clues.New("folder with no display name").
With("repo_ref", repoRef, "location_ref", uniqueLoc.InDetails())
}
shortRef := repoRef.ShortRef()
rr := repoRef.String()
// Get the parent of this entry to add as the LocationRef for the folder.
uniqueLoc.dir()
repoRef = repoRef.Dir()
parentRef := repoRef.ShortRef()
folder, ok := b.knownFolders[mapKey]
if !ok {
loc := uniqueLoc.InDetails().String()
folder = Entry{
RepoRef: rr,
ShortRef: shortRef,
ParentRef: parentRef,
LocationRef: loc,
ItemInfo: ItemInfo{
Folder: &FolderInfo{
ItemType: FolderItem,
// TODO(ashmrtn): Use the item type returned by the entry once
// SharePoint properly sets it.
DisplayName: name,
},
},
}
if err := entry.updateFolder(folder.Folder); err != nil {
return clues.Wrap(err, "adding folder").
With("parent_repo_ref", repoRef, "location_ref", loc)
}
}
folder.Folder.Size += entry.size()
folder.Updated = folder.Updated || entry.Updated
itemModified := entry.Modified()
if folder.Folder.Modified.Before(itemModified) {
folder.Folder.Modified = itemModified
}
// Always update the map because we're storing structs not pointers to
// structs.
b.knownFolders[mapKey] = folder
}
return nil
}
func (b *Builder) Details() *Details {
b.mu.Lock()
defer b.mu.Unlock()
ents := make([]Entry, len(b.d.Entries))
copy(ents, b.d.Entries)
// Write the cached folder entries to details
details := &Details{
DetailsModel{
Entries: append(ents, maps.Values(b.knownFolders)...),
},
}
return details
}
// --------------------------------------------------------------------------------
// Details
// --------------------------------------------------------------------------------
@ -490,540 +104,65 @@ func withoutMetadataSuffix(id string) string {
return id
}
// --------------------------------------------------------------------------------
// Entry
// --------------------------------------------------------------------------------
// ---------------------------------------------------------------------------
// LocationIDer
// ---------------------------------------------------------------------------
// Add a new type so we can transparently use PrintAll in different situations.
type entrySet []*Entry
func (ents entrySet) PrintEntries(ctx context.Context) {
printEntries(ctx, ents)
// LocationIDer provides access to location information but guarantees that it
// can also generate a unique location (among items in the same service but
// possibly across data types within the service) that can be used as a key in
// maps and other structures. The unique location may be different than
// InDetails, the location used in backup details.
type LocationIDer interface {
ID() *path.Builder
InDetails() *path.Builder
}
// MaybePrintEntries is same as PrintEntries, but only prints if we
// have less than 15 items or is not json output.
func (ents entrySet) MaybePrintEntries(ctx context.Context) {
if len(ents) <= maxPrintLimit ||
print.DisplayJSONFormat() ||
print.DisplayVerbose() {
printEntries(ctx, ents)
}
type uniqueLoc struct {
pb *path.Builder
prefixElems int
}
// Entry describes a single item stored in a Backup
type Entry struct {
// RepoRef is the full storage path of the item in Kopia
RepoRef string `json:"repoRef"`
ShortRef string `json:"shortRef"`
ParentRef string `json:"parentRef,omitempty"`
// LocationRef contains the logical path structure by its human-readable
// display names. IE: If an item is located at "/Inbox/Important", we
// hold that string in the LocationRef, while the actual IDs of each
// container are used for the RepoRef.
// LocationRef only holds the container values, and does not include
// the metadata prefixes (tenant, service, owner, etc) found in the
// repoRef.
// Currently only implemented for Exchange Calendars.
LocationRef string `json:"locationRef,omitempty"`
// ItemRef contains the stable id of the item itself. ItemRef is not
// guaranteed to be unique within a repository. Uniqueness guarantees
// maximally inherit from the source item. Eg: Entries for m365 mail items
// are only as unique as m365 mail item IDs themselves.
ItemRef string `json:"itemRef,omitempty"`
// Indicates the item was added or updated in this backup
// Always `true` for full backups
Updated bool `json:"updated"`
ItemInfo
func (ul uniqueLoc) ID() *path.Builder {
return ul.pb
}
// ToLocationIDer takes a backup version and produces the unique location for
// this entry if possible. Reasons it may not be possible to produce the unique
// location include an unsupported backup version or missing information.
func (de Entry) ToLocationIDer(backupVersion int) (LocationIDer, error) {
if len(de.LocationRef) > 0 {
baseLoc, err := path.Builder{}.SplitUnescapeAppend(de.LocationRef)
if err != nil {
return nil, clues.Wrap(err, "parsing base location info").
With("location_ref", de.LocationRef)
func (ul uniqueLoc) InDetails() *path.Builder {
return path.Builder{}.Append(ul.pb.Elements()[ul.prefixElems:]...)
}
// Individual services may add additional info to the base and return that.
return de.ItemInfo.uniqueLocation(baseLoc)
// elementCount returns the number of non-prefix elements in the LocationIDer
// (i.e. the number of elements in the InDetails path.Builder).
func (ul uniqueLoc) elementCount() int {
res := len(ul.pb.Elements()) - ul.prefixElems
if res < 0 {
res = 0
}
if backupVersion >= version.OneDrive7LocationRef ||
(de.ItemInfo.infoType() != OneDriveItem &&
de.ItemInfo.infoType() != SharePointLibrary) {
return nil, clues.New("no previous location for entry")
return res
}
// This is a little hacky, but we only want to try to extract the old
// location if it's OneDrive or SharePoint libraries and it's known to
// be an older backup version.
//
// TODO(ashmrtn): Remove this code once OneDrive/SharePoint libraries
// LocationRef code has been out long enough that all delta tokens for
// previous backup versions will have expired. At that point, either
// we'll do a full backup (token expired, no newer backups) or have a
// backup of a higher version with the information we need.
rr, err := path.FromDataLayerPath(de.RepoRef, true)
if err != nil {
return nil, clues.Wrap(err, "getting item RepoRef")
func (ul *uniqueLoc) dir() {
if ul.elementCount() == 0 {
return
}
p, err := path.ToDrivePath(rr)
if err != nil {
return nil, clues.New("converting RepoRef to drive path")
ul.pb = ul.pb.Dir()
}
baseLoc := path.Builder{}.Append(p.Root).Append(p.Folders...)
// Individual services may add additional info to the base and return that.
return de.ItemInfo.uniqueLocation(baseLoc)
// lastElem returns the unescaped last element in the location. If the location
// is empty returns an empty string.
func (ul uniqueLoc) lastElem() string {
if ul.elementCount() == 0 {
return ""
}
// --------------------------------------------------------------------------------
// CLI Output
// --------------------------------------------------------------------------------
// interface compliance checks
var _ print.Printable = &Entry{}
// MinimumPrintable DetailsEntries is a passthrough func, because no
// reduction is needed for the json output.
func (de Entry) MinimumPrintable() any {
return de
return ul.pb.LastElem()
}
// Headers returns the human-readable names of properties in a DetailsEntry
// for printing out to a terminal in a columnar display.
func (de Entry) Headers() []string {
hs := []string{"ID"}
if de.ItemInfo.Folder != nil {
hs = append(hs, de.ItemInfo.Folder.Headers()...)
}
if de.ItemInfo.Exchange != nil {
hs = append(hs, de.ItemInfo.Exchange.Headers()...)
}
if de.ItemInfo.SharePoint != nil {
hs = append(hs, de.ItemInfo.SharePoint.Headers()...)
}
if de.ItemInfo.OneDrive != nil {
hs = append(hs, de.ItemInfo.OneDrive.Headers()...)
}
return hs
}
// Values returns the values matching the Headers list.
func (de Entry) Values() []string {
vs := []string{de.ShortRef}
if de.ItemInfo.Folder != nil {
vs = append(vs, de.ItemInfo.Folder.Values()...)
}
if de.ItemInfo.Exchange != nil {
vs = append(vs, de.ItemInfo.Exchange.Values()...)
}
if de.ItemInfo.SharePoint != nil {
vs = append(vs, de.ItemInfo.SharePoint.Values()...)
}
if de.ItemInfo.OneDrive != nil {
vs = append(vs, de.ItemInfo.OneDrive.Values()...)
}
return vs
}
type ItemType int
// ItemTypes are enumerated by service (hundredth digit) and data type (ones digit).
// Ex: exchange is 00x where x is the data type. Sharepoint is 10x, and etc.
// Every item info struct should get its own hundredth enumeration entry.
// Every item category for that service should get its own entry (even if differences
// between types aren't apparent on initial implementation, this future-proofs
// against breaking changes).
// Entries should not be rearranged.
// Additionally, any itemType directly assigned a number should not be altered.
// This applies to OneDriveItem and FolderItem
const (
UnknownType ItemType = iota // 0, global unknown value
// Exchange (00x)
ExchangeContact
ExchangeEvent
ExchangeMail
// SharePoint (10x)
SharePointLibrary ItemType = iota + 97 // 100
SharePointList // 101...
SharePointPage
// OneDrive (20x)
OneDriveItem ItemType = 205
// Folder Management(30x)
FolderItem ItemType = 306
)
func UpdateItem(item *ItemInfo, newLocPath *path.Builder) {
// Only OneDrive and SharePoint have information about parent folders
// contained in them.
// Can't switch based on infoType because that's been unstable.
if item.Exchange != nil {
item.Exchange.UpdateParentPath(newLocPath)
} else if item.SharePoint != nil {
// SharePoint used to store library items with the OneDriveItem ItemType.
// Start switching them over as we see them since there's no point in
// keeping the old format.
if item.SharePoint.ItemType == OneDriveItem {
item.SharePoint.ItemType = SharePointLibrary
}
item.SharePoint.UpdateParentPath(newLocPath)
} else if item.OneDrive != nil {
item.OneDrive.UpdateParentPath(newLocPath)
}
}
// ItemInfo is a oneOf that contains service specific
// information about the item it tracks
type ItemInfo struct {
Folder *FolderInfo `json:"folder,omitempty"`
Exchange *ExchangeInfo `json:"exchange,omitempty"`
SharePoint *SharePointInfo `json:"sharePoint,omitempty"`
OneDrive *OneDriveInfo `json:"oneDrive,omitempty"`
// Optional item extension data
Extension *ExtensionData `json:"extension,omitempty"`
}
// typedInfo should get embedded in each sesrvice type to track
// the type of item it stores for multi-item service support.
// infoType provides internal categorization for collecting like-typed ItemInfos.
// It should return the most granular value type (ex: "event" for an exchange
// calendar event).
func (i ItemInfo) infoType() ItemType {
switch {
case i.Folder != nil:
return i.Folder.ItemType
case i.Exchange != nil:
return i.Exchange.ItemType
case i.SharePoint != nil:
return i.SharePoint.ItemType
case i.OneDrive != nil:
return i.OneDrive.ItemType
}
return UnknownType
}
func (i ItemInfo) size() int64 {
switch {
case i.Exchange != nil:
return i.Exchange.Size
case i.OneDrive != nil:
return i.OneDrive.Size
case i.SharePoint != nil:
return i.SharePoint.Size
case i.Folder != nil:
return i.Folder.Size
}
return 0
}
func (i ItemInfo) Modified() time.Time {
switch {
case i.Exchange != nil:
return i.Exchange.Modified
case i.OneDrive != nil:
return i.OneDrive.Modified
case i.SharePoint != nil:
return i.SharePoint.Modified
case i.Folder != nil:
return i.Folder.Modified
}
return time.Time{}
}
func (i ItemInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
switch {
case i.Exchange != nil:
return i.Exchange.uniqueLocation(baseLoc)
case i.OneDrive != nil:
return i.OneDrive.uniqueLocation(baseLoc)
case i.SharePoint != nil:
return i.SharePoint.uniqueLocation(baseLoc)
default:
return nil, clues.New("unsupported type")
}
}
func (i ItemInfo) updateFolder(f *FolderInfo) error {
switch {
case i.Exchange != nil:
return i.Exchange.updateFolder(f)
case i.OneDrive != nil:
return i.OneDrive.updateFolder(f)
case i.SharePoint != nil:
return i.SharePoint.updateFolder(f)
default:
return clues.New("unsupported type")
}
}
type FolderInfo struct {
ItemType ItemType `json:"itemType,omitempty"`
DisplayName string `json:"displayName"`
Modified time.Time `json:"modified,omitempty"`
Size int64 `json:"size,omitempty"`
DataType ItemType `json:"dataType,omitempty"`
DriveName string `json:"driveName,omitempty"`
DriveID string `json:"driveID,omitempty"`
}
func (i FolderInfo) Headers() []string {
return []string{"Display Name"}
}
func (i FolderInfo) Values() []string {
return []string{i.DisplayName}
}
// ExchangeInfo describes an exchange item
type ExchangeInfo struct {
ItemType ItemType `json:"itemType,omitempty"`
Sender string `json:"sender,omitempty"`
Subject string `json:"subject,omitempty"`
Recipient []string `json:"recipient,omitempty"`
ParentPath string `json:"parentPath,omitempty"`
Received time.Time `json:"received,omitempty"`
EventStart time.Time `json:"eventStart,omitempty"`
EventEnd time.Time `json:"eventEnd,omitempty"`
Organizer string `json:"organizer,omitempty"`
ContactName string `json:"contactName,omitempty"`
EventRecurs bool `json:"eventRecurs,omitempty"`
Created time.Time `json:"created,omitempty"`
Modified time.Time `json:"modified,omitempty"`
Size int64 `json:"size,omitempty"`
}
// Headers returns the human-readable names of properties in an ExchangeInfo
// for printing out to a terminal in a columnar display.
func (i ExchangeInfo) Headers() []string {
switch i.ItemType {
case ExchangeEvent:
return []string{"Organizer", "Subject", "Starts", "Ends", "Recurring"}
case ExchangeContact:
return []string{"Contact Name"}
case ExchangeMail:
return []string{"Sender", "Folder", "Subject", "Received"}
}
return []string{}
}
// Values returns the values matching the Headers list for printing
// out to a terminal in a columnar display.
func (i ExchangeInfo) Values() []string {
switch i.ItemType {
case ExchangeEvent:
return []string{
i.Organizer,
i.Subject,
dttm.FormatToTabularDisplay(i.EventStart),
dttm.FormatToTabularDisplay(i.EventEnd),
strconv.FormatBool(i.EventRecurs),
}
case ExchangeContact:
return []string{i.ContactName}
case ExchangeMail:
return []string{
i.Sender, i.ParentPath, i.Subject,
dttm.FormatToTabularDisplay(i.Received),
}
}
return []string{}
}
func (i *ExchangeInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.String()
}
func (i *ExchangeInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
var category path.CategoryType
switch i.ItemType {
case ExchangeEvent:
category = path.EventsCategory
case ExchangeContact:
category = path.ContactsCategory
case ExchangeMail:
category = path.EmailCategory
}
loc, err := NewExchangeLocationIDer(category, baseLoc.Elements()...)
return &loc, err
}
func (i *ExchangeInfo) updateFolder(f *FolderInfo) error {
// Use a switch instead of a rather large if-statement. Just make sure it's an
// Exchange type. If it's not return an error.
switch i.ItemType {
case ExchangeContact, ExchangeEvent, ExchangeMail:
default:
return clues.New("unsupported non-Exchange ItemType").
With("item_type", i.ItemType)
}
f.DataType = i.ItemType
return nil
}
// SharePointInfo describes a sharepoint item
type SharePointInfo struct {
Created time.Time `json:"created,omitempty"`
DriveName string `json:"driveName,omitempty"`
DriveID string `json:"driveID,omitempty"`
ItemName string `json:"itemName,omitempty"`
ItemType ItemType `json:"itemType,omitempty"`
Modified time.Time `json:"modified,omitempty"`
Owner string `json:"owner,omitempty"`
ParentPath string `json:"parentPath,omitempty"`
Size int64 `json:"size,omitempty"`
WebURL string `json:"webUrl,omitempty"`
SiteID string `json:"siteID,omitempty"`
}
// Headers returns the human-readable names of properties in a SharePointInfo
// for printing out to a terminal in a columnar display.
func (i SharePointInfo) Headers() []string {
return []string{"ItemName", "Library", "ParentPath", "Size", "Owner", "Created", "Modified"}
}
// Values returns the values matching the Headers list for printing
// out to a terminal in a columnar display.
func (i SharePointInfo) Values() []string {
return []string{
i.ItemName,
i.DriveName,
i.ParentPath,
humanize.Bytes(uint64(i.Size)),
i.Owner,
dttm.FormatToTabularDisplay(i.Created),
dttm.FormatToTabularDisplay(i.Modified),
}
}
func (i *SharePointInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.PopFront().String()
}
func (i *SharePointInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
if len(i.DriveID) == 0 {
return nil, clues.New("empty drive ID")
}
loc := NewSharePointLocationIDer(i.DriveID, baseLoc.Elements()...)
return &loc, nil
}
func (i *SharePointInfo) updateFolder(f *FolderInfo) error {
// TODO(ashmrtn): Change to just SharePointLibrary when the code that
// generates the item type is fixed.
if i.ItemType == OneDriveItem || i.ItemType == SharePointLibrary {
return updateFolderWithinDrive(SharePointLibrary, i.DriveName, i.DriveID, f)
}
return clues.New("unsupported non-SharePoint ItemType").With("item_type", i.ItemType)
}
// OneDriveInfo describes a oneDrive item
type OneDriveInfo struct {
Created time.Time `json:"created,omitempty"`
DriveID string `json:"driveID,omitempty"`
DriveName string `json:"driveName,omitempty"`
IsMeta bool `json:"isMeta,omitempty"`
ItemName string `json:"itemName,omitempty"`
ItemType ItemType `json:"itemType,omitempty"`
Modified time.Time `json:"modified,omitempty"`
Owner string `json:"owner,omitempty"`
ParentPath string `json:"parentPath"`
Size int64 `json:"size,omitempty"`
}
// Headers returns the human-readable names of properties in a OneDriveInfo
// for printing out to a terminal in a columnar display.
func (i OneDriveInfo) Headers() []string {
return []string{"ItemName", "ParentPath", "Size", "Owner", "Created", "Modified"}
}
// Values returns the values matching the Headers list for printing
// out to a terminal in a columnar display.
func (i OneDriveInfo) Values() []string {
return []string{
i.ItemName,
i.ParentPath,
humanize.Bytes(uint64(i.Size)),
i.Owner,
dttm.FormatToTabularDisplay(i.Created),
dttm.FormatToTabularDisplay(i.Modified),
}
}
func (i *OneDriveInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.PopFront().String()
}
func (i *OneDriveInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
if len(i.DriveID) == 0 {
return nil, clues.New("empty drive ID")
}
loc := NewOneDriveLocationIDer(i.DriveID, baseLoc.Elements()...)
return &loc, nil
}
func (i *OneDriveInfo) updateFolder(f *FolderInfo) error {
return updateFolderWithinDrive(OneDriveItem, i.DriveName, i.DriveID, f)
}
// ---------------------------------------------------------------------------
// helpers
// ---------------------------------------------------------------------------
func updateFolderWithinDrive(
t ItemType,

View File

@ -0,0 +1,175 @@
package details
import (
"context"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/path"
)
// Add a new type so we can transparently use PrintAll in different situations.
type entrySet []*Entry
func (ents entrySet) PrintEntries(ctx context.Context) {
printEntries(ctx, ents)
}
// MaybePrintEntries is same as PrintEntries, but only prints if we
// have less than 15 items or is not json output.
func (ents entrySet) MaybePrintEntries(ctx context.Context) {
if len(ents) <= maxPrintLimit ||
print.DisplayJSONFormat() ||
print.DisplayVerbose() {
printEntries(ctx, ents)
}
}
// Entry describes a single item stored in a Backup
type Entry struct {
// RepoRef is the full storage path of the item in Kopia
RepoRef string `json:"repoRef"`
ShortRef string `json:"shortRef"`
ParentRef string `json:"parentRef,omitempty"`
// LocationRef contains the logical path structure by its human-readable
// display names. IE: If an item is located at "/Inbox/Important", we
// hold that string in the LocationRef, while the actual IDs of each
// container are used for the RepoRef.
// LocationRef only holds the container values, and does not include
// the metadata prefixes (tenant, service, owner, etc) found in the
// repoRef.
// Currently only implemented for Exchange Calendars.
LocationRef string `json:"locationRef,omitempty"`
// ItemRef contains the stable id of the item itself. ItemRef is not
// guaranteed to be unique within a repository. Uniqueness guarantees
// maximally inherit from the source item. Eg: Entries for m365 mail items
// are only as unique as m365 mail item IDs themselves.
ItemRef string `json:"itemRef,omitempty"`
// Indicates the item was added or updated in this backup
// Always `true` for full backups
Updated bool `json:"updated"`
ItemInfo
}
// ToLocationIDer takes a backup version and produces the unique location for
// this entry if possible. Reasons it may not be possible to produce the unique
// location include an unsupported backup version or missing information.
func (de Entry) ToLocationIDer(backupVersion int) (LocationIDer, error) {
if len(de.LocationRef) > 0 {
baseLoc, err := path.Builder{}.SplitUnescapeAppend(de.LocationRef)
if err != nil {
return nil, clues.Wrap(err, "parsing base location info").
With("location_ref", de.LocationRef)
}
// Individual services may add additional info to the base and return that.
return de.ItemInfo.uniqueLocation(baseLoc)
}
if backupVersion >= version.OneDrive7LocationRef ||
(de.ItemInfo.infoType() != OneDriveItem &&
de.ItemInfo.infoType() != SharePointLibrary) {
return nil, clues.New("no previous location for entry")
}
// This is a little hacky, but we only want to try to extract the old
// location if it's OneDrive or SharePoint libraries and it's known to
// be an older backup version.
//
// TODO(ashmrtn): Remove this code once OneDrive/SharePoint libraries
// LocationRef code has been out long enough that all delta tokens for
// previous backup versions will have expired. At that point, either
// we'll do a full backup (token expired, no newer backups) or have a
// backup of a higher version with the information we need.
rr, err := path.FromDataLayerPath(de.RepoRef, true)
if err != nil {
return nil, clues.Wrap(err, "getting item RepoRef")
}
p, err := path.ToDrivePath(rr)
if err != nil {
return nil, clues.New("converting RepoRef to drive path")
}
baseLoc := path.Builder{}.Append(p.Root).Append(p.Folders...)
// Individual services may add additional info to the base and return that.
return de.ItemInfo.uniqueLocation(baseLoc)
}
// Check if a file is a metadata file. These are used to store
// additional data like permissions (in case of Drive items) and are
// not to be treated as regular files.
func (de Entry) isMetaFile() bool {
// sharepoint types not needed, since sharepoint permissions were
// added after IsMeta was deprecated.
// Earlier onedrive backups used to store both metafiles and files in details.
// So filter out just the onedrive items and check for metafiles
return de.ItemInfo.OneDrive != nil && de.ItemInfo.OneDrive.IsMeta
}
// --------------------------------------------------------------------------------
// CLI Output
// --------------------------------------------------------------------------------
// interface compliance checks
var _ print.Printable = &Entry{}
// MinimumPrintable DetailsEntries is a passthrough func, because no
// reduction is needed for the json output.
func (de Entry) MinimumPrintable() any {
return de
}
// Headers returns the human-readable names of properties in a DetailsEntry
// for printing out to a terminal in a columnar display.
func (de Entry) Headers() []string {
hs := []string{"ID"}
if de.ItemInfo.Folder != nil {
hs = append(hs, de.ItemInfo.Folder.Headers()...)
}
if de.ItemInfo.Exchange != nil {
hs = append(hs, de.ItemInfo.Exchange.Headers()...)
}
if de.ItemInfo.SharePoint != nil {
hs = append(hs, de.ItemInfo.SharePoint.Headers()...)
}
if de.ItemInfo.OneDrive != nil {
hs = append(hs, de.ItemInfo.OneDrive.Headers()...)
}
return hs
}
// Values returns the values matching the Headers list.
func (de Entry) Values() []string {
vs := []string{de.ShortRef}
if de.ItemInfo.Folder != nil {
vs = append(vs, de.ItemInfo.Folder.Values()...)
}
if de.ItemInfo.Exchange != nil {
vs = append(vs, de.ItemInfo.Exchange.Values()...)
}
if de.ItemInfo.SharePoint != nil {
vs = append(vs, de.ItemInfo.SharePoint.Values()...)
}
if de.ItemInfo.OneDrive != nil {
vs = append(vs, de.ItemInfo.OneDrive.Values()...)
}
return vs
}

View File

@ -0,0 +1,127 @@
package details
import (
"strconv"
"time"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/path"
)
// NewExchangeLocationIDer builds a LocationIDer for the given category and
// folder path. The path denoted by the folders should be unique within the
// category.
func NewExchangeLocationIDer(
category path.CategoryType,
escapedFolders ...string,
) (uniqueLoc, error) {
if err := path.ValidateServiceAndCategory(path.ExchangeService, category); err != nil {
return uniqueLoc{}, clues.Wrap(err, "making exchange LocationIDer")
}
pb := path.Builder{}.Append(category.String()).Append(escapedFolders...)
return uniqueLoc{
pb: pb,
prefixElems: 1,
}, nil
}
// ExchangeInfo describes an exchange item
type ExchangeInfo struct {
ItemType ItemType `json:"itemType,omitempty"`
Sender string `json:"sender,omitempty"`
Subject string `json:"subject,omitempty"`
Recipient []string `json:"recipient,omitempty"`
ParentPath string `json:"parentPath,omitempty"`
Received time.Time `json:"received,omitempty"`
EventStart time.Time `json:"eventStart,omitempty"`
EventEnd time.Time `json:"eventEnd,omitempty"`
Organizer string `json:"organizer,omitempty"`
ContactName string `json:"contactName,omitempty"`
EventRecurs bool `json:"eventRecurs,omitempty"`
Created time.Time `json:"created,omitempty"`
Modified time.Time `json:"modified,omitempty"`
Size int64 `json:"size,omitempty"`
}
// Headers returns the human-readable names of properties in an ExchangeInfo
// for printing out to a terminal in a columnar display.
func (i ExchangeInfo) Headers() []string {
switch i.ItemType {
case ExchangeEvent:
return []string{"Organizer", "Subject", "Starts", "Ends", "Recurring"}
case ExchangeContact:
return []string{"Contact Name"}
case ExchangeMail:
return []string{"Sender", "Folder", "Subject", "Received"}
}
return []string{}
}
// Values returns the values matching the Headers list for printing
// out to a terminal in a columnar display.
func (i ExchangeInfo) Values() []string {
switch i.ItemType {
case ExchangeEvent:
return []string{
i.Organizer,
i.Subject,
dttm.FormatToTabularDisplay(i.EventStart),
dttm.FormatToTabularDisplay(i.EventEnd),
strconv.FormatBool(i.EventRecurs),
}
case ExchangeContact:
return []string{i.ContactName}
case ExchangeMail:
return []string{
i.Sender, i.ParentPath, i.Subject,
dttm.FormatToTabularDisplay(i.Received),
}
}
return []string{}
}
func (i *ExchangeInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.String()
}
func (i *ExchangeInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
var category path.CategoryType
switch i.ItemType {
case ExchangeEvent:
category = path.EventsCategory
case ExchangeContact:
category = path.ContactsCategory
case ExchangeMail:
category = path.EmailCategory
}
loc, err := NewExchangeLocationIDer(category, baseLoc.Elements()...)
return &loc, err
}
func (i *ExchangeInfo) updateFolder(f *FolderInfo) error {
// Use a switch instead of a rather large if-statement. Just make sure it's an
// Exchange type. If it's not return an error.
switch i.ItemType {
case ExchangeContact, ExchangeEvent, ExchangeMail:
default:
return clues.New("unsupported non-Exchange ItemType").
With("item_type", i.ItemType)
}
f.DataType = i.ItemType
return nil
}

View File

@ -0,0 +1,21 @@
package details
import "time"
type FolderInfo struct {
ItemType ItemType `json:"itemType,omitempty"`
DisplayName string `json:"displayName"`
Modified time.Time `json:"modified,omitempty"`
Size int64 `json:"size,omitempty"`
DataType ItemType `json:"dataType,omitempty"`
DriveName string `json:"driveName,omitempty"`
DriveID string `json:"driveID,omitempty"`
}
func (i FolderInfo) Headers() []string {
return []string{"Display Name"}
}
func (i FolderInfo) Values() []string {
return []string{i.DisplayName}
}

View File

@ -0,0 +1,59 @@
package details
import (
"time"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/path"
)
// NewGroupsLocationIDer builds a LocationIDer for the groups.
func NewGroupsLocationIDer(
driveID string,
escapedFolders ...string,
) uniqueLoc {
// TODO: implement
return uniqueLoc{}
}
// GroupsInfo describes a groups item
type GroupsInfo struct {
Created time.Time `json:"created,omitempty"`
DriveName string `json:"driveName,omitempty"`
DriveID string `json:"driveID,omitempty"`
ItemName string `json:"itemName,omitempty"`
ItemType ItemType `json:"itemType,omitempty"`
Modified time.Time `json:"modified,omitempty"`
Owner string `json:"owner,omitempty"`
ParentPath string `json:"parentPath,omitempty"`
Size int64 `json:"size,omitempty"`
}
// Headers returns the human-readable names of properties in a SharePointInfo
// for printing out to a terminal in a columnar display.
func (i GroupsInfo) Headers() []string {
return []string{"Created", "Modified"}
}
// Values returns the values matching the Headers list for printing
// out to a terminal in a columnar display.
func (i GroupsInfo) Values() []string {
return []string{
dttm.FormatToTabularDisplay(i.Created),
dttm.FormatToTabularDisplay(i.Modified),
}
}
func (i *GroupsInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.PopFront().String()
}
func (i *GroupsInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
return nil, clues.New("not yet implemented")
}
func (i *GroupsInfo) updateFolder(f *FolderInfo) error {
return clues.New("not yet implemented")
}

View File

@ -0,0 +1,169 @@
package details
import (
"time"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/path"
)
type ItemType int
// ItemTypes are enumerated by service (hundredth digit) and data type (ones digit).
// Ex: exchange is 00x where x is the data type. Sharepoint is 10x, and etc.
// Every item info struct should get its own hundredth enumeration entry.
// Every item category for that service should get its own entry (even if differences
// between types aren't apparent on initial implementation, this future-proofs
// against breaking changes).
// Entries should not be rearranged.
// Additionally, any itemType directly assigned a number should not be altered.
// This applies to OneDriveItem and FolderItem
const (
UnknownType ItemType = iota // 0, global unknown value
// Exchange (00x)
ExchangeContact
ExchangeEvent
ExchangeMail
// SharePoint (10x)
SharePointLibrary ItemType = iota + 97 // 100
SharePointList // 101...
SharePointPage
// OneDrive (20x)
OneDriveItem ItemType = 205
// Folder Management(30x)
FolderItem ItemType = 306
)
func UpdateItem(item *ItemInfo, newLocPath *path.Builder) {
// Only OneDrive and SharePoint have information about parent folders
// contained in them.
// Can't switch based on infoType because that's been unstable.
if item.Exchange != nil {
item.Exchange.UpdateParentPath(newLocPath)
} else if item.SharePoint != nil {
// SharePoint used to store library items with the OneDriveItem ItemType.
// Start switching them over as we see them since there's no point in
// keeping the old format.
if item.SharePoint.ItemType == OneDriveItem {
item.SharePoint.ItemType = SharePointLibrary
}
item.SharePoint.UpdateParentPath(newLocPath)
} else if item.OneDrive != nil {
item.OneDrive.UpdateParentPath(newLocPath)
}
}
// ItemInfo is a oneOf that contains service specific
// information about the item it tracks
type ItemInfo struct {
Folder *FolderInfo `json:"folder,omitempty"`
Exchange *ExchangeInfo `json:"exchange,omitempty"`
SharePoint *SharePointInfo `json:"sharePoint,omitempty"`
OneDrive *OneDriveInfo `json:"oneDrive,omitempty"`
Groups *GroupsInfo `json:"groups,omitempty"`
// Optional item extension data
Extension *ExtensionData `json:"extension,omitempty"`
}
// typedInfo should get embedded in each sesrvice type to track
// the type of item it stores for multi-item service support.
// infoType provides internal categorization for collecting like-typed ItemInfos.
// It should return the most granular value type (ex: "event" for an exchange
// calendar event).
func (i ItemInfo) infoType() ItemType {
switch {
case i.Folder != nil:
return i.Folder.ItemType
case i.Exchange != nil:
return i.Exchange.ItemType
case i.SharePoint != nil:
return i.SharePoint.ItemType
case i.OneDrive != nil:
return i.OneDrive.ItemType
}
return UnknownType
}
func (i ItemInfo) size() int64 {
switch {
case i.Exchange != nil:
return i.Exchange.Size
case i.OneDrive != nil:
return i.OneDrive.Size
case i.SharePoint != nil:
return i.SharePoint.Size
case i.Folder != nil:
return i.Folder.Size
}
return 0
}
func (i ItemInfo) Modified() time.Time {
switch {
case i.Exchange != nil:
return i.Exchange.Modified
case i.OneDrive != nil:
return i.OneDrive.Modified
case i.SharePoint != nil:
return i.SharePoint.Modified
case i.Folder != nil:
return i.Folder.Modified
}
return time.Time{}
}
func (i ItemInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
switch {
case i.Exchange != nil:
return i.Exchange.uniqueLocation(baseLoc)
case i.OneDrive != nil:
return i.OneDrive.uniqueLocation(baseLoc)
case i.SharePoint != nil:
return i.SharePoint.uniqueLocation(baseLoc)
case i.Groups != nil:
return i.Groups.uniqueLocation(baseLoc)
default:
return nil, clues.New("unsupported type")
}
}
func (i ItemInfo) updateFolder(f *FolderInfo) error {
switch {
case i.Exchange != nil:
return i.Exchange.updateFolder(f)
case i.OneDrive != nil:
return i.OneDrive.updateFolder(f)
case i.SharePoint != nil:
return i.SharePoint.updateFolder(f)
case i.Groups != nil:
return i.Groups.updateFolder(f)
default:
return clues.New("unsupported type")
}
}

View File

@ -0,0 +1,125 @@
package details
import (
"context"
"github.com/alcionai/corso/src/cli/print"
)
// DetailsModel describes what was stored in a Backup
type DetailsModel struct {
Entries []Entry `json:"entries"`
}
// Print writes the DetailModel Entries to StdOut, in the format
// requested by the caller.
func (dm DetailsModel) PrintEntries(ctx context.Context) {
printEntries(ctx, dm.Entries)
}
type infoer interface {
Entry | *Entry
// Need this here so we can access the infoType function without a type
// assertion. See https://stackoverflow.com/a/71378366 for more details.
infoType() ItemType
}
func printEntries[T infoer](ctx context.Context, entries []T) {
if print.DisplayJSONFormat() {
printJSON(ctx, entries)
} else {
printTable(ctx, entries)
}
}
func printTable[T infoer](ctx context.Context, entries []T) {
perType := map[ItemType][]print.Printable{}
for _, ent := range entries {
it := ent.infoType()
ps, ok := perType[it]
if !ok {
ps = []print.Printable{}
}
perType[it] = append(ps, print.Printable(ent))
}
for _, ps := range perType {
print.All(ctx, ps...)
}
}
func printJSON[T infoer](ctx context.Context, entries []T) {
ents := []print.Printable{}
for _, ent := range entries {
ents = append(ents, print.Printable(ent))
}
print.All(ctx, ents...)
}
// Paths returns the list of Paths for non-folder and non-meta items extracted
// from the Entries slice.
func (dm DetailsModel) Paths() []string {
r := make([]string, 0, len(dm.Entries))
for _, ent := range dm.Entries {
if ent.Folder != nil || ent.isMetaFile() {
continue
}
r = append(r, ent.RepoRef)
}
return r
}
// Items returns a slice of *ItemInfo that does not contain any FolderInfo
// entries. Required because not all folders in the details are valid resource
// paths, and we want to slice out metadata.
func (dm DetailsModel) Items() entrySet {
res := make([]*Entry, 0, len(dm.Entries))
for i := 0; i < len(dm.Entries); i++ {
ent := dm.Entries[i]
if ent.Folder != nil || ent.isMetaFile() {
continue
}
res = append(res, &ent)
}
return res
}
// FilterMetaFiles returns a copy of the Details with all of the
// .meta files removed from the entries.
func (dm DetailsModel) FilterMetaFiles() DetailsModel {
d2 := DetailsModel{
Entries: []Entry{},
}
for _, ent := range dm.Entries {
if !ent.isMetaFile() {
d2.Entries = append(d2.Entries, ent)
}
}
return d2
}
// SumNonMetaFileSizes returns the total size of items excluding all the
// .meta files from the items.
func (dm DetailsModel) SumNonMetaFileSizes() int64 {
var size int64
// Items will provide only files and filter out folders
for _, ent := range dm.FilterMetaFiles().Items() {
size += ent.size()
}
return size
}

View File

@ -0,0 +1,78 @@
package details
import (
"time"
"github.com/alcionai/clues"
"github.com/dustin/go-humanize"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/path"
)
// NewOneDriveLocationIDer builds a LocationIDer for the drive and folder path.
// The path denoted by the folders should be unique within the drive.
func NewOneDriveLocationIDer(
driveID string,
escapedFolders ...string,
) uniqueLoc {
pb := path.Builder{}.
Append(path.FilesCategory.String(), driveID).
Append(escapedFolders...)
return uniqueLoc{
pb: pb,
prefixElems: 2,
}
}
// OneDriveInfo describes a oneDrive item
type OneDriveInfo struct {
Created time.Time `json:"created,omitempty"`
DriveID string `json:"driveID,omitempty"`
DriveName string `json:"driveName,omitempty"`
IsMeta bool `json:"isMeta,omitempty"`
ItemName string `json:"itemName,omitempty"`
ItemType ItemType `json:"itemType,omitempty"`
Modified time.Time `json:"modified,omitempty"`
Owner string `json:"owner,omitempty"`
ParentPath string `json:"parentPath"`
Size int64 `json:"size,omitempty"`
}
// Headers returns the human-readable names of properties in a OneDriveInfo
// for printing out to a terminal in a columnar display.
func (i OneDriveInfo) Headers() []string {
return []string{"ItemName", "ParentPath", "Size", "Owner", "Created", "Modified"}
}
// Values returns the values matching the Headers list for printing
// out to a terminal in a columnar display.
func (i OneDriveInfo) Values() []string {
return []string{
i.ItemName,
i.ParentPath,
humanize.Bytes(uint64(i.Size)),
i.Owner,
dttm.FormatToTabularDisplay(i.Created),
dttm.FormatToTabularDisplay(i.Modified),
}
}
func (i *OneDriveInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.PopFront().String()
}
func (i *OneDriveInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
if len(i.DriveID) == 0 {
return nil, clues.New("empty drive ID")
}
loc := NewOneDriveLocationIDer(i.DriveID, baseLoc.Elements()...)
return &loc, nil
}
func (i *OneDriveInfo) updateFolder(f *FolderInfo) error {
return updateFolderWithinDrive(OneDriveItem, i.DriveName, i.DriveID, f)
}

View File

@ -0,0 +1,86 @@
package details
import (
"time"
"github.com/alcionai/clues"
"github.com/dustin/go-humanize"
"github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/pkg/path"
)
// NewSharePointLocationIDer builds a LocationIDer for the drive and folder
// path. The path denoted by the folders should be unique within the drive.
func NewSharePointLocationIDer(
driveID string,
escapedFolders ...string,
) uniqueLoc {
pb := path.Builder{}.
Append(path.LibrariesCategory.String(), driveID).
Append(escapedFolders...)
return uniqueLoc{
pb: pb,
prefixElems: 2,
}
}
// SharePointInfo describes a sharepoint item
type SharePointInfo struct {
Created time.Time `json:"created,omitempty"`
DriveName string `json:"driveName,omitempty"`
DriveID string `json:"driveID,omitempty"`
ItemName string `json:"itemName,omitempty"`
ItemType ItemType `json:"itemType,omitempty"`
Modified time.Time `json:"modified,omitempty"`
Owner string `json:"owner,omitempty"`
ParentPath string `json:"parentPath,omitempty"`
Size int64 `json:"size,omitempty"`
WebURL string `json:"webUrl,omitempty"`
SiteID string `json:"siteID,omitempty"`
}
// Headers returns the human-readable names of properties in a SharePointInfo
// for printing out to a terminal in a columnar display.
func (i SharePointInfo) Headers() []string {
return []string{"ItemName", "Library", "ParentPath", "Size", "Owner", "Created", "Modified"}
}
// Values returns the values matching the Headers list for printing
// out to a terminal in a columnar display.
func (i SharePointInfo) Values() []string {
return []string{
i.ItemName,
i.DriveName,
i.ParentPath,
humanize.Bytes(uint64(i.Size)),
i.Owner,
dttm.FormatToTabularDisplay(i.Created),
dttm.FormatToTabularDisplay(i.Modified),
}
}
func (i *SharePointInfo) UpdateParentPath(newLocPath *path.Builder) {
i.ParentPath = newLocPath.PopFront().String()
}
func (i *SharePointInfo) uniqueLocation(baseLoc *path.Builder) (*uniqueLoc, error) {
if len(i.DriveID) == 0 {
return nil, clues.New("empty drive ID")
}
loc := NewSharePointLocationIDer(i.DriveID, baseLoc.Elements()...)
return &loc, nil
}
func (i *SharePointInfo) updateFolder(f *FolderInfo) error {
// TODO(ashmrtn): Change to just SharePointLibrary when the code that
// generates the item type is fixed.
if i.ItemType == OneDriveItem || i.ItemType == SharePointLibrary {
return updateFolderWithinDrive(SharePointLibrary, i.DriveName, i.DriveID, f)
}
return clues.New("unsupported non-SharePoint ItemType").With("item_type", i.ItemType)
}

513
src/pkg/selectors/groups.go Normal file
View File

@ -0,0 +1,513 @@
package selectors
import (
"context"
"fmt"
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
)
// ---------------------------------------------------------------------------
// Selectors
// ---------------------------------------------------------------------------
type (
// groups provides an api for selecting
// data scopes applicable to the groups service.
groups struct {
Selector
}
// groups provides an api for selecting
// data scopes applicable to the groups service,
// plus backup-specific methods.
GroupsBackup struct {
groups
}
// GroupsRestorep provides an api for selecting
// data scopes applicable to the Groups service,
// plus restore-specific methods.
GroupsRestore struct {
groups
}
)
var (
_ Reducer = &GroupsRestore{}
_ pathCategorier = &GroupsRestore{}
)
// NewGroupsBackup produces a new Selector with the service set to ServiceGroups.
func NewGroupsBackup(resources []string) *GroupsBackup {
src := GroupsBackup{
groups{
newSelector(ServiceGroups, resources),
},
}
return &src
}
// ToGroupsBackup transforms the generic selector into an GroupsBackup.
// Errors if the service defined by the selector is not ServiceGroups.
func (s Selector) ToGroupsBackup() (*GroupsBackup, error) {
if s.Service != ServiceGroups {
return nil, badCastErr(ServiceGroups, s.Service)
}
src := GroupsBackup{groups{s}}
return &src, nil
}
func (s GroupsBackup) SplitByResourceOwner(resources []string) []GroupsBackup {
sels := splitByResourceOwner[GroupsScope](s.Selector, resources, GroupsGroup)
ss := make([]GroupsBackup, 0, len(sels))
for _, sel := range sels {
ss = append(ss, GroupsBackup{groups{sel}})
}
return ss
}
// NewGroupsRestore produces a new Selector with the service set to ServiceGroups.
func NewGroupsRestore(resources []string) *GroupsRestore {
src := GroupsRestore{
groups{
newSelector(ServiceGroups, resources),
},
}
return &src
}
// ToGroupsRestore transforms the generic selector into an GroupsRestore.
// Errors if the service defined by the selector is not ServiceGroups.
func (s Selector) ToGroupsRestore() (*GroupsRestore, error) {
if s.Service != ServiceGroups {
return nil, badCastErr(ServiceGroups, s.Service)
}
src := GroupsRestore{groups{s}}
return &src, nil
}
func (s GroupsRestore) SplitByResourceOwner(resources []string) []GroupsRestore {
sels := splitByResourceOwner[GroupsScope](s.Selector, resources, GroupsGroup)
ss := make([]GroupsRestore, 0, len(sels))
for _, sel := range sels {
ss = append(ss, GroupsRestore{groups{sel}})
}
return ss
}
// PathCategories produces the aggregation of discrete resources described by each type of scope.
func (s groups) PathCategories() selectorPathCategories {
return selectorPathCategories{
Excludes: pathCategoriesIn[GroupsScope, groupsCategory](s.Excludes),
Filters: pathCategoriesIn[GroupsScope, groupsCategory](s.Filters),
Includes: pathCategoriesIn[GroupsScope, groupsCategory](s.Includes),
}
}
// ---------------------------------------------------------------------------
// Stringers and Concealers
// ---------------------------------------------------------------------------
func (s GroupsScope) Conceal() string { return conceal(s) }
func (s GroupsScope) Format(fs fmt.State, r rune) { format(s, fs, r) }
func (s GroupsScope) String() string { return conceal(s) }
func (s GroupsScope) PlainString() string { return plainString(s) }
// -------------------
// Scope Factories
// Include appends the provided scopes to the selector's inclusion set.
// Data is included if it matches ANY inclusion.
// The inclusion set is later filtered (all included data must pass ALL
// filters) and excluded (all included data must not match ANY exclusion).
// Data is included if it matches ANY inclusion (of the same data category).
//
// All parts of the scope must match for data to be exclucded.
// Ex: File(s1, f1, i1) => only excludes an item if it is owned by site s1,
// located in folder f1, and ID'd as i1. Use selectors.Any() to wildcard
// a scope value. No value will match if selectors.None() is provided.
//
// Group-level scopes will automatically apply the Any() wildcard to
// child properties.
// ex: Site(u1) automatically cascades to all folders and files owned
// by s1.
func (s *groups) Include(scopes ...[]GroupsScope) {
s.Includes = appendScopes(s.Includes, scopes...)
}
// Exclude appends the provided scopes to the selector's exclusion set.
// Every Exclusion scope applies globally, affecting all inclusion scopes.
// Data is excluded if it matches ANY exclusion.
//
// All parts of the scope must match for data to be exclucded.
// Ex: File(s1, f1, i1) => only excludes an item if it is owned by site s1,
// located in folder f1, and ID'd as i1. Use selectors.Any() to wildcard
// a scope value. No value will match if selectors.None() is provided.
//
// Group-level scopes will automatically apply the Any() wildcard to
// child properties.
// ex: Site(u1) automatically cascades to all folders and files owned
// by s1.
func (s *groups) Exclude(scopes ...[]GroupsScope) {
s.Excludes = appendScopes(s.Excludes, scopes...)
}
// Filter appends the provided scopes to the selector's filters set.
// A selector with >0 filters and 0 inclusions will include any data
// that passes all filters.
// A selector with >0 filters and >0 inclusions will reduce the
// inclusion set to only the data that passes all filters.
// Data is retained if it passes ALL filters.
//
// All parts of the scope must match for data to be exclucded.
// Ex: File(s1, f1, i1) => only excludes an item if it is owned by site s1,
// located in folder f1, and ID'd as i1. Use selectors.Any() to wildcard
// a scope value. No value will match if selectors.None() is provided.
//
// Group-level scopes will automatically apply the Any() wildcard to
// child properties.
// ex: Site(u1) automatically cascades to all folders and files owned
// by s1.
func (s *groups) Filter(scopes ...[]GroupsScope) {
s.Filters = appendScopes(s.Filters, scopes...)
}
// Scopes retrieves the list of groupsScopes in the selector.
func (s *groups) Scopes() []GroupsScope {
return scopes[GroupsScope](s.Selector)
}
// -------------------
// Scope Factories
// Produces one or more Groups site scopes.
// One scope is created per site entry.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
func (s *groups) AllData() []GroupsScope {
scopes := []GroupsScope{}
scopes = append(
scopes,
makeScope[GroupsScope](GroupsTODOContainer, Any()))
return scopes
}
// TODO produces one or more Groups TODO scopes.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// Any empty slice defaults to [selectors.None]
func (s *groups) TODO(lists []string, opts ...option) []GroupsScope {
var (
scopes = []GroupsScope{}
os = append([]option{pathComparator()}, opts...)
)
scopes = append(scopes, makeScope[GroupsScope](GroupsTODOContainer, lists, os...))
return scopes
}
// ListTODOItemsItems produces one or more Groups TODO item scopes.
// If any slice contains selectors.Any, that slice is reduced to [selectors.Any]
// If any slice contains selectors.None, that slice is reduced to [selectors.None]
// If any slice is empty, it defaults to [selectors.None]
// options are only applied to the list scopes.
func (s *groups) TODOItems(lists, items []string, opts ...option) []GroupsScope {
scopes := []GroupsScope{}
scopes = append(
scopes,
makeScope[GroupsScope](GroupsTODOItem, items, defaultItemOptions(s.Cfg)...).
set(GroupsTODOContainer, lists, opts...))
return scopes
}
// -------------------
// ItemInfo Factories
// TODO
// ---------------------------------------------------------------------------
// Categories
// ---------------------------------------------------------------------------
// groupsCategory enumerates the type of the lowest level
// of data () in a scope.
type groupsCategory string
// interface compliance checks
var _ categorizer = GroupsCategoryUnknown
const (
GroupsCategoryUnknown groupsCategory = ""
// types of data in Groups
GroupsGroup groupsCategory = "GroupsGroup"
GroupsTODOContainer groupsCategory = "GroupsTODOContainer"
GroupsTODOItem groupsCategory = "GroupsTODOItem"
// details.itemInfo comparables
// library drive selection
GroupsInfoSiteLibraryDrive groupsCategory = "GroupsInfoSiteLibraryDrive"
)
// groupsLeafProperties describes common metadata of the leaf categories
var groupsLeafProperties = map[categorizer]leafProperty{
GroupsTODOItem: { // the root category must be represented, even though it isn't a leaf
pathKeys: []categorizer{GroupsTODOContainer, GroupsTODOItem},
pathType: path.UnknownCategory,
},
GroupsGroup: { // the root category must be represented, even though it isn't a leaf
pathKeys: []categorizer{GroupsGroup},
pathType: path.UnknownCategory,
},
}
func (c groupsCategory) String() string {
return string(c)
}
// leafCat returns the leaf category of the receiver.
// If the receiver category has multiple leaves (ex: User) or no leaves,
// (ex: Unknown), the receiver itself is returned.
// Ex: ServiceTypeFolder.leafCat() => ServiceTypeItem
// Ex: ServiceUser.leafCat() => ServiceUser
func (c groupsCategory) leafCat() categorizer {
switch c {
case GroupsTODOContainer, GroupsInfoSiteLibraryDrive:
return GroupsTODOItem
}
return c
}
// rootCat returns the root category type.
func (c groupsCategory) rootCat() categorizer {
return GroupsGroup
}
// unknownCat returns the unknown category type.
func (c groupsCategory) unknownCat() categorizer {
return GroupsCategoryUnknown
}
// isUnion returns true if the category is a site or a webURL, which
// can act as an alternative identifier to siteID across all site types.
func (c groupsCategory) isUnion() bool {
return c == c.rootCat()
}
// isLeaf is true if the category is a GroupsItem category.
func (c groupsCategory) isLeaf() bool {
return c == c.leafCat()
}
// pathValues transforms the two paths to maps of identified properties.
//
// Example:
// [tenantID, service, siteID, category, folder, itemID]
// => {spFolder: folder, spItemID: itemID}
func (c groupsCategory) pathValues(
repo path.Path,
ent details.Entry,
cfg Config,
) (map[categorizer][]string, error) {
var (
folderCat, itemCat categorizer
itemID string
rFld string
)
switch c {
case GroupsTODOContainer, GroupsTODOItem:
if ent.Groups == nil {
return nil, clues.New("no Groups ItemInfo in details")
}
folderCat, itemCat = GroupsTODOContainer, GroupsTODOItem
rFld = ent.Groups.ParentPath
default:
return nil, clues.New("unrecognized groupsCategory").With("category", c)
}
item := ent.ItemRef
if len(item) == 0 {
item = repo.Item()
}
if cfg.OnlyMatchItemNames {
item = ent.ItemInfo.Groups.ItemName
}
result := map[categorizer][]string{
folderCat: {rFld},
itemCat: {item, ent.ShortRef},
}
if len(itemID) > 0 {
result[itemCat] = append(result[itemCat], itemID)
}
return result, nil
}
// pathKeys returns the path keys recognized by the receiver's leaf type.
func (c groupsCategory) pathKeys() []categorizer {
return groupsLeafProperties[c.leafCat()].pathKeys
}
// PathType converts the category's leaf type into the matching path.CategoryType.
func (c groupsCategory) PathType() path.CategoryType {
return groupsLeafProperties[c.leafCat()].pathType
}
// ---------------------------------------------------------------------------
// Scopes
// ---------------------------------------------------------------------------
// GroupsScope specifies the data available
// when interfacing with the Groups service.
type GroupsScope scope
// interface compliance checks
var _ scoper = &GroupsScope{}
// Category describes the type of the data in scope.
func (s GroupsScope) Category() groupsCategory {
return groupsCategory(getCategory(s))
}
// categorizer type is a generic wrapper around Category.
// Primarily used by scopes.go to for abstract comparisons.
func (s GroupsScope) categorizer() categorizer {
return s.Category()
}
// Matches returns true if the category is included in the scope's
// data type, and the target string matches that category's comparator.
func (s GroupsScope) Matches(cat groupsCategory, target string) bool {
return matches(s, cat, target)
}
// InfoCategory returns the category enum of the scope info.
// If the scope is not an info type, returns GroupsUnknownCategory.
func (s GroupsScope) InfoCategory() groupsCategory {
return groupsCategory(getInfoCategory(s))
}
// IncludeCategory checks whether the scope includes a
// certain category of data.
// Ex: to check if the scope includes file data:
// s.IncludesCategory(selector.GroupsFile)
func (s GroupsScope) IncludesCategory(cat groupsCategory) bool {
return categoryMatches(s.Category(), cat)
}
// returns true if the category is included in the scope's data type,
// and the value is set to Any().
func (s GroupsScope) IsAny(cat groupsCategory) bool {
return isAnyTarget(s, cat)
}
// Get returns the data category in the scope. If the scope
// contains all data types for a user, it'll return the
// GroupsUser category.
func (s GroupsScope) Get(cat groupsCategory) []string {
return getCatValue(s, cat)
}
// sets a value by category to the scope. Only intended for internal use.
func (s GroupsScope) set(cat groupsCategory, v []string, opts ...option) GroupsScope {
os := []option{}
switch cat {
case GroupsTODOContainer:
os = append(os, pathComparator())
}
return set(s, cat, v, append(os, opts...)...)
}
// setDefaults ensures that site scopes express `AnyTgt` for their child category types.
func (s GroupsScope) setDefaults() {
switch s.Category() {
case GroupsGroup:
s[GroupsTODOContainer.String()] = passAny
s[GroupsTODOItem.String()] = passAny
case GroupsTODOContainer:
s[GroupsTODOItem.String()] = passAny
}
}
// ---------------------------------------------------------------------------
// Backup Details Filtering
// ---------------------------------------------------------------------------
// Reduce filters the entries in a details struct to only those that match the
// inclusions, filters, and exclusions in the selector.
func (s groups) Reduce(
ctx context.Context,
deets *details.Details,
errs *fault.Bus,
) *details.Details {
return reduce[GroupsScope](
ctx,
deets,
s.Selector,
map[path.CategoryType]groupsCategory{
path.UnknownCategory: GroupsTODOItem,
},
errs)
}
// matchesInfo handles the standard behavior when comparing a scope and an groupsInfo
// returns true if the scope and info match for the provided category.
func (s GroupsScope) matchesInfo(dii details.ItemInfo) bool {
var (
infoCat = s.InfoCategory()
i = ""
info = dii.Groups
)
if info == nil {
return false
}
switch infoCat {
case GroupsInfoSiteLibraryDrive:
ds := []string{}
if len(info.DriveName) > 0 {
ds = append(ds, info.DriveName)
}
if len(info.DriveID) > 0 {
ds = append(ds, info.DriveID)
}
return matchesAny(s, GroupsInfoSiteLibraryDrive, ds)
}
return s.Matches(infoCat, i)
}

View File

@ -23,6 +23,7 @@ const (
ServiceExchange // Exchange
ServiceOneDrive // OneDrive
ServiceSharePoint // SharePoint
ServiceGroups // Groups
)
var serviceToPathType = map[service]path.ServiceType{

View File

@ -12,11 +12,12 @@ func _() {
_ = x[ServiceExchange-1]
_ = x[ServiceOneDrive-2]
_ = x[ServiceSharePoint-3]
_ = x[ServiceGroups-4]
}
const _service_name = "Unknown ServiceExchangeOneDriveSharePoint"
const _service_name = "Unknown ServiceExchangeOneDriveSharePointGroups"
var _service_index = [...]uint8{0, 15, 23, 31, 41}
var _service_index = [...]uint8{0, 15, 23, 31, 41, 47}
func (i service) String() string {
if i < 0 || i >= service(len(_service_index)-1) {