support debug reading out delta files (#4629)
#### Does this PR need a docs update or release note? - [x] ⛔ No #### Type of change - [x] 🌻 Feature #### Test Plan - [x] 💪 Manual
This commit is contained in:
parent
d173551d25
commit
1f756ce34f
@ -12,6 +12,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
### Fixed
|
||||
- Handle OneDrive folders being deleted and recreated midway through a backup
|
||||
- Automatically re-run a full delta query on incrmental if the prior backup is found to have malformed prior-state information.
|
||||
|
||||
## [v0.15.0] (beta) - 2023-10-31
|
||||
|
||||
|
||||
@ -125,6 +125,9 @@ issues:
|
||||
linters:
|
||||
- forbidigo
|
||||
text: "context.(Background|TODO)"
|
||||
- path: internal/m365/collection/drive/collections_test.go
|
||||
linters:
|
||||
- lll
|
||||
- path: internal/m365/graph/betasdk
|
||||
linters:
|
||||
- wsl
|
||||
|
||||
@ -11,6 +11,7 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/cli/backup"
|
||||
"github.com/alcionai/corso/src/cli/config"
|
||||
"github.com/alcionai/corso/src/cli/debug"
|
||||
"github.com/alcionai/corso/src/cli/export"
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
"github.com/alcionai/corso/src/cli/help"
|
||||
@ -125,6 +126,7 @@ func BuildCommandTree(cmd *cobra.Command) {
|
||||
backup.AddCommands(cmd)
|
||||
restore.AddCommands(cmd)
|
||||
export.AddCommands(cmd)
|
||||
debug.AddCommands(cmd)
|
||||
help.AddCommands(cmd)
|
||||
}
|
||||
|
||||
|
||||
@ -3,11 +3,13 @@ package debug
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/alcionai/corso/src/cli/flags"
|
||||
. "github.com/alcionai/corso/src/cli/print"
|
||||
"github.com/alcionai/corso/src/cli/utils"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
)
|
||||
|
||||
@ -31,9 +33,9 @@ func AddCommands(cmd *cobra.Command) {
|
||||
utils.AddCommand(debugC, subCommand, utils.MarkDebugCommand())
|
||||
|
||||
for _, addTo := range debugCommands {
|
||||
addTo(subCommand)
|
||||
flags.AddAllProviderFlags(subCommand)
|
||||
flags.AddAllStorageFlags(subCommand)
|
||||
servCmd := addTo(subCommand)
|
||||
flags.AddAllProviderFlags(servCmd)
|
||||
flags.AddAllStorageFlags(servCmd)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -85,13 +87,15 @@ func handleMetadataFilesCmd(cmd *cobra.Command, args []string) error {
|
||||
// runners
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func runMetadataFiles(
|
||||
func genericMetadataFiles(
|
||||
ctx context.Context,
|
||||
cmd *cobra.Command,
|
||||
args []string,
|
||||
sel selectors.Selector,
|
||||
debugID, serviceName string,
|
||||
backupID string,
|
||||
) error {
|
||||
ctx = clues.Add(ctx, "backup_id", backupID)
|
||||
|
||||
r, _, err := utils.GetAccountAndConnect(ctx, cmd, sel.PathService())
|
||||
if err != nil {
|
||||
return Only(ctx, err)
|
||||
@ -99,7 +103,18 @@ func runMetadataFiles(
|
||||
|
||||
defer utils.CloseRepo(ctx, r)
|
||||
|
||||
// TODO: read and print out all metadata files in the debug
|
||||
// read metadata
|
||||
files, err := r.GetBackupMetadata(ctx, sel, backupID, fault.New(true))
|
||||
if err != nil {
|
||||
return Only(ctx, clues.Wrap(err, "retrieving metadata files"))
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
Infof(ctx, "\n------------------------------")
|
||||
Info(ctx, file.Name)
|
||||
Info(ctx, file.Path)
|
||||
Pretty(ctx, file.Data)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -59,13 +59,17 @@ func metadataFilesExchangeCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// opts := utils.MakeExchangeOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewExchangeBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "Exchange")
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
|
||||
@ -60,13 +60,17 @@ func metadataFilesGroupsCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// opts := utils.MakeGroupsOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewGroupsBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "Groups")
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
|
||||
@ -59,13 +59,17 @@ func metadataFilesOneDriveCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// opts := utils.MakeOneDriveOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewOneDriveBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.AllData())
|
||||
|
||||
return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "OneDrive")
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
|
||||
@ -59,13 +59,17 @@ func metadataFilesSharePointCmd(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// opts := utils.MakeSharePointOpts(cmd)
|
||||
|
||||
if flags.RunModeFV == flags.RunModeFlagTest {
|
||||
return nil
|
||||
}
|
||||
|
||||
sel := selectors.NewSharePointBackup([]string{"unused-placeholder"})
|
||||
sel.Include(sel.LibraryFolders(selectors.Any()))
|
||||
|
||||
return runMetadataFiles(ctx, cmd, args, sel.Selector, flags.BackupIDFV, "SharePoint")
|
||||
return genericMetadataFiles(
|
||||
ctx,
|
||||
cmd,
|
||||
args,
|
||||
sel.Selector,
|
||||
flags.BackupIDFV)
|
||||
}
|
||||
|
||||
@ -119,10 +119,21 @@ func Infof(ctx context.Context, t string, s ...any) {
|
||||
outf(ctx, getRootCmd(ctx).ErrOrStderr(), t, s...)
|
||||
}
|
||||
|
||||
// Pretty prettifies and prints the value.
|
||||
func Pretty(ctx context.Context, a any) {
|
||||
if a == nil {
|
||||
Err(ctx, "<nil>")
|
||||
return
|
||||
}
|
||||
|
||||
printPrettyJSON(getRootCmd(ctx).ErrOrStderr(), a)
|
||||
}
|
||||
|
||||
// PrettyJSON prettifies and prints the value.
|
||||
func PrettyJSON(ctx context.Context, p minimumPrintabler) {
|
||||
if p == nil {
|
||||
Err(ctx, "<nil>")
|
||||
return
|
||||
}
|
||||
|
||||
outputJSON(getRootCmd(ctx).ErrOrStderr(), p, outputAsJSONDebug)
|
||||
@ -281,3 +292,14 @@ func printJSON(w io.Writer, a any) {
|
||||
|
||||
fmt.Fprintln(w, string(pretty.Pretty(bs)))
|
||||
}
|
||||
|
||||
// output to stdout the list of printable structs as prettified json.
|
||||
func printPrettyJSON(w io.Writer, a any) {
|
||||
bs, err := json.MarshalIndent(a, "", " ")
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "error formatting results to json: %v\n", err)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Fprintln(w, string(pretty.Pretty(bs)))
|
||||
}
|
||||
|
||||
@ -55,6 +55,14 @@ type BackupBase struct {
|
||||
Reasons []identity.Reasoner
|
||||
}
|
||||
|
||||
func (bb BackupBase) GetReasons() []identity.Reasoner {
|
||||
return bb.Reasons
|
||||
}
|
||||
|
||||
func (bb BackupBase) GetSnapshotID() manifest.ID {
|
||||
return bb.ItemDataSnapshot.ID
|
||||
}
|
||||
|
||||
func (bb BackupBase) GetSnapshotTag(key string) (string, bool) {
|
||||
k, _ := makeTagKV(key)
|
||||
v, ok := bb.ItemDataSnapshot.Tags[k]
|
||||
|
||||
@ -376,11 +376,15 @@ func getDir(
|
||||
return nil, clues.Wrap(ErrNoRestorePath, "getting directory").WithClues(ctx)
|
||||
}
|
||||
|
||||
toGet := dirPath.PopFront()
|
||||
|
||||
ctx = clues.Add(ctx, "entry_path", toGet)
|
||||
|
||||
// GetNestedEntry handles nil properly.
|
||||
e, err := snapshotfs.GetNestedEntry(
|
||||
ctx,
|
||||
snapshotRoot,
|
||||
encodeElements(dirPath.PopFront().Elements()...))
|
||||
encodeElements(toGet.Elements()...))
|
||||
if err != nil {
|
||||
if isErrEntryNotFound(err) {
|
||||
err = clues.Stack(data.ErrNotFound, err).WithClues(ctx)
|
||||
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/diagnostics"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||
"github.com/alcionai/corso/src/internal/m365/graph"
|
||||
"github.com/alcionai/corso/src/internal/m365/service/exchange"
|
||||
@ -179,7 +178,7 @@ func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
|
||||
func (ctrl *Controller) GetMetadataPaths(
|
||||
ctx context.Context,
|
||||
r kinject.RestoreProducer,
|
||||
base kopia.BackupBase,
|
||||
base inject.ReasonAndSnapshotIDer,
|
||||
errs *fault.Bus,
|
||||
) ([]path.RestorePaths, error) {
|
||||
var (
|
||||
@ -187,12 +186,12 @@ func (ctrl *Controller) GetMetadataPaths(
|
||||
err error
|
||||
)
|
||||
|
||||
for _, reason := range base.Reasons {
|
||||
for _, reason := range base.GetReasons() {
|
||||
filePaths := [][]string{}
|
||||
|
||||
switch true {
|
||||
case reason.Service() == path.GroupsService && reason.Category() == path.LibrariesCategory:
|
||||
filePaths, err = groups.MetadataFiles(ctx, reason, r, base.ItemDataSnapshot.ID, errs)
|
||||
filePaths, err = groups.MetadataFiles(ctx, reason, r, base.GetSnapshotID(), errs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -72,7 +72,88 @@ func NewCollections(
|
||||
}
|
||||
}
|
||||
|
||||
func deserializeMetadata(
|
||||
func deserializeAndValidateMetadata(
|
||||
ctx context.Context,
|
||||
cols []data.RestoreCollection,
|
||||
fb *fault.Bus,
|
||||
) (map[string]string, map[string]map[string]string, bool, error) {
|
||||
deltas, prevs, canUse, err := DeserializeMetadata(ctx, cols)
|
||||
if err != nil || !canUse {
|
||||
return deltas, prevs, false, clues.Stack(err).OrNil()
|
||||
}
|
||||
|
||||
// Go through and remove delta tokens if we didn't have any paths for them
|
||||
// or one or more paths are empty (incorrect somehow). This will ensure we
|
||||
// don't accidentally try to pull in delta results when we should have
|
||||
// enumerated everything instead.
|
||||
//
|
||||
// Loop over the set of previous deltas because it's alright to have paths
|
||||
// without a delta but not to have a delta without paths. This way ensures
|
||||
// we check at least all the path sets for the deltas we have.
|
||||
for drive := range deltas {
|
||||
ictx := clues.Add(ctx, "drive_id", drive)
|
||||
|
||||
paths := prevs[drive]
|
||||
if len(paths) == 0 {
|
||||
logger.Ctx(ictx).Info("dropping drive delta due to 0 prev paths")
|
||||
delete(deltas, drive)
|
||||
}
|
||||
|
||||
// Drives have only a single delta token. If we find any folder that
|
||||
// seems like the path is bad we need to drop the entire token and start
|
||||
// fresh. Since we know the token will be gone we can also stop checking
|
||||
// for other possibly incorrect folder paths.
|
||||
for _, prevPath := range paths {
|
||||
if len(prevPath) == 0 {
|
||||
logger.Ctx(ictx).Info("dropping drive delta due to 0 len path")
|
||||
delete(deltas, drive)
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
alertIfPrevPathsHaveCollisions(ctx, prevs, fb)
|
||||
|
||||
return deltas, prevs, canUse, nil
|
||||
}
|
||||
|
||||
func alertIfPrevPathsHaveCollisions(
|
||||
ctx context.Context,
|
||||
prevs map[string]map[string]string,
|
||||
fb *fault.Bus,
|
||||
) {
|
||||
for driveID, folders := range prevs {
|
||||
prevPathCollisions := map[string]string{}
|
||||
|
||||
for fid, prev := range folders {
|
||||
if otherID, collision := prevPathCollisions[prev]; collision {
|
||||
ctx = clues.Add(
|
||||
ctx,
|
||||
"collision_folder_id_1", fid,
|
||||
"collision_folder_id_2", otherID,
|
||||
"collision_drive_id", driveID,
|
||||
"collision_prev_path", path.LoggableDir(prev))
|
||||
|
||||
fb.AddAlert(ctx, fault.NewAlert(
|
||||
fault.AlertPreviousPathCollision,
|
||||
"", // no namespace
|
||||
"", // no item id
|
||||
"previousPaths",
|
||||
map[string]any{
|
||||
"collision_folder_id_1": fid,
|
||||
"collision_folder_id_2": otherID,
|
||||
"collision_drive_id": driveID,
|
||||
"collision_prev_path": prev,
|
||||
}))
|
||||
}
|
||||
|
||||
prevPathCollisions[prev] = fid
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func DeserializeMetadata(
|
||||
ctx context.Context,
|
||||
cols []data.RestoreCollection,
|
||||
) (map[string]string, map[string]map[string]string, bool, error) {
|
||||
@ -96,7 +177,7 @@ func deserializeMetadata(
|
||||
for breakLoop := false; !breakLoop; {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, nil, false, clues.Wrap(ctx.Err(), "deserialzing previous backup metadata").WithClues(ctx)
|
||||
return nil, nil, false, clues.Wrap(ctx.Err(), "deserializing previous backup metadata").WithClues(ctx)
|
||||
|
||||
case item, ok := <-items:
|
||||
if !ok {
|
||||
@ -137,32 +218,6 @@ func deserializeMetadata(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Go through and remove delta tokens if we didn't have any paths for them
|
||||
// or one or more paths are empty (incorrect somehow). This will ensure we
|
||||
// don't accidentally try to pull in delta results when we should have
|
||||
// enumerated everything instead.
|
||||
//
|
||||
// Loop over the set of previous deltas because it's alright to have paths
|
||||
// without a delta but not to have a delta without paths. This way ensures
|
||||
// we check at least all the path sets for the deltas we have.
|
||||
for drive := range prevDeltas {
|
||||
paths := prevFolders[drive]
|
||||
if len(paths) == 0 {
|
||||
delete(prevDeltas, drive)
|
||||
}
|
||||
|
||||
// Drives have only a single delta token. If we find any folder that
|
||||
// seems like the path is bad we need to drop the entire token and start
|
||||
// fresh. Since we know the token will be gone we can also stop checking
|
||||
// for other possibly incorrect folder paths.
|
||||
for _, prevPath := range paths {
|
||||
if len(prevPath) == 0 {
|
||||
delete(prevDeltas, drive)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if reads from items failed, return empty but no error
|
||||
@ -215,7 +270,7 @@ func (c *Collections) Get(
|
||||
ssmb *prefixmatcher.StringSetMatchBuilder,
|
||||
errs *fault.Bus,
|
||||
) ([]data.BackupCollection, bool, error) {
|
||||
prevDriveIDToDelta, oldPrevPathsByDriveID, canUsePrevBackup, err := deserializeMetadata(ctx, prevMetadata)
|
||||
deltasByDriveID, prevPathsByDriveID, canUsePrevBackup, err := deserializeAndValidateMetadata(ctx, prevMetadata, errs)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
@ -224,7 +279,7 @@ func (c *Collections) Get(
|
||||
|
||||
driveTombstones := map[string]struct{}{}
|
||||
|
||||
for driveID := range oldPrevPathsByDriveID {
|
||||
for driveID := range prevPathsByDriveID {
|
||||
driveTombstones[driveID] = struct{}{}
|
||||
}
|
||||
|
||||
@ -257,8 +312,8 @@ func (c *Collections) Get(
|
||||
"drive_name", clues.Hide(driveName))
|
||||
|
||||
excludedItemIDs = map[string]struct{}{}
|
||||
oldPrevPaths = oldPrevPathsByDriveID[driveID]
|
||||
prevDeltaLink = prevDriveIDToDelta[driveID]
|
||||
oldPrevPaths = prevPathsByDriveID[driveID]
|
||||
prevDeltaLink = deltasByDriveID[driveID]
|
||||
|
||||
// packagePaths is keyed by folder paths to a parent directory
|
||||
// which is marked as a package by its driveItem GetPackage
|
||||
@ -437,6 +492,8 @@ func (c *Collections) Get(
|
||||
collections = append(collections, coll)
|
||||
}
|
||||
|
||||
alertIfPrevPathsHaveCollisions(ctx, driveIDToPrevPaths, errs)
|
||||
|
||||
// add metadata collections
|
||||
pathPrefix, err := c.handler.MetadataPathPrefix(c.tenantID)
|
||||
if err != nil {
|
||||
@ -996,13 +1053,13 @@ func includePath(ctx context.Context, dsc dirScopeChecker, folderPath path.Path)
|
||||
}
|
||||
|
||||
func updatePath(paths map[string]string, id, newPath string) {
|
||||
oldPath := paths[id]
|
||||
if len(oldPath) == 0 {
|
||||
currPath := paths[id]
|
||||
if len(currPath) == 0 {
|
||||
paths[id] = newPath
|
||||
return
|
||||
}
|
||||
|
||||
if oldPath == newPath {
|
||||
if currPath == newPath {
|
||||
return
|
||||
}
|
||||
|
||||
@ -1011,10 +1068,10 @@ func updatePath(paths map[string]string, id, newPath string) {
|
||||
// other components should take care of that. We do need to ensure that the
|
||||
// resulting map contains all folders though so we know the next time around.
|
||||
for folderID, p := range paths {
|
||||
if !strings.HasPrefix(p, oldPath) {
|
||||
if !strings.HasPrefix(p, currPath) {
|
||||
continue
|
||||
}
|
||||
|
||||
paths[folderID] = strings.Replace(p, oldPath, newPath, 1)
|
||||
paths[folderID] = strings.Replace(p, currPath, newPath, 1)
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
32
src/internal/m365/collection/drive/debug.go
Normal file
32
src/internal/m365/collection/drive/debug.go
Normal file
@ -0,0 +1,32 @@
|
||||
package drive
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
func DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error) {
|
||||
deltas, prevs, _, err := deserializeAndValidateMetadata(ctx, colls, fault.New(true))
|
||||
|
||||
files := []store.MetadataFile{
|
||||
{
|
||||
Name: bupMD.PreviousPathFileName,
|
||||
Data: prevs,
|
||||
},
|
||||
{
|
||||
Name: bupMD.DeltaURLsFileName,
|
||||
Data: deltas,
|
||||
},
|
||||
}
|
||||
|
||||
return files, clues.Stack(err).OrNil()
|
||||
}
|
||||
@ -531,7 +531,7 @@ func (suite *URLCacheUnitSuite) TestGetItemProperties() {
|
||||
pages: []mock.NextPage{
|
||||
{Items: []models.DriveItemable{
|
||||
fileItem("1", "file1", "root", "root", "https://dummy1.com", false),
|
||||
driveItem("2", "folder2", "root", "root", false, true, false),
|
||||
driveItem("2", "folder2", "root", "root", isFolder),
|
||||
}},
|
||||
},
|
||||
expectedItemProps: map[string]itemProps{
|
||||
|
||||
17
src/internal/m365/collection/exchange/debug.go
Normal file
17
src/internal/m365/collection/exchange/debug.go
Normal file
@ -0,0 +1,17 @@
|
||||
package exchange
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
func DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error) {
|
||||
return nil, clues.New("TODO: needs implementation")
|
||||
}
|
||||
17
src/internal/m365/collection/groups/debug.go
Normal file
17
src/internal/m365/collection/groups/debug.go
Normal file
@ -0,0 +1,17 @@
|
||||
package groups
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
func DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error) {
|
||||
return nil, clues.New("TODO: needs implementation")
|
||||
}
|
||||
39
src/internal/m365/debug.go
Normal file
39
src/internal/m365/debug.go
Normal file
@ -0,0 +1,39 @@
|
||||
package m365
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/drive"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/exchange"
|
||||
"github.com/alcionai/corso/src/internal/m365/collection/groups"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
func (ctrl *Controller) DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error) {
|
||||
if len(colls) == 0 {
|
||||
return []store.MetadataFile{}, nil
|
||||
}
|
||||
|
||||
// assume all collections refer to the same service
|
||||
service := colls[0].FullPath().Service()
|
||||
|
||||
switch service {
|
||||
case path.ExchangeService, path.ExchangeMetadataService:
|
||||
return exchange.DeserializeMetadataFiles(ctx, colls)
|
||||
case path.OneDriveService, path.OneDriveMetadataService:
|
||||
return drive.DeserializeMetadataFiles(ctx, colls)
|
||||
case path.SharePointService, path.SharePointMetadataService:
|
||||
return drive.DeserializeMetadataFiles(ctx, colls)
|
||||
case path.GroupsService, path.GroupsMetadataService:
|
||||
return groups.DeserializeMetadataFiles(ctx, colls)
|
||||
default:
|
||||
return nil, clues.New("unrecognized service").With("service", service).WithClues(ctx)
|
||||
}
|
||||
}
|
||||
@ -8,7 +8,6 @@ import (
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
@ -53,7 +52,7 @@ func (ctrl Controller) ProduceBackupCollections(
|
||||
func (ctrl *Controller) GetMetadataPaths(
|
||||
ctx context.Context,
|
||||
r kinject.RestoreProducer,
|
||||
base kopia.BackupBase,
|
||||
base inject.ReasonAndSnapshotIDer,
|
||||
errs *fault.Bus,
|
||||
) ([]path.RestorePaths, error) {
|
||||
return nil, clues.New("not implemented")
|
||||
|
||||
@ -3,12 +3,14 @@ package inject
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/kopia/kopia/repo/manifest"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/idname"
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
"github.com/alcionai/corso/src/internal/kopia/inject"
|
||||
"github.com/alcionai/corso/src/pkg/backup/details"
|
||||
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||
"github.com/alcionai/corso/src/pkg/control"
|
||||
"github.com/alcionai/corso/src/pkg/control/repository"
|
||||
"github.com/alcionai/corso/src/pkg/count"
|
||||
@ -37,7 +39,7 @@ type (
|
||||
GetMetadataPaths(
|
||||
ctx context.Context,
|
||||
r inject.RestoreProducer,
|
||||
base kopia.BackupBase,
|
||||
base ReasonAndSnapshotIDer,
|
||||
errs *fault.Bus,
|
||||
) ([]path.RestorePaths, error)
|
||||
|
||||
@ -125,4 +127,9 @@ type (
|
||||
service path.ServiceType,
|
||||
) (ServiceHandler, error)
|
||||
}
|
||||
|
||||
ReasonAndSnapshotIDer interface {
|
||||
GetReasons() []identity.Reasoner
|
||||
GetSnapshotID() manifest.ID
|
||||
}
|
||||
)
|
||||
|
||||
@ -7,7 +7,6 @@ import (
|
||||
|
||||
"github.com/alcionai/corso/src/internal/common/prefixmatcher"
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/kopia"
|
||||
kinject "github.com/alcionai/corso/src/internal/kopia/inject"
|
||||
"github.com/alcionai/corso/src/internal/m365"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
@ -64,7 +63,7 @@ func (mbp *mockBackupProducer) Wait() *data.CollectionStats {
|
||||
func (mbp mockBackupProducer) GetMetadataPaths(
|
||||
ctx context.Context,
|
||||
r kinject.RestoreProducer,
|
||||
base kopia.BackupBase,
|
||||
base inject.ReasonAndSnapshotIDer,
|
||||
errs *fault.Bus,
|
||||
) ([]path.RestorePaths, error) {
|
||||
ctrl := m365.Controller{}
|
||||
|
||||
@ -53,7 +53,7 @@ func produceManifestsAndMetadata(
|
||||
}
|
||||
|
||||
// getManifestsAndMetadata calls kopia to retrieve prior backup manifests,
|
||||
// metadata collections to supply backup heuristics.
|
||||
// metadata collections to supply backup information.
|
||||
func getManifestsAndMetadata(
|
||||
ctx context.Context,
|
||||
bf inject.BaseFinder,
|
||||
|
||||
@ -4,6 +4,10 @@ import (
|
||||
"github.com/alcionai/corso/src/cli/print"
|
||||
)
|
||||
|
||||
const (
|
||||
AlertPreviousPathCollision = "previous_path_collision"
|
||||
)
|
||||
|
||||
var _ print.Printable = &Alert{}
|
||||
|
||||
// Alerts are informational-only notifications. The purpose of alerts is to
|
||||
|
||||
@ -208,7 +208,6 @@ func (e *Bus) AddAlert(ctx context.Context, a *Alert) {
|
||||
e.logAndAddAlert(ctx, a, 1)
|
||||
}
|
||||
|
||||
// logs the error and adds an alert.
|
||||
func (e *Bus) logAndAddAlert(ctx context.Context, a *Alert, trace int) {
|
||||
logger.CtxStack(ctx, trace+1).
|
||||
With("alert", a).
|
||||
|
||||
@ -6,11 +6,13 @@ import (
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
|
||||
"github.com/alcionai/corso/src/internal/data"
|
||||
"github.com/alcionai/corso/src/internal/m365"
|
||||
"github.com/alcionai/corso/src/internal/observe"
|
||||
"github.com/alcionai/corso/src/internal/operations/inject"
|
||||
"github.com/alcionai/corso/src/pkg/account"
|
||||
"github.com/alcionai/corso/src/pkg/path"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
type DataProvider interface {
|
||||
@ -20,6 +22,10 @@ type DataProvider interface {
|
||||
inject.ToServiceHandler
|
||||
|
||||
VerifyAccess(ctx context.Context) error
|
||||
DeserializeMetadataFiles(
|
||||
ctx context.Context,
|
||||
colls []data.RestoreCollection,
|
||||
) ([]store.MetadataFile, error)
|
||||
}
|
||||
|
||||
type DataProviderConnector interface {
|
||||
@ -30,6 +36,12 @@ type DataProviderConnector interface {
|
||||
ctx context.Context,
|
||||
pst path.ServiceType,
|
||||
) error
|
||||
// DataProvider retrieves the data provider.
|
||||
DataProvider() DataProvider
|
||||
}
|
||||
|
||||
func (r *repository) DataProvider() DataProvider {
|
||||
return r.Provider
|
||||
}
|
||||
|
||||
func (r *repository) ConnectDataProvider(
|
||||
|
||||
86
src/pkg/repository/debug.go
Normal file
86
src/pkg/repository/debug.go
Normal file
@ -0,0 +1,86 @@
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/alcionai/clues"
|
||||
"github.com/kopia/kopia/repo/manifest"
|
||||
|
||||
"github.com/alcionai/corso/src/pkg/backup/identity"
|
||||
"github.com/alcionai/corso/src/pkg/fault"
|
||||
"github.com/alcionai/corso/src/pkg/selectors"
|
||||
"github.com/alcionai/corso/src/pkg/store"
|
||||
)
|
||||
|
||||
type base struct {
|
||||
snapshotID manifest.ID
|
||||
reasons []identity.Reasoner
|
||||
}
|
||||
|
||||
func (b base) GetReasons() []identity.Reasoner {
|
||||
return b.reasons
|
||||
}
|
||||
|
||||
func (b base) GetSnapshotID() manifest.ID {
|
||||
return b.snapshotID
|
||||
}
|
||||
|
||||
// should probably turn into a NewDebug interface like we're
|
||||
// doing with the other interfaces
|
||||
type Debugger interface {
|
||||
GetBackupMetadata(
|
||||
ctx context.Context,
|
||||
sel selectors.Selector,
|
||||
backupID string,
|
||||
errs *fault.Bus,
|
||||
) ([]store.MetadataFile, error)
|
||||
}
|
||||
|
||||
// Backups lists backups by ID. Returns as many backups as possible with
|
||||
// errors for the backups it was unable to retrieve.
|
||||
func (r repository) GetBackupMetadata(
|
||||
ctx context.Context,
|
||||
sel selectors.Selector,
|
||||
backupID string,
|
||||
errs *fault.Bus,
|
||||
) ([]store.MetadataFile, error) {
|
||||
bup, err := r.Backup(ctx, backupID)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "looking up backup")
|
||||
}
|
||||
|
||||
sel = sel.SetDiscreteOwnerIDName(bup.ResourceOwnerID, bup.ResourceOwnerName)
|
||||
|
||||
reasons, err := sel.Reasons(r.Account.ID(), false)
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "constructing lookup parameters")
|
||||
}
|
||||
|
||||
var (
|
||||
rp = r.dataLayer
|
||||
dp = r.DataProvider()
|
||||
)
|
||||
|
||||
paths, err := dp.GetMetadataPaths(
|
||||
ctx,
|
||||
rp,
|
||||
&base{manifest.ID(bup.SnapshotID), reasons},
|
||||
fault.New(true))
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "retrieving metadata files")
|
||||
}
|
||||
|
||||
colls, err := rp.ProduceRestoreCollections(
|
||||
ctx,
|
||||
bup.SnapshotID,
|
||||
paths,
|
||||
nil,
|
||||
fault.New(true))
|
||||
if err != nil {
|
||||
return nil, clues.Wrap(err, "looking up metadata file content")
|
||||
}
|
||||
|
||||
files, err := dp.DeserializeMetadataFiles(ctx, colls)
|
||||
|
||||
return files, clues.Wrap(err, "deserializing metadata file content").OrNil()
|
||||
}
|
||||
@ -37,6 +37,7 @@ type Repositoryer interface {
|
||||
BackupGetter
|
||||
Restorer
|
||||
Exporter
|
||||
Debugger
|
||||
DataProviderConnector
|
||||
|
||||
Initialize(
|
||||
|
||||
12
src/pkg/store/metadata.go
Normal file
12
src/pkg/store/metadata.go
Normal file
@ -0,0 +1,12 @@
|
||||
package store
|
||||
|
||||
// MetadataFile holds a standard representation of a
|
||||
// metadata file. Primarily used for debugging purposes.
|
||||
type MetadataFile struct {
|
||||
Name string `json:"name"`
|
||||
Path string `json:"path"`
|
||||
Data any `json:"data"`
|
||||
}
|
||||
|
||||
// TODO: printable support
|
||||
// var _ print.Printable = &MetadataFile{}
|
||||
Loading…
x
Reference in New Issue
Block a user