Compare commits

...

30 Commits

Author SHA1 Message Date
Ashlie Martinez
addb979541 Create a repo lineage command
Starts a web server and serves an SVG with the graph of lineage info.
2023-10-06 11:41:20 -07:00
Ashlie Martinez
e513c0b144 Helper functions to build dot graph
Take the set of connected components from the lineage function and build
a set of dot nodes/edges from them.
2023-10-06 11:40:30 -07:00
Ashlie Martinez
d648f68f3a Repo level functions to get lineage
Create a set of functions (not very well thought out) that gets backups
from the repo and then creates connected components from the backups
based on lineage info.

Fill in info for deleted backups on a best-effort basis.
2023-10-06 11:37:44 -07:00
Ashlie Martinez
6be14da682 Additional filters for selecting backups 2023-10-06 11:36:00 -07:00
Ashlie Martinez
1867a13f4d Export some consts/functions
Needed to allow backup filtering. Would be better to have a function
that returns the values based on a Reason or something.
2023-10-06 11:35:03 -07:00
Ashlie Martinez
dab4d7f0c7 Fixup other tests 2023-10-05 13:07:36 -07:00
Ashlie Martinez
2e69a97e86 E2E test for backup reason tags 2023-10-05 13:07:23 -07:00
Ashlie Martinez
5c6d46a581 Populate reason info in tags 2023-10-05 13:07:09 -07:00
Ashlie Martinez
9b8ee4f3f6 Add way to extract reason from tags
Helper functions to store reason information as tags in a backup model
and extract them. Also add tests for this.
2023-10-05 13:06:05 -07:00
Ashlie Martinez
0ebd6c8eec Minor refactor of tag keys and error generation 2023-10-05 12:36:54 -07:00
Ashlie Martinez
0e5f5bde49 More Exchange tests for lineage
Check the lineage for the base is empty.

This can be squashed.
2023-10-05 12:35:33 -07:00
Ashlie Martinez
1ad135bd93 Fix shadowing lint errors
Rename errs -> bus when referring to the fault.Bus.
2023-10-05 12:34:40 -07:00
Ashlie Martinez
0ce35f4f62 Fixup references to sentinel error 2023-10-05 12:21:57 -07:00
Ashlie Martinez
d2253145bf Move ErrNotFound to common package
Will help avoid future import cycles.
2023-10-05 10:51:33 -07:00
Ashlie Martinez
bb5700917b Basic check that lineage works
Add a basic test for lineage info on some Exchange integration tests.
2023-10-04 15:57:31 -07:00
Ashlie Martinez
8131b6d388 Uncomment tests
Unclear why they were disabled in the first place.
2023-10-04 15:57:16 -07:00
Ashlie Martinez
59b0fb3ebc Tests for deserializing lineage info 2023-10-04 15:56:53 -07:00
Ashlie Martinez
08a7d435e3 Switch package for tests
Will allow for access to Reason serializer.
2023-10-04 15:56:23 -07:00
Ashlie Martinez
2295c5cc20 Add a way to get lineage info from a backup
Add a helper function that deserializes persisted lineage information
and returns it in a nicer form.
2023-10-04 15:55:18 -07:00
Ashlie Martinez
d1fba10aeb Refine serialized service/category
Refine the persisted service/category slightly by making it so we can
break them apart again during deserialization.

Should be squashed into a previous commit.
2023-10-04 15:54:09 -07:00
Ashlie Martinez
e38ecc38fe Tests for serializing service/category
Should be squashed into a prior commit.
2023-10-04 15:52:58 -07:00
Ashlie Martinez
405ce1548d Fix references to NewReason
Update package path for calls.
2023-10-04 15:43:09 -07:00
Ashlie Martinez
72269441ea Move reason struct to avoid cycles 2023-10-04 15:41:44 -07:00
Ashlie Martinez
ffcc4e1bb8 Wire up setting lineage on backup models
Also minor test fixups.
2023-10-04 13:46:52 -07:00
Ashlie Martinez
ad5d77f6f8 Add backup lineage info to model
When making a backup model, add the set of merge and assist backups that
were used. Each merge or assist base also records the reason it was
selected.
2023-10-04 13:46:52 -07:00
Ashlie Martinez
44c2b7fb68 Minor naming adjustment for backup op
Update the name of backup bases from "mans" to "bases" so it's more
descriptive.
2023-10-04 13:46:11 -07:00
Ashlie Martinez
cd4905c0ad Update kopia tests for new model struct 2023-10-04 13:38:50 -07:00
Ashlie Martinez
7f438533f8 Rename variable holding protected resource info
Rename backup model variables that holds the protected resource ID and
name. The variable name has changed from ResourceOwner[ID|Name] to
ProtectedResource[ID|Name].

The previous version of the variable in the model of the form
ProtectedResourceX wasn't getting populated and never had any data
persisted for it even though other code sometimes relied on it having a
value.

The current update ** DOES NOT ** change the JSON tag associated with
the renamed variable. This means that golang's stdlib JSON serialization
with continue to use the old name. Thus there's no need for additional
code to handle previously serialized backup models.
2023-10-04 13:30:53 -07:00
Ashlie Martinez
65af82f0f5 Fix function, variable, and type references
Fix the package specifier for function, variable, and type references
since the package path changed for them.
2023-10-04 13:24:38 -07:00
Ashlie Martinez
216f29888f Move BackupBases interface and structs definitions
Move the interfact and struct definitions from the kopia package to the
backup package so that we can initialize the backup model with lineage
information sourced from BackupBases in a later PR.
2023-10-04 13:19:15 -07:00
51 changed files with 2012 additions and 667 deletions

View File

@ -12,8 +12,8 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -174,7 +174,7 @@ func genericCreateCommand(
) error { ) error {
var ( var (
bIDs []string bIDs []string
errs = []error{} bus = []error{}
) )
for _, discSel := range selectorSet { for _, discSel := range selectorSet {
@ -187,7 +187,7 @@ func genericCreateCommand(
bo, err := r.NewBackupWithLookup(ictx, discSel, ins) bo, err := r.NewBackupWithLookup(ictx, discSel, ins)
if err != nil { if err != nil {
errs = append(errs, clues.Wrap(err, owner).WithClues(ictx)) bus = append(bus, clues.Wrap(err, owner).WithClues(ictx))
Errf(ictx, "%v\n", err) Errf(ictx, "%v\n", err)
continue continue
@ -208,7 +208,7 @@ func genericCreateCommand(
continue continue
} }
errs = append(errs, clues.Wrap(err, owner).WithClues(ictx)) bus = append(bus, clues.Wrap(err, owner).WithClues(ictx))
Errf(ictx, "%v\n", err) Errf(ictx, "%v\n", err)
continue continue
@ -235,10 +235,10 @@ func genericCreateCommand(
backup.PrintAll(ctx, bups) backup.PrintAll(ctx, bups)
if len(errs) > 0 { if len(bus) > 0 {
sb := fmt.Sprintf("%d of %d backups failed:\n", len(errs), len(selectorSet)) sb := fmt.Sprintf("%d of %d backups failed:\n", len(bus), len(selectorSet))
for i, e := range errs { for i, e := range bus {
logger.CtxErr(ctx, e).Errorf("Backup %d of %d failed", i+1, len(selectorSet)) logger.CtxErr(ctx, e).Errorf("Backup %d of %d failed", i+1, len(selectorSet))
sb += "∙ " + e.Error() + "\n" sb += "∙ " + e.Error() + "\n"
} }
@ -305,13 +305,13 @@ func genericListCommand(
defer utils.CloseRepo(ctx, r) defer utils.CloseRepo(ctx, r)
if len(bID) > 0 { if len(bID) > 0 {
fe, b, errs := r.GetBackupErrors(ctx, bID) fe, b, bus := r.GetBackupErrors(ctx, bID)
if errs.Failure() != nil { if bus.Failure() != nil {
if errors.Is(errs.Failure(), data.ErrNotFound) { if errors.Is(bus.Failure(), errs.NotFound) {
return Only(ctx, clues.New("No backup exists with the id "+bID)) return Only(ctx, clues.New("No backup exists with the id "+bID))
} }
return Only(ctx, clues.Wrap(errs.Failure(), "Failed to list backup id "+bID)) return Only(ctx, clues.Wrap(bus.Failure(), "Failed to list backup id "+bID))
} }
b.Print(ctx) b.Print(ctx)
@ -367,21 +367,21 @@ func genericDetailsCore(
sel.Configure(selectors.Config{OnlyMatchItemNames: true}) sel.Configure(selectors.Config{OnlyMatchItemNames: true})
d, _, errs := bg.GetBackupDetails(ctx, backupID) d, _, bus := bg.GetBackupDetails(ctx, backupID)
// TODO: log/track recoverable errors // TODO: log/track recoverable errors
if errs.Failure() != nil { if bus.Failure() != nil {
if errors.Is(errs.Failure(), data.ErrNotFound) { if errors.Is(bus.Failure(), errs.NotFound) {
return nil, clues.New("no backup exists with the id " + backupID) return nil, clues.New("no backup exists with the id " + backupID)
} }
return nil, clues.Wrap(errs.Failure(), "Failed to get backup details in the repository") return nil, clues.Wrap(bus.Failure(), "Failed to get backup details in the repository")
} }
if opts.SkipReduce { if opts.SkipReduce {
return d, nil return d, nil
} }
d, err := sel.Reduce(ctx, d, errs) d, err := sel.Reduce(ctx, d, bus)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "filtering backup details to selection") return nil, clues.Wrap(err, "filtering backup details to selection")
} }

View File

@ -11,7 +11,7 @@ import (
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/export" "github.com/alcionai/corso/src/pkg/export"
@ -93,7 +93,7 @@ func runExport(
expColl, err := eo.Run(ctx) expColl, err := eo.Run(ctx)
if err != nil { if err != nil {
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, errs.NotFound) {
return Only(ctx, clues.New("Backup or backup details missing for id "+backupID)) return Only(ctx, clues.New("Backup or backup details missing for id "+backupID))
} }

View File

@ -1,9 +1,14 @@
package repo package repo
import ( import (
"context"
"fmt"
"net/http"
"strings" "strings"
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/goccy/go-graphviz"
"github.com/goccy/go-graphviz/cgraph"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
@ -11,13 +16,16 @@ import (
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/logger"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
repo "github.com/alcionai/corso/src/pkg/repository"
) )
const ( const (
initCommand = "init" initCommand = "init"
connectCommand = "connect" connectCommand = "connect"
maintenanceCommand = "maintenance" maintenanceCommand = "maintenance"
lineageCommand = "lineage"
) )
var ( var (
@ -39,12 +47,14 @@ func AddCommands(cmd *cobra.Command) {
initCmd = initCmd() initCmd = initCmd()
connectCmd = connectCmd() connectCmd = connectCmd()
maintenanceCmd = maintenanceCmd() maintenanceCmd = maintenanceCmd()
lineageCmd = lineageCmd()
) )
cmd.AddCommand(repoCmd) cmd.AddCommand(repoCmd)
repoCmd.AddCommand(initCmd) repoCmd.AddCommand(initCmd)
repoCmd.AddCommand(connectCmd) repoCmd.AddCommand(connectCmd)
repoCmd.AddCommand(maintenanceCmd) repoCmd.AddCommand(maintenanceCmd)
repoCmd.AddCommand(lineageCmd)
flags.AddMaintenanceModeFlag(maintenanceCmd) flags.AddMaintenanceModeFlag(maintenanceCmd)
flags.AddForceMaintenanceFlag(maintenanceCmd) flags.AddForceMaintenanceFlag(maintenanceCmd)
@ -175,3 +185,242 @@ func getMaintenanceType(t string) (repository.MaintenanceType, error) {
return res, nil return res, nil
} }
func lineageCmd() *cobra.Command {
return &cobra.Command{
Use: lineageCommand,
Short: "Run maintenance on an existing repository",
Long: `Run maintenance on an existing repository to optimize performance and storage use`,
RunE: handleLineageCmd,
Args: cobra.NoArgs,
}
}
func handleLineageCmd(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
r, acct, err := utils.GetAccountAndConnect(ctx, cmd, path.UnknownService)
if err != nil {
return print.Only(ctx, err)
}
roots, err := r.BackupLineage(
ctx,
acct.Repo.Account.ID(),
"c8006e42-5b84-4d37-a027-14f3d09fe6c7",
path.UnknownService,
path.UnknownCategory,
)
if err != nil {
return print.Only(ctx, err)
}
for _, root := range roots {
fmt.Printf("Tree rooted with %v\n", root.Label)
printTree(root, 0)
}
if err := drawTree(ctx, roots); err != nil {
return print.Only(ctx, err)
}
return nil
}
func printTree(root *repo.BackupNode, ident int) {
if root == nil {
return
}
fmt.Printf(strings.Repeat("\t", ident)+"%+v\n", root)
for _, child := range root.Children {
printTree(child.BackupNode, ident+1)
}
}
func drawTree(ctx context.Context, roots []*repo.BackupNode) error {
const port = ":6060"
g := graphviz.New()
graph, err := g.Graph()
if err != nil {
return clues.Wrap(err, "getting graph")
}
defer func() {
graph.Close()
g.Close()
}()
graph.SetRankDir(cgraph.LRRank)
for _, root := range roots {
if err := buildGraph(ctx, graph, root); err != nil {
return clues.Wrap(err, "building graph")
}
}
fmt.Printf("starting http server on port %s", port)
// Start an http server that has the redered image.
http.HandleFunc(
"/",
func(w http.ResponseWriter, _ *http.Request) {
w.Header().Set("Content-Type", "image/svg+xml")
if err := g.Render(graph, graphviz.SVG, w); err != nil {
logger.CtxErr(ctx, err).Info("sending svg to server")
}
},
)
return clues.Stack(http.ListenAndServe(port, nil)).OrNil()
}
func buildGraph(
ctx context.Context,
graph *cgraph.Graph,
root *repo.BackupNode,
) error {
// Add all nodes to the map and track them by ID. The root is just mapped as
// "root" and has the resource ID.
allNodes := map[string]*cgraph.Node{}
if err := addNodes(graph, root, allNodes); err != nil {
return clues.Stack(err)
}
// To keep from adding edges multiple times, track which nodes we've already
// processed. This is required because there can be multiple paths to a node.
visitedNodes := map[string]struct{}{}
if err := addEdges(graph, root, allNodes, visitedNodes); err != nil {
return clues.Stack(err)
}
// Go through and add edges between all nodes. The edge info will be based
// on the Reason contained in the edge struct.
return nil
}
func addNodes(
graph *cgraph.Graph,
node *repo.BackupNode,
allNodes map[string]*cgraph.Node,
) error {
if node == nil {
return nil
}
if _, ok := allNodes[node.Label]; ok {
return nil
}
// Need unique keys for nodes so use the backupID.
n, err := graph.CreateNode(node.Label)
if err != nil {
return clues.Wrap(err, "creating node").With("backup_id", node.Label)
}
// Set tooltip info to have Reasons for backup and backup type.
var toolTip string
if node.Deleted {
toolTip += "This backup was deleted, Reasons are a best guess!\n"
}
toolTip += "BackupID: " + node.Label + "\n"
switch node.Type {
case repo.MergeNode:
toolTip += "Base Type: merge\n"
case repo.AssistNode:
toolTip += "Base Type: assist\n"
}
toolTip += fmt.Sprintf("Created At: %v\n", node.Created)
var reasonStrings []string
for _, reason := range node.Reasons {
reasonStrings = append(
reasonStrings,
fmt.Sprintf("%s/%s", reason.Service(), reason.Category()),
)
}
n.
SetLabel(strings.Join(reasonStrings, "\n")).
SetTooltip(toolTip).
SetStyle(cgraph.FilledNodeStyle).
SetFillColor("white")
if node.Deleted {
n.SetFillColor("indianred")
}
if node.Type == repo.AssistNode {
n.SetFillColor("grey")
}
allNodes[node.Label] = n
for _, child := range node.Children {
if err := addNodes(graph, child.BackupNode, allNodes); err != nil {
return clues.Stack(err)
}
}
return nil
}
func addEdges(
graph *cgraph.Graph,
node *repo.BackupNode,
allNodes map[string]*cgraph.Node,
visitedNodes map[string]struct{},
) error {
if node == nil {
return nil
}
if _, ok := visitedNodes[node.Label]; ok {
return nil
}
visitedNodes[node.Label] = struct{}{}
n := allNodes[node.Label]
for _, child := range node.Children {
var edgeReasons []string
for _, reason := range child.Reasons {
edgeReasons = append(
edgeReasons,
fmt.Sprintf("%s/%s", reason.Service(), reason.Category()),
)
}
edgeLabel := strings.Join(edgeReasons, ",\n")
e, err := graph.CreateEdge(edgeLabel, n, allNodes[child.Label])
if err != nil {
return clues.Wrap(err, "adding edge").With(
"parent", node.Label,
"child", child.Label,
)
}
e.SetDir(cgraph.ForwardDir).SetLabel(edgeLabel).SetTooltip(" ")
if err := addEdges(graph, child.BackupNode, allNodes, visitedNodes); err != nil {
return clues.Stack(err)
}
}
return nil
}

View File

@ -10,7 +10,7 @@ import (
"github.com/alcionai/corso/src/cli/flags" "github.com/alcionai/corso/src/cli/flags"
. "github.com/alcionai/corso/src/cli/print" . "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/cli/utils" "github.com/alcionai/corso/src/cli/utils"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -114,7 +114,7 @@ func runRestore(
ds, err := ro.Run(ctx) ds, err := ro.Run(ctx)
if err != nil { if err != nil {
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, errs.NotFound) {
return Only(ctx, clues.New("Backup or backup details missing for id "+flags.BackupIDFV)) return Only(ctx, clues.New("Backup or backup details missing for id "+flags.BackupIDFV))
} }

View File

@ -10,6 +10,7 @@ require (
github.com/armon/go-metrics v0.4.1 github.com/armon/go-metrics v0.4.1
github.com/aws/aws-xray-sdk-go v1.8.2 github.com/aws/aws-xray-sdk-go v1.8.2
github.com/cenkalti/backoff/v4 v4.2.1 github.com/cenkalti/backoff/v4 v4.2.1
github.com/goccy/go-graphviz v0.1.1
github.com/google/uuid v1.3.1 github.com/google/uuid v1.3.1
github.com/h2non/gock v1.2.0 github.com/h2non/gock v1.2.0
github.com/kopia/kopia v0.13.0 github.com/kopia/kopia v0.13.0
@ -44,9 +45,11 @@ require (
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
github.com/andybalholm/brotli v1.0.5 // indirect github.com/andybalholm/brotli v1.0.5 // indirect
github.com/aws/aws-sdk-go v1.45.0 // indirect github.com/aws/aws-sdk-go v1.45.0 // indirect
github.com/fogleman/gg v1.3.0 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/gofrs/flock v0.8.1 // indirect github.com/gofrs/flock v0.8.1 // indirect
github.com/golang-jwt/jwt/v5 v5.0.0 // indirect github.com/golang-jwt/jwt/v5 v5.0.0 // indirect
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
github.com/google/go-cmp v0.5.9 // indirect github.com/google/go-cmp v0.5.9 // indirect
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
github.com/hashicorp/cronexpr v1.1.2 // indirect github.com/hashicorp/cronexpr v1.1.2 // indirect
@ -62,6 +65,7 @@ require (
github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasthttp v1.48.0 // indirect github.com/valyala/fasthttp v1.48.0 // indirect
go.opentelemetry.io/otel/metric v1.18.0 // indirect go.opentelemetry.io/otel/metric v1.18.0 // indirect
golang.org/x/image v0.6.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 // indirect
) )

View File

@ -98,6 +98,8 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/corona10/goimagehash v1.0.2 h1:pUfB0LnsJASMPGEZLj7tGY251vF+qLGqOgEP4rUs6kA=
github.com/corona10/goimagehash v1.0.2/go.mod h1:/l9umBhvcHQXVtQO1V6Gp1yD20STawkhRnnX0D1bvVI=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE= github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec= github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
@ -116,6 +118,8 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m
github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8=
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY=
github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
@ -133,6 +137,8 @@ github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbV
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/goccy/go-graphviz v0.1.1 h1:MGrsnzBxTyt7KG8FhHsFPDTGvF7UaQMmSa6A610DqPg=
github.com/goccy/go-graphviz v0.1.1/go.mod h1:lpnwvVDjskayq84ZxG8tGCPeZX/WxP88W+OJajh+gFk=
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
@ -142,6 +148,8 @@ github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOW
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE= github.com/golang-jwt/jwt/v5 v5.0.0 h1:1n1XNM9hk7O9mnQoNBGolZvzebBQ7p93ULHRc28XJUE=
github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-jwt/jwt/v5 v5.0.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
@ -325,6 +333,8 @@ github.com/natefinch/atomic v1.0.1 h1:ZPYKxkqQOx3KZ+RsbnP/YsgvxWQPGxjC0oBt2AhwV0
github.com/natefinch/atomic v1.0.1/go.mod h1:N/D/ELrljoqDyT3rZrsUmtsuzvHkeB/wWjHV22AZRbM= github.com/natefinch/atomic v1.0.1/go.mod h1:N/D/ELrljoqDyT3rZrsUmtsuzvHkeB/wWjHV22AZRbM=
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms=
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5 h1:BvoENQQU+fZ9uukda/RzCAL/191HHwJA5b13R6diVlY=
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0= github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0=
@ -486,6 +496,8 @@ golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjs
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.6.0 h1:bR8b5okrPI3g/gyZakLZHeWxAR8Dn5CyxXv1hLH5g/4=
golang.org/x/image v0.6.0/go.mod h1:MXLdDR43H7cDJq5GEGXEVeeNhPgi+YYEQ2pC1byI1x0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@ -508,6 +520,7 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -546,6 +559,7 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@ -568,6 +582,7 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -624,6 +639,7 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -633,6 +649,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -688,6 +706,7 @@ golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

View File

@ -0,0 +1,5 @@
package errs
import "github.com/alcionai/clues"
var NotFound = clues.New("not found")

View File

@ -3,15 +3,12 @@ package data
import ( import (
"context" "context"
"github.com/alcionai/clues" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
var ErrNotFound = clues.New("not found")
type CollectionState int type CollectionState int
const ( const (
@ -27,13 +24,13 @@ type FetchRestoreCollection struct {
} }
// NoFetchRestoreCollection is a wrapper for a Collection that returns // NoFetchRestoreCollection is a wrapper for a Collection that returns
// ErrNotFound for all Fetch calls. // errs.NotFound for all Fetch calls.
type NoFetchRestoreCollection struct { type NoFetchRestoreCollection struct {
Collection Collection
} }
func (c NoFetchRestoreCollection) FetchItemByName(context.Context, string) (Item, error) { func (c NoFetchRestoreCollection) FetchItemByName(context.Context, string) (Item, error) {
return nil, ErrNotFound return nil, errs.NotFound
} }
// StateOf lets us figure out the state of the collection from the // StateOf lets us figure out the state of the collection from the

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/spatialcurrent/go-lazy/pkg/lazy" "github.com/spatialcurrent/go-lazy/pkg/lazy"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -136,14 +137,14 @@ func NewUnindexedLazyItem(
itemGetter ItemDataGetter, itemGetter ItemDataGetter,
itemID string, itemID string,
modTime time.Time, modTime time.Time,
errs *fault.Bus, bus *fault.Bus,
) *unindexedLazyItem { ) *unindexedLazyItem {
return &unindexedLazyItem{ return &unindexedLazyItem{
ctx: ctx, ctx: ctx,
id: itemID, id: itemID,
itemGetter: itemGetter, itemGetter: itemGetter,
modTime: modTime, modTime: modTime,
errs: errs, bus: bus,
} }
} }
@ -157,7 +158,7 @@ type unindexedLazyItem struct {
ctx context.Context ctx context.Context
mu sync.Mutex mu sync.Mutex
id string id string
errs *fault.Bus bus *fault.Bus
itemGetter ItemDataGetter itemGetter ItemDataGetter
modTime time.Time modTime time.Time
@ -185,7 +186,7 @@ func (i *unindexedLazyItem) ToReader() io.ReadCloser {
i.mu.Lock() i.mu.Lock()
defer i.mu.Unlock() defer i.mu.Unlock()
reader, info, delInFlight, err := i.itemGetter.GetData(i.ctx, i.errs) reader, info, delInFlight, err := i.itemGetter.GetData(i.ctx, i.bus)
if err != nil { if err != nil {
return nil, clues.Stack(err) return nil, clues.Stack(err)
} }
@ -232,7 +233,7 @@ func NewLazyItem(
itemGetter ItemDataGetter, itemGetter ItemDataGetter,
itemID string, itemID string,
modTime time.Time, modTime time.Time,
errs *fault.Bus, bus *fault.Bus,
) *lazyItem { ) *lazyItem {
return &lazyItem{ return &lazyItem{
unindexedLazyItem: NewUnindexedLazyItem( unindexedLazyItem: NewUnindexedLazyItem(
@ -240,7 +241,7 @@ func NewLazyItem(
itemGetter, itemGetter,
itemID, itemID,
modTime, modTime,
errs), bus),
} }
} }
@ -259,7 +260,7 @@ func (i *lazyItem) Info() (details.ItemInfo, error) {
defer i.mu.Unlock() defer i.mu.Unlock()
if i.delInFlight { if i.delInFlight {
return details.ItemInfo{}, clues.Stack(ErrNotFound).WithClues(i.ctx) return details.ItemInfo{}, clues.Stack(errs.NotFound).WithClues(i.ctx)
} else if i.info == nil { } else if i.info == nil {
return details.ItemInfo{}, clues.New("requesting ItemInfo before data retrieval"). return details.ItemInfo{}, clues.New("requesting ItemInfo before data retrieval").
WithClues(i.ctx) WithClues(i.ctx)

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
@ -191,12 +192,12 @@ func (mid *mockItemDataGetter) check(t *testing.T, expectCalled bool) {
func (mid *mockItemDataGetter) GetData( func (mid *mockItemDataGetter) GetData(
ctx context.Context, ctx context.Context,
errs *fault.Bus, bus *fault.Bus,
) (io.ReadCloser, *details.ItemInfo, bool, error) { ) (io.ReadCloser, *details.ItemInfo, bool, error) {
mid.getCalled = true mid.getCalled = true
if mid.err != nil { if mid.err != nil {
errs.AddRecoverable(ctx, mid.err) bus.AddRecoverable(ctx, mid.err)
} }
return mid.reader, mid.info, mid.delInFlight, mid.err return mid.reader, mid.info, mid.delInFlight, mid.err
@ -287,7 +288,7 @@ func (suite *ItemUnitSuite) TestLazyItem() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
errs := fault.New(true) bus := fault.New(true)
defer test.mid.check(t, true) defer test.mid.check(t, true)
@ -296,7 +297,7 @@ func (suite *ItemUnitSuite) TestLazyItem() {
test.mid, test.mid,
id, id,
now, now,
errs) bus)
assert.Equal(t, id, item.ID(), "ID") assert.Equal(t, id, item.ID(), "ID")
assert.False(t, item.Deleted(), "deleted") assert.False(t, item.Deleted(), "deleted")
@ -324,7 +325,7 @@ func (suite *ItemUnitSuite) TestLazyItem() {
_, err = item.Info() _, err = item.Info()
test.infoErr(t, err, "Info(): %v", clues.ToCore(err)) test.infoErr(t, err, "Info(): %v", clues.ToCore(err))
e := errs.Errors() e := bus.Errors()
if !test.expectBusErr { if !test.expectBusErr {
assert.Nil(t, e.Failure, "hard failure") assert.Nil(t, e.Failure, "hard failure")
@ -349,12 +350,12 @@ func (suite *ItemUnitSuite) TestLazyItem_DeletedInFlight() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
errs := fault.New(true) bus := fault.New(true)
mid := &mockItemDataGetter{delInFlight: true} mid := &mockItemDataGetter{delInFlight: true}
defer mid.check(t, true) defer mid.check(t, true)
item := data.NewLazyItem(ctx, mid, id, now, errs) item := data.NewLazyItem(ctx, mid, id, now, bus)
assert.Equal(t, id, item.ID(), "ID") assert.Equal(t, id, item.ID(), "ID")
assert.False(t, item.Deleted(), "deleted") assert.False(t, item.Deleted(), "deleted")
@ -376,9 +377,9 @@ func (suite *ItemUnitSuite) TestLazyItem_DeletedInFlight() {
assert.Empty(t, readData, "read data") assert.Empty(t, readData, "read data")
_, err = item.Info() _, err = item.Info()
assert.ErrorIs(t, err, data.ErrNotFound, "Info() error") assert.ErrorIs(t, err, errs.NotFound, "Info() error")
e := errs.Errors() e := bus.Errors()
assert.Nil(t, e.Failure, "hard failure") assert.Nil(t, e.Failure, "hard failure")
assert.Empty(t, e.Recovered, "recovered") assert.Empty(t, e.Recovered, "recovered")
@ -395,12 +396,12 @@ func (suite *ItemUnitSuite) TestLazyItem_InfoBeforeReadErrors() {
ctx, flush := tester.NewContext(t) ctx, flush := tester.NewContext(t)
defer flush() defer flush()
errs := fault.New(true) bus := fault.New(true)
mid := &mockItemDataGetter{} mid := &mockItemDataGetter{}
defer mid.check(t, false) defer mid.check(t, false)
item := data.NewLazyItem(ctx, mid, id, now, errs) item := data.NewLazyItem(ctx, mid, id, now, bus)
assert.Equal(t, id, item.ID(), "ID") assert.Equal(t, id, item.ID(), "ID")
assert.False(t, item.Deleted(), "deleted") assert.False(t, item.Deleted(), "deleted")

View File

@ -9,6 +9,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -92,13 +93,13 @@ type Collection struct {
AuxItems map[string]data.Item AuxItems map[string]data.Item
} }
func (c Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item { func (c Collection) Items(ctx context.Context, bus *fault.Bus) <-chan data.Item {
ch := make(chan data.Item) ch := make(chan data.Item)
go func() { go func() {
defer close(ch) defer close(ch)
el := errs.Local() el := bus.Local()
for _, item := range c.ItemData { for _, item := range c.ItemData {
it, ok := item.(*Item) it, ok := item.(*Item)
@ -112,7 +113,7 @@ func (c Collection) Items(ctx context.Context, errs *fault.Bus) <-chan data.Item
}() }()
for _, err := range c.ItemsRecoverableErrs { for _, err := range c.ItemsRecoverableErrs {
errs.AddRecoverable(ctx, err) bus.AddRecoverable(ctx, err)
} }
return ch return ch
@ -144,7 +145,7 @@ func (c Collection) FetchItemByName(
) (data.Item, error) { ) (data.Item, error) {
res := c.AuxItems[name] res := c.AuxItems[name]
if res == nil { if res == nil {
return nil, data.ErrNotFound return nil, errs.NotFound
} }
return res, nil return res, nil
@ -163,7 +164,7 @@ func (rc RestoreCollection) FetchItemByName(
) (data.Item, error) { ) (data.Item, error) {
res := rc.AuxItems[name] res := rc.AuxItems[name]
if res == nil { if res == nil {
return nil, data.ErrNotFound return nil, errs.NotFound
} }
return res, nil return res, nil
@ -206,13 +207,13 @@ type unversionedRestoreCollection struct {
func (c *unversionedRestoreCollection) Items( func (c *unversionedRestoreCollection) Items(
ctx context.Context, ctx context.Context,
errs *fault.Bus, bus *fault.Bus,
) <-chan data.Item { ) <-chan data.Item {
res := make(chan data.Item) res := make(chan data.Item)
go func() { go func() {
defer close(res) defer close(res)
for item := range c.RestoreCollection.Items(ctx, errs) { for item := range c.RestoreCollection.Items(ctx, bus) {
r, err := readers.NewVersionedRestoreReader(item.ToReader()) r, err := readers.NewVersionedRestoreReader(item.ToReader())
require.NoError(c.t, err, clues.ToCore(err)) require.NoError(c.t, err, clues.ToCore(err))
@ -248,13 +249,13 @@ type versionedBackupCollection struct {
func (c *versionedBackupCollection) Items( func (c *versionedBackupCollection) Items(
ctx context.Context, ctx context.Context,
errs *fault.Bus, bus *fault.Bus,
) <-chan data.Item { ) <-chan data.Item {
res := make(chan data.Item) res := make(chan data.Item)
go func() { go func() {
defer close(res) defer close(res)
for item := range c.BackupCollection.Items(ctx, errs) { for item := range c.BackupCollection.Items(ctx, bus) {
r, err := readers.NewVersionedBackupReader( r, err := readers.NewVersionedBackupReader(
readers.SerializationFormat{ readers.SerializationFormat{
Version: readers.DefaultSerializationVersion, Version: readers.DefaultSerializationVersion,

View File

@ -8,49 +8,27 @@ import (
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/identity" "github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
) )
// TODO(ashmrtn): Move this into some inject package. Here to avoid import var _ backup.BackupBases = &backupBases{}
// cycles.
type BackupBases interface {
// ConvertToAssistBase converts the base with the given item data snapshot ID
// from a merge base to an assist base.
ConvertToAssistBase(manifestID manifest.ID)
Backups() []BackupEntry
UniqueAssistBackups() []BackupEntry
MinBackupVersion() int
MergeBases() []ManifestEntry
DisableMergeBases()
UniqueAssistBases() []ManifestEntry
DisableAssistBases()
MergeBackupBases(
ctx context.Context,
other BackupBases,
reasonToKey func(identity.Reasoner) string,
) BackupBases
// SnapshotAssistBases returns the set of bases to use for kopia assisted
// incremental snapshot operations. It consists of the union of merge bases
// and assist bases. If DisableAssistBases has been called then it returns
// nil.
SnapshotAssistBases() []ManifestEntry
}
type backupBases struct { type backupBases struct {
// backups and mergeBases should be modified together as they relate similar // backups and mergeBases should be modified together as they relate similar
// data. // data.
backups []BackupEntry backups []backup.BackupEntry
mergeBases []ManifestEntry mergeBases []backup.ManifestEntry
assistBackups []BackupEntry assistBackups []backup.BackupEntry
assistBases []ManifestEntry assistBases []backup.ManifestEntry
// disableAssistBases denote whether any assist bases should be returned to // disableAssistBases denote whether any assist bases should be returned to
// kopia during snapshot operation. // kopia during snapshot operation.
disableAssistBases bool disableAssistBases bool
} }
func (bb *backupBases) SnapshotAssistBases() []ManifestEntry { func (bb *backupBases) SnapshotAssistBases() []backup.ManifestEntry {
if bb.disableAssistBases { if bb.disableAssistBases {
return nil return nil
} }
@ -62,14 +40,14 @@ func (bb *backupBases) SnapshotAssistBases() []ManifestEntry {
func (bb *backupBases) ConvertToAssistBase(manifestID manifest.ID) { func (bb *backupBases) ConvertToAssistBase(manifestID manifest.ID) {
var ( var (
snapshotMan ManifestEntry snapshotMan backup.ManifestEntry
base BackupEntry base backup.BackupEntry
snapFound bool snapFound bool
) )
idx := slices.IndexFunc( idx := slices.IndexFunc(
bb.mergeBases, bb.mergeBases,
func(man ManifestEntry) bool { func(man backup.ManifestEntry) bool {
return man.ID == manifestID return man.ID == manifestID
}) })
if idx >= 0 { if idx >= 0 {
@ -80,7 +58,7 @@ func (bb *backupBases) ConvertToAssistBase(manifestID manifest.ID) {
idx = slices.IndexFunc( idx = slices.IndexFunc(
bb.backups, bb.backups,
func(bup BackupEntry) bool { func(bup backup.BackupEntry) bool {
return bup.SnapshotID == string(manifestID) return bup.SnapshotID == string(manifestID)
}) })
if idx >= 0 { if idx >= 0 {
@ -95,11 +73,11 @@ func (bb *backupBases) ConvertToAssistBase(manifestID manifest.ID) {
} }
} }
func (bb backupBases) Backups() []BackupEntry { func (bb backupBases) Backups() []backup.BackupEntry {
return slices.Clone(bb.backups) return slices.Clone(bb.backups)
} }
func (bb backupBases) UniqueAssistBackups() []BackupEntry { func (bb backupBases) UniqueAssistBackups() []backup.BackupEntry {
if bb.disableAssistBases { if bb.disableAssistBases {
return nil return nil
} }
@ -123,7 +101,7 @@ func (bb *backupBases) MinBackupVersion() int {
return min return min
} }
func (bb backupBases) MergeBases() []ManifestEntry { func (bb backupBases) MergeBases() []backup.ManifestEntry {
return slices.Clone(bb.mergeBases) return slices.Clone(bb.mergeBases)
} }
@ -140,7 +118,7 @@ func (bb *backupBases) DisableMergeBases() {
bb.backups = nil bb.backups = nil
} }
func (bb backupBases) UniqueAssistBases() []ManifestEntry { func (bb backupBases) UniqueAssistBases() []backup.ManifestEntry {
if bb.disableAssistBases { if bb.disableAssistBases {
return nil return nil
} }
@ -175,9 +153,9 @@ func (bb *backupBases) DisableAssistBases() {
// MergeBase in the other BackupBases. // MergeBase in the other BackupBases.
func (bb *backupBases) MergeBackupBases( func (bb *backupBases) MergeBackupBases(
ctx context.Context, ctx context.Context,
other BackupBases, other backup.BackupBases,
reasonToKey func(reason identity.Reasoner) string, reasonToKey func(reason identity.Reasoner) string,
) BackupBases { ) backup.BackupBases {
if other == nil || (len(other.MergeBases()) == 0 && len(other.UniqueAssistBases()) == 0) { if other == nil || (len(other.MergeBases()) == 0 && len(other.UniqueAssistBases()) == 0) {
return bb return bb
} }
@ -206,7 +184,7 @@ func (bb *backupBases) MergeBackupBases(
} }
} }
var toAdd []ManifestEntry var toAdd []backup.ManifestEntry
// Calculate the set of mergeBases to pull from other into this one. // Calculate the set of mergeBases to pull from other into this one.
for _, m := range other.MergeBases() { for _, m := range other.MergeBases() {
@ -267,10 +245,10 @@ func (bb *backupBases) MergeBackupBases(
func findNonUniqueManifests( func findNonUniqueManifests(
ctx context.Context, ctx context.Context,
manifests []ManifestEntry, manifests []backup.ManifestEntry,
) map[manifest.ID]struct{} { ) map[manifest.ID]struct{} {
// ReasonKey -> manifests with that reason. // ReasonKey -> manifests with that reason.
reasons := map[string][]ManifestEntry{} reasons := map[string][]backup.ManifestEntry{}
toDrop := map[manifest.ID]struct{}{} toDrop := map[manifest.ID]struct{}{}
for _, man := range manifests { for _, man := range manifests {
@ -323,17 +301,20 @@ func findNonUniqueManifests(
return toDrop return toDrop
} }
func getBackupByID(backups []BackupEntry, bID string) (BackupEntry, bool) { func getBackupByID(
backups []backup.BackupEntry,
bID string,
) (backup.BackupEntry, bool) {
if len(bID) == 0 { if len(bID) == 0 {
return BackupEntry{}, false return backup.BackupEntry{}, false
} }
idx := slices.IndexFunc(backups, func(b BackupEntry) bool { idx := slices.IndexFunc(backups, func(b backup.BackupEntry) bool {
return string(b.ID) == bID return string(b.ID) == bID
}) })
if idx < 0 || idx >= len(backups) { if idx < 0 || idx >= len(backups) {
return BackupEntry{}, false return backup.BackupEntry{}, false
} }
return backups[idx], true return backups[idx], true
@ -356,10 +337,10 @@ func (bb *backupBases) fixupAndVerify(ctx context.Context) {
toDrop := findNonUniqueManifests(ctx, bb.mergeBases) toDrop := findNonUniqueManifests(ctx, bb.mergeBases)
var ( var (
backupsToKeep []BackupEntry backupsToKeep []backup.BackupEntry
assistBackupsToKeep []BackupEntry assistBackupsToKeep []backup.BackupEntry
mergeToKeep []ManifestEntry mergeToKeep []backup.ManifestEntry
assistToKeep []ManifestEntry assistToKeep []backup.ManifestEntry
) )
for _, man := range bb.mergeBases { for _, man := range bb.mergeBases {

View File

@ -18,10 +18,15 @@ import (
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
func makeManifest(id, incmpl, bID string, reasons ...identity.Reasoner) ManifestEntry { func makeManifest(
bIDKey, _ := makeTagKV(TagBackupID) id,
incmpl,
bID string,
reasons ...identity.Reasoner,
) backup.ManifestEntry {
bIDKey, _ := backup.MakeTagKV(TagBackupID)
return ManifestEntry{ return backup.ManifestEntry{
Manifest: &snapshot.Manifest{ Manifest: &snapshot.Manifest{
ID: manifest.ID(id), ID: manifest.ID(id),
IncompleteReason: incmpl, IncompleteReason: incmpl,
@ -57,7 +62,7 @@ func (suite *BackupBasesUnitSuite) TestMinBackupVersion() {
{ {
name: "Unsorted Backups", name: "Unsorted Backups",
bb: &backupBases{ bb: &backupBases{
backups: []BackupEntry{ backups: []backup.BackupEntry{
{ {
Backup: &backup.Backup{ Backup: &backup.Backup{
Version: 4, Version: 4,
@ -86,13 +91,13 @@ func (suite *BackupBasesUnitSuite) TestMinBackupVersion() {
} }
func (suite *BackupBasesUnitSuite) TestConvertToAssistBase() { func (suite *BackupBasesUnitSuite) TestConvertToAssistBase() {
backups := []BackupEntry{ backups := []backup.BackupEntry{
{Backup: &backup.Backup{SnapshotID: "1"}}, {Backup: &backup.Backup{SnapshotID: "1"}},
{Backup: &backup.Backup{SnapshotID: "2"}}, {Backup: &backup.Backup{SnapshotID: "2"}},
{Backup: &backup.Backup{SnapshotID: "3"}}, {Backup: &backup.Backup{SnapshotID: "3"}},
} }
merges := []ManifestEntry{ merges := []backup.ManifestEntry{
makeManifest("1", "", ""), makeManifest("1", "", ""),
makeManifest("2", "", ""), makeManifest("2", "", ""),
makeManifest("3", "", ""), makeManifest("3", "", ""),
@ -185,8 +190,8 @@ func (suite *BackupBasesUnitSuite) TestConvertToAssistBase() {
} }
expected := &backupBases{ expected := &backupBases{
backups: []BackupEntry{backups[0], backups[1]}, backups: []backup.BackupEntry{backups[0], backups[1]},
mergeBases: []ManifestEntry{merges[0], merges[1]}, mergeBases: []backup.ManifestEntry{merges[0], merges[1]},
} }
for _, i := range test.expectAssist { for _, i := range test.expectAssist {
@ -203,20 +208,20 @@ func (suite *BackupBasesUnitSuite) TestConvertToAssistBase() {
func (suite *BackupBasesUnitSuite) TestDisableMergeBases() { func (suite *BackupBasesUnitSuite) TestDisableMergeBases() {
t := suite.T() t := suite.T()
merge := []BackupEntry{ merge := []backup.BackupEntry{
{Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "m1"}}}, {Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "m1"}}},
{Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "m2"}}}, {Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "m2"}}},
} }
assist := []BackupEntry{ assist := []backup.BackupEntry{
{Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "a1"}}}, {Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "a1"}}},
{Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "a2"}}}, {Backup: &backup.Backup{BaseModel: model.BaseModel{ID: "a2"}}},
} }
bb := &backupBases{ bb := &backupBases{
backups: slices.Clone(merge), backups: slices.Clone(merge),
mergeBases: make([]ManifestEntry, 2), mergeBases: make([]backup.ManifestEntry, 2),
assistBackups: slices.Clone(assist), assistBackups: slices.Clone(assist),
assistBases: make([]ManifestEntry, 2), assistBases: make([]backup.ManifestEntry, 2),
} }
bb.DisableMergeBases() bb.DisableMergeBases()
@ -237,10 +242,10 @@ func (suite *BackupBasesUnitSuite) TestDisableMergeBases() {
func (suite *BackupBasesUnitSuite) TestDisableAssistBases() { func (suite *BackupBasesUnitSuite) TestDisableAssistBases() {
t := suite.T() t := suite.T()
bb := &backupBases{ bb := &backupBases{
backups: make([]BackupEntry, 2), backups: make([]backup.BackupEntry, 2),
mergeBases: make([]ManifestEntry, 2), mergeBases: make([]backup.ManifestEntry, 2),
assistBases: make([]ManifestEntry, 2), assistBases: make([]backup.ManifestEntry, 2),
assistBackups: make([]BackupEntry, 2), assistBackups: make([]backup.BackupEntry, 2),
} }
bb.DisableAssistBases() bb.DisableAssistBases()
@ -270,12 +275,12 @@ func (suite *BackupBasesUnitSuite) TestMergeBackupBases() {
reasons := make([]identity.Reasoner, 0, len(i.cat)) reasons := make([]identity.Reasoner, 0, len(i.cat))
for _, c := range i.cat { for _, c := range i.cat {
reasons = append(reasons, NewReason("", ro, path.ExchangeService, c)) reasons = append(reasons, identity.NewReason("", ro, path.ExchangeService, c))
} }
m := makeManifest(baseID, "", "b"+baseID, reasons...) m := makeManifest(baseID, "", "b"+baseID, reasons...)
b := BackupEntry{ b := backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ID: model.StableID("b" + baseID)}, BaseModel: model.BaseModel{ID: model.StableID("b" + baseID)},
SnapshotID: baseID, SnapshotID: baseID,
@ -294,12 +299,12 @@ func (suite *BackupBasesUnitSuite) TestMergeBackupBases() {
reasons := make([]identity.Reasoner, 0, len(i.cat)) reasons := make([]identity.Reasoner, 0, len(i.cat))
for _, c := range i.cat { for _, c := range i.cat {
reasons = append(reasons, NewReason("", ro, path.ExchangeService, c)) reasons = append(reasons, identity.NewReason("", ro, path.ExchangeService, c))
} }
m := makeManifest(baseID, "", "a"+baseID, reasons...) m := makeManifest(baseID, "", "a"+baseID, reasons...)
b := BackupEntry{ b := backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: model.StableID("a" + baseID), ID: model.StableID("a" + baseID),
@ -528,15 +533,19 @@ func (suite *BackupBasesUnitSuite) TestMergeBackupBases() {
func (suite *BackupBasesUnitSuite) TestFixupAndVerify() { func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
ro := "resource_owner" ro := "resource_owner"
makeMan := func(pct path.CategoryType, id, incmpl, bID string) ManifestEntry { makeMan := func(
r := NewReason("", ro, path.ExchangeService, pct) pct path.CategoryType,
id, incmpl,
bID string,
) backup.ManifestEntry {
r := identity.NewReason("", ro, path.ExchangeService, pct)
return makeManifest(id, incmpl, bID, r) return makeManifest(id, incmpl, bID, r)
} }
// Make a function so tests can modify things without messing with each other. // Make a function so tests can modify things without messing with each other.
validMail1 := func() *backupBases { validMail1 := func() *backupBases {
return &backupBases{ return &backupBases{
backups: []BackupEntry{ backups: []backup.BackupEntry{
{ {
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
@ -547,10 +556,10 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
}, },
}, },
}, },
mergeBases: []ManifestEntry{ mergeBases: []backup.ManifestEntry{
makeMan(path.EmailCategory, "id1", "", "bid1"), makeMan(path.EmailCategory, "id1", "", "bid1"),
}, },
assistBackups: []BackupEntry{ assistBackups: []backup.BackupEntry{
{ {
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
@ -562,7 +571,7 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
}, },
}, },
}, },
assistBases: []ManifestEntry{ assistBases: []backup.ManifestEntry{
makeMan(path.EmailCategory, "id2", "", "bid2"), makeMan(path.EmailCategory, "id2", "", "bid2"),
}, },
} }
@ -571,7 +580,7 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
table := []struct { table := []struct {
name string name string
bb *backupBases bb *backupBases
expect BackupBases expect backup.BackupBases
}{ }{
{ {
name: "empty BaseBackups", name: "empty BaseBackups",
@ -727,11 +736,11 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
res := validMail1() res := validMail1()
res.mergeBases[0].Reasons = append( res.mergeBases[0].Reasons = append(
res.mergeBases[0].Reasons, res.mergeBases[0].Reasons,
NewReason("", ro, path.ExchangeService, path.ContactsCategory)) identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory))
res.assistBases[0].Reasons = append( res.assistBases[0].Reasons = append(
res.assistBases[0].Reasons, res.assistBases[0].Reasons,
NewReason("", ro, path.ExchangeService, path.ContactsCategory)) identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory))
return res return res
}(), }(),
@ -739,11 +748,11 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
res := validMail1() res := validMail1()
res.mergeBases[0].Reasons = append( res.mergeBases[0].Reasons = append(
res.mergeBases[0].Reasons, res.mergeBases[0].Reasons,
NewReason("", ro, path.ExchangeService, path.ContactsCategory)) identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory))
res.assistBases[0].Reasons = append( res.assistBases[0].Reasons = append(
res.assistBases[0].Reasons, res.assistBases[0].Reasons,
NewReason("", ro, path.ExchangeService, path.ContactsCategory)) identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory))
return res return res
}(), }(),
@ -769,14 +778,14 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
res := validMail1() res := validMail1()
res.backups = append( res.backups = append(
res.backups, res.backups,
BackupEntry{ backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: "bid3", ID: "bid3",
}, },
}, },
}, },
BackupEntry{ backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: "bid4", ID: "bid4",
@ -796,7 +805,7 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
res := validMail1() res := validMail1()
res.backups = append( res.backups = append(
res.backups, res.backups,
BackupEntry{ backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: "bid4", ID: "bid4",
@ -818,7 +827,7 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
res := validMail1() res := validMail1()
res.assistBackups = append( res.assistBackups = append(
res.assistBackups, res.assistBackups,
BackupEntry{ backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: "bid3", ID: "bid3",
@ -826,7 +835,7 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
}, },
}, },
}, },
BackupEntry{ backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: "bid4", ID: "bid4",
@ -847,7 +856,7 @@ func (suite *BackupBasesUnitSuite) TestFixupAndVerify() {
res := validMail1() res := validMail1()
res.assistBackups = append( res.assistBackups = append(
res.assistBackups, res.assistBackups,
BackupEntry{ backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: "bid4", ID: "bid4",

View File

@ -17,68 +17,6 @@ import (
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
const (
// Kopia does not do comparisons properly for empty tags right now so add some
// placeholder value to them.
defaultTagValue = "0"
// Kopia CLI prefixes all user tags with "tag:"[1]. Maintaining this will
// ensure we don't accidentally take reserved tags and that tags can be
// displayed with kopia CLI.
// (permalinks)
// [1] https://github.com/kopia/kopia/blob/05e729a7858a6e86cb48ba29fb53cb6045efce2b/cli/command_snapshot_create.go#L169
userTagPrefix = "tag:"
)
func NewReason(
tenant, resource string,
service path.ServiceType,
category path.CategoryType,
) identity.Reasoner {
return reason{
tenant: tenant,
resource: resource,
service: service,
category: category,
}
}
type reason struct {
// tenant appears here so that when this is moved to an inject package nothing
// needs changed. However, kopia itself is blind to the fields in the reason
// struct and relies on helper functions to get the information it needs.
tenant string
resource string
service path.ServiceType
category path.CategoryType
}
func (r reason) Tenant() string {
return r.tenant
}
func (r reason) ProtectedResource() string {
return r.resource
}
func (r reason) Service() path.ServiceType {
return r.service
}
func (r reason) Category() path.CategoryType {
return r.category
}
func (r reason) SubtreePath() (path.Path, error) {
p, err := path.BuildPrefix(
r.Tenant(),
r.ProtectedResource(),
r.Service(),
r.Category())
return p, clues.Wrap(err, "building path").OrNil()
}
func tagKeys(r identity.Reasoner) []string { func tagKeys(r identity.Reasoner) []string {
return []string{ return []string{
r.ProtectedResource(), r.ProtectedResource(),
@ -91,49 +29,15 @@ func reasonKey(r identity.Reasoner) string {
return r.ProtectedResource() + r.Service().String() + r.Category().String() return r.ProtectedResource() + r.Service().String() + r.Category().String()
} }
type BackupEntry struct {
*backup.Backup
Reasons []identity.Reasoner
}
type ManifestEntry struct {
*snapshot.Manifest
// Reasons contains the ResourceOwners and Service/Categories that caused this
// snapshot to be selected as a base. We can't reuse OwnersCats here because
// it's possible some ResourceOwners will have a subset of the Categories as
// the reason for selecting a snapshot. For example:
// 1. backup user1 email,contacts -> B1
// 2. backup user1 contacts -> B2 (uses B1 as base)
// 3. backup user1 email,contacts,events (uses B1 for email, B2 for contacts)
Reasons []identity.Reasoner
}
func (me ManifestEntry) GetTag(key string) (string, bool) {
k, _ := makeTagKV(key)
v, ok := me.Tags[k]
return v, ok
}
func serviceCatString(s path.ServiceType, c path.CategoryType) string { func serviceCatString(s path.ServiceType, c path.CategoryType) string {
return s.String() + c.String() return s.String() + c.String()
} }
// MakeTagKV normalizes the provided key to protect it from clobbering
// similarly named tags from non-user input (user inputs are still open
// to collisions amongst eachother).
// Returns the normalized Key plus a default value. If you're embedding a
// key-only tag, the returned default value msut be used instead of an
// empty string.
func makeTagKV(k string) (string, string) {
return userTagPrefix + k, defaultTagValue
}
func normalizeTagKVs(tags map[string]string) map[string]string { func normalizeTagKVs(tags map[string]string) map[string]string {
t2 := make(map[string]string, len(tags)) t2 := make(map[string]string, len(tags))
for k, v := range tags { for k, v := range tags {
mk, mv := makeTagKV(k) mk, mv := backup.MakeTagKV(k)
if len(v) == 0 { if len(v) == 0 {
v = mv v = mv
@ -172,7 +76,7 @@ func (b *baseFinder) getBackupModel(
ctx context.Context, ctx context.Context,
man *snapshot.Manifest, man *snapshot.Manifest,
) (*backup.Backup, error) { ) (*backup.Backup, error) {
k, _ := makeTagKV(TagBackupID) k, _ := backup.MakeTagKV(TagBackupID)
bID := man.Tags[k] bID := man.Tags[k]
ctx = clues.Add(ctx, "search_backup_id", bID) ctx = clues.Add(ctx, "search_backup_id", bID)
@ -380,7 +284,7 @@ func (b *baseFinder) FindBases(
ctx context.Context, ctx context.Context,
reasons []identity.Reasoner, reasons []identity.Reasoner,
tags map[string]string, tags map[string]string,
) BackupBases { ) backup.BackupBases {
var ( var (
// Backup models and item data snapshot manifests are 1:1 for bases so just // Backup models and item data snapshot manifests are 1:1 for bases so just
// track things by the backup ID. We need to track by ID so we can coalesce // track things by the backup ID. We need to track by ID so we can coalesce
@ -431,16 +335,16 @@ func (b *baseFinder) FindBases(
// Convert what we got to the format that backupBases takes right now. // Convert what we got to the format that backupBases takes right now.
// TODO(ashmrtn): Remove when backupBases has consolidated fields. // TODO(ashmrtn): Remove when backupBases has consolidated fields.
res := &backupBases{} res := &backupBases{}
bups := make([]BackupEntry, 0, len(mergeBases)) bups := make([]backup.BackupEntry, 0, len(mergeBases))
snaps := make([]ManifestEntry, 0, len(mergeBases)) snaps := make([]backup.ManifestEntry, 0, len(mergeBases))
for _, base := range mergeBases { for _, base := range mergeBases {
bups = append(bups, BackupEntry{ bups = append(bups, backup.BackupEntry{
Backup: base.Backup, Backup: base.Backup,
Reasons: base.Reasons, Reasons: base.Reasons,
}) })
snaps = append(snaps, ManifestEntry{ snaps = append(snaps, backup.ManifestEntry{
Manifest: base.ItemDataSnapshot, Manifest: base.ItemDataSnapshot,
Reasons: base.Reasons, Reasons: base.Reasons,
}) })
@ -449,16 +353,16 @@ func (b *baseFinder) FindBases(
res.backups = bups res.backups = bups
res.mergeBases = snaps res.mergeBases = snaps
bups = make([]BackupEntry, 0, len(assistBases)) bups = make([]backup.BackupEntry, 0, len(assistBases))
snaps = make([]ManifestEntry, 0, len(assistBases)) snaps = make([]backup.ManifestEntry, 0, len(assistBases))
for _, base := range assistBases { for _, base := range assistBases {
bups = append(bups, BackupEntry{ bups = append(bups, backup.BackupEntry{
Backup: base.Backup, Backup: base.Backup,
Reasons: base.Reasons, Reasons: base.Reasons,
}) })
snaps = append(snaps, ManifestEntry{ snaps = append(snaps, backup.ManifestEntry{
Manifest: base.ItemDataSnapshot, Manifest: base.ItemDataSnapshot,
Reasons: base.Reasons, Reasons: base.Reasons,
}) })

View File

@ -10,7 +10,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
@ -47,22 +47,22 @@ var (
testAllUsersAllCats = []identity.Reasoner{ testAllUsersAllCats = []identity.Reasoner{
// User1 email and events. // User1 email and events.
NewReason("", testUser1, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EmailCategory),
NewReason("", testUser1, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EventsCategory),
// User2 email and events. // User2 email and events.
NewReason("", testUser2, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EmailCategory),
NewReason("", testUser2, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EventsCategory),
// User3 email and events. // User3 email and events.
NewReason("", testUser3, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser3, path.ExchangeService, path.EmailCategory),
NewReason("", testUser3, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser3, path.ExchangeService, path.EventsCategory),
} }
testAllUsersMail = []identity.Reasoner{ testAllUsersMail = []identity.Reasoner{
NewReason("", testUser1, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EmailCategory),
NewReason("", testUser2, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EmailCategory),
NewReason("", testUser3, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser3, path.ExchangeService, path.EmailCategory),
} }
testUser1Mail = []identity.Reasoner{ testUser1Mail = []identity.Reasoner{
NewReason("", testUser1, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EmailCategory),
} }
) )
@ -91,7 +91,7 @@ func (mg mockEmptyModelGetter) GetBackup(
context.Context, context.Context,
model.StableID, model.StableID,
) (*backup.Backup, error) { ) (*backup.Backup, error) {
return nil, data.ErrNotFound return nil, errs.NotFound
} }
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
@ -121,7 +121,7 @@ func newManifestInfo(
structTags := make(map[string]string, len(tags)) structTags := make(map[string]string, len(tags))
for _, t := range tags { for _, t := range tags {
tk, _ := makeTagKV(t) tk, _ := backup.MakeTagKV(t)
structTags[tk] = "" structTags[tk] = ""
} }
@ -141,7 +141,7 @@ func newManifestInfo(
} }
if len(backupID) > 0 { if len(backupID) > 0 {
k, _ := makeTagKV(TagBackupID) k, _ := backup.MakeTagKV(TagBackupID)
res.metadata.Labels[k] = backupID res.metadata.Labels[k] = backupID
res.man.Tags[k] = backupID res.man.Tags[k] = backupID
} }
@ -269,7 +269,7 @@ func (mg mockModelGetter) GetBackup(
return &res, nil return &res, nil
} }
return nil, data.ErrNotFound return nil, errs.NotFound
} }
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
@ -294,7 +294,7 @@ func (suite *BaseFinderUnitSuite) TestNoResult_NoBackupsOrSnapshots() {
bg: mockEmptyModelGetter{}, bg: mockEmptyModelGetter{},
} }
reasons := []identity.Reasoner{ reasons := []identity.Reasoner{
NewReason("", "a-user", path.ExchangeService, path.EmailCategory), identity.NewReason("", "a-user", path.ExchangeService, path.EmailCategory),
} }
bb := bf.FindBases(ctx, reasons, nil) bb := bf.FindBases(ctx, reasons, nil)
@ -314,7 +314,7 @@ func (suite *BaseFinderUnitSuite) TestNoResult_ErrorListingSnapshots() {
bg: mockEmptyModelGetter{}, bg: mockEmptyModelGetter{},
} }
reasons := []identity.Reasoner{ reasons := []identity.Reasoner{
NewReason("", "a-user", path.ExchangeService, path.EmailCategory), identity.NewReason("", "a-user", path.ExchangeService, path.EmailCategory),
} }
bb := bf.FindBases(ctx, reasons, nil) bb := bf.FindBases(ctx, reasons, nil)
@ -561,14 +561,14 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
}, },
expectedBaseReasons: map[int][]identity.Reasoner{ expectedBaseReasons: map[int][]identity.Reasoner{
0: { 0: {
NewReason("", testUser1, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EmailCategory),
NewReason("", testUser2, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EmailCategory),
NewReason("", testUser3, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser3, path.ExchangeService, path.EmailCategory),
}, },
1: { 1: {
NewReason("", testUser1, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EventsCategory),
NewReason("", testUser2, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EventsCategory),
NewReason("", testUser3, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser3, path.ExchangeService, path.EventsCategory),
}, },
}, },
backupData: []backupInfo{ backupData: []backupInfo{
@ -611,20 +611,20 @@ func (suite *BaseFinderUnitSuite) TestGetBases() {
}, },
expectedBaseReasons: map[int][]identity.Reasoner{ expectedBaseReasons: map[int][]identity.Reasoner{
2: { 2: {
NewReason("", testUser1, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EmailCategory),
NewReason("", testUser2, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EmailCategory),
NewReason("", testUser1, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EventsCategory),
NewReason("", testUser2, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EventsCategory),
}, },
}, },
expectedAssistReasons: map[int][]identity.Reasoner{ expectedAssistReasons: map[int][]identity.Reasoner{
0: { 0: {
NewReason("", testUser1, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EventsCategory),
NewReason("", testUser2, path.ExchangeService, path.EventsCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EventsCategory),
}, },
1: { 1: {
NewReason("", testUser1, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser1, path.ExchangeService, path.EmailCategory),
NewReason("", testUser2, path.ExchangeService, path.EmailCategory), identity.NewReason("", testUser2, path.ExchangeService, path.EmailCategory),
}, },
}, },
backupData: []backupInfo{ backupData: []backupInfo{
@ -1078,7 +1078,7 @@ func (suite *BaseFinderUnitSuite) TestFindBases_CustomTags() {
func checkManifestEntriesMatch( func checkManifestEntriesMatch(
t *testing.T, t *testing.T,
retSnaps []ManifestEntry, retSnaps []backup.ManifestEntry,
allExpected []manifestInfo, allExpected []manifestInfo,
expectedIdxsAndReasons map[int][]identity.Reasoner, expectedIdxsAndReasons map[int][]identity.Reasoner,
) { ) {
@ -1119,7 +1119,7 @@ func checkManifestEntriesMatch(
func checkBackupEntriesMatch( func checkBackupEntriesMatch(
t *testing.T, t *testing.T,
retBups []BackupEntry, retBups []backup.BackupEntry,
allExpected []backupInfo, allExpected []backupInfo,
expectedIdxsAndReasons map[int][]identity.Reasoner, expectedIdxsAndReasons map[int][]identity.Reasoner,
) { ) {

View File

@ -12,7 +12,7 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -99,7 +99,7 @@ func cleanupOrphanedData(
toDelete[snap.ID] = struct{}{} toDelete[snap.ID] = struct{}{}
k, _ := makeTagKV(TagBackupCategory) k, _ := backup.MakeTagKV(TagBackupCategory)
if _, ok := snap.Labels[k]; ok { if _, ok := snap.Labels[k]; ok {
dataSnaps[snap.ID] = snap dataSnaps[snap.ID] = snap
continue continue
@ -160,7 +160,7 @@ func cleanupOrphanedData(
model.BackupSchema, model.BackupSchema,
bup.ModelStoreID, bup.ModelStoreID,
&bm); err != nil { &bm); err != nil {
if !errors.Is(err, data.ErrNotFound) { if !errors.Is(err, errs.NotFound) {
return clues.Wrap(err, "getting backup model"). return clues.Wrap(err, "getting backup model").
With("search_backup_id", bup.ID) With("search_backup_id", bup.ID)
} }
@ -315,7 +315,7 @@ func transferTags(snap *manifest.EntryMetadata, bup *backup.Backup) error {
skipTags := map[string]struct{}{} skipTags := map[string]struct{}{}
for _, k := range skipKeys { for _, k := range skipKeys {
key, _ := makeTagKV(k) key, _ := backup.MakeTagKV(k)
skipTags[key] = struct{}{} skipTags[key] = struct{}{}
} }
@ -324,7 +324,7 @@ func transferTags(snap *manifest.EntryMetadata, bup *backup.Backup) error {
// backups. // backups.
roid := bup.ProtectedResourceID roid := bup.ProtectedResourceID
roidK, _ := makeTagKV(roid) roidK, _ := backup.MakeTagKV(roid)
skipTags[roidK] = struct{}{} skipTags[roidK] = struct{}{}
// This is hacky, but right now we don't have a good way to get only the // This is hacky, but right now we don't have a good way to get only the
@ -336,11 +336,11 @@ func transferTags(snap *manifest.EntryMetadata, bup *backup.Backup) error {
// Convert them to the newer format that we'd like to have where the // Convert them to the newer format that we'd like to have where the
// service/category tags have the form "sc-<service><category>". // service/category tags have the form "sc-<service><category>".
for tag := range snap.Labels { for tag := range snap.Labels {
if _, ok := skipTags[tag]; ok || !strings.HasPrefix(tag, userTagPrefix) { if _, ok := skipTags[tag]; ok || !strings.HasPrefix(tag, backup.LegacyUserTagPrefix) {
continue continue
} }
bup.Tags[strings.Replace(tag, userTagPrefix, serviceCatTagPrefix, 1)] = "0" bup.Tags[strings.Replace(tag, backup.LegacyUserTagPrefix, serviceCatTagPrefix, 1)] = "0"
} }
return nil return nil

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
@ -116,7 +116,7 @@ func (ms mockStorer) GetWithModelStoreID(
} }
} }
return clues.Stack(data.ErrNotFound) return clues.Stack(errs.NotFound)
} }
func (ms mockStorer) DeleteWithModelStoreIDs( func (ms mockStorer) DeleteWithModelStoreIDs(
@ -136,7 +136,7 @@ type backupRes struct {
} }
func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() { func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
backupTag, _ := makeTagKV(TagBackupCategory) backupTag, _ := backup.MakeTagKV(TagBackupCategory)
// Current backup and snapshots. // Current backup and snapshots.
bupCurrent := func() *backup.Backup { bupCurrent := func() *backup.Backup {
@ -331,13 +331,13 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
// Add the given reasons. // Add the given reasons.
for _, r := range reasons { for _, r := range reasons {
for _, k := range tagKeys(r) { for _, k := range tagKeys(r) {
key, _ := makeTagKV(k) key, _ := backup.MakeTagKV(k)
res.Labels[key] = "0" res.Labels[key] = "0"
} }
} }
// Also add other common reasons on item data snapshots. // Also add other common reasons on item data snapshots.
k, _ := makeTagKV(TagBackupCategory) k, _ := backup.MakeTagKV(TagBackupCategory)
res.Labels[k] = "0" res.Labels[k] = "0"
return &res return &res
@ -369,13 +369,6 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
return &res return &res
} }
backupWithLegacyResource := func(protectedResource string, b *backup.Backup) *backup.Backup {
res := *b
res.ResourceOwnerID = protectedResource
return &res
}
table := []struct { table := []struct {
name string name string
snapshots []*manifest.EntryMetadata snapshots []*manifest.EntryMetadata
@ -525,11 +518,11 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
{bup: bupCurrent()}, {bup: bupCurrent()},
{ {
bup: bupLegacy(), bup: bupLegacy(),
err: data.ErrNotFound, err: errs.NotFound,
}, },
{ {
bup: bupNoDetails(), bup: bupNoDetails(),
err: data.ErrNotFound, err: errs.NotFound,
}, },
}, },
// Backup IDs are still included in here because they're added to the // Backup IDs are still included in here because they're added to the
@ -630,7 +623,7 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
backups: []backupRes{ backups: []backupRes{
{ {
bup: backupWithTime(baseTime, bupCurrent()), bup: backupWithTime(baseTime, bupCurrent()),
err: data.ErrNotFound, err: errs.NotFound,
}, },
}, },
time: baseTime, time: baseTime,
@ -648,13 +641,13 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()), manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
}, },
backups: []backupRes{ backups: []backupRes{
@ -675,19 +668,19 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()), manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()), manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()), manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()),
}, },
backups: []backupRes{ backups: []backupRes{
@ -711,6 +704,9 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
// not ideal, but some older versions of corso didn't even populate the // not ideal, but some older versions of corso didn't even populate the
// resource owner ID. // resource owner ID.
// //
// The old version of corso does not populated the ProtectedResourceID
// field in the backup model.
//
// Worst case, the assist base will be cleaned up when the user upgrades // Worst case, the assist base will be cleaned up when the user upgrades
// corso and generates either a new assist base or merge base with the // corso and generates either a new assist base or merge base with the
// same reason. // same reason.
@ -719,18 +715,18 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()), manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
}, },
backups: []backupRes{ backups: []backupRes{
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))}, {bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithLegacyResource("ro", backupWithTime(baseTime.Add(time.Second), bupCurrent2()))}, {bup: backupWithTime(baseTime.Add(time.Second), bupCurrent2())},
}, },
time: baseTime.Add(48 * time.Hour), time: baseTime.Add(48 * time.Hour),
buffer: 24 * time.Hour, buffer: 24 * time.Hour,
@ -742,6 +738,9 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
// reason and an even newer merge base from a current version of corso // reason and an even newer merge base from a current version of corso
// causes the assist base to be garbage collected. // causes the assist base to be garbage collected.
// //
// The old version of corso does not populated the ProtectedResourceID
// field in the backup model.
//
// This also tests that bases without a merge or assist tag are not // This also tests that bases without a merge or assist tag are not
// garbage collected as an assist base. // garbage collected as an assist base.
name: "AssistAndLegacyAndCurrentMergeBases NotYoungest CausesCleanup", name: "AssistAndLegacyAndCurrentMergeBases NotYoungest CausesCleanup",
@ -749,24 +748,24 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()), manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()), manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()), manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()),
}, },
backups: []backupRes{ backups: []backupRes{
{bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))}, {bup: backupWithResource("ro", true, backupWithTime(baseTime, bupCurrent()))},
{bup: backupWithLegacyResource("ro", backupWithTime(baseTime.Add(time.Second), bupCurrent2()))}, {bup: backupWithTime(baseTime.Add(time.Second), bupCurrent2())},
{bup: backupWithResource("ro", false, backupWithTime(baseTime.Add(time.Minute), bupCurrent3()))}, {bup: backupWithResource("ro", false, backupWithTime(baseTime.Add(time.Minute), bupCurrent3()))},
}, },
time: baseTime.Add(48 * time.Hour), time: baseTime.Add(48 * time.Hour),
@ -786,19 +785,19 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Minute), snapCurrent2()), manifestWithTime(baseTime.Add(time.Minute), snapCurrent2()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Minute), deetsCurrent2()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent3()), manifestWithTime(baseTime.Add(time.Second), snapCurrent3()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent3()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent3()),
}, },
backups: []backupRes{ backups: []backupRes{
@ -823,14 +822,14 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory),
NewReason("", "ro", path.ExchangeService, path.ContactsCategory)), identity.NewReason("", "ro", path.ExchangeService, path.ContactsCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()), manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
}, },
backups: []backupRes{ backups: []backupRes{
@ -851,13 +850,13 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"tenant1", "tenant1",
NewReason("", "ro1", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro1", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()), manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1", "tenant1",
NewReason("", "ro2", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro2", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
}, },
backups: []backupRes{ backups: []backupRes{
@ -878,13 +877,13 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()), manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant2", "tenant2",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
}, },
backups: []backupRes{ backups: []backupRes{
@ -905,19 +904,19 @@ func (suite *BackupCleanupUnitSuite) TestCleanupOrphanedData() {
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime, snapCurrent()), manifestWithTime(baseTime, snapCurrent()),
"", "",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime, deetsCurrent()), manifestWithTime(baseTime, deetsCurrent()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Second), snapCurrent2()), manifestWithTime(baseTime.Add(time.Second), snapCurrent2()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()), manifestWithTime(baseTime.Add(time.Second), deetsCurrent2()),
manifestWithReasons( manifestWithReasons(
manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()), manifestWithTime(baseTime.Add(time.Minute), snapCurrent3()),
"tenant1", "tenant1",
NewReason("", "ro", path.ExchangeService, path.EmailCategory)), identity.NewReason("", "ro", path.ExchangeService, path.EmailCategory)),
manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()), manifestWithTime(baseTime.Add(time.Minute), deetsCurrent3()),
}, },
backups: []backupRes{ backups: []backupRes{

View File

@ -7,6 +7,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/kopia/kopia/fs" "github.com/kopia/kopia/fs"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -30,11 +31,11 @@ type kopiaDataCollection struct {
func (kdc *kopiaDataCollection) Items( func (kdc *kopiaDataCollection) Items(
ctx context.Context, ctx context.Context,
errs *fault.Bus, bus *fault.Bus,
) <-chan data.Item { ) <-chan data.Item {
var ( var (
res = make(chan data.Item) res = make(chan data.Item)
el = errs.Local() el = bus.Local()
loadCount = 0 loadCount = 0
) )
@ -74,7 +75,7 @@ func (kdc kopiaDataCollection) FullPath() path.Path {
} }
// Fetch returns the file with the given name from the collection as a // Fetch returns the file with the given name from the collection as a
// data.Item. Returns a data.ErrNotFound error if the file isn't in the // data.Item. Returns a errs.NotFound error if the file isn't in the
// collection. // collection.
func (kdc kopiaDataCollection) FetchItemByName( func (kdc kopiaDataCollection) FetchItemByName(
ctx context.Context, ctx context.Context,
@ -93,7 +94,7 @@ func (kdc kopiaDataCollection) FetchItemByName(
e, err := kdc.dir.Child(ctx, encodeAsPath(name)) e, err := kdc.dir.Child(ctx, encodeAsPath(name))
if err != nil { if err != nil {
if isErrEntryNotFound(err) { if isErrEntryNotFound(err) {
err = clues.Stack(data.ErrNotFound, err) err = clues.Stack(errs.NotFound, err)
} }
return nil, clues.Wrap(err, "getting item").WithClues(ctx) return nil, clues.Wrap(err, "getting item").WithClues(ctx)

View File

@ -13,6 +13,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
dataMock "github.com/alcionai/corso/src/internal/data/mock" dataMock "github.com/alcionai/corso/src/internal/data/mock"
@ -410,7 +411,7 @@ func (suite *KopiaDataCollectionUnitSuite) TestFetchItemByName() {
if err != nil { if err != nil {
if test.notFoundErr { if test.notFoundErr {
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err)) assert.ErrorIs(t, err, errs.NotFound, clues.ToCore(err))
} }
return return

View File

@ -6,6 +6,7 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity" "github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -17,7 +18,7 @@ type (
ConsumeBackupCollections( ConsumeBackupCollections(
ctx context.Context, ctx context.Context,
backupReasons []identity.Reasoner, backupReasons []identity.Reasoner,
bases kopia.BackupBases, bases backup.BackupBases,
cs []data.BackupCollection, cs []data.BackupCollection,
pmr prefixmatcher.StringSetReader, pmr prefixmatcher.StringSetReader,
tags map[string]string, tags map[string]string,
@ -41,6 +42,6 @@ type (
ctx context.Context, ctx context.Context,
reasons []identity.Reasoner, reasons []identity.Reasoner,
tags map[string]string, tags map[string]string,
) kopia.BackupBases ) backup.BackupBases
} }
) )

View File

@ -7,6 +7,7 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -63,7 +64,7 @@ func (mc mergeCollection) FullPath() path.Path {
func (mc *mergeCollection) Items( func (mc *mergeCollection) Items(
ctx context.Context, ctx context.Context,
errs *fault.Bus, bus *fault.Bus,
) <-chan data.Item { ) <-chan data.Item {
res := make(chan data.Item) res := make(chan data.Item)
@ -82,7 +83,7 @@ func (mc *mergeCollection) Items(
"merged_collection_storage_path", path.LoggableDir(c.storagePath)) "merged_collection_storage_path", path.LoggableDir(c.storagePath))
logger.Ctx(ictx).Debug("sending items from merged collection") logger.Ctx(ictx).Debug("sending items from merged collection")
for item := range c.Items(ictx, errs) { for item := range c.Items(ictx, bus) {
res <- item res <- item
} }
} }
@ -92,7 +93,7 @@ func (mc *mergeCollection) Items(
} }
// Fetch goes through all the collections in this one and returns the first // Fetch goes through all the collections in this one and returns the first
// match found or the first error that is not data.ErrNotFound. If multiple // match found or the first error that is not errs.NotFound. If multiple
// collections have the requested item, the instance in the collection with the // collections have the requested item, the instance in the collection with the
// lexicographically smallest storage path is returned. // lexicographically smallest storage path is returned.
func (mc *mergeCollection) FetchItemByName( func (mc *mergeCollection) FetchItemByName(
@ -113,11 +114,11 @@ func (mc *mergeCollection) FetchItemByName(
s, err := c.FetchItemByName(ictx, name) s, err := c.FetchItemByName(ictx, name)
if err == nil { if err == nil {
return s, nil return s, nil
} else if err != nil && !errors.Is(err, data.ErrNotFound) { } else if err != nil && !errors.Is(err, errs.NotFound) {
return nil, clues.Wrap(err, "fetching from merged collection"). return nil, clues.Wrap(err, "fetching from merged collection").
WithClues(ictx) WithClues(ictx)
} }
} }
return nil, clues.Wrap(data.ErrNotFound, "merged collection fetch") return nil, clues.Wrap(errs.NotFound, "merged collection fetch")
} }

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/m365/service/exchange/mock" "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
@ -292,7 +293,7 @@ func (suite *MergeCollectionUnitSuite) TestFetchItemByName() {
if err != nil { if err != nil {
if test.notFoundErr { if test.notFoundErr {
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err)) assert.ErrorIs(t, err, errs.NotFound, clues.ToCore(err))
} }
return return

View File

@ -4,9 +4,13 @@ import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/alcionai/corso/src/pkg/backup"
) )
func AssertBackupBasesEqual(t *testing.T, expect, got BackupBases) { var _ backup.BackupBases = &MockBackupBases{}
func AssertBackupBasesEqual(t *testing.T, expect, got backup.BackupBases) {
if expect == nil && got == nil { if expect == nil && got == nil {
return return
} }
@ -48,22 +52,22 @@ type MockBackupBases struct {
*backupBases *backupBases
} }
func (bb *MockBackupBases) WithBackups(b ...BackupEntry) *MockBackupBases { func (bb *MockBackupBases) WithBackups(b ...backup.BackupEntry) *MockBackupBases {
bb.backupBases.backups = append(bb.Backups(), b...) bb.backupBases.backups = append(bb.Backups(), b...)
return bb return bb
} }
func (bb *MockBackupBases) WithMergeBases(m ...ManifestEntry) *MockBackupBases { func (bb *MockBackupBases) WithMergeBases(m ...backup.ManifestEntry) *MockBackupBases {
bb.backupBases.mergeBases = append(bb.MergeBases(), m...) bb.backupBases.mergeBases = append(bb.MergeBases(), m...)
return bb return bb
} }
func (bb *MockBackupBases) WithAssistBackups(b ...BackupEntry) *MockBackupBases { func (bb *MockBackupBases) WithAssistBackups(b ...backup.BackupEntry) *MockBackupBases {
bb.backupBases.assistBackups = append(bb.UniqueAssistBackups(), b...) bb.backupBases.assistBackups = append(bb.UniqueAssistBackups(), b...)
return bb return bb
} }
func (bb *MockBackupBases) WithAssistBases(m ...ManifestEntry) *MockBackupBases { func (bb *MockBackupBases) WithAssistBases(m ...backup.ManifestEntry) *MockBackupBases {
bb.backupBases.assistBases = append(bb.UniqueAssistBases(), m...) bb.backupBases.assistBases = append(bb.UniqueAssistBases(), m...)
return bb return bb
} }

View File

@ -11,7 +11,7 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
@ -292,7 +292,7 @@ func (ms *ModelStore) getModelStoreID(
} }
if len(metadata) == 0 { if len(metadata) == 0 {
return "", clues.Wrap(data.ErrNotFound, "getting ModelStoreID").WithClues(ctx) return "", clues.Wrap(errs.NotFound, "getting ModelStoreID").WithClues(ctx)
} }
if len(metadata) != 1 { if len(metadata) != 1 {
@ -347,7 +347,7 @@ func (ms *ModelStore) GetWithModelStoreID(
metadata, err := ms.c.GetManifest(ctx, id, m) metadata, err := ms.c.GetManifest(ctx, id, m)
if err != nil { if err != nil {
if errors.Is(err, manifest.ErrNotFound) { if errors.Is(err, manifest.ErrNotFound) {
err = data.ErrNotFound err = errs.NotFound
} }
return clues.Wrap(err, "getting model data").WithClues(ctx) return clues.Wrap(err, "getting model data").WithClues(ctx)
@ -490,7 +490,7 @@ func (ms *ModelStore) Delete(ctx context.Context, s model.Schema, id model.Stabl
latest, err := ms.getModelStoreID(ctx, s, id) latest, err := ms.getModelStoreID(ctx, s, id)
if err != nil { if err != nil {
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, errs.NotFound) {
return nil return nil
} }

View File

@ -14,7 +14,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
@ -381,10 +381,10 @@ func (suite *ModelStoreIntegrationSuite) TestGet_NotFoundErrors() {
t := suite.T() t := suite.T()
err := suite.m.Get(suite.ctx, model.BackupOpSchema, "baz", nil) err := suite.m.Get(suite.ctx, model.BackupOpSchema, "baz", nil)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err)) assert.ErrorIs(t, err, errs.NotFound, clues.ToCore(err))
err = suite.m.GetWithModelStoreID(suite.ctx, model.BackupOpSchema, "baz", nil) err = suite.m.GetWithModelStoreID(suite.ctx, model.BackupOpSchema, "baz", nil)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err)) assert.ErrorIs(t, err, errs.NotFound, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestPutGetOfTypeBadVersion() { func (suite *ModelStoreIntegrationSuite) TestPutGetOfTypeBadVersion() {
@ -670,7 +670,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutUpdate() {
} }
err = m.GetWithModelStoreID(ctx, theModelType, oldModelID, nil) err = m.GetWithModelStoreID(ctx, theModelType, oldModelID, nil)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err)) assert.ErrorIs(t, err, errs.NotFound, clues.ToCore(err))
}) })
} }
} }
@ -737,7 +737,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutDelete() {
returned := &fooModel{} returned := &fooModel{}
err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned) err = suite.m.GetWithModelStoreID(suite.ctx, theModelType, foo.ModelStoreID, returned)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err)) assert.ErrorIs(t, err, errs.NotFound, clues.ToCore(err))
} }
func (suite *ModelStoreIntegrationSuite) TestPutDeleteBatch() { func (suite *ModelStoreIntegrationSuite) TestPutDeleteBatch() {
@ -760,7 +760,7 @@ func (suite *ModelStoreIntegrationSuite) TestPutDeleteBatch() {
for _, id := range ids { for _, id := range ids {
returned := &fooModel{} returned := &fooModel{}
err := suite.m.GetWithModelStoreID(suite.ctx, theModelType, id, returned) err := suite.m.GetWithModelStoreID(suite.ctx, theModelType, id, returned)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err)) assert.ErrorIs(t, err, errs.NotFound, clues.ToCore(err))
} }
} }
@ -843,7 +843,7 @@ func (suite *ModelStoreRegressionSuite) TestFailDuringWriteSessionHasNoVisibleEf
assert.ErrorIs(t, err, assert.AnError, clues.ToCore(err)) assert.ErrorIs(t, err, assert.AnError, clues.ToCore(err))
err = m.GetWithModelStoreID(ctx, theModelType, newID, nil) err = m.GetWithModelStoreID(ctx, theModelType, newID, nil)
assert.ErrorIs(t, err, data.ErrNotFound, clues.ToCore(err)) assert.ErrorIs(t, err, errs.NotFound, clues.ToCore(err))
returned := &fooModel{} returned := &fooModel{}

View File

@ -17,12 +17,14 @@ import (
"github.com/kopia/kopia/snapshot/snapshotfs" "github.com/kopia/kopia/snapshot/snapshotfs"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/graph/metadata" "github.com/alcionai/corso/src/internal/m365/graph/metadata"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -58,7 +60,7 @@ type corsoProgress struct {
toMerge *mergeDetails toMerge *mergeDetails
mu sync.RWMutex mu sync.RWMutex
totalBytes int64 totalBytes int64
errs *fault.Bus bus *fault.Bus
// expectedIgnoredErrors is a count of error cases caught in the Error wrapper // expectedIgnoredErrors is a count of error cases caught in the Error wrapper
// which are well known and actually ignorable. At the end of a run, if the // which are well known and actually ignorable. At the end of a run, if the
// manifest ignored error count is equal to this count, then everything is good. // manifest ignored error count is equal to this count, then everything is good.
@ -107,7 +109,7 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
// never had to materialize their details in-memory. // never had to materialize their details in-memory.
if d.infoer == nil || d.cached { if d.infoer == nil || d.cached {
if d.prevPath == nil { if d.prevPath == nil {
cp.errs.AddRecoverable(ctx, clues.New("finished file sourced from previous backup with no previous path"). cp.bus.AddRecoverable(ctx, clues.New("finished file sourced from previous backup with no previous path").
WithClues(ctx). WithClues(ctx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
@ -123,7 +125,7 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
d.repoPath, d.repoPath,
d.locationPath) d.locationPath)
if err != nil { if err != nil {
cp.errs.AddRecoverable(ctx, clues.Wrap(err, "adding finished file to merge list"). cp.bus.AddRecoverable(ctx, clues.Wrap(err, "adding finished file to merge list").
WithClues(ctx). WithClues(ctx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
} }
@ -132,18 +134,18 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
} }
info, err := d.infoer.Info() info, err := d.infoer.Info()
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, errs.NotFound) {
// The item was deleted between enumeration and trying to get data. Skip // The item was deleted between enumeration and trying to get data. Skip
// adding it to details since there's no data for it. // adding it to details since there's no data for it.
return return
} else if err != nil { } else if err != nil {
cp.errs.AddRecoverable(ctx, clues.Wrap(err, "getting ItemInfo"). cp.bus.AddRecoverable(ctx, clues.Wrap(err, "getting ItemInfo").
WithClues(ctx). WithClues(ctx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
return return
} else if !ptr.Val(d.modTime).Equal(info.Modified()) { } else if !ptr.Val(d.modTime).Equal(info.Modified()) {
cp.errs.AddRecoverable(ctx, clues.New("item modTime mismatch"). cp.bus.AddRecoverable(ctx, clues.New("item modTime mismatch").
WithClues(ctx). WithClues(ctx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
@ -152,7 +154,7 @@ func (cp *corsoProgress) FinishedFile(relativePath string, err error) {
err = cp.deets.Add(d.repoPath, d.locationPath, info) err = cp.deets.Add(d.repoPath, d.locationPath, info)
if err != nil { if err != nil {
cp.errs.AddRecoverable(ctx, clues.Wrap(err, "adding finished file to details"). cp.bus.AddRecoverable(ctx, clues.Wrap(err, "adding finished file to details").
WithClues(ctx). WithClues(ctx).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
@ -216,7 +218,7 @@ func (cp *corsoProgress) Error(relpath string, err error, isIgnored bool) {
defer cp.UploadProgress.Error(relpath, err, isIgnored) defer cp.UploadProgress.Error(relpath, err, isIgnored)
cp.errs.AddRecoverable(cp.ctx, clues.Wrap(err, "kopia reported error"). cp.bus.AddRecoverable(cp.ctx, clues.Wrap(err, "kopia reported error").
With("is_ignored", isIgnored, "relative_path", relpath). With("is_ignored", isIgnored, "relative_path", relpath).
Label(fault.LabelForceNoBackupCreation)) Label(fault.LabelForceNoBackupCreation))
} }
@ -250,7 +252,7 @@ func collectionEntries(
// Track which items have already been seen so we can skip them if we see // Track which items have already been seen so we can skip them if we see
// them again in the data from the base snapshot. // them again in the data from the base snapshot.
seen = map[string]struct{}{} seen = map[string]struct{}{}
items = streamedEnts.Items(ctx, progress.errs) items = streamedEnts.Items(ctx, progress.bus)
) )
if lp, ok := streamedEnts.(data.LocationPather); ok { if lp, ok := streamedEnts.(data.LocationPather); ok {
@ -288,7 +290,7 @@ func collectionEntries(
itemPath, err := streamedEnts.FullPath().AppendItem(e.ID()) itemPath, err := streamedEnts.FullPath().AppendItem(e.ID())
if err != nil { if err != nil {
err = clues.Wrap(err, "getting full item path") err = clues.Wrap(err, "getting full item path")
progress.errs.AddRecoverable(ctx, err) progress.bus.AddRecoverable(ctx, err)
logger.CtxErr(ctx, err).Error("getting full item path") logger.CtxErr(ctx, err).Error("getting full item path")
@ -1039,7 +1041,7 @@ func traverseBaseDir(
return nil return nil
} }
func logBaseInfo(ctx context.Context, m ManifestEntry) { func logBaseInfo(ctx context.Context, m backup.ManifestEntry) {
svcs := map[string]struct{}{} svcs := map[string]struct{}{}
cats := map[string]struct{}{} cats := map[string]struct{}{}
@ -1085,7 +1087,7 @@ const (
func inflateBaseTree( func inflateBaseTree(
ctx context.Context, ctx context.Context,
loader snapshotLoader, loader snapshotLoader,
snap ManifestEntry, snap backup.ManifestEntry,
updatedPaths map[string]path.Path, updatedPaths map[string]path.Path,
roots map[string]*treeMap, roots map[string]*treeMap,
) error { ) error {
@ -1196,7 +1198,7 @@ func inflateBaseTree(
func inflateDirTree( func inflateDirTree(
ctx context.Context, ctx context.Context,
loader snapshotLoader, loader snapshotLoader,
baseSnaps []ManifestEntry, baseSnaps []backup.ManifestEntry,
collections []data.BackupCollection, collections []data.BackupCollection,
globalExcludeSet prefixmatcher.StringSetReader, globalExcludeSet prefixmatcher.StringSetReader,
progress *corsoProgress, progress *corsoProgress,

View File

@ -22,6 +22,7 @@ import (
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity" "github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
@ -377,7 +378,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFile() {
deets: bd, deets: bd,
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
errs: fault.New(true), bus: fault.New(true),
} }
ci := test.cachedItems(suite.targetFileName, suite.targetFilePath) ci := test.cachedItems(suite.targetFileName, suite.targetFilePath)
@ -475,7 +476,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() {
UploadProgress: &snapshotfs.NullUploadProgress{}, UploadProgress: &snapshotfs.NullUploadProgress{},
deets: bd, deets: bd,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
errs: fault.New(true), bus: fault.New(true),
} }
for k, v := range cachedItems { for k, v := range cachedItems {
@ -491,7 +492,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileCachedNoPrevPathErrors() {
assert.Empty(t, cp.pending) assert.Empty(t, cp.pending)
assert.Empty(t, bd.Details().Entries) assert.Empty(t, bd.Details().Entries)
assert.Error(t, cp.errs.Failure(), clues.ToCore(cp.errs.Failure())) assert.Error(t, cp.bus.Failure(), clues.ToCore(cp.bus.Failure()))
} }
func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarchy() { func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarchy() {
@ -526,7 +527,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedFileBaseItemDoesntBuildHierarch
deets: db, deets: db,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
deets := &itemDetails{ deets := &itemDetails{
@ -568,7 +569,7 @@ func (suite *CorsoProgressUnitSuite) TestFinishedHashingFile() {
UploadProgress: &snapshotfs.NullUploadProgress{}, UploadProgress: &snapshotfs.NullUploadProgress{},
deets: bd, deets: bd,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
errs: fault.New(true), bus: fault.New(true),
} }
ci := test.cachedItems(suite.targetFileName, suite.targetFilePath) ci := test.cachedItems(suite.targetFileName, suite.targetFilePath)
@ -631,7 +632,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree() {
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
collections := []data.BackupCollection{ collections := []data.BackupCollection{
@ -751,7 +752,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_MixedDirectory()
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
dirTree, err := inflateDirTree(ctx, nil, nil, test.layout, pmMock.NewPrefixMap(nil), progress) dirTree, err := inflateDirTree(ctx, nil, nil, test.layout, pmMock.NewPrefixMap(nil), progress)
@ -858,7 +859,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_Fails() {
progress := &corsoProgress{ progress := &corsoProgress{
ctx: ctx, ctx: ctx,
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
_, err := inflateDirTree(ctx, nil, nil, test.layout, pmMock.NewPrefixMap(nil), progress) _, err := inflateDirTree(ctx, nil, nil, test.layout, pmMock.NewPrefixMap(nil), progress)
@ -879,14 +880,14 @@ func makeManifestEntry(
id, tenant, resourceOwner string, id, tenant, resourceOwner string,
service path.ServiceType, service path.ServiceType,
categories ...path.CategoryType, categories ...path.CategoryType,
) ManifestEntry { ) backup.ManifestEntry {
var reasons []identity.Reasoner var reasons []identity.Reasoner
for _, c := range categories { for _, c := range categories {
reasons = append(reasons, NewReason(tenant, resourceOwner, service, c)) reasons = append(reasons, identity.NewReason(tenant, resourceOwner, service, c))
} }
return ManifestEntry{ return backup.ManifestEntry{
Manifest: &snapshot.Manifest{ Manifest: &snapshot.Manifest{
ID: manifest.ID(id), ID: manifest.ID(id),
}, },
@ -957,7 +958,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeErrors() {
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
cols := []data.BackupCollection{} cols := []data.BackupCollection{}
@ -1189,7 +1190,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
msw := &mockSnapshotWalker{ msw := &mockSnapshotWalker{
snapshotRoot: getBaseSnapshot(), snapshotRoot: getBaseSnapshot(),
@ -1198,7 +1199,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSingleSubtree() {
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []backup.ManifestEntry{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
test.inputCollections(), test.inputCollections(),
@ -1899,7 +1900,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
msw := &mockSnapshotWalker{ msw := &mockSnapshotWalker{
snapshotRoot: getBaseSnapshot(), snapshotRoot: getBaseSnapshot(),
@ -1913,7 +1914,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeMultipleSubdirecto
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []backup.ManifestEntry{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
test.inputCollections(t), test.inputCollections(t),
@ -2033,7 +2034,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
mc := exchMock.NewCollection(suite.testStoragePath, suite.testStoragePath, 1) mc := exchMock.NewCollection(suite.testStoragePath, suite.testStoragePath, 1)
mc.PrevPath = mc.FullPath() mc.PrevPath = mc.FullPath()
@ -2057,7 +2058,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSkipsDeletedSubtre
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []backup.ManifestEntry{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
collections, collections,
@ -2130,7 +2131,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
mc := exchMock.NewCollection(archiveStorePath, archiveLocPath, 1) mc := exchMock.NewCollection(archiveStorePath, archiveLocPath, 1)
mc.ColState = data.NewState mc.ColState = data.NewState
@ -2157,7 +2158,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_HandleEmptyBase()
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []backup.ManifestEntry{
makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeManifestEntry("", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
collections, collections,
@ -2352,7 +2353,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
mc := exchMock.NewCollection(inboxPath, inboxPath, 1) mc := exchMock.NewCollection(inboxPath, inboxPath, 1)
@ -2373,7 +2374,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsCorrectSubt
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []backup.ManifestEntry{
makeManifestEntry("id1", testTenant, testUser, path.ExchangeService, path.ContactsCategory), makeManifestEntry("id1", testTenant, testUser, path.ExchangeService, path.ContactsCategory),
makeManifestEntry("id2", testTenant, testUser, path.ExchangeService, path.EmailCategory), makeManifestEntry("id2", testTenant, testUser, path.ExchangeService, path.EmailCategory),
}, },
@ -2508,7 +2509,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubt
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
mce := exchMock.NewCollection(newPrefixPathEmail, nil, 0) mce := exchMock.NewCollection(newPrefixPathEmail, nil, 0)
@ -2526,7 +2527,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTreeSelectsMigrateSubt
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []backup.ManifestEntry{
makeManifestEntry("id1", testTenant, testUser, path.ExchangeService, path.EmailCategory, path.ContactsCategory), makeManifestEntry("id1", testTenant, testUser, path.ExchangeService, path.EmailCategory, path.ContactsCategory),
}, },
[]data.BackupCollection{mce, mcc}, []data.BackupCollection{mce, mcc},
@ -3436,7 +3437,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_SelectiveSubtreeP
ctx: ctx, ctx: ctx,
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: fault.New(true), bus: fault.New(true),
} }
snapshotRoot, counters := getBaseSnapshot() snapshotRoot, counters := getBaseSnapshot()
msw := &mockSnapshotWalker{ msw := &mockSnapshotWalker{
@ -3451,7 +3452,7 @@ func (suite *HierarchyBuilderUnitSuite) TestBuildDirectoryTree_SelectiveSubtreeP
dirTree, err := inflateDirTree( dirTree, err := inflateDirTree(
ctx, ctx,
msw, msw,
[]ManifestEntry{ []backup.ManifestEntry{
makeManifestEntry("", tenant, user, path.OneDriveService, path.FilesCategory), makeManifestEntry("", tenant, user, path.OneDriveService, path.FilesCategory),
}, },
test.inputCollections(t), test.inputCollections(t),

View File

@ -17,12 +17,14 @@ import (
"github.com/kopia/kopia/snapshot/snapshotmaintenance" "github.com/kopia/kopia/snapshot/snapshotmaintenance"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/stats" "github.com/alcionai/corso/src/internal/stats"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity" "github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"
@ -139,12 +141,12 @@ func (w *Wrapper) Close(ctx context.Context) error {
func (w Wrapper) ConsumeBackupCollections( func (w Wrapper) ConsumeBackupCollections(
ctx context.Context, ctx context.Context,
backupReasons []identity.Reasoner, backupReasons []identity.Reasoner,
bases BackupBases, bases backup.BackupBases,
collections []data.BackupCollection, collections []data.BackupCollection,
globalExcludeSet prefixmatcher.StringSetReader, globalExcludeSet prefixmatcher.StringSetReader,
additionalTags map[string]string, additionalTags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
errs *fault.Bus, bus *fault.Bus,
) (*BackupStats, *details.Builder, DetailsMergeInfoer, error) { ) (*BackupStats, *details.Builder, DetailsMergeInfoer, error) {
if w.c == nil { if w.c == nil {
return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx) return nil, nil, nil, clues.Stack(errNotConnected).WithClues(ctx)
@ -162,15 +164,15 @@ func (w Wrapper) ConsumeBackupCollections(
pending: map[string]*itemDetails{}, pending: map[string]*itemDetails{},
deets: &details.Builder{}, deets: &details.Builder{},
toMerge: newMergeDetails(), toMerge: newMergeDetails(),
errs: errs, bus: bus,
} }
// When running an incremental backup, we need to pass the prior // When running an incremental backup, we need to pass the prior
// snapshot bases into inflateDirTree so that the new snapshot // snapshot bases into inflateDirTree so that the new snapshot
// includes historical data. // includes historical data.
var ( var (
mergeBase []ManifestEntry mergeBase []backup.ManifestEntry
assistBase []ManifestEntry assistBase []backup.ManifestEntry
) )
if bases != nil { if bases != nil {
@ -215,12 +217,12 @@ func (w Wrapper) ConsumeBackupCollections(
return nil, nil, nil, err return nil, nil, nil, err
} }
return s, progress.deets, progress.toMerge, progress.errs.Failure() return s, progress.deets, progress.toMerge, progress.bus.Failure()
} }
func (w Wrapper) makeSnapshotWithRoot( func (w Wrapper) makeSnapshotWithRoot(
ctx context.Context, ctx context.Context,
prevSnapEntries []ManifestEntry, prevSnapEntries []backup.ManifestEntry,
root fs.Directory, root fs.Directory,
addlTags map[string]string, addlTags map[string]string,
progress *corsoProgress, progress *corsoProgress,
@ -253,7 +255,7 @@ func (w Wrapper) makeSnapshotWithRoot(
tags := map[string]string{} tags := map[string]string{}
for k, v := range addlTags { for k, v := range addlTags {
mk, mv := makeTagKV(k) mk, mv := backup.MakeTagKV(k)
if len(v) == 0 { if len(v) == 0 {
v = mv v = mv
@ -369,7 +371,7 @@ func getDir(
encodeElements(dirPath.PopFront().Elements()...)) encodeElements(dirPath.PopFront().Elements()...))
if err != nil { if err != nil {
if isErrEntryNotFound(err) { if isErrEntryNotFound(err) {
err = clues.Stack(data.ErrNotFound, err).WithClues(ctx) err = clues.Stack(errs.NotFound, err).WithClues(ctx)
} }
return nil, clues.Wrap(err, "getting nested object handle").WithClues(ctx) return nil, clues.Wrap(err, "getting nested object handle").WithClues(ctx)
@ -474,7 +476,7 @@ func (w Wrapper) ProduceRestoreCollections(
snapshotID string, snapshotID string,
paths []path.RestorePaths, paths []path.RestorePaths,
bcounter ByteCounter, bcounter ByteCounter,
errs *fault.Bus, bus *fault.Bus,
) ([]data.RestoreCollection, error) { ) ([]data.RestoreCollection, error) {
ctx, end := diagnostics.Span(ctx, "kopia:produceRestoreCollections") ctx, end := diagnostics.Span(ctx, "kopia:produceRestoreCollections")
defer end() defer end()
@ -495,7 +497,7 @@ func (w Wrapper) ProduceRestoreCollections(
// RestorePath -> []StoragePath directory -> set of items to load from the // RestorePath -> []StoragePath directory -> set of items to load from the
// directory. // directory.
dirsToItems = map[string]*restoreCollection{} dirsToItems = map[string]*restoreCollection{}
el = errs.Local() el = bus.Local()
) )
for _, itemPaths := range paths { for _, itemPaths := range paths {
@ -550,7 +552,7 @@ func (w Wrapper) ProduceRestoreCollections(
// Now that we've grouped everything, go through and load each directory and // Now that we've grouped everything, go through and load each directory and
// then load the items from the directory. // then load the items from the directory.
res, err := loadDirsAndItems(ctx, snapshotRoot, bcounter, dirsToItems, errs) res, err := loadDirsAndItems(ctx, snapshotRoot, bcounter, dirsToItems, bus)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "loading items").WithClues(ctx) return nil, clues.Wrap(err, "loading items").WithClues(ctx)
} }

View File

@ -21,6 +21,7 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/errs"
pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock" pmMock "github.com/alcionai/corso/src/internal/common/prefixmatcher/mock"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -28,6 +29,7 @@ import (
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock" exchMock "github.com/alcionai/corso/src/internal/m365/service/exchange/mock"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity" "github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"
@ -811,12 +813,12 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
} }
reasons := []identity.Reasoner{ reasons := []identity.Reasoner{
NewReason( identity.NewReason(
testTenant, testTenant,
suite.storePath1.ProtectedResource(), suite.storePath1.ProtectedResource(),
suite.storePath1.Service(), suite.storePath1.Service(),
suite.storePath1.Category()), suite.storePath1.Category()),
NewReason( identity.NewReason(
testTenant, testTenant,
suite.storePath2.ProtectedResource(), suite.storePath2.ProtectedResource(),
suite.storePath2.Service(), suite.storePath2.Service(),
@ -837,7 +839,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
type testCase struct { type testCase struct {
name string name string
baseBackups func(base ManifestEntry) BackupBases baseBackups func(base backup.ManifestEntry) backup.BackupBases
collections []data.BackupCollection collections []data.BackupCollection
expectedUploadedFiles int expectedUploadedFiles int
expectedCachedFiles int expectedCachedFiles int
@ -862,7 +864,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
// Initial backup. All files should be considered new by kopia. // Initial backup. All files should be considered new by kopia.
baseBackupCase := testCase{ baseBackupCase := testCase{
name: "Uncached", name: "Uncached",
baseBackups: func(ManifestEntry) BackupBases { baseBackups: func(backup.ManifestEntry) backup.BackupBases {
return NewMockBackupBases() return NewMockBackupBases()
}, },
collections: collections, collections: collections,
@ -873,8 +875,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
uploadedBytes: []int64{8000, 10000}, uploadedBytes: []int64{8000, 10000},
} }
runAndTestBackup := func(test testCase, base ManifestEntry) ManifestEntry { runAndTestBackup := func(test testCase, base backup.ManifestEntry) backup.ManifestEntry {
var res ManifestEntry var res backup.ManifestEntry
suite.Run(test.name, func() { suite.Run(test.name, func() {
t := suite.T() t := suite.T()
@ -945,7 +947,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
manifest.ID(stats.SnapshotID)) manifest.ID(stats.SnapshotID))
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
res = ManifestEntry{ res = backup.ManifestEntry{
Manifest: snap, Manifest: snap,
Reasons: reasons, Reasons: reasons,
} }
@ -954,12 +956,12 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
return res return res
} }
base := runAndTestBackup(baseBackupCase, ManifestEntry{}) base := runAndTestBackup(baseBackupCase, backup.ManifestEntry{})
table := []testCase{ table := []testCase{
{ {
name: "Kopia Assist And Merge All Files Changed", name: "Kopia Assist And Merge All Files Changed",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base backup.ManifestEntry) backup.BackupBases {
return NewMockBackupBases().WithMergeBases(base) return NewMockBackupBases().WithMergeBases(base)
}, },
collections: collections, collections: collections,
@ -973,7 +975,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
}, },
{ {
name: "Kopia Assist And Merge No Files Changed", name: "Kopia Assist And Merge No Files Changed",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base backup.ManifestEntry) backup.BackupBases {
return NewMockBackupBases().WithMergeBases(base) return NewMockBackupBases().WithMergeBases(base)
}, },
// Pass in empty collections to force a backup. Otherwise we'll skip // Pass in empty collections to force a backup. Otherwise we'll skip
@ -995,7 +997,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
}, },
{ {
name: "Kopia Assist Only", name: "Kopia Assist Only",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base backup.ManifestEntry) backup.BackupBases {
return NewMockBackupBases().WithAssistBases(base) return NewMockBackupBases().WithAssistBases(base)
}, },
collections: collections, collections: collections,
@ -1008,7 +1010,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
}, },
{ {
name: "Merge Only", name: "Merge Only",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base backup.ManifestEntry) backup.BackupBases {
return NewMockBackupBases().WithMergeBases(base).MockDisableAssistBases() return NewMockBackupBases().WithMergeBases(base).MockDisableAssistBases()
}, },
// Pass in empty collections to force a backup. Otherwise we'll skip // Pass in empty collections to force a backup. Otherwise we'll skip
@ -1028,7 +1030,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections() {
}, },
{ {
name: "Content Hash Only", name: "Content Hash Only",
baseBackups: func(base ManifestEntry) BackupBases { baseBackups: func(base backup.ManifestEntry) backup.BackupBases {
return NewMockBackupBases() return NewMockBackupBases()
}, },
collections: collections, collections: collections,
@ -1077,7 +1079,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
} }
reasons := []identity.Reasoner{ reasons := []identity.Reasoner{
NewReason( identity.NewReason(
testTenant, testTenant,
storePath.ProtectedResource(), storePath.ProtectedResource(),
storePath.Service(), storePath.Service(),
@ -1231,7 +1233,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_NoDetailsForMeta() {
require.NoError(t, err, clues.ToCore(err)) require.NoError(t, err, clues.ToCore(err))
prevSnaps.WithMergeBases( prevSnaps.WithMergeBases(
ManifestEntry{ backup.ManifestEntry{
Manifest: snap, Manifest: snap,
Reasons: reasons, Reasons: reasons,
}) })
@ -1253,7 +1255,7 @@ func (suite *KopiaIntegrationSuite) TestRestoreAfterCompressionChange() {
w := &Wrapper{k} w := &Wrapper{k}
r := NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory) r := identity.NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory)
dc1 := exchMock.NewCollection(suite.storePath1, suite.locPath1, 1) dc1 := exchMock.NewCollection(suite.storePath1, suite.locPath1, 1)
dc2 := exchMock.NewCollection(suite.storePath2, suite.locPath2, 1) dc2 := exchMock.NewCollection(suite.storePath2, suite.locPath2, 1)
@ -1303,7 +1305,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
loc1 := path.Builder{}.Append(suite.storePath1.Folders()...) loc1 := path.Builder{}.Append(suite.storePath1.Folders()...)
loc2 := path.Builder{}.Append(suite.storePath2.Folders()...) loc2 := path.Builder{}.Append(suite.storePath2.Folders()...)
r := NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory) r := identity.NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory)
collections := []data.BackupCollection{ collections := []data.BackupCollection{
&dataMock.Collection{ &dataMock.Collection{
@ -1350,7 +1352,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
}, },
} }
errs := fault.New(true) bus := fault.New(true)
stats, deets, _, err := suite.w.ConsumeBackupCollections( stats, deets, _, err := suite.w.ConsumeBackupCollections(
suite.ctx, suite.ctx,
@ -1360,13 +1362,13 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
nil, nil,
nil, nil,
true, true,
errs) bus)
require.Error(t, err, clues.ToCore(err)) require.Error(t, err, clues.ToCore(err))
assert.Equal(t, 0, stats.ErrorCount, "error count") assert.Equal(t, 0, stats.ErrorCount, "error count")
assert.Equal(t, 5, stats.TotalFileCount, "total files") assert.Equal(t, 5, stats.TotalFileCount, "total files")
assert.Equal(t, 6, stats.TotalDirectoryCount, "total directories") assert.Equal(t, 6, stats.TotalDirectoryCount, "total directories")
assert.Equal(t, 0, stats.IgnoredErrorCount, "ignored errors") assert.Equal(t, 0, stats.IgnoredErrorCount, "ignored errors")
assert.Equal(t, 1, len(errs.Errors().Recovered), "recovered errors") assert.Equal(t, 1, len(bus.Errors().Recovered), "recovered errors")
assert.False(t, stats.Incomplete, "incomplete") assert.False(t, stats.Incomplete, "incomplete")
// 5 file and 2 folder entries. // 5 file and 2 folder entries.
assert.Len(t, deets.Details().Entries, 5+2) assert.Len(t, deets.Details().Entries, 5+2)
@ -1386,8 +1388,8 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
require.Len(t, dcs, 1, "number of restore collections") require.Len(t, dcs, 1, "number of restore collections")
errs = fault.New(true) bus = fault.New(true)
items := dcs[0].Items(suite.ctx, errs) items := dcs[0].Items(suite.ctx, bus)
// Get all the items from channel // Get all the items from channel
//nolint:revive //nolint:revive
@ -1397,7 +1399,7 @@ func (suite *KopiaIntegrationSuite) TestBackupCollections_ReaderError() {
// Files that had an error shouldn't make a dir entry in kopia. If they do we // Files that had an error shouldn't make a dir entry in kopia. If they do we
// may run into kopia-assisted incrementals issues because only mod time and // may run into kopia-assisted incrementals issues because only mod time and
// not file size is checked for StreamingFiles. // not file size is checked for StreamingFiles.
assert.ErrorIs(t, errs.Failure(), data.ErrNotFound, "errored file is restorable", clues.ToCore(err)) assert.ErrorIs(t, bus.Failure(), errs.NotFound, "errored file is restorable", clues.ToCore(err))
} }
type backedupFile struct { type backedupFile struct {
@ -1585,7 +1587,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) SetupTest() {
dataMock.NewVersionedBackupCollection(t, collection)) dataMock.NewVersionedBackupCollection(t, collection))
} }
r := NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory) r := identity.NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory)
// Other tests check basic things about deets so not doing that again here. // Other tests check basic things about deets so not doing that again here.
stats, _, _, err := suite.w.ConsumeBackupCollections( stats, _, _, err := suite.w.ConsumeBackupCollections(
@ -1622,7 +1624,7 @@ func (c *i64counter) Count(i int64) {
} }
func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() { func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
r := NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory) r := identity.NewReason(testTenant, testUser, path.ExchangeService, path.EmailCategory)
man, err := suite.w.c.LoadSnapshot(suite.ctx, suite.snapshotID) man, err := suite.w.c.LoadSnapshot(suite.ctx, suite.snapshotID)
require.NoError(suite.T(), err, "getting base snapshot: %v", clues.ToCore(err)) require.NoError(suite.T(), err, "getting base snapshot: %v", clues.ToCore(err))
@ -1719,7 +1721,7 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
suite.ctx, suite.ctx,
[]identity.Reasoner{r}, []identity.Reasoner{r},
NewMockBackupBases().WithMergeBases( NewMockBackupBases().WithMergeBases(
ManifestEntry{ backup.ManifestEntry{
Manifest: man, Manifest: man,
Reasons: []identity.Reasoner{r}, Reasons: []identity.Reasoner{r},
}), }),
@ -1750,15 +1752,15 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestBackupExcludeItem() {
assert.NoError(t, err, "errors producing collection", clues.ToCore(err)) assert.NoError(t, err, "errors producing collection", clues.ToCore(err))
require.Len(t, dcs, 1, "unexpected number of restore collections") require.Len(t, dcs, 1, "unexpected number of restore collections")
errs := fault.New(true) bus := fault.New(true)
items := dcs[0].Items(suite.ctx, errs) items := dcs[0].Items(suite.ctx, bus)
// Get all the items from channel // Get all the items from channel
//nolint:revive //nolint:revive
for range items { for range items {
} }
test.restoreCheck(t, errs.Failure(), errs) test.restoreCheck(t, bus.Failure(), bus)
}) })
} }
} }
@ -1871,19 +1873,19 @@ func (suite *KopiaSimpleRepoIntegrationSuite) TestProduceRestoreCollections() {
return return
} }
errs := fault.New(true) bus := fault.New(true)
for _, dc := range result { for _, dc := range result {
// Get all the items from channel // Get all the items from channel
items := dc.Items(suite.ctx, errs) items := dc.Items(suite.ctx, bus)
//nolint:revive //nolint:revive
for range items { for range items {
} }
} }
test.expectedErr(t, errs.Failure(), errs.Failure(), "getting items") test.expectedErr(t, bus.Failure(), bus.Failure(), "getting items")
if errs.Failure() != nil { if bus.Failure() != nil {
return return
} }

View File

@ -8,7 +8,6 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/diagnostics" "github.com/alcionai/corso/src/internal/diagnostics"
"github.com/alcionai/corso/src/internal/kopia"
kinject "github.com/alcionai/corso/src/internal/kopia/inject" kinject "github.com/alcionai/corso/src/internal/kopia/inject"
"github.com/alcionai/corso/src/internal/m365/graph" "github.com/alcionai/corso/src/internal/m365/graph"
"github.com/alcionai/corso/src/internal/m365/service/exchange" "github.com/alcionai/corso/src/internal/m365/service/exchange"
@ -16,6 +15,7 @@ import (
"github.com/alcionai/corso/src/internal/m365/service/onedrive" "github.com/alcionai/corso/src/internal/m365/service/onedrive"
"github.com/alcionai/corso/src/internal/m365/service/sharepoint" "github.com/alcionai/corso/src/internal/m365/service/sharepoint"
"github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup"
bupMD "github.com/alcionai/corso/src/pkg/backup/metadata" bupMD "github.com/alcionai/corso/src/pkg/backup/metadata"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/filters" "github.com/alcionai/corso/src/pkg/filters"
@ -176,7 +176,7 @@ func verifyBackupInputs(sels selectors.Selector, cachedIDs []string) error {
func (ctrl *Controller) GetMetadataPaths( func (ctrl *Controller) GetMetadataPaths(
ctx context.Context, ctx context.Context,
r kinject.RestoreProducer, r kinject.RestoreProducer,
man kopia.ManifestEntry, man backup.ManifestEntry,
errs *fault.Bus, errs *fault.Bus,
) ([]path.RestorePaths, error) { ) ([]path.RestorePaths, error) {
var ( var (

View File

@ -16,6 +16,7 @@ import (
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/ptr" "github.com/alcionai/corso/src/internal/common/ptr"
"github.com/alcionai/corso/src/internal/common/readers" "github.com/alcionai/corso/src/internal/common/readers"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
@ -276,7 +277,7 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
var ( var (
t = suite.T() t = suite.T()
errs = fault.New(true) bus = fault.New(true)
itemCount int itemCount int
) )
@ -297,7 +298,7 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
false, false,
statusUpdater) statusUpdater)
for item := range col.Items(ctx, errs) { for item := range col.Items(ctx, bus) {
itemCount++ itemCount++
_, rok := test.removed[item.ID()] _, rok := test.removed[item.ID()]
@ -316,7 +317,7 @@ func (suite *CollectionUnitSuite) TestPrefetchCollection_Items() {
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID()) assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
} }
assert.NoError(t, errs.Failure()) assert.NoError(t, bus.Failure())
assert.Equal( assert.Equal(
t, t,
test.expectItemCount, test.expectItemCount,
@ -336,10 +337,10 @@ func (mlg *mockLazyItemGetterSerializer) GetItem(
user string, user string,
itemID string, itemID string,
immutableIDs bool, immutableIDs bool,
errs *fault.Bus, bus *fault.Bus,
) (serialization.Parsable, *details.ExchangeInfo, error) { ) (serialization.Parsable, *details.ExchangeInfo, error) {
mlg.callIDs = append(mlg.callIDs, itemID) mlg.callIDs = append(mlg.callIDs, itemID)
return mlg.ItemGetSerialize.GetItem(ctx, user, itemID, immutableIDs, errs) return mlg.ItemGetSerialize.GetItem(ctx, user, itemID, immutableIDs, bus)
} }
func (mlg *mockLazyItemGetterSerializer) check(t *testing.T, expectIDs []string) { func (mlg *mockLazyItemGetterSerializer) check(t *testing.T, expectIDs []string) {
@ -409,7 +410,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
suite.Run(test.name, func() { suite.Run(test.name, func() {
var ( var (
t = suite.T() t = suite.T()
errs = fault.New(true) bus = fault.New(true)
itemCount int itemCount int
) )
@ -435,7 +436,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
true, true,
statusUpdater) statusUpdater)
for item := range col.Items(ctx, errs) { for item := range col.Items(ctx, bus) {
itemCount++ itemCount++
_, rok := test.removed[item.ID()] _, rok := test.removed[item.ID()]
@ -478,7 +479,7 @@ func (suite *CollectionUnitSuite) TestLazyFetchCollection_Items_LazyFetch() {
assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID()) assert.True(t, aok || rok, "item must be either added or removed: %q", item.ID())
} }
assert.NoError(t, errs.Failure()) assert.NoError(t, bus.Failure())
assert.Equal( assert.Equal(
t, t,
test.expectItemCount, test.expectItemCount,
@ -625,7 +626,7 @@ func (suite *CollectionUnitSuite) TestLazyItem_ReturnsEmptyReaderOnDeletedInFlig
assert.Empty(t, readData, "read item data") assert.Empty(t, readData, "read item data")
_, err = li.Info() _, err = li.Info()
assert.ErrorIs(t, err, data.ErrNotFound, "Info() error") assert.ErrorIs(t, err, errs.NotFound, "Info() error")
} }
func (suite *CollectionUnitSuite) TestLazyItem() { func (suite *CollectionUnitSuite) TestLazyItem() {

View File

@ -8,9 +8,9 @@ import (
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia"
kinject "github.com/alcionai/corso/src/internal/kopia/inject" kinject "github.com/alcionai/corso/src/internal/kopia/inject"
"github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/count" "github.com/alcionai/corso/src/pkg/count"
@ -53,7 +53,7 @@ func (ctrl Controller) ProduceBackupCollections(
func (ctrl *Controller) GetMetadataPaths( func (ctrl *Controller) GetMetadataPaths(
ctx context.Context, ctx context.Context,
r kinject.RestoreProducer, r kinject.RestoreProducer,
man kopia.ManifestEntry, man backup.ManifestEntry,
errs *fault.Bus, errs *fault.Bus,
) ([]path.RestorePaths, error) { ) ([]path.RestorePaths, error) {
return nil, clues.New("not implemented") return nil, clues.New("not implemented")

View File

@ -85,14 +85,14 @@ func (suite *GroupsBackupUnitSuite) TestMetadataFiles() {
}{ }{
{ {
name: "error", name: "error",
reason: kopia.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory), reason: identity.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory),
manID: "manifestID", manID: "manifestID",
r: mockRestoreProducer{err: assert.AnError}, r: mockRestoreProducer{err: assert.AnError},
expectErr: require.Error, expectErr: require.Error,
}, },
{ {
name: "single site", name: "single site",
reason: kopia.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory), reason: identity.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory),
manID: "manifestID", manID: "manifestID",
r: mockRestoreProducer{ r: mockRestoreProducer{
rc: []data.RestoreCollection{ rc: []data.RestoreCollection{
@ -108,7 +108,7 @@ func (suite *GroupsBackupUnitSuite) TestMetadataFiles() {
}, },
{ {
name: "multiple sites", name: "multiple sites",
reason: kopia.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory), reason: identity.NewReason("tenant", "user", path.GroupsService, path.LibrariesCategory),
manID: "manifestID", manID: "manifestID",
r: mockRestoreProducer{ r: mockRestoreProducer{
rc: []data.RestoreCollection{ rc: []data.RestoreCollection{

View File

@ -278,7 +278,7 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
observe.Message(ctx, "Backing Up", observe.Bullet, clues.Hide(op.ResourceOwner.Name())) observe.Message(ctx, "Backing Up", observe.Bullet, clues.Hide(op.ResourceOwner.Name()))
deets, err := op.do( reasons, bases, deets, err := op.do(
ctx, ctx,
&opStats, &opStats,
sstore, sstore,
@ -304,6 +304,8 @@ func (op *BackupOperation) Run(ctx context.Context) (err error) {
err = op.createBackupModels( err = op.createBackupModels(
ctx, ctx,
sstore, sstore,
reasons,
bases,
opStats, opStats,
op.Results.BackupID, op.Results.BackupID,
op.BackupVersion, op.BackupVersion,
@ -329,17 +331,17 @@ func (op *BackupOperation) do(
opStats *backupStats, opStats *backupStats,
detailsStore streamstore.Streamer, detailsStore streamstore.Streamer,
backupID model.StableID, backupID model.StableID,
) (*details.Builder, error) { ) ([]identity.Reasoner, backup.BackupBases, *details.Builder, error) {
lastBackupVersion := version.NoBackup lastBackupVersion := version.NoBackup
reasons, err := op.Selectors.Reasons(op.account.ID(), false) reasons, err := op.Selectors.Reasons(op.account.ID(), false)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting reasons") return nil, nil, nil, clues.Wrap(err, "getting reasons")
} }
fallbackReasons, err := makeFallbackReasons(op.account.ID(), op.Selectors) fallbackReasons, err := makeFallbackReasons(op.account.ID(), op.Selectors)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "getting fallback reasons") return nil, nil, nil, clues.Wrap(err, "getting fallback reasons")
} }
logger.Ctx(ctx).With( logger.Ctx(ctx).With(
@ -352,10 +354,10 @@ func (op *BackupOperation) do(
kbf, err := op.kopia.NewBaseFinder(op.store) kbf, err := op.kopia.NewBaseFinder(op.store)
if err != nil { if err != nil {
return nil, clues.Stack(err) return nil, nil, nil, clues.Stack(err)
} }
mans, mdColls, canUseMetadata, err := produceManifestsAndMetadata( bases, mdColls, canUseMetadata, err := produceManifestsAndMetadata(
ctx, ctx,
kbf, kbf,
op.bp, op.bp,
@ -365,17 +367,17 @@ func (op *BackupOperation) do(
op.incremental, op.incremental,
op.disableAssistBackup) op.disableAssistBackup)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "producing manifests and metadata") return nil, nil, nil, clues.Wrap(err, "producing manifests and metadata")
} }
ctx = clues.Add( ctx = clues.Add(
ctx, ctx,
"can_use_metadata", canUseMetadata, "can_use_metadata", canUseMetadata,
"assist_bases", len(mans.UniqueAssistBases()), "assist_bases", len(bases.UniqueAssistBases()),
"merge_bases", len(mans.MergeBases())) "merge_bases", len(bases.MergeBases()))
if canUseMetadata { if canUseMetadata {
lastBackupVersion = mans.MinBackupVersion() lastBackupVersion = bases.MinBackupVersion()
} }
// TODO(ashmrtn): This should probably just return a collection that deletes // TODO(ashmrtn): This should probably just return a collection that deletes
@ -392,7 +394,7 @@ func (op *BackupOperation) do(
op.Options, op.Options,
op.Errors) op.Errors)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "producing backup data collections") return nil, nil, nil, clues.Wrap(err, "producing backup data collections")
} }
ctx = clues.Add( ctx = clues.Add(
@ -405,14 +407,14 @@ func (op *BackupOperation) do(
op.kopia, op.kopia,
op.account.ID(), op.account.ID(),
reasons, reasons,
mans, bases,
cs, cs,
ssmb, ssmb,
backupID, backupID,
op.incremental && canUseMetadata && canUsePreviousBackup, op.incremental && canUseMetadata && canUsePreviousBackup,
op.Errors) op.Errors)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "persisting collection backups") return nil, nil, nil, clues.Wrap(err, "persisting collection backups")
} }
opStats.hasNewDetailEntries = (deets != nil && !deets.Empty()) || opStats.hasNewDetailEntries = (deets != nil && !deets.Empty()) ||
@ -422,21 +424,21 @@ func (op *BackupOperation) do(
err = mergeDetails( err = mergeDetails(
ctx, ctx,
detailsStore, detailsStore,
mans, bases,
toMerge, toMerge,
deets, deets,
writeStats, writeStats,
op.Selectors.PathService(), op.Selectors.PathService(),
op.Errors) op.Errors)
if err != nil { if err != nil {
return nil, clues.Wrap(err, "merging details") return nil, nil, nil, clues.Wrap(err, "merging details")
} }
opStats.ctrl = op.bp.Wait() opStats.ctrl = op.bp.Wait()
logger.Ctx(ctx).Debug(opStats.ctrl) logger.Ctx(ctx).Debug(opStats.ctrl)
return deets, nil return reasons, bases, deets, nil
} }
func makeFallbackReasons(tenant string, sel selectors.Selector) ([]identity.Reasoner, error) { func makeFallbackReasons(tenant string, sel selectors.Selector) ([]identity.Reasoner, error) {
@ -494,7 +496,7 @@ func consumeBackupCollections(
bc kinject.BackupConsumer, bc kinject.BackupConsumer,
tenantID string, tenantID string,
reasons []identity.Reasoner, reasons []identity.Reasoner,
bbs kopia.BackupBases, bbs backup.BackupBases,
cs []data.BackupCollection, cs []data.BackupCollection,
pmr prefixmatcher.StringSetReader, pmr prefixmatcher.StringSetReader,
backupID model.StableID, backupID model.StableID,
@ -598,7 +600,7 @@ func getNewPathRefs(
func mergeItemsFromBase( func mergeItemsFromBase(
ctx context.Context, ctx context.Context,
checkReason bool, checkReason bool,
baseBackup kopia.BackupEntry, baseBackup backup.BackupEntry,
detailsStore streamstore.Streamer, detailsStore streamstore.Streamer,
dataFromBackup kopia.DetailsMergeInfoer, dataFromBackup kopia.DetailsMergeInfoer,
deets *details.Builder, deets *details.Builder,
@ -702,7 +704,7 @@ func mergeItemsFromBase(
func mergeDetails( func mergeDetails(
ctx context.Context, ctx context.Context,
detailsStore streamstore.Streamer, detailsStore streamstore.Streamer,
bases kopia.BackupBases, bases backup.BackupBases,
dataFromBackup kopia.DetailsMergeInfoer, dataFromBackup kopia.DetailsMergeInfoer,
deets *details.Builder, deets *details.Builder,
writeStats *kopia.BackupStats, writeStats *kopia.BackupStats,
@ -843,6 +845,8 @@ func (op *BackupOperation) persistResults(
func (op *BackupOperation) createBackupModels( func (op *BackupOperation) createBackupModels(
ctx context.Context, ctx context.Context,
sscw streamstore.CollectorWriter, sscw streamstore.CollectorWriter,
reasons []identity.Reasoner,
bases backup.BackupBases,
opStats backupStats, opStats backupStats,
backupID model.StableID, backupID model.StableID,
backupVersion int, backupVersion int,
@ -928,6 +932,8 @@ func (op *BackupOperation) createBackupModels(
op.ResourceOwner.Name(), op.ResourceOwner.Name(),
op.Results.ReadWrites, op.Results.ReadWrites,
op.Results.StartAndEndTime, op.Results.StartAndEndTime,
reasons,
bases,
op.Errors.Errors(), op.Errors.Errors(),
tags) tags)

View File

@ -121,7 +121,7 @@ func checkPaths(t *testing.T, expected, got []path.Path) {
type mockBackupConsumer struct { type mockBackupConsumer struct {
checkFunc func( checkFunc func(
backupReasons []identity.Reasoner, backupReasons []identity.Reasoner,
bases kopia.BackupBases, bases backup.BackupBases,
cs []data.BackupCollection, cs []data.BackupCollection,
tags map[string]string, tags map[string]string,
buildTreeWithBase bool) buildTreeWithBase bool)
@ -130,7 +130,7 @@ type mockBackupConsumer struct {
func (mbu mockBackupConsumer) ConsumeBackupCollections( func (mbu mockBackupConsumer) ConsumeBackupCollections(
ctx context.Context, ctx context.Context,
backupReasons []identity.Reasoner, backupReasons []identity.Reasoner,
bases kopia.BackupBases, bases backup.BackupBases,
cs []data.BackupCollection, cs []data.BackupCollection,
excluded prefixmatcher.StringSetReader, excluded prefixmatcher.StringSetReader,
tags map[string]string, tags map[string]string,
@ -453,12 +453,12 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections
tenant = "a-tenant" tenant = "a-tenant"
resourceOwner = "a-user" resourceOwner = "a-user"
emailReason = kopia.NewReason( emailReason = identity.NewReason(
tenant, tenant,
resourceOwner, resourceOwner,
path.ExchangeService, path.ExchangeService,
path.EmailCategory) path.EmailCategory)
contactsReason = kopia.NewReason( contactsReason = identity.NewReason(
tenant, tenant,
resourceOwner, resourceOwner,
path.ExchangeService, path.ExchangeService,
@ -477,13 +477,13 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections
} }
bases = kopia.NewMockBackupBases().WithMergeBases( bases = kopia.NewMockBackupBases().WithMergeBases(
kopia.ManifestEntry{ backup.ManifestEntry{
Manifest: manifest1, Manifest: manifest1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
emailReason, emailReason,
}, },
}).WithAssistBases( }).WithAssistBases(
kopia.ManifestEntry{ backup.ManifestEntry{
Manifest: manifest2, Manifest: manifest2,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
contactsReason, contactsReason,
@ -500,7 +500,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_ConsumeBackupDataCollections
mbu := &mockBackupConsumer{ mbu := &mockBackupConsumer{
checkFunc: func( checkFunc: func(
backupReasons []identity.Reasoner, backupReasons []identity.Reasoner,
gotBases kopia.BackupBases, gotBases backup.BackupBases,
cs []data.BackupCollection, cs []data.BackupCollection,
gotTags map[string]string, gotTags map[string]string,
buildTreeWithBase bool, buildTreeWithBase bool,
@ -590,12 +590,12 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
DetailsID: "did2", DetailsID: "did2",
} }
pathReason1 = kopia.NewReason( pathReason1 = identity.NewReason(
"", "",
itemPath1.ProtectedResource(), itemPath1.ProtectedResource(),
itemPath1.Service(), itemPath1.Service(),
itemPath1.Category()) itemPath1.Category())
pathReason3 = kopia.NewReason( pathReason3 = identity.NewReason(
"", "",
itemPath3.ProtectedResource(), itemPath3.ProtectedResource(),
itemPath3.Service(), itemPath3.Service(),
@ -616,7 +616,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
}, },
true) true)
exchangeLocationPath1 = path.Builder{}.Append("work-display-name") exchangeLocationPath1 = path.Builder{}.Append("work-display-name")
exchangePathReason1 = kopia.NewReason( exchangePathReason1 = identity.NewReason(
"", "",
exchangeItemPath1.ProtectedResource(), exchangeItemPath1.ProtectedResource(),
exchangeItemPath1.Service(), exchangeItemPath1.Service(),
@ -631,8 +631,8 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
table := []struct { table := []struct {
name string name string
populatedDetails map[string]*details.Details populatedDetails map[string]*details.Details
inputBackups []kopia.BackupEntry inputBackups []backup.BackupEntry
inputAssistBackups []kopia.BackupEntry inputAssistBackups []backup.BackupEntry
mdm *mockDetailsMergeInfoer mdm *mockDetailsMergeInfoer
errCheck assert.ErrorAssertionFunc errCheck assert.ErrorAssertionFunc
@ -659,7 +659,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
@ -683,7 +683,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -710,7 +710,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -766,7 +766,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -793,7 +793,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -823,7 +823,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -853,7 +853,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -883,7 +883,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -914,7 +914,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -945,7 +945,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -993,7 +993,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -1002,7 +1002,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
}, },
}, },
}, },
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []backup.BackupEntry{
{Backup: &backup2}, {Backup: &backup2},
}, },
populatedDetails: map[string]*details.Details{ populatedDetails: map[string]*details.Details{
@ -1037,7 +1037,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputBackups: []kopia.BackupEntry{ inputBackups: []backup.BackupEntry{
{ {
Backup: &backup1, Backup: &backup1,
Reasons: []identity.Reasoner{ Reasons: []identity.Reasoner{
@ -1045,7 +1045,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
}, },
}, },
}, },
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []backup.BackupEntry{
{Backup: &backup2}, {Backup: &backup2},
}, },
populatedDetails: map[string]*details.Details{ populatedDetails: map[string]*details.Details{
@ -1077,7 +1077,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []backup.BackupEntry{
{Backup: &backup1}, {Backup: &backup1},
{Backup: &backup2}, {Backup: &backup2},
}, },
@ -1110,7 +1110,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []backup.BackupEntry{
{Backup: &backup1}, {Backup: &backup1},
{Backup: &backup2}, {Backup: &backup2},
}, },
@ -1143,7 +1143,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
return res return res
}(), }(),
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []backup.BackupEntry{
{Backup: &backup1}, {Backup: &backup1},
{Backup: &backup2}, {Backup: &backup2},
}, },
@ -1173,7 +1173,7 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsItems
mdm: func() *mockDetailsMergeInfoer { mdm: func() *mockDetailsMergeInfoer {
return newMockDetailsMergeInfoer() return newMockDetailsMergeInfoer()
}(), }(),
inputAssistBackups: []kopia.BackupEntry{ inputAssistBackups: []backup.BackupEntry{
{Backup: &backup1}, {Backup: &backup1},
}, },
populatedDetails: map[string]*details.Details{ populatedDetails: map[string]*details.Details{
@ -1267,13 +1267,13 @@ func (suite *BackupOpUnitSuite) TestBackupOperation_MergeBackupDetails_AddsFolde
locPath1 = path.Builder{}.Append(itemPath1.Folders()...) locPath1 = path.Builder{}.Append(itemPath1.Folders()...)
pathReason1 = kopia.NewReason( pathReason1 = identity.NewReason(
"", "",
itemPath1.ProtectedResource(), itemPath1.ProtectedResource(),
itemPath1.Service(), itemPath1.Service(),
itemPath1.Category()) itemPath1.Category())
backup1 = kopia.BackupEntry{ backup1 = backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: "bid1", ID: "bid1",

View File

@ -6,8 +6,8 @@ import (
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/kopia/inject" "github.com/alcionai/corso/src/internal/kopia/inject"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
"github.com/alcionai/corso/src/pkg/control/repository" "github.com/alcionai/corso/src/pkg/control/repository"
@ -37,7 +37,7 @@ type (
GetMetadataPaths( GetMetadataPaths(
ctx context.Context, ctx context.Context,
r inject.RestoreProducer, r inject.RestoreProducer,
man kopia.ManifestEntry, man backup.ManifestEntry,
errs *fault.Bus, errs *fault.Bus,
) ([]path.RestorePaths, error) ) ([]path.RestorePaths, error)

View File

@ -7,10 +7,10 @@ import (
"github.com/alcionai/corso/src/internal/common/prefixmatcher" "github.com/alcionai/corso/src/internal/common/prefixmatcher"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia"
kinject "github.com/alcionai/corso/src/internal/kopia/inject" kinject "github.com/alcionai/corso/src/internal/kopia/inject"
"github.com/alcionai/corso/src/internal/m365" "github.com/alcionai/corso/src/internal/m365"
"github.com/alcionai/corso/src/internal/operations/inject" "github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path" "github.com/alcionai/corso/src/pkg/path"
) )
@ -62,7 +62,7 @@ func (mbp *mockBackupProducer) Wait() *data.CollectionStats {
func (mbp mockBackupProducer) GetMetadataPaths( func (mbp mockBackupProducer) GetMetadataPaths(
ctx context.Context, ctx context.Context,
r kinject.RestoreProducer, r kinject.RestoreProducer,
man kopia.ManifestEntry, man backup.ManifestEntry,
errs *fault.Bus, errs *fault.Bus,
) ([]path.RestorePaths, error) { ) ([]path.RestorePaths, error) {
ctrl := m365.Controller{} ctrl := m365.Controller{}

View File

@ -6,10 +6,12 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/kopia/inject" "github.com/alcionai/corso/src/internal/kopia/inject"
oinject "github.com/alcionai/corso/src/internal/operations/inject" oinject "github.com/alcionai/corso/src/internal/operations/inject"
"github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/identity" "github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -23,7 +25,7 @@ func produceManifestsAndMetadata(
reasons, fallbackReasons []identity.Reasoner, reasons, fallbackReasons []identity.Reasoner,
tenantID string, tenantID string,
getMetadata, dropAssistBases bool, getMetadata, dropAssistBases bool,
) (kopia.BackupBases, []data.RestoreCollection, bool, error) { ) (backup.BackupBases, []data.RestoreCollection, bool, error) {
bb, meta, useMergeBases, err := getManifestsAndMetadata( bb, meta, useMergeBases, err := getManifestsAndMetadata(
ctx, ctx,
bf, bf,
@ -62,7 +64,7 @@ func getManifestsAndMetadata(
reasons, fallbackReasons []identity.Reasoner, reasons, fallbackReasons []identity.Reasoner,
tenantID string, tenantID string,
getMetadata bool, getMetadata bool,
) (kopia.BackupBases, []data.RestoreCollection, bool, error) { ) (backup.BackupBases, []data.RestoreCollection, bool, error) {
var ( var (
tags = map[string]string{kopia.TagBackupCategory: ""} tags = map[string]string{kopia.TagBackupCategory: ""}
collections []data.RestoreCollection collections []data.RestoreCollection
@ -127,7 +129,7 @@ func getManifestsAndMetadata(
// should be safe to leave this manifest in the AssistBases set, though we // should be safe to leave this manifest in the AssistBases set, though we
// could remove it there too if we want to be conservative. That can be done // could remove it there too if we want to be conservative. That can be done
// by finding the manifest ID. // by finding the manifest ID.
if err != nil && !errors.Is(err, data.ErrNotFound) { if err != nil && !errors.Is(err, errs.NotFound) {
// prior metadata isn't guaranteed to exist. // prior metadata isn't guaranteed to exist.
// if it doesn't, we'll just have to do a // if it doesn't, we'll just have to do a
// full backup for that data. // full backup for that data.

View File

@ -50,14 +50,14 @@ type mockBackupFinder struct {
// beyond that and results are returned for the union of the reasons anyway. // beyond that and results are returned for the union of the reasons anyway.
// This does assume that the return data is properly constructed to return a // This does assume that the return data is properly constructed to return a
// union of the reasons etc. // union of the reasons etc.
data map[string]kopia.BackupBases data map[string]backup.BackupBases
} }
func (bf *mockBackupFinder) FindBases( func (bf *mockBackupFinder) FindBases(
_ context.Context, _ context.Context,
reasons []identity.Reasoner, reasons []identity.Reasoner,
_ map[string]string, _ map[string]string,
) kopia.BackupBases { ) backup.BackupBases {
if len(reasons) == 0 { if len(reasons) == 0 {
return kopia.NewMockBackupBases() return kopia.NewMockBackupBases()
} }
@ -163,7 +163,7 @@ func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
name: "single reason", name: "single reason",
manID: "single", manID: "single",
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason(tid, ro, path.ExchangeService, path.EmailCategory), identity.NewReason(tid, ro, path.ExchangeService, path.EmailCategory),
}, },
preFetchPaths: []string{}, preFetchPaths: []string{},
expectPaths: func(t *testing.T, files []string) []path.Path { expectPaths: func(t *testing.T, files []string) []path.Path {
@ -183,8 +183,8 @@ func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
name: "multiple reasons", name: "multiple reasons",
manID: "multi", manID: "multi",
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason(tid, ro, path.ExchangeService, path.EmailCategory), identity.NewReason(tid, ro, path.ExchangeService, path.EmailCategory),
kopia.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory), identity.NewReason(tid, ro, path.ExchangeService, path.ContactsCategory),
}, },
preFetchPaths: []string{}, preFetchPaths: []string{},
expectPaths: func(t *testing.T, files []string) []path.Path { expectPaths: func(t *testing.T, files []string) []path.Path {
@ -209,7 +209,7 @@ func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
name: "single reason sp libraries", name: "single reason sp libraries",
manID: "single-sp-libraries", manID: "single-sp-libraries",
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason(tid, ro, path.SharePointService, path.LibrariesCategory), identity.NewReason(tid, ro, path.SharePointService, path.LibrariesCategory),
}, },
preFetchPaths: []string{}, preFetchPaths: []string{},
expectPaths: func(t *testing.T, files []string) []path.Path { expectPaths: func(t *testing.T, files []string) []path.Path {
@ -229,7 +229,7 @@ func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
name: "single reason groups messages", name: "single reason groups messages",
manID: "single-groups-messages", manID: "single-groups-messages",
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason(tid, ro, path.GroupsService, path.ChannelMessagesCategory), identity.NewReason(tid, ro, path.GroupsService, path.ChannelMessagesCategory),
}, },
preFetchPaths: []string{}, preFetchPaths: []string{},
expectPaths: func(t *testing.T, files []string) []path.Path { expectPaths: func(t *testing.T, files []string) []path.Path {
@ -249,7 +249,7 @@ func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
name: "single reason groups libraries", name: "single reason groups libraries",
manID: "single-groups-libraries", manID: "single-groups-libraries",
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason(tid, ro, path.GroupsService, path.LibrariesCategory), identity.NewReason(tid, ro, path.GroupsService, path.LibrariesCategory),
}, },
preFetchPaths: []string{"previouspath"}, preFetchPaths: []string{"previouspath"},
expectPaths: func(t *testing.T, files []string) []path.Path { expectPaths: func(t *testing.T, files []string) []path.Path {
@ -290,7 +290,7 @@ func (suite *OperationsManifestsUnitSuite) TestGetMetadataPaths() {
mr := mockRestoreProducer{err: test.expectErr, colls: test.preFetchCollection} mr := mockRestoreProducer{err: test.expectErr, colls: test.preFetchCollection}
mr.buildRestoreFunc(t, test.manID, paths) mr.buildRestoreFunc(t, test.manID, paths)
man := kopia.ManifestEntry{ man := backup.ManifestEntry{
Manifest: &snapshot.Manifest{ID: manifest.ID(test.manID)}, Manifest: &snapshot.Manifest{ID: manifest.ID(test.manID)},
Reasons: test.reasons, Reasons: test.reasons,
} }
@ -314,7 +314,7 @@ func buildReasons(
for _, cat := range cats { for _, cat := range cats {
reasons = append( reasons = append(
reasons, reasons,
kopia.NewReason(tenant, ro, service, cat)) identity.NewReason(tenant, ro, service, cat))
} }
return reasons return reasons
@ -327,8 +327,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
did = "detailsid" did = "detailsid"
) )
makeMan := func(id, incmpl string, cats ...path.CategoryType) kopia.ManifestEntry { makeMan := func(id, incmpl string, cats ...path.CategoryType) backup.ManifestEntry {
return kopia.ManifestEntry{ return backup.ManifestEntry{
Manifest: &snapshot.Manifest{ Manifest: &snapshot.Manifest{
ID: manifest.ID(id), ID: manifest.ID(id),
IncompleteReason: incmpl, IncompleteReason: incmpl,
@ -337,8 +337,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
} }
} }
makeBackup := func(snapID string, cats ...path.CategoryType) kopia.BackupEntry { makeBackup := func(snapID string, cats ...path.CategoryType) backup.BackupEntry {
return kopia.BackupEntry{ return backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: model.StableID(snapID + "bup"), ID: model.StableID(snapID + "bup"),
@ -361,7 +361,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
assertB assert.BoolAssertionFunc assertB assert.BoolAssertionFunc
expectDCS []mockColl expectDCS []mockColl
expectPaths func(t *testing.T, gotPaths []path.Path) expectPaths func(t *testing.T, gotPaths []path.Path)
expectMans kopia.BackupBases expectMans backup.BackupBases
}{ }{
{ {
name: "don't get metadata, no mans", name: "don't get metadata, no mans",
@ -376,7 +376,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "don't get metadata", name: "don't get metadata",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithBackups(makeBackup("id1", path.EmailCategory)), WithBackups(makeBackup("id1", path.EmailCategory)),
@ -384,7 +384,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
}, },
rp: mockRestoreProducer{}, rp: mockRestoreProducer{},
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason("", ro, path.ExchangeService, path.EmailCategory), identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
}, },
getMeta: false, getMeta: false,
assertErr: assert.NoError, assertErr: assert.NoError,
@ -398,14 +398,14 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "don't get metadata, incomplete manifest", name: "don't get metadata, incomplete manifest",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases( ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan("id1", "checkpoint", path.EmailCategory)), makeMan("id1", "checkpoint", path.EmailCategory)),
}, },
}, },
rp: mockRestoreProducer{}, rp: mockRestoreProducer{},
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason("", ro, path.ExchangeService, path.EmailCategory), identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
@ -420,7 +420,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "one valid man, multiple reasons", name: "one valid man, multiple reasons",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory, path.ContactsCategory)), makeMan("id1", "", path.EmailCategory, path.ContactsCategory)),
}, },
@ -431,8 +431,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason("", ro, path.ExchangeService, path.EmailCategory), identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
kopia.NewReason("", ro, path.ExchangeService, path.ContactsCategory), identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
@ -462,7 +462,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "one valid man, extra incomplete man", name: "one valid man, extra incomplete man",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)), WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)),
@ -475,7 +475,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason("", ro, path.ExchangeService, path.EmailCategory), identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
@ -488,7 +488,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "one valid man, extra incomplete man, drop assist bases", name: "one valid man, extra incomplete man, drop assist bases",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)). WithMergeBases(makeMan("id1", "", path.EmailCategory)).
WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)), WithAssistBases(makeMan("id2", "checkpoint", path.EmailCategory)),
@ -501,7 +501,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason("", ro, path.ExchangeService, path.EmailCategory), identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
}, },
getMeta: true, getMeta: true,
dropAssist: true, dropAssist: true,
@ -515,7 +515,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "multiple valid mans", name: "multiple valid mans",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan("id1", "", path.EmailCategory), makeMan("id1", "", path.EmailCategory),
makeMan("id2", "", path.EmailCategory)), makeMan("id2", "", path.EmailCategory)),
@ -528,7 +528,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
}, },
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason("", ro, path.ExchangeService, path.EmailCategory), identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
@ -541,14 +541,14 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata() {
{ {
name: "error collecting metadata", name: "error collecting metadata",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan("id1", "", path.EmailCategory)), WithMergeBases(makeMan("id1", "", path.EmailCategory)),
}, },
}, },
rp: mockRestoreProducer{err: assert.AnError}, rp: mockRestoreProducer{err: assert.AnError},
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
kopia.NewReason("", ro, path.ExchangeService, path.EmailCategory), identity.NewReason("", ro, path.ExchangeService, path.EmailCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.Error, assertErr: assert.Error,
@ -628,8 +628,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
did = "detailsid" did = "detailsid"
) )
makeMan := func(ro, id, incmpl string, cats ...path.CategoryType) kopia.ManifestEntry { makeMan := func(ro, id, incmpl string, cats ...path.CategoryType) backup.ManifestEntry {
return kopia.ManifestEntry{ return backup.ManifestEntry{
Manifest: &snapshot.Manifest{ Manifest: &snapshot.Manifest{
ID: manifest.ID(id), ID: manifest.ID(id),
IncompleteReason: incmpl, IncompleteReason: incmpl,
@ -639,8 +639,8 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
} }
} }
makeBackup := func(ro, snapID string, cats ...path.CategoryType) kopia.BackupEntry { makeBackup := func(ro, snapID string, cats ...path.CategoryType) backup.BackupEntry {
return kopia.BackupEntry{ return backup.BackupEntry{
Backup: &backup.Backup{ Backup: &backup.Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: model.StableID(snapID + "bup"), ID: model.StableID(snapID + "bup"),
@ -652,13 +652,13 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
} }
} }
emailReason := kopia.NewReason( emailReason := identity.NewReason(
"", "",
ro, ro,
path.ExchangeService, path.ExchangeService,
path.EmailCategory) path.EmailCategory)
fbEmailReason := kopia.NewReason( fbEmailReason := identity.NewReason(
"", "",
fbro, fbro,
path.ExchangeService, path.ExchangeService,
@ -675,12 +675,12 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
assertErr assert.ErrorAssertionFunc assertErr assert.ErrorAssertionFunc
assertB assert.BoolAssertionFunc assertB assert.BoolAssertionFunc
expectDCS []mockColl expectDCS []mockColl
expectMans kopia.BackupBases expectMans backup.BackupBases
}{ }{
{ {
name: "don't get metadata, only fallbacks", name: "don't get metadata, only fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
WithMergeBases(makeMan(fbro, "fb_id1", "", path.EmailCategory)). WithMergeBases(makeMan(fbro, "fb_id1", "", path.EmailCategory)).
WithBackups(makeBackup(fbro, "fb_id1", path.EmailCategory)), WithBackups(makeBackup(fbro, "fb_id1", path.EmailCategory)),
@ -700,7 +700,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "only fallbacks", name: "only fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
fbro: kopia.NewMockBackupBases().WithMergeBases( fbro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups( makeMan(fbro, "fb_id1", "", path.EmailCategory)).WithBackups(
makeBackup(fbro, "fb_id1", path.EmailCategory)), makeBackup(fbro, "fb_id1", path.EmailCategory)),
@ -723,7 +723,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "only fallbacks, drop assist", name: "only fallbacks, drop assist",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
WithMergeBases(makeMan(fbro, "fb_id1", "", path.EmailCategory)). WithMergeBases(makeMan(fbro, "fb_id1", "", path.EmailCategory)).
WithBackups(makeBackup(fbro, "fb_id1", path.EmailCategory)), WithBackups(makeBackup(fbro, "fb_id1", path.EmailCategory)),
@ -748,7 +748,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "complete mans and fallbacks", name: "complete mans and fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)), WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
@ -774,7 +774,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "incomplete mans and fallbacks", name: "incomplete mans and fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases( ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)), makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithAssistBases( fbro: kopia.NewMockBackupBases().WithAssistBases(
@ -799,7 +799,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "complete and incomplete mans and fallbacks", name: "complete and incomplete mans and fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases(). ro: kopia.NewMockBackupBases().
WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)). WithMergeBases(makeMan(ro, "id1", "", path.EmailCategory)).
WithAssistBases(makeMan(ro, "id2", "checkpoint", path.EmailCategory)), WithAssistBases(makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
@ -830,7 +830,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "incomplete mans and complete fallbacks", name: "incomplete mans and complete fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases( ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)), makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
@ -858,7 +858,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "incomplete mans and complete fallbacks, no assist bases", name: "incomplete mans and complete fallbacks, no assist bases",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithAssistBases( ro: kopia.NewMockBackupBases().WithAssistBases(
makeMan(ro, "id2", "checkpoint", path.EmailCategory)), makeMan(ro, "id2", "checkpoint", path.EmailCategory)),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
@ -887,7 +887,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "complete mans and incomplete fallbacks", name: "complete mans and incomplete fallbacks",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory)), makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases().WithAssistBases( fbro: kopia.NewMockBackupBases().WithAssistBases(
@ -912,7 +912,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "complete mans and complete fallbacks, multiple reasons", name: "complete mans and complete fallbacks, multiple reasons",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory)), makeMan(ro, "id1", "", path.EmailCategory, path.ContactsCategory)),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
@ -928,11 +928,11 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
emailReason, emailReason,
kopia.NewReason("", ro, path.ExchangeService, path.ContactsCategory), identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory),
}, },
fallbackReasons: []identity.Reasoner{ fallbackReasons: []identity.Reasoner{
fbEmailReason, fbEmailReason,
kopia.NewReason("", fbro, path.ExchangeService, path.ContactsCategory), identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
@ -944,7 +944,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "complete mans and complete fallbacks, distinct reasons", name: "complete mans and complete fallbacks, distinct reasons",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory)), makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
@ -960,7 +960,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
}, },
reasons: []identity.Reasoner{emailReason}, reasons: []identity.Reasoner{emailReason},
fallbackReasons: []identity.Reasoner{ fallbackReasons: []identity.Reasoner{
kopia.NewReason("", fbro, path.ExchangeService, path.ContactsCategory), identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,
@ -975,7 +975,7 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
{ {
name: "complete mans and complete fallbacks, fallback has superset of reasons", name: "complete mans and complete fallbacks, fallback has superset of reasons",
bf: &mockBackupFinder{ bf: &mockBackupFinder{
data: map[string]kopia.BackupBases{ data: map[string]backup.BackupBases{
ro: kopia.NewMockBackupBases().WithMergeBases( ro: kopia.NewMockBackupBases().WithMergeBases(
makeMan(ro, "id1", "", path.EmailCategory)), makeMan(ro, "id1", "", path.EmailCategory)),
fbro: kopia.NewMockBackupBases(). fbro: kopia.NewMockBackupBases().
@ -993,11 +993,11 @@ func (suite *OperationsManifestsUnitSuite) TestProduceManifestsAndMetadata_Fallb
}, },
reasons: []identity.Reasoner{ reasons: []identity.Reasoner{
emailReason, emailReason,
kopia.NewReason("", ro, path.ExchangeService, path.ContactsCategory), identity.NewReason("", ro, path.ExchangeService, path.ContactsCategory),
}, },
fallbackReasons: []identity.Reasoner{ fallbackReasons: []identity.Reasoner{
fbEmailReason, fbEmailReason,
kopia.NewReason("", fbro, path.ExchangeService, path.ContactsCategory), identity.NewReason("", fbro, path.ExchangeService, path.ContactsCategory),
}, },
getMeta: true, getMeta: true,
assertErr: assert.NoError, assertErr: assert.NoError,

View File

@ -76,28 +76,28 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
category path.CategoryType category path.CategoryType
metadataFiles [][]string metadataFiles [][]string
}{ }{
// { {
// name: "Mail", name: "Mail",
// selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
// sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
// sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch())) sel.Include(sel.MailFolders([]string{api.MailInbox}, selectors.PrefixMatch()))
// sel.DiscreteOwner = suite.its.user.ID sel.DiscreteOwner = suite.its.user.ID
// return sel return sel
// }, },
// category: path.EmailCategory, category: path.EmailCategory,
// metadataFiles: exchange.MetadataFileNames(path.EmailCategory), metadataFiles: MetadataFileNames(path.EmailCategory),
// }, },
// { {
// name: "Contacts", name: "Contacts",
// selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
// sel := selectors.NewExchangeBackup([]string{suite.its.user.ID}) sel := selectors.NewExchangeBackup([]string{suite.its.user.ID})
// sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch())) sel.Include(sel.ContactFolders([]string{api.DefaultContacts}, selectors.PrefixMatch()))
// return sel return sel
// }, },
// category: path.ContactsCategory, category: path.ContactsCategory,
// metadataFiles: exchange.MetadataFileNames(path.ContactsCategory), metadataFiles: MetadataFileNames(path.ContactsCategory),
// }, },
{ {
name: "Calendar Events", name: "Calendar Events",
selector: func() *selectors.ExchangeBackup { selector: func() *selectors.ExchangeBackup {
@ -175,6 +175,20 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
expectDeets, expectDeets,
false) false)
// Lineage for the initial backup should be empty.
bup, err := bod.sw.GetBackup(ctx, bo.Results.BackupID)
require.NoError(t, err, clues.ToCore(err))
assert.Empty(t, bup.MergeBases, "base backup merge base lineage")
assert.Empty(t, bup.AssistBases, "base backup assist base lineage")
// Reason for backup should have Exchange and the category.
reasons, err := bup.Reasons()
require.NoError(t, err, clues.ToCore(err))
require.Len(t, reasons, 1, "number of reasons for backup")
assert.Equal(t, path.ExchangeService, reasons[0].Service())
assert.Equal(t, test.category, reasons[0].Category())
// Basic, happy path incremental test. No changes are dictated or expected. // Basic, happy path incremental test. No changes are dictated or expected.
// This only tests that an incremental backup is runnable at all, and that it // This only tests that an incremental backup is runnable at all, and that it
// produces fewer results than the last backup. // produces fewer results than the last backup.
@ -218,6 +232,36 @@ func (suite *ExchangeBackupIntgSuite) TestBackup_Run_exchange() {
expectDeets, expectDeets,
false) false)
// Incremental backup should have the initial backup as a merge base in
// the lineage information.
bup, err = bod.sw.GetBackup(ctx, incBO.Results.BackupID)
require.NoError(t, err, clues.ToCore(err))
lineage, err := bup.Bases()
require.NoError(t, err, clues.ToCore(err))
// No assist bases.
assert.Empty(t, lineage.Assist)
// Expect one merge base with Reason we're testing. Right now tenant isn't
// populated and protected resource ID may be incorrect due to inputs to
// the test. Just compare service/category.
require.Len(t, lineage.Merge[bo.Results.BackupID], 1)
assert.Equal(
t,
path.ExchangeService,
lineage.Merge[bo.Results.BackupID][0].Service())
assert.Equal(
t,
test.category,
lineage.Merge[bo.Results.BackupID][0].Category())
// Reason for backup should have Exchange and the category.
reasons, err = bup.Reasons()
require.NoError(t, err, clues.ToCore(err))
require.Len(t, reasons, 1, "number of reasons for backup")
assert.Equal(t, path.ExchangeService, reasons[0].Service())
assert.Equal(t, test.category, reasons[0].Category())
// do some additional checks to ensure the incremental dealt with fewer items. // do some additional checks to ensure the incremental dealt with fewer items.
assert.Greater(t, bo.Results.ItemsWritten, incBO.Results.ItemsWritten, "incremental items written") assert.Greater(t, bo.Results.ItemsWritten, incBO.Results.ItemsWritten, "incremental items written")
assert.Greater(t, bo.Results.ItemsRead, incBO.Results.ItemsRead, "incremental items read") assert.Greater(t, bo.Results.ItemsRead, incBO.Results.ItemsRead, "incremental items read")

View File

@ -244,7 +244,7 @@ func checkBackupIsInManifests(
for _, category := range categories { for _, category := range categories {
t.Run(category.String(), func(t *testing.T) { t.Run(category.String(), func(t *testing.T) {
var ( var (
r = kopia.NewReason("", resourceOwner, sel.PathService(), category) r = identity.NewReason("", resourceOwner, sel.PathService(), category)
tags = map[string]string{kopia.TagBackupCategory: ""} tags = map[string]string{kopia.TagBackupCategory: ""}
found bool found bool
) )

View File

@ -9,7 +9,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
@ -76,13 +76,13 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
table := []struct { table := []struct {
name string name string
deets func(*testing.T) *details.Details deets func(*testing.T) *details.Details
errs func(context.Context) *fault.Errors bus func(context.Context) *fault.Errors
hasSnapID assert.ValueAssertionFunc hasSnapID assert.ValueAssertionFunc
}{ }{
{ {
name: "none", name: "none",
deets: func(*testing.T) *details.Details { return nil }, deets: func(*testing.T) *details.Details { return nil },
errs: func(context.Context) *fault.Errors { return nil }, bus: func(context.Context) *fault.Errors { return nil },
hasSnapID: assert.Empty, hasSnapID: assert.Empty,
}, },
{ {
@ -100,13 +100,13 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
})) }))
return deetsBuilder.Details() return deetsBuilder.Details()
}, },
errs: func(context.Context) *fault.Errors { return nil }, bus: func(context.Context) *fault.Errors { return nil },
hasSnapID: assert.NotEmpty, hasSnapID: assert.NotEmpty,
}, },
{ {
name: "errors", name: "errors",
deets: func(*testing.T) *details.Details { return nil }, deets: func(*testing.T) *details.Details { return nil },
errs: func(ctx context.Context) *fault.Errors { bus: func(ctx context.Context) *fault.Errors {
bus := fault.New(false) bus := fault.New(false)
bus.Fail(clues.New("foo")) bus.Fail(clues.New("foo"))
bus.AddRecoverable(ctx, clues.New("bar")) bus.AddRecoverable(ctx, clues.New("bar"))
@ -136,7 +136,7 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
return deetsBuilder.Details() return deetsBuilder.Details()
}, },
errs: func(ctx context.Context) *fault.Errors { bus: func(ctx context.Context) *fault.Errors {
bus := fault.New(false) bus := fault.New(false)
bus.Fail(clues.New("foo")) bus.Fail(clues.New("foo"))
bus.AddRecoverable(ctx, clues.New("bar")) bus.AddRecoverable(ctx, clues.New("bar"))
@ -169,9 +169,9 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
require.NoError(t, err) require.NoError(t, err)
} }
errs := test.errs(ctx) bus := test.bus(ctx)
if errs != nil { if bus != nil {
err = ss.Collect(ctx, FaultErrorsCollector(errs)) err = ss.Collect(ctx, FaultErrorsCollector(bus))
require.NoError(t, err) require.NoError(t, err)
} }
@ -207,12 +207,12 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
snapid, snapid,
DetailsReader(details.UnmarshalTo(&readDeets)), DetailsReader(details.UnmarshalTo(&readDeets)),
fault.New(true)) fault.New(true))
assert.ErrorIs(t, err, data.ErrNotFound) assert.ErrorIs(t, err, errs.NotFound)
assert.Empty(t, readDeets) assert.Empty(t, readDeets)
} }
var readErrs fault.Errors var readErrs fault.Errors
if errs != nil { if bus != nil {
err = ss.Read( err = ss.Read(
ctx, ctx,
snapid, snapid,
@ -221,15 +221,15 @@ func (suite *StreamStoreIntgSuite) TestStreamer() {
require.NoError(t, err) require.NoError(t, err)
require.NotEmpty(t, readErrs) require.NotEmpty(t, readErrs)
assert.ElementsMatch(t, errs.Skipped, readErrs.Skipped) assert.ElementsMatch(t, bus.Skipped, readErrs.Skipped)
assert.ElementsMatch(t, errs.Recovered, readErrs.Recovered) assert.ElementsMatch(t, bus.Recovered, readErrs.Recovered)
} else { } else {
err := ss.Read( err := ss.Read(
ctx, ctx,
snapid, snapid,
FaultErrorsReader(fault.UnmarshalErrorsTo(&readErrs)), FaultErrorsReader(fault.UnmarshalErrorsTo(&readErrs)),
fault.New(true)) fault.New(true))
assert.ErrorIs(t, err, data.ErrNotFound) assert.ErrorIs(t, err, errs.NotFound)
assert.Empty(t, readErrs) assert.Empty(t, readErrs)
} }
}) })

View File

@ -2,18 +2,23 @@ package backup
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"strconv" "strconv"
"strings" "strings"
"time" "time"
"github.com/alcionai/clues"
"github.com/dustin/go-humanize" "github.com/dustin/go-humanize"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/cli/print" "github.com/alcionai/corso/src/cli/print"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/str" "github.com/alcionai/corso/src/internal/common/str"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/stats" "github.com/alcionai/corso/src/internal/stats"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -36,10 +41,12 @@ type Backup struct {
// Selector used in this operation // Selector used in this operation
Selector selectors.Selector `json:"selectors"` Selector selectors.Selector `json:"selectors"`
// TODO: in process of gaining support, most cases will still use // ** DO NOT CHANGE JSON TAG NAMES **
// ResourceOwner and ResourceOwnerName. // These are in-memory only variable renames of previously persisted fields.
ProtectedResourceID string `json:"protectedResourceID,omitempty"` // ** CHANGING THE JSON TAGS WILL BREAK THINGS BECAUSE THE MODEL WON'T **
ProtectedResourceName string `json:"protectedResourceName,omitempty"` // ** DESERIALIZE PROPERLY **
ProtectedResourceID string `json:"resourceOwnerID,omitempty"`
ProtectedResourceName string `json:"resourceOwnerName,omitempty"`
// Version represents the version of the backup format // Version represents the version of the backup format
Version int `json:"version"` Version int `json:"version"`
@ -57,14 +64,19 @@ type Backup struct {
stats.StartAndEndTime stats.StartAndEndTime
stats.SkippedCounts stats.SkippedCounts
// MergeBases records the set of merge bases used for this backup and the
// Reason(s) each merge base was selected. Reasons are serialized the same
// way that Reason tags are serialized.
MergeBases map[model.StableID][]string `json:"mergeBases,omitempty"`
// AssistBases records the set of assist bases used for this backup and the
// Reason(s) each assist base was selected. Reasons are serialized the same
// way that Reason tags are serialized.
AssistBases map[model.StableID][]string `json:"assistBases,omitempty"`
// **Deprecated** // **Deprecated**
// Reference to the backup details storage location. // Reference to the backup details storage location.
// Used to read backup.Details from the streamstore. // Used to read backup.Details from the streamstore.
DetailsID string `json:"detailsID"` DetailsID string `json:"detailsID"`
// prefer protectedResource
ResourceOwnerID string `json:"resourceOwnerID,omitempty"`
ResourceOwnerName string `json:"resourceOwnerName,omitempty"`
} }
// interface compliance checks // interface compliance checks
@ -78,6 +90,8 @@ func New(
ownerID, ownerName string, ownerID, ownerName string,
rw stats.ReadWrites, rw stats.ReadWrites,
se stats.StartAndEndTime, se stats.StartAndEndTime,
reasons []identity.Reasoner,
bases BackupBases,
fe *fault.Errors, fe *fault.Errors,
tags map[string]string, tags map[string]string,
) *Backup { ) *Backup {
@ -109,14 +123,19 @@ func New(
} }
} }
return &Backup{ // maps.Clone throws an NPE if passed nil on Mac for some reason.
if tags == nil {
tags = map[string]string{}
}
b := &Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: id, ID: id,
Tags: tags, Tags: maps.Clone(tags),
}, },
ResourceOwnerID: ownerID, ProtectedResourceID: ownerID,
ResourceOwnerName: ownerName, ProtectedResourceName: ownerName,
Version: version, Version: version,
SnapshotID: snapshotID, SnapshotID: snapshotID,
@ -139,6 +158,145 @@ func New(
SkippedInvalidOneNoteFile: invalidONFile, SkippedInvalidOneNoteFile: invalidONFile,
}, },
} }
if bases != nil {
mergeBases := map[model.StableID][]string{}
assistBases := map[model.StableID][]string{}
for _, backup := range bases.Backups() {
for _, reason := range backup.Reasons {
mergeBases[backup.ID] = append(
mergeBases[backup.ID],
ServiceCatString(reason.Service(), reason.Category()))
}
}
for _, backup := range bases.UniqueAssistBackups() {
for _, reason := range backup.Reasons {
assistBases[backup.ID] = append(
assistBases[backup.ID],
ServiceCatString(reason.Service(), reason.Category()))
}
}
if len(mergeBases) > 0 {
b.MergeBases = mergeBases
}
if len(assistBases) > 0 {
b.AssistBases = assistBases
}
}
for _, reason := range reasons {
for k, v := range reasonTags(reason) {
b.Tags[k] = v
}
}
return b
}
// PersistedBaseSet contains information extracted from the backup model
// relating to its lineage. It only contains the backup ID and Reasons each
// base was selected instead of the full set of information contained in other
// structs like BackupBases.
type PersistedBaseSet struct {
Merge map[model.StableID][]identity.Reasoner
Assist map[model.StableID][]identity.Reasoner
}
func (b Backup) Bases() (PersistedBaseSet, error) {
res := PersistedBaseSet{
Merge: map[model.StableID][]identity.Reasoner{},
Assist: map[model.StableID][]identity.Reasoner{},
}
for id, reasons := range b.MergeBases {
for _, reason := range reasons {
service, cat, err := serviceCatStringToTypes(reason)
if err != nil {
return res, clues.Wrap(err, "getting Reason info").With(
"base_type", "merge",
"base_backup_id", id,
"input_string", reason)
}
res.Merge[id] = append(res.Merge[id], identity.NewReason(
// Tenant ID not currently stored in backup model.
"",
str.First(
b.ProtectedResourceID,
b.Selector.DiscreteOwner),
service,
cat))
}
}
for id, reasons := range b.AssistBases {
for _, reason := range reasons {
service, cat, err := serviceCatStringToTypes(reason)
if err != nil {
return res, clues.Wrap(err, "getting Reason info").With(
"base_type", "assist",
"base_backup_id", id,
"input_string", reason)
}
res.Assist[id] = append(res.Assist[id], identity.NewReason(
// Tenant ID not currently stored in backup model.
"",
str.First(
b.ProtectedResourceID,
b.Selector.DiscreteOwner),
service,
cat))
}
}
return res, nil
}
func (b Backup) Tenant() (string, error) {
t := b.Tags[TenantIDKey]
if len(t) == 0 {
return "", clues.Wrap(errs.NotFound, "getting tenant")
}
return t, nil
}
// Reasons returns the set of services and categories this backup encompassed
// for the tenant and protected resource.
func (b Backup) Reasons() ([]identity.Reasoner, error) {
tenant, err := b.Tenant()
if err != nil {
return nil, clues.Stack(err)
}
var res []identity.Reasoner
for tag := range b.Tags {
service, cat, err := serviceCatStringToTypes(tag)
if err != nil {
// Assume it's just not one of the Reason tags.
if errors.Is(err, errMissingPrefix) {
continue
}
return nil, clues.Wrap(err, "parsing reasons")
}
res = append(
res,
identity.NewReason(
tenant,
str.First(b.ProtectedResourceID, b.Selector.DiscreteOwner),
service,
cat))
}
return res, nil
} }
// -------------------------------------------------------------------------------- // --------------------------------------------------------------------------------
@ -253,9 +411,7 @@ func (b Backup) Values() []string {
name := str.First( name := str.First(
b.ProtectedResourceName, b.ProtectedResourceName,
b.ResourceOwnerName,
b.ProtectedResourceID, b.ProtectedResourceID,
b.ResourceOwnerID,
b.Selector.Name()) b.Selector.Name())
bs := b.toStats() bs := b.toStats()

View File

@ -0,0 +1,152 @@
package backup
import (
"context"
"strings"
"github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest"
"github.com/kopia/kopia/snapshot"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/path"
)
const (
// Kopia does not do comparisons properly for empty tags right now so add some
// placeholder value to them.
legacyDefaultTagValue = "0"
// Kopia CLI prefixes all user tags with "tag:"[1]. Maintaining this will
// ensure we don't accidentally take reserved tags and that tags can be
// displayed with kopia CLI.
// (permalinks)
// [1] https://github.com/kopia/kopia/blob/05e729a7858a6e86cb48ba29fb53cb6045efce2b/cli/command_snapshot_create.go#L169
LegacyUserTagPrefix = "tag:"
TenantIDKey = "tenant"
ResourceIDKey = "protectedResource"
serviceCatPrefix = "sc-"
separator = "_"
// Sentinel value for tags. Could technically be empty but we'll store
// something for now.
//nolint
DefaultTagValue = "1"
)
var errMissingPrefix = clues.New("missing tag prefix")
func ServiceCatString(
service path.ServiceType,
category path.CategoryType,
) string {
return serviceCatPrefix + service.String() + separator + category.String()
}
func serviceCatStringToTypes(
input string,
) (path.ServiceType, path.CategoryType, error) {
trimmed := strings.TrimPrefix(input, serviceCatPrefix)
// No prefix found -> unexpected format.
if trimmed == input {
return path.UnknownService,
path.UnknownCategory,
clues.Stack(errMissingPrefix).With(
"expected_prefix", serviceCatPrefix,
"input", input)
}
parts := strings.Split(trimmed, separator)
if len(parts) != 2 {
return path.UnknownService,
path.UnknownCategory,
clues.New("missing tag separator")
}
cat := path.ToCategoryType(parts[1])
if cat == path.UnknownCategory {
return path.UnknownService,
path.UnknownCategory,
clues.New("parsing category").With("input_category", parts[1])
}
service := path.ToServiceType(parts[0])
if service == path.UnknownService {
return path.UnknownService,
path.UnknownCategory,
clues.New("parsing service").With("input_service", parts[0])
}
return service, cat, nil
}
// reasonTags returns the set of key-value pairs that can be used as tags to
// represent this Reason.
// nolint
func reasonTags(r identity.Reasoner) map[string]string {
return map[string]string{
TenantIDKey: r.Tenant(),
ResourceIDKey: r.ProtectedResource(),
ServiceCatString(r.Service(), r.Category()): DefaultTagValue,
}
}
// nolint
type BackupEntry struct {
*Backup
Reasons []identity.Reasoner
}
type ManifestEntry struct {
*snapshot.Manifest
// Reasons contains the ResourceOwners and Service/Categories that caused this
// snapshot to be selected as a base. We can't reuse OwnersCats here because
// it's possible some ResourceOwners will have a subset of the Categories as
// the reason for selecting a snapshot. For example:
// 1. backup user1 email,contacts -> B1
// 2. backup user1 contacts -> B2 (uses B1 as base)
// 3. backup user1 email,contacts,events (uses B1 for email, B2 for contacts)
Reasons []identity.Reasoner
}
// MakeTagKV normalizes the provided key to protect it from clobbering
// similarly named tags from non-user input (user inputs are still open
// to collisions amongst eachother).
// Returns the normalized Key plus a default value. If you're embedding a
// key-only tag, the returned default value msut be used instead of an
// empty string.
func MakeTagKV(k string) (string, string) {
return LegacyUserTagPrefix + k, legacyDefaultTagValue
}
func (me ManifestEntry) GetTag(key string) (string, bool) {
k, _ := MakeTagKV(key)
v, ok := me.Tags[k]
return v, ok
}
// nolint
type BackupBases interface {
// ConvertToAssistBase converts the base with the given item data snapshot ID
// from a merge base to an assist base.
ConvertToAssistBase(manifestID manifest.ID)
Backups() []BackupEntry
UniqueAssistBackups() []BackupEntry
MinBackupVersion() int
MergeBases() []ManifestEntry
DisableMergeBases()
UniqueAssistBases() []ManifestEntry
DisableAssistBases()
MergeBackupBases(
ctx context.Context,
other BackupBases,
reasonToKey func(identity.Reasoner) string,
) BackupBases
// SnapshotAssistBases returns the set of bases to use for kopia assisted
// incremental snapshot operations. It consists of the union of merge bases
// and assist bases. If DisableAssistBases has been called then it returns
// nil.
SnapshotAssistBases() []ManifestEntry
}

View File

@ -0,0 +1,70 @@
package backup
import (
"strings"
"testing"
"github.com/alcionai/clues"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/path"
)
type BackupBasesUnitSuite struct {
tester.Suite
}
func TestBackupBasesUnitSuite(t *testing.T) {
suite.Run(t, &BackupBasesUnitSuite{Suite: tester.NewUnitSuite(t)})
}
func (suite *BackupBasesUnitSuite) TestServiceCategorySerialization() {
table := []struct {
name string
input string
expectErr assert.ErrorAssertionFunc
expectService path.ServiceType
expectCategory path.CategoryType
}{
{
name: "ProperFormat",
input: ServiceCatString(path.ExchangeService, path.EmailCategory),
expectErr: assert.NoError,
expectService: path.ExchangeService,
expectCategory: path.EmailCategory,
},
{
name: "MissingPrefix",
input: strings.TrimPrefix(
ServiceCatString(path.ExchangeService, path.EmailCategory),
serviceCatPrefix),
expectErr: assert.Error,
},
{
name: "MissingSeparator",
input: strings.ReplaceAll(
ServiceCatString(path.ExchangeService, path.EmailCategory),
separator,
""),
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
service, cat, err := serviceCatStringToTypes(test.input)
test.expectErr(t, err, clues.ToCore(err))
if err != nil {
return
}
assert.Equal(t, test.expectService, service)
assert.Equal(t, test.expectCategory, cat)
})
}
}

View File

@ -1,20 +1,24 @@
package backup_test package backup
import ( import (
"strconv" "strconv"
"testing" "testing"
"time" "time"
"github.com/alcionai/clues"
"github.com/dustin/go-humanize" "github.com/dustin/go-humanize"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/exp/maps"
"github.com/alcionai/corso/src/internal/common/dttm" "github.com/alcionai/corso/src/internal/common/dttm"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/stats" "github.com/alcionai/corso/src/internal/stats"
"github.com/alcionai/corso/src/internal/tester" "github.com/alcionai/corso/src/internal/tester"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
) )
@ -26,11 +30,11 @@ func TestBackupUnitSuite(t *testing.T) {
suite.Run(t, &BackupUnitSuite{Suite: tester.NewUnitSuite(t)}) suite.Run(t, &BackupUnitSuite{Suite: tester.NewUnitSuite(t)})
} }
func stubBackup(t time.Time, ownerID, ownerName string) backup.Backup { func stubBackup(t time.Time, ownerID, ownerName string) Backup {
sel := selectors.NewExchangeBackup([]string{"test"}) sel := selectors.NewExchangeBackup([]string{"test"})
sel.Include(sel.AllData()) sel.Include(sel.AllData())
return backup.Backup{ return Backup{
BaseModel: model.BaseModel{ BaseModel: model.BaseModel{
ID: model.StableID("id"), ID: model.StableID("id"),
Tags: map[string]string{ Tags: map[string]string{
@ -40,10 +44,8 @@ func stubBackup(t time.Time, ownerID, ownerName string) backup.Backup {
CreationTime: t, CreationTime: t,
SnapshotID: "snapshot", SnapshotID: "snapshot",
DetailsID: "details", DetailsID: "details",
ProtectedResourceID: ownerID + "-pr", ProtectedResourceID: ownerID + "-ro",
ProtectedResourceName: ownerName + "-pr", ProtectedResourceName: ownerName + "-ro",
ResourceOwnerID: ownerID + "-ro",
ResourceOwnerName: ownerName + "-ro",
Status: "status", Status: "status",
Selector: sel.Selector, Selector: sel.Selector,
ErrorCount: 2, ErrorCount: 2,
@ -67,6 +69,371 @@ func stubBackup(t time.Time, ownerID, ownerName string) backup.Backup {
} }
} }
func (suite *BackupUnitSuite) TestBackup_Bases() {
const (
mergeID model.StableID = "merge-backup-id"
assistID model.StableID = "assist-backup-id"
userID = "user-id"
)
stub := stubBackup(time.Now(), userID, "user-name")
defaultEmailReason := identity.NewReason(
"",
stub.ProtectedResourceID,
path.ExchangeService,
path.EmailCategory)
defaultContactsReason := identity.NewReason(
"",
stub.ProtectedResourceID,
path.ExchangeService,
path.ContactsCategory)
table := []struct {
name string
getBackup func() *Backup
expectErr assert.ErrorAssertionFunc
expectMerge map[model.StableID][]identity.Reasoner
expectAssist map[model.StableID][]identity.Reasoner
}{
{
name: "MergeAndAssist SameReasonEach",
getBackup: func() *Backup {
res := stub
res.MergeBases = map[model.StableID][]string{}
res.AssistBases = map[model.StableID][]string{}
res.MergeBases[mergeID] = []string{
ServiceCatString(
defaultEmailReason.Service(),
defaultEmailReason.Category()),
}
res.AssistBases[assistID] = []string{
ServiceCatString(
defaultEmailReason.Service(),
defaultEmailReason.Category()),
}
return &res
},
expectErr: assert.NoError,
expectMerge: map[model.StableID][]identity.Reasoner{
mergeID: {defaultEmailReason},
},
expectAssist: map[model.StableID][]identity.Reasoner{
assistID: {defaultEmailReason},
},
},
{
name: "MergeAndAssist DifferentReasonEach",
getBackup: func() *Backup {
res := stub
res.MergeBases = map[model.StableID][]string{}
res.AssistBases = map[model.StableID][]string{}
res.MergeBases[mergeID] = []string{
ServiceCatString(
defaultEmailReason.Service(),
defaultEmailReason.Category()),
}
res.AssistBases[assistID] = []string{
ServiceCatString(
defaultContactsReason.Service(),
defaultContactsReason.Category()),
}
return &res
},
expectErr: assert.NoError,
expectMerge: map[model.StableID][]identity.Reasoner{
mergeID: {defaultEmailReason},
},
expectAssist: map[model.StableID][]identity.Reasoner{
assistID: {defaultContactsReason},
},
},
{
name: "MergeAndAssist MultipleReasonsEach",
getBackup: func() *Backup {
res := stub
res.MergeBases = map[model.StableID][]string{}
res.AssistBases = map[model.StableID][]string{}
res.MergeBases[mergeID] = []string{
ServiceCatString(
defaultEmailReason.Service(),
defaultEmailReason.Category()),
ServiceCatString(
defaultContactsReason.Service(),
defaultContactsReason.Category()),
}
res.AssistBases[assistID] = []string{
ServiceCatString(
defaultEmailReason.Service(),
defaultEmailReason.Category()),
ServiceCatString(
defaultContactsReason.Service(),
defaultContactsReason.Category()),
}
return &res
},
expectErr: assert.NoError,
expectMerge: map[model.StableID][]identity.Reasoner{
mergeID: {
defaultEmailReason,
defaultContactsReason,
},
},
expectAssist: map[model.StableID][]identity.Reasoner{
assistID: {
defaultEmailReason,
defaultContactsReason,
},
},
},
{
name: "OnlyMerge SingleReason",
getBackup: func() *Backup {
res := stub
res.MergeBases = map[model.StableID][]string{}
res.MergeBases[mergeID] = []string{
ServiceCatString(
defaultEmailReason.Service(),
defaultEmailReason.Category()),
}
return &res
},
expectErr: assert.NoError,
expectMerge: map[model.StableID][]identity.Reasoner{
mergeID: {defaultEmailReason},
},
},
{
name: "OnlyAssist SingleReason",
getBackup: func() *Backup {
res := stub
res.AssistBases = map[model.StableID][]string{}
res.AssistBases[mergeID] = []string{
ServiceCatString(
defaultEmailReason.Service(),
defaultEmailReason.Category()),
}
return &res
},
expectErr: assert.NoError,
expectAssist: map[model.StableID][]identity.Reasoner{
mergeID: {defaultEmailReason},
},
},
{
name: "BadReasonFormat",
getBackup: func() *Backup {
res := stub
res.AssistBases = map[model.StableID][]string{}
res.AssistBases[mergeID] = []string{"foo"}
return &res
},
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
bup := test.getBackup()
got, err := bup.Bases()
test.expectErr(t, err, clues.ToCore(err))
if err != nil {
return
}
// Since the result contains a slice of Reasons directly calling Equals
// will fail because we want ElementsMatch on the internal slices.
assert.ElementsMatch(
t,
maps.Keys(test.expectMerge),
maps.Keys(got.Merge),
"merge base keys")
assert.ElementsMatch(
t,
maps.Keys(test.expectAssist),
maps.Keys(got.Assist),
"assist base keys")
for id, e := range test.expectMerge {
assert.ElementsMatch(t, e, got.Merge[id], "merge bases")
}
for id, e := range test.expectAssist {
assert.ElementsMatch(t, e, got.Assist[id], "assist bases")
}
})
}
}
func (suite *BackupUnitSuite) TestBackup_Tenant() {
const tenant = "tenant-id"
stub := stubBackup(time.Now(), "user-id", "user-name")
table := []struct {
name string
inputKey string
inputValue string
expectErr assert.ErrorAssertionFunc
expect string
}{
{
name: "ProperlyFormatted",
inputKey: TenantIDKey,
inputValue: tenant,
expectErr: assert.NoError,
expect: tenant,
},
{
name: "WrongKey",
inputKey: "foo",
inputValue: tenant,
expectErr: assert.Error,
},
{
name: "EmptyValue",
inputKey: TenantIDKey,
inputValue: "",
expectErr: assert.Error,
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
b := stub
b.Tags = map[string]string{test.inputKey: test.inputValue}
gotTenant, err := b.Tenant()
test.expectErr(t, err, clues.ToCore(err))
if err != nil {
assert.ErrorIs(t, err, errs.NotFound)
return
}
assert.Equal(t, test.expect, gotTenant)
})
}
}
func (suite *BackupUnitSuite) TestBackup_Reasons() {
const (
tenantID = "tenant-id"
userID = "user-id"
)
stub := stubBackup(time.Now(), userID, "user-name")
defaultEmailReason := identity.NewReason(
tenantID,
stub.ProtectedResourceID,
path.ExchangeService,
path.EmailCategory)
defaultContactsReason := identity.NewReason(
tenantID,
stub.ProtectedResourceID,
path.ExchangeService,
path.ContactsCategory)
table := []struct {
name string
getBackup func() *Backup
expectErr assert.ErrorAssertionFunc
expect []identity.Reasoner
}{
{
name: "SingleReason",
getBackup: func() *Backup {
res := stub
res.Tags = map[string]string{}
for k, v := range reasonTags(defaultEmailReason) {
res.Tags[k] = v
}
return &res
},
expectErr: assert.NoError,
expect: []identity.Reasoner{defaultEmailReason},
},
{
name: "MultipleReasons",
getBackup: func() *Backup {
res := stub
res.Tags = map[string]string{}
for _, reason := range []identity.Reasoner{defaultEmailReason, defaultContactsReason} {
for k, v := range reasonTags(reason) {
res.Tags[k] = v
}
}
return &res
},
expectErr: assert.NoError,
expect: []identity.Reasoner{
defaultEmailReason,
defaultContactsReason,
},
},
{
name: "SingleReason OtherTags",
getBackup: func() *Backup {
res := stub
res.Tags = map[string]string{}
for k, v := range reasonTags(defaultEmailReason) {
res.Tags[k] = v
}
res.Tags["foo"] = "bar"
return &res
},
expectErr: assert.NoError,
expect: []identity.Reasoner{defaultEmailReason},
},
}
for _, test := range table {
suite.Run(test.name, func() {
t := suite.T()
bup := test.getBackup()
got, err := bup.Reasons()
test.expectErr(t, err, clues.ToCore(err))
if err != nil {
return
}
assert.ElementsMatch(t, test.expect, got)
})
}
}
func (suite *BackupUnitSuite) TestBackup_HeadersValues() { func (suite *BackupUnitSuite) TestBackup_HeadersValues() {
var ( var (
t = suite.T() t = suite.T()
@ -86,7 +453,7 @@ func (suite *BackupUnitSuite) TestBackup_HeadersValues() {
nowFmt, nowFmt,
"1m0s", "1m0s",
"status (2 errors, 1 skipped: 1 malware)", "status (2 errors, 1 skipped: 1 malware)",
"name-pr", "name-ro",
} }
) )
@ -123,9 +490,6 @@ func (suite *BackupUnitSuite) TestBackup_HeadersValues_onlyResourceOwners() {
} }
) )
b.ProtectedResourceID = ""
b.ProtectedResourceName = ""
b.StartAndEndTime.CompletedAt = later b.StartAndEndTime.CompletedAt = later
// single skipped malware // single skipped malware
@ -139,17 +503,17 @@ func (suite *BackupUnitSuite) TestBackup_HeadersValues_onlyResourceOwners() {
func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() { func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() {
table := []struct { table := []struct {
name string name string
bup backup.Backup bup Backup
expect string expect string
}{ }{
{ {
name: "no extras", name: "no extras",
bup: backup.Backup{Status: "test"}, bup: Backup{Status: "test"},
expect: "test", expect: "test",
}, },
{ {
name: "errors", name: "errors",
bup: backup.Backup{ bup: Backup{
Status: "test", Status: "test",
ErrorCount: 42, ErrorCount: 42,
}, },
@ -157,7 +521,7 @@ func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() {
}, },
{ {
name: "malware", name: "malware",
bup: backup.Backup{ bup: Backup{
Status: "test", Status: "test",
SkippedCounts: stats.SkippedCounts{ SkippedCounts: stats.SkippedCounts{
TotalSkippedItems: 2, TotalSkippedItems: 2,
@ -168,7 +532,7 @@ func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() {
}, },
{ {
name: "errors and malware", name: "errors and malware",
bup: backup.Backup{ bup: Backup{
Status: "test", Status: "test",
ErrorCount: 42, ErrorCount: 42,
SkippedCounts: stats.SkippedCounts{ SkippedCounts: stats.SkippedCounts{
@ -180,7 +544,7 @@ func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() {
}, },
{ {
name: "errors and skipped", name: "errors and skipped",
bup: backup.Backup{ bup: Backup{
Status: "test", Status: "test",
ErrorCount: 42, ErrorCount: 42,
SkippedCounts: stats.SkippedCounts{ SkippedCounts: stats.SkippedCounts{
@ -191,7 +555,7 @@ func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() {
}, },
{ {
name: "errors and invalid OneNote", name: "errors and invalid OneNote",
bup: backup.Backup{ bup: Backup{
Status: "test", Status: "test",
ErrorCount: 42, ErrorCount: 42,
SkippedCounts: stats.SkippedCounts{ SkippedCounts: stats.SkippedCounts{
@ -203,7 +567,7 @@ func (suite *BackupUnitSuite) TestBackup_Values_statusVariations() {
}, },
{ {
name: "errors, malware, notFound, invalid OneNote", name: "errors, malware, notFound, invalid OneNote",
bup: backup.Backup{ bup: Backup{
Status: "test", Status: "test",
ErrorCount: 42, ErrorCount: 42,
SkippedCounts: stats.SkippedCounts{ SkippedCounts: stats.SkippedCounts{
@ -229,7 +593,7 @@ func (suite *BackupUnitSuite) TestBackup_MinimumPrintable() {
b := stubBackup(now, "id", "name") b := stubBackup(now, "id", "name")
resultIface := b.MinimumPrintable() resultIface := b.MinimumPrintable()
result, ok := resultIface.(backup.Printable) result, ok := resultIface.(Printable)
require.True(t, ok) require.True(t, ok)
assert.Equal(t, b.ID, result.ID, "id") assert.Equal(t, b.ID, result.ID, "id")

View File

@ -1,6 +1,10 @@
package identity package identity
import "github.com/alcionai/corso/src/pkg/path" import (
"github.com/alcionai/clues"
"github.com/alcionai/corso/src/pkg/path"
)
// Reasoner describes the parts of the backup that make up its // Reasoner describes the parts of the backup that make up its
// data identity: the tenant, protected resources, services, and // data identity: the tenant, protected resources, services, and
@ -14,3 +18,52 @@ type Reasoner interface {
// parameters (tenant, protected resourced, etc) that match this Reasoner. // parameters (tenant, protected resourced, etc) that match this Reasoner.
SubtreePath() (path.Path, error) SubtreePath() (path.Path, error)
} }
func NewReason(
tenant, resource string,
service path.ServiceType,
category path.CategoryType,
) Reasoner {
return reason{
tenant: tenant,
resource: resource,
service: service,
category: category,
}
}
type reason struct {
// tenant appears here so that when this is moved to an inject package nothing
// needs changed. However, kopia itself is blind to the fields in the reason
// struct and relies on helper functions to get the information it needs.
tenant string
resource string
service path.ServiceType
category path.CategoryType
}
func (r reason) Tenant() string {
return r.tenant
}
func (r reason) ProtectedResource() string {
return r.resource
}
func (r reason) Service() path.ServiceType {
return r.service
}
func (r reason) Category() path.CategoryType {
return r.category
}
func (r reason) SubtreePath() (path.Path, error) {
p, err := path.BuildPrefix(
r.Tenant(),
r.ProtectedResource(),
r.Service(),
r.Category())
return p, clues.Wrap(err, "building path").OrNil()
}

View File

@ -6,9 +6,10 @@ import (
"github.com/alcionai/clues" "github.com/alcionai/clues"
"github.com/kopia/kopia/repo/manifest" "github.com/kopia/kopia/repo/manifest"
"github.com/pkg/errors" "github.com/pkg/errors"
"golang.org/x/exp/slices"
"github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/common/idname" "github.com/alcionai/corso/src/internal/common/idname"
"github.com/alcionai/corso/src/internal/data"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/m365/collection/drive/metadata" "github.com/alcionai/corso/src/internal/m365/collection/drive/metadata"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
@ -17,7 +18,9 @@ import (
"github.com/alcionai/corso/src/internal/version" "github.com/alcionai/corso/src/internal/version"
"github.com/alcionai/corso/src/pkg/backup" "github.com/alcionai/corso/src/pkg/backup"
"github.com/alcionai/corso/src/pkg/backup/details" "github.com/alcionai/corso/src/pkg/backup/details"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/fault" "github.com/alcionai/corso/src/pkg/fault"
"github.com/alcionai/corso/src/pkg/path"
"github.com/alcionai/corso/src/pkg/selectors" "github.com/alcionai/corso/src/pkg/selectors"
"github.com/alcionai/corso/src/pkg/store" "github.com/alcionai/corso/src/pkg/store"
) )
@ -120,7 +123,7 @@ func getBackup(
func (r repository) Backups(ctx context.Context, ids []string) ([]*backup.Backup, *fault.Bus) { func (r repository) Backups(ctx context.Context, ids []string) ([]*backup.Backup, *fault.Bus) {
var ( var (
bups []*backup.Backup bups []*backup.Backup
errs = fault.New(false) bus = fault.New(false)
sw = store.NewWrapper(r.modelStore) sw = store.NewWrapper(r.modelStore)
) )
@ -129,13 +132,260 @@ func (r repository) Backups(ctx context.Context, ids []string) ([]*backup.Backup
b, err := sw.GetBackup(ictx, model.StableID(id)) b, err := sw.GetBackup(ictx, model.StableID(id))
if err != nil { if err != nil {
errs.AddRecoverable(ctx, errWrapper(err)) bus.AddRecoverable(ctx, errWrapper(err))
} }
bups = append(bups, b) bups = append(bups, b)
} }
return bups, errs return bups, bus
}
func addBackup(
bup *lineageNode,
seen map[model.StableID]*lineageNode,
allNodes map[model.StableID]*lineageNode,
) {
if bup == nil {
return
}
if _, ok := seen[bup.ID]; ok {
// We've already traversed this node.
return
}
for baseID := range bup.MergeBases {
addBackup(allNodes[baseID], seen, allNodes)
}
for baseID := range bup.AssistBases {
addBackup(allNodes[baseID], seen, allNodes)
}
seen[bup.ID] = bup
for _, descendent := range bup.children {
addBackup(allNodes[descendent.ID], seen, allNodes)
}
}
func filterLineages(
bups map[model.StableID]*lineageNode,
backupIDs ...string,
) map[model.StableID]*lineageNode {
if len(backupIDs) == 0 {
return bups
}
res := map[model.StableID]*lineageNode{}
// For each backup we're interested in, traverse up and down the hierarchy.
// Going down the hierarchy is more difficult because backups only have
// backpointers to their ancestors.
for _, id := range backupIDs {
addBackup(bups[model.StableID(id)], res, bups)
}
return res
}
func addBase(
baseID model.StableID,
baseReasons []identity.Reasoner,
current *BackupNode,
allNodes map[model.StableID]*BackupNode,
bups map[model.StableID]*lineageNode,
) {
parent, parentOK := allNodes[baseID]
if !parentOK {
parent = &BackupNode{}
allNodes[baseID] = parent
}
parent.Label = string(baseID)
// If the parent isn't in the set of backups passed in it must have been
// deleted.
if p, ok := bups[baseID]; !ok || p.deleted {
parent.Deleted = true
// If the backup was deleted we should also attempt to recreate the
// set of Reasons which it encompassed. We can get partial info on this
// by collecting all the Reasons it was a base.
for _, reason := range baseReasons {
if !slices.ContainsFunc(
parent.Reasons,
func(other identity.Reasoner) bool {
return other.Service() == reason.Service() &&
other.Category() == reason.Category()
}) {
parent.Reasons = append(parent.Reasons, reason)
}
}
}
parent.Children = append(
parent.Children,
&BackupEdge{
BackupNode: current,
Reasons: baseReasons,
})
}
func buildOutput(bups map[model.StableID]*lineageNode) ([]*BackupNode, error) {
var roots []*BackupNode
allNodes := map[model.StableID]*BackupNode{}
for _, bup := range bups {
node := allNodes[bup.ID]
if node == nil {
node = &BackupNode{}
allNodes[bup.ID] = node
}
node.Label = string(bup.ID)
node.Type = MergeNode
node.Created = bup.CreationTime
if bup.Tags[model.BackupTypeTag] == model.AssistBackup {
node.Type = AssistNode
}
topLevel := true
if !bup.deleted {
reasons, err := bup.Reasons()
if err != nil {
return nil, clues.Wrap(err, "getting reasons").With("backup_id", bup.ID)
}
node.Reasons = reasons
bases, err := bup.Bases()
if err != nil {
return nil, clues.Wrap(err, "getting bases").With("backup_id", bup.ID)
}
for baseID, baseReasons := range bases.Merge {
topLevel = false
addBase(baseID, baseReasons, node, allNodes, bups)
}
for baseID, baseReasons := range bases.Assist {
topLevel = false
addBase(baseID, baseReasons, node, allNodes, bups)
}
}
// If this node has no ancestors then add it directly to the root.
if bup.deleted || topLevel {
roots = append(roots, node)
}
}
return roots, nil
}
// lineageNode is a small in-memory wrapper around *backup.Backup that provides
// information about children. This just makes it easier to traverse lineages
// during filtering.
type lineageNode struct {
*backup.Backup
children []*lineageNode
deleted bool
}
func (r repository) BackupLineage(
ctx context.Context,
tenantID string,
protectedResourceID string,
service path.ServiceType,
category path.CategoryType,
backupIDs ...string,
) ([]*BackupNode, error) {
sw := store.NewWrapper(r.modelStore)
fs := []store.FilterOption{
store.Tenant(tenantID),
//store.ProtectedResource(protectedResourceID),
//store.Reason(service, category),
}
bs, err := sw.GetBackups(ctx, fs...)
if err != nil {
return nil, clues.Stack(err)
}
if len(bs) == 0 {
return nil, clues.Stack(errs.NotFound)
}
// Put all the backups in a map so we can access them easier when building the
// graph.
bups := make(map[model.StableID]*lineageNode, len(bs))
for _, b := range bs {
current := bups[b.ID]
if current == nil {
current = &lineageNode{}
}
current.Backup = b
current.deleted = false
bups[b.ID] = current
for id := range b.MergeBases {
parent := bups[id]
if parent == nil {
// Populate the ID so we don't NPE on it when building the tree if
// something was deleted.
parent = &lineageNode{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: id,
},
},
deleted: true,
}
}
parent.children = append(parent.children, current)
bups[id] = parent
}
for id := range b.AssistBases {
parent := bups[id]
if parent == nil {
// Populate the ID so we don't NPE on it when building the tree if
// something was deleted.
parent = &lineageNode{
Backup: &backup.Backup{
BaseModel: model.BaseModel{
ID: id,
},
},
deleted: true,
}
}
parent.children = append(parent.children, current)
bups[id] = parent
}
}
// Filter the map of backups to just those in the lineages we're interested
// about.
filtered := filterLineages(bups, backupIDs...)
// Build the output graph.
res, err := buildOutput(filtered)
return res, clues.Stack(err).OrNil()
} }
// BackupsByTag lists all backups in a repository that contain all the tags // BackupsByTag lists all backups in a repository that contain all the tags
@ -177,7 +427,7 @@ func (r repository) GetBackupDetails(
ctx context.Context, ctx context.Context,
backupID string, backupID string,
) (*details.Details, *backup.Backup, *fault.Bus) { ) (*details.Details, *backup.Backup, *fault.Bus) {
errs := fault.New(false) bus := fault.New(false)
deets, bup, err := getBackupDetails( deets, bup, err := getBackupDetails(
ctx, ctx,
@ -185,9 +435,9 @@ func (r repository) GetBackupDetails(
r.Account.ID(), r.Account.ID(),
r.dataLayer, r.dataLayer,
store.NewWrapper(r.modelStore), store.NewWrapper(r.modelStore),
errs) bus)
return deets, bup, errs.Fail(err) return deets, bup, bus.Fail(err)
} }
// getBackupDetails handles the processing for GetBackupDetails. // getBackupDetails handles the processing for GetBackupDetails.
@ -196,7 +446,7 @@ func getBackupDetails(
backupID, tenantID string, backupID, tenantID string,
kw *kopia.Wrapper, kw *kopia.Wrapper,
sw store.BackupGetter, sw store.BackupGetter,
errs *fault.Bus, bus *fault.Bus,
) (*details.Details, *backup.Backup, error) { ) (*details.Details, *backup.Backup, error) {
b, err := sw.GetBackup(ctx, model.StableID(backupID)) b, err := sw.GetBackup(ctx, model.StableID(backupID))
if err != nil { if err != nil {
@ -221,7 +471,7 @@ func getBackupDetails(
ctx, ctx,
ssid, ssid,
streamstore.DetailsReader(details.UnmarshalTo(&deets)), streamstore.DetailsReader(details.UnmarshalTo(&deets)),
errs) bus)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -247,7 +497,7 @@ func (r repository) GetBackupErrors(
ctx context.Context, ctx context.Context,
backupID string, backupID string,
) (*fault.Errors, *backup.Backup, *fault.Bus) { ) (*fault.Errors, *backup.Backup, *fault.Bus) {
errs := fault.New(false) bus := fault.New(false)
fe, bup, err := getBackupErrors( fe, bup, err := getBackupErrors(
ctx, ctx,
@ -255,9 +505,9 @@ func (r repository) GetBackupErrors(
r.Account.ID(), r.Account.ID(),
r.dataLayer, r.dataLayer,
store.NewWrapper(r.modelStore), store.NewWrapper(r.modelStore),
errs) bus)
return fe, bup, errs.Fail(err) return fe, bup, bus.Fail(err)
} }
// getBackupErrors handles the processing for GetBackupErrors. // getBackupErrors handles the processing for GetBackupErrors.
@ -266,7 +516,7 @@ func getBackupErrors(
backupID, tenantID string, backupID, tenantID string,
kw *kopia.Wrapper, kw *kopia.Wrapper,
sw store.BackupGetter, sw store.BackupGetter,
errs *fault.Bus, bus *fault.Bus,
) (*fault.Errors, *backup.Backup, error) { ) (*fault.Errors, *backup.Backup, error) {
b, err := sw.GetBackup(ctx, model.StableID(backupID)) b, err := sw.GetBackup(ctx, model.StableID(backupID))
if err != nil { if err != nil {
@ -287,7 +537,7 @@ func getBackupErrors(
ctx, ctx,
ssid, ssid,
streamstore.FaultErrorsReader(fault.UnmarshalErrorsTo(&fe)), streamstore.FaultErrorsReader(fault.UnmarshalErrorsTo(&fe)),
errs) bus)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -330,7 +580,7 @@ func deleteBackups(
for _, id := range ids { for _, id := range ids {
b, err := sw.GetBackup(ctx, model.StableID(id)) b, err := sw.GetBackup(ctx, model.StableID(id))
if err != nil { if err != nil {
if !failOnMissing && errors.Is(err, data.ErrNotFound) { if !failOnMissing && errors.Is(err, errs.NotFound) {
continue continue
} }

View File

@ -9,13 +9,14 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/alcionai/corso/src/internal/common/crash" "github.com/alcionai/corso/src/internal/common/crash"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/events" "github.com/alcionai/corso/src/internal/events"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/observe" "github.com/alcionai/corso/src/internal/observe"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
"github.com/alcionai/corso/src/pkg/account" "github.com/alcionai/corso/src/pkg/account"
"github.com/alcionai/corso/src/pkg/backup/identity"
"github.com/alcionai/corso/src/pkg/control" "github.com/alcionai/corso/src/pkg/control"
ctrlRepo "github.com/alcionai/corso/src/pkg/control/repository" ctrlRepo "github.com/alcionai/corso/src/pkg/control/repository"
"github.com/alcionai/corso/src/pkg/logger" "github.com/alcionai/corso/src/pkg/logger"
@ -31,6 +32,29 @@ var (
ErrorBackupNotFound = clues.New("no backup exists with that id") ErrorBackupNotFound = clues.New("no backup exists with that id")
) )
type NodeType int
const (
UnknownNode NodeType = iota
RootNode
MergeNode
AssistNode
)
type BackupEdge struct {
Reasons []identity.Reasoner
*BackupNode
}
type BackupNode struct {
Type NodeType
Deleted bool
Reasons []identity.Reasoner
Created time.Time
Label string
Children []*BackupEdge
}
type Repositoryer interface { type Repositoryer interface {
Backuper Backuper
BackupGetter BackupGetter
@ -57,6 +81,23 @@ type Repositoryer interface {
ctx context.Context, ctx context.Context,
rcOpts ctrlRepo.Retention, rcOpts ctrlRepo.Retention,
) (operations.RetentionConfigOperation, error) ) (operations.RetentionConfigOperation, error)
// BackupLineage returns all backups for the given tenant and
// protectedResourceID. If one or more backupIDs are given then filters the
// lineage down to include only those backups that either match the given IDs
// or are reachable from the backups with the given IDs.
//
// The root node representing the tenant/protectedResourceID is returned. All
// discovered backups in the lineage are reachable by traversing the tree
// starting with the returned node.
BackupLineage(
ctx context.Context,
teantID string,
protectedResourceID string,
service path.ServiceType,
category path.CategoryType,
backupIDs ...string,
) ([]*BackupNode, error)
} }
// Repository contains storage provider information. // Repository contains storage provider information.
@ -385,7 +426,7 @@ func newRepoID(s storage.Storage) string {
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
func errWrapper(err error) error { func errWrapper(err error) error {
if errors.Is(err, data.ErrNotFound) { if errors.Is(err, errs.NotFound) {
return clues.Stack(ErrorBackupNotFound, err) return clues.Stack(ErrorBackupNotFound, err)
} }

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"github.com/alcionai/corso/src/internal/data" "github.com/alcionai/corso/src/internal/common/errs"
"github.com/alcionai/corso/src/internal/kopia" "github.com/alcionai/corso/src/internal/kopia"
"github.com/alcionai/corso/src/internal/model" "github.com/alcionai/corso/src/internal/model"
"github.com/alcionai/corso/src/internal/operations" "github.com/alcionai/corso/src/internal/operations"
@ -107,11 +107,11 @@ func (suite *RepositoryBackupsUnitSuite) TestGetBackup() {
name: "get error", name: "get error",
sw: mock.BackupWrapper{ sw: mock.BackupWrapper{
Backup: bup, Backup: bup,
GetErr: data.ErrNotFound, GetErr: errs.NotFound,
DeleteErr: nil, DeleteErr: nil,
}, },
expectErr: func(t *testing.T, result error) { expectErr: func(t *testing.T, result error) {
assert.ErrorIs(t, result, data.ErrNotFound, clues.ToCore(result)) assert.ErrorIs(t, result, errs.NotFound, clues.ToCore(result))
assert.ErrorIs(t, result, ErrorBackupNotFound, clues.ToCore(result)) assert.ErrorIs(t, result, ErrorBackupNotFound, clues.ToCore(result))
}, },
expectID: bup.ID, expectID: bup.ID,
@ -446,14 +446,14 @@ func (suite *RepositoryBackupsUnitSuite) TestDeleteBackups() {
bup.ID, bup.ID,
}, },
gets: []getRes{ gets: []getRes{
{err: data.ErrNotFound}, {err: errs.NotFound},
}, },
expectGets: []model.StableID{ expectGets: []model.StableID{
bup.ID, bup.ID,
}, },
failOnMissing: true, failOnMissing: true,
expectErr: func(t *testing.T, result error) { expectErr: func(t *testing.T, result error) {
assert.ErrorIs(t, result, data.ErrNotFound, clues.ToCore(result)) assert.ErrorIs(t, result, errs.NotFound, clues.ToCore(result))
assert.ErrorIs(t, result, ErrorBackupNotFound, clues.ToCore(result)) assert.ErrorIs(t, result, ErrorBackupNotFound, clues.ToCore(result))
}, },
}, },
@ -598,7 +598,7 @@ func (suite *RepositoryBackupsUnitSuite) TestDeleteBackups() {
{bup: bup}, {bup: bup},
{bup: bupLegacy}, {bup: bupLegacy},
{bup: bupNoSnapshot}, {bup: bupNoSnapshot},
{err: data.ErrNotFound}, {err: errs.NotFound},
}, },
expectGets: []model.StableID{ expectGets: []model.StableID{
bup.ID, bup.ID,
@ -608,7 +608,7 @@ func (suite *RepositoryBackupsUnitSuite) TestDeleteBackups() {
}, },
failOnMissing: true, failOnMissing: true,
expectErr: func(t *testing.T, result error) { expectErr: func(t *testing.T, result error) {
assert.ErrorIs(t, result, data.ErrNotFound, clues.ToCore(result)) assert.ErrorIs(t, result, errs.NotFound, clues.ToCore(result))
assert.ErrorIs(t, result, ErrorBackupNotFound, clues.ToCore(result)) assert.ErrorIs(t, result, ErrorBackupNotFound, clues.ToCore(result))
}, },
}, },
@ -622,7 +622,7 @@ func (suite *RepositoryBackupsUnitSuite) TestDeleteBackups() {
}, },
gets: []getRes{ gets: []getRes{
{bup: bup}, {bup: bup},
{err: data.ErrNotFound}, {err: errs.NotFound},
{bup: bupNoSnapshot}, {bup: bupNoSnapshot},
{bup: bupNoDetails}, {bup: bupNoDetails},
}, },
@ -793,7 +793,7 @@ func writeBackup(
ownerID, ownerName string, ownerID, ownerName string,
deets *details.Details, deets *details.Details,
fe *fault.Errors, fe *fault.Errors,
errs *fault.Bus, bus *fault.Bus,
) *backup.Backup { ) *backup.Backup {
var ( var (
serv = sel.PathService() serv = sel.PathService()
@ -806,7 +806,7 @@ func writeBackup(
err = sstore.Collect(ctx, streamstore.FaultErrorsCollector(fe)) err = sstore.Collect(ctx, streamstore.FaultErrorsCollector(fe))
require.NoError(t, err, "collecting errors in streamstore") require.NoError(t, err, "collecting errors in streamstore")
ssid, err := sstore.Write(ctx, errs) ssid, err := sstore.Write(ctx, bus)
require.NoError(t, err, "writing to streamstore") require.NoError(t, err, "writing to streamstore")
tags := map[string]string{ tags := map[string]string{
@ -822,6 +822,8 @@ func writeBackup(
ownerID, ownerName, ownerID, ownerName,
stats.ReadWrites{}, stats.ReadWrites{},
stats.StartAndEndTime{}, stats.StartAndEndTime{},
nil,
nil,
fe, fe,
tags) tags)

View File

@ -37,6 +37,24 @@ func Service(pst path.ServiceType) FilterOption {
} }
} }
func Reason(service path.ServiceType, category path.CategoryType) FilterOption {
return func(qf *queryFilters) {
qf.tags[backup.ServiceCatString(service, category)] = backup.DefaultTagValue
}
}
func Tenant(tenant string) FilterOption {
return func(qf *queryFilters) {
qf.tags[backup.TenantIDKey] = tenant
}
}
func ProtectedResource(resourceID string) FilterOption {
return func(qf *queryFilters) {
qf.tags[backup.ResourceIDKey] = resourceID
}
}
type ( type (
BackupWrapper interface { BackupWrapper interface {
BackupGetterDeleter BackupGetterDeleter