Ensure that we propagate errors if available (#4964)
<!-- PR description--> Fixed all cases where we missed propagating errors. Also added a lint rule in CI to catch it. --- #### Does this PR need a docs update or release note? - [ ] ✅ Yes, it's included - [ ] 🕐 Yes, but in a later PR - [x] ⛔ No #### Type of change <!--- Please check the type of change your PR introduces: ---> - [ ] 🌻 Feature - [ ] 🐛 Bugfix - [ ] 🗺️ Documentation - [ ] 🤖 Supportability/Tests - [ ] 💻 CI/Deployment - [x] 🧹 Tech Debt/Cleanup #### Issue(s) <!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. --> * #<issue> #### Test Plan <!-- How will this be tested prior to merging.--> - [x] 💪 Manual - [ ] ⚡ Unit test - [ ] 💚 E2E
This commit is contained in:
parent
b49c124512
commit
b47fa8b14f
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
@ -511,6 +511,13 @@ jobs:
|
||||
echo "Use len check instead of empty string comparison"
|
||||
exit 1
|
||||
fi
|
||||
- name: Check for cases where errors are not propagated
|
||||
run: |
|
||||
# Using `grep .` as the exit codes are always true for correct grammar
|
||||
if tree-grepper -q go '((if_statement (binary_expression) @_if (block (return_statement (expression_list (call_expression (selector_expression) @_fun ) @ret .)))) (#match? @_if "err != nil") (#match? @_fun "clues.NewWC"))' | grep .; then
|
||||
echo "Make sure to propagate errors with clues"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# ----------------------------------------------------------------------------------------------------
|
||||
# --- GitHub Actions Linting -------------------------------------------------------------------------
|
||||
|
||||
@ -1204,12 +1204,12 @@ func traverseBaseDir(
|
||||
|
||||
curP, err := path.PrefixOrPathFromDataLayerPath(currentPath.String(), false)
|
||||
if err != nil {
|
||||
return clues.NewWC(ctx, "converting current path to path.Path")
|
||||
return clues.WrapWC(ctx, err, "converting current path to path.Path")
|
||||
}
|
||||
|
||||
oldP, err := path.PrefixOrPathFromDataLayerPath(oldDirPath.String(), false)
|
||||
if err != nil {
|
||||
return clues.NewWC(ctx, "converting old path to path.Path")
|
||||
return clues.WrapWC(ctx, err, "converting old path to path.Path")
|
||||
}
|
||||
|
||||
node.baseDir = dir
|
||||
|
||||
@ -98,7 +98,7 @@ func computePreviousLinkShares(
|
||||
|
||||
parent, err := originDir.Dir()
|
||||
if err != nil {
|
||||
return nil, clues.NewWC(ctx, "getting parent")
|
||||
return nil, clues.WrapWC(ctx, err, "getting parent")
|
||||
}
|
||||
|
||||
for len(parent.Elements()) > 0 {
|
||||
@ -106,7 +106,7 @@ func computePreviousLinkShares(
|
||||
|
||||
drivePath, err := path.ToDrivePath(parent)
|
||||
if err != nil {
|
||||
return nil, clues.NewWC(ictx, "transforming dir to drivePath")
|
||||
return nil, clues.WrapWC(ictx, err, "transforming dir to drivePath")
|
||||
}
|
||||
|
||||
if len(drivePath.Folders) == 0 {
|
||||
@ -126,7 +126,7 @@ func computePreviousLinkShares(
|
||||
|
||||
parent, err = parent.Dir()
|
||||
if err != nil {
|
||||
return nil, clues.NewWC(ictx, "getting parent")
|
||||
return nil, clues.WrapWC(ictx, err, "getting parent")
|
||||
}
|
||||
}
|
||||
|
||||
@ -156,14 +156,14 @@ func computePreviousMetadata(
|
||||
for {
|
||||
parent, err = parent.Dir()
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.NewWC(ctx, "getting parent")
|
||||
return metadata.Metadata{}, clues.WrapWC(ctx, err, "getting parent")
|
||||
}
|
||||
|
||||
ictx := clues.Add(ctx, "parent_dir", parent)
|
||||
|
||||
drivePath, err := path.ToDrivePath(parent)
|
||||
if err != nil {
|
||||
return metadata.Metadata{}, clues.NewWC(ictx, "transforming dir to drivePath")
|
||||
return metadata.Metadata{}, clues.WrapWC(ictx, err, "transforming dir to drivePath")
|
||||
}
|
||||
|
||||
if len(drivePath.Folders) == 0 {
|
||||
|
||||
@ -63,7 +63,7 @@ func ParseMetadataCollections(
|
||||
|
||||
err := json.NewDecoder(item.ToReader()).Decode(&m)
|
||||
if err != nil {
|
||||
return nil, false, clues.NewWC(ctx, "decoding metadata json")
|
||||
return nil, false, clues.WrapWC(ctx, err, "decoding metadata json")
|
||||
}
|
||||
|
||||
switch item.ID() {
|
||||
|
||||
@ -64,7 +64,7 @@ func parseMetadataCollections(
|
||||
|
||||
err := json.NewDecoder(item.ToReader()).Decode(&m)
|
||||
if err != nil {
|
||||
return nil, false, clues.NewWC(ctx, "decoding metadata json")
|
||||
return nil, false, clues.WrapWC(ctx, err, "decoding metadata json")
|
||||
}
|
||||
|
||||
switch item.ID() {
|
||||
|
||||
@ -673,7 +673,7 @@ func mergeItemsFromBase(
|
||||
errs)
|
||||
if err != nil {
|
||||
return manifestAddedEntries,
|
||||
clues.NewWC(ctx, "fetching base details for backup")
|
||||
clues.WrapWC(ctx, err, "fetching base details for backup")
|
||||
}
|
||||
|
||||
for _, entry := range baseDeets.Items() {
|
||||
@ -683,7 +683,7 @@ func mergeItemsFromBase(
|
||||
|
||||
rr, err := path.FromDataLayerPath(entry.RepoRef, true)
|
||||
if err != nil {
|
||||
return manifestAddedEntries, clues.NewWC(ctx, "parsing base item info path").
|
||||
return manifestAddedEntries, clues.WrapWC(ctx, err, "parsing base item info path").
|
||||
With("repo_ref", path.LoggableDir(entry.RepoRef))
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user