Ensure that we propagate errors if available (#4964)

<!-- PR description-->

Fixed all cases where we missed propagating errors. Also added a lint rule in CI to catch it.

---

#### Does this PR need a docs update or release note?

- [ ]  Yes, it's included
- [ ] 🕐 Yes, but in a later PR
- [x]  No

#### Type of change

<!--- Please check the type of change your PR introduces: --->
- [ ] 🌻 Feature
- [ ] 🐛 Bugfix
- [ ] 🗺️ Documentation
- [ ] 🤖 Supportability/Tests
- [ ] 💻 CI/Deployment
- [x] 🧹 Tech Debt/Cleanup

#### Issue(s)

<!-- Can reference multiple issues. Use one of the following "magic words" - "closes, fixes" to auto-close the Github issue. -->
* #<issue>

#### Test Plan

<!-- How will this be tested prior to merging.-->
- [x] 💪 Manual
- [ ]  Unit test
- [ ] 💚 E2E
This commit is contained in:
Abin Simon 2024-01-03 21:08:46 +05:30 committed by GitHub
parent b49c124512
commit b47fa8b14f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 18 additions and 11 deletions

View File

@ -511,6 +511,13 @@ jobs:
echo "Use len check instead of empty string comparison" echo "Use len check instead of empty string comparison"
exit 1 exit 1
fi fi
- name: Check for cases where errors are not propagated
run: |
# Using `grep .` as the exit codes are always true for correct grammar
if tree-grepper -q go '((if_statement (binary_expression) @_if (block (return_statement (expression_list (call_expression (selector_expression) @_fun ) @ret .)))) (#match? @_if "err != nil") (#match? @_fun "clues.NewWC"))' | grep .; then
echo "Make sure to propagate errors with clues"
exit 1
fi
# ---------------------------------------------------------------------------------------------------- # ----------------------------------------------------------------------------------------------------
# --- GitHub Actions Linting ------------------------------------------------------------------------- # --- GitHub Actions Linting -------------------------------------------------------------------------

View File

@ -1204,12 +1204,12 @@ func traverseBaseDir(
curP, err := path.PrefixOrPathFromDataLayerPath(currentPath.String(), false) curP, err := path.PrefixOrPathFromDataLayerPath(currentPath.String(), false)
if err != nil { if err != nil {
return clues.NewWC(ctx, "converting current path to path.Path") return clues.WrapWC(ctx, err, "converting current path to path.Path")
} }
oldP, err := path.PrefixOrPathFromDataLayerPath(oldDirPath.String(), false) oldP, err := path.PrefixOrPathFromDataLayerPath(oldDirPath.String(), false)
if err != nil { if err != nil {
return clues.NewWC(ctx, "converting old path to path.Path") return clues.WrapWC(ctx, err, "converting old path to path.Path")
} }
node.baseDir = dir node.baseDir = dir

View File

@ -98,7 +98,7 @@ func computePreviousLinkShares(
parent, err := originDir.Dir() parent, err := originDir.Dir()
if err != nil { if err != nil {
return nil, clues.NewWC(ctx, "getting parent") return nil, clues.WrapWC(ctx, err, "getting parent")
} }
for len(parent.Elements()) > 0 { for len(parent.Elements()) > 0 {
@ -106,7 +106,7 @@ func computePreviousLinkShares(
drivePath, err := path.ToDrivePath(parent) drivePath, err := path.ToDrivePath(parent)
if err != nil { if err != nil {
return nil, clues.NewWC(ictx, "transforming dir to drivePath") return nil, clues.WrapWC(ictx, err, "transforming dir to drivePath")
} }
if len(drivePath.Folders) == 0 { if len(drivePath.Folders) == 0 {
@ -126,7 +126,7 @@ func computePreviousLinkShares(
parent, err = parent.Dir() parent, err = parent.Dir()
if err != nil { if err != nil {
return nil, clues.NewWC(ictx, "getting parent") return nil, clues.WrapWC(ictx, err, "getting parent")
} }
} }
@ -156,14 +156,14 @@ func computePreviousMetadata(
for { for {
parent, err = parent.Dir() parent, err = parent.Dir()
if err != nil { if err != nil {
return metadata.Metadata{}, clues.NewWC(ctx, "getting parent") return metadata.Metadata{}, clues.WrapWC(ctx, err, "getting parent")
} }
ictx := clues.Add(ctx, "parent_dir", parent) ictx := clues.Add(ctx, "parent_dir", parent)
drivePath, err := path.ToDrivePath(parent) drivePath, err := path.ToDrivePath(parent)
if err != nil { if err != nil {
return metadata.Metadata{}, clues.NewWC(ictx, "transforming dir to drivePath") return metadata.Metadata{}, clues.WrapWC(ictx, err, "transforming dir to drivePath")
} }
if len(drivePath.Folders) == 0 { if len(drivePath.Folders) == 0 {

View File

@ -63,7 +63,7 @@ func ParseMetadataCollections(
err := json.NewDecoder(item.ToReader()).Decode(&m) err := json.NewDecoder(item.ToReader()).Decode(&m)
if err != nil { if err != nil {
return nil, false, clues.NewWC(ctx, "decoding metadata json") return nil, false, clues.WrapWC(ctx, err, "decoding metadata json")
} }
switch item.ID() { switch item.ID() {

View File

@ -64,7 +64,7 @@ func parseMetadataCollections(
err := json.NewDecoder(item.ToReader()).Decode(&m) err := json.NewDecoder(item.ToReader()).Decode(&m)
if err != nil { if err != nil {
return nil, false, clues.NewWC(ctx, "decoding metadata json") return nil, false, clues.WrapWC(ctx, err, "decoding metadata json")
} }
switch item.ID() { switch item.ID() {

View File

@ -673,7 +673,7 @@ func mergeItemsFromBase(
errs) errs)
if err != nil { if err != nil {
return manifestAddedEntries, return manifestAddedEntries,
clues.NewWC(ctx, "fetching base details for backup") clues.WrapWC(ctx, err, "fetching base details for backup")
} }
for _, entry := range baseDeets.Items() { for _, entry := range baseDeets.Items() {
@ -683,7 +683,7 @@ func mergeItemsFromBase(
rr, err := path.FromDataLayerPath(entry.RepoRef, true) rr, err := path.FromDataLayerPath(entry.RepoRef, true)
if err != nil { if err != nil {
return manifestAddedEntries, clues.NewWC(ctx, "parsing base item info path"). return manifestAddedEntries, clues.WrapWC(ctx, err, "parsing base item info path").
With("repo_ref", path.LoggableDir(entry.RepoRef)) With("repo_ref", path.LoggableDir(entry.RepoRef))
} }