btrfs: rename err to ret in __set_extent_bit()
authorAnand Jain <anand.jain@oracle.com>
Tue, 19 Mar 2024 04:15:16 +0000 (12:15 +0800)
committerDavid Sterba <dsterba@suse.com>
Tue, 7 May 2024 19:31:01 +0000 (21:31 +0200)
Unify naming of return value to the preferred way.

Signed-off-by: Anand Jain <anand.jain@oracle.com>
Reviewed-by: David Sterba <dsterba@suse.com>
Signed-off-by: David Sterba <dsterba@suse.com>
fs/btrfs/extent-io-tree.c

index c09b428823d76d89dbea579649ffcac296b9a84d..0d564860464d322d6b486915d31b7bae6b61abeb 100644 (file)
@@ -1059,7 +1059,7 @@ static int __set_extent_bit(struct extent_io_tree *tree, u64 start, u64 end,
        struct extent_state *prealloc = NULL;
        struct rb_node **p = NULL;
        struct rb_node *parent = NULL;
-       int err = 0;
+       int ret = 0;
        u64 last_start;
        u64 last_end;
        u32 exclusive_bits = (bits & EXTENT_LOCKED);
@@ -1122,7 +1122,7 @@ hit_next:
                if (state->state & exclusive_bits) {
                        *failed_start = state->start;
                        cache_state(state, failed_state);
-                       err = -EEXIST;
+                       ret = -EEXIST;
                        goto out;
                }
 
@@ -1158,7 +1158,7 @@ hit_next:
                if (state->state & exclusive_bits) {
                        *failed_start = start;
                        cache_state(state, failed_state);
-                       err = -EEXIST;
+                       ret = -EEXIST;
                        goto out;
                }
 
@@ -1175,12 +1175,12 @@ hit_next:
                prealloc = alloc_extent_state_atomic(prealloc);
                if (!prealloc)
                        goto search_again;
-               err = split_state(tree, state, prealloc, start);
-               if (err)
-                       extent_io_tree_panic(tree, state, "split", err);
+               ret = split_state(tree, state, prealloc, start);
+               if (ret)
+                       extent_io_tree_panic(tree, state, "split", ret);
 
                prealloc = NULL;
-               if (err)
+               if (ret)
                        goto out;
                if (state->end <= end) {
                        set_state_bits(tree, state, bits, changeset);
@@ -1224,8 +1224,8 @@ hit_next:
                prealloc->end = this_end;
                inserted_state = insert_state(tree, prealloc, bits, changeset);
                if (IS_ERR(inserted_state)) {
-                       err = PTR_ERR(inserted_state);
-                       extent_io_tree_panic(tree, prealloc, "insert", err);
+                       ret = PTR_ERR(inserted_state);
+                       extent_io_tree_panic(tree, prealloc, "insert", ret);
                }
 
                cache_state(inserted_state, cached_state);
@@ -1244,16 +1244,16 @@ hit_next:
                if (state->state & exclusive_bits) {
                        *failed_start = start;
                        cache_state(state, failed_state);
-                       err = -EEXIST;
+                       ret = -EEXIST;
                        goto out;
                }
 
                prealloc = alloc_extent_state_atomic(prealloc);
                if (!prealloc)
                        goto search_again;
-               err = split_state(tree, state, prealloc, end + 1);
-               if (err)
-                       extent_io_tree_panic(tree, state, "split", err);
+               ret = split_state(tree, state, prealloc, end + 1);
+               if (ret)
+                       extent_io_tree_panic(tree, state, "split", ret);
 
                set_state_bits(tree, prealloc, bits, changeset);
                cache_state(prealloc, cached_state);
@@ -1275,7 +1275,7 @@ out:
        if (prealloc)
                free_extent_state(prealloc);
 
-       return err;
+       return ret;
 
 }