struct extent_state *prealloc = NULL;
        struct rb_node **p = NULL;
        struct rb_node *parent = NULL;
-       int err = 0;
+       int ret = 0;
        u64 last_start;
        u64 last_end;
        u32 exclusive_bits = (bits & EXTENT_LOCKED);
                if (state->state & exclusive_bits) {
                        *failed_start = state->start;
                        cache_state(state, failed_state);
-                       err = -EEXIST;
+                       ret = -EEXIST;
                        goto out;
                }
 
                if (state->state & exclusive_bits) {
                        *failed_start = start;
                        cache_state(state, failed_state);
-                       err = -EEXIST;
+                       ret = -EEXIST;
                        goto out;
                }
 
                prealloc = alloc_extent_state_atomic(prealloc);
                if (!prealloc)
                        goto search_again;
-               err = split_state(tree, state, prealloc, start);
-               if (err)
-                       extent_io_tree_panic(tree, state, "split", err);
+               ret = split_state(tree, state, prealloc, start);
+               if (ret)
+                       extent_io_tree_panic(tree, state, "split", ret);
 
                prealloc = NULL;
-               if (err)
+               if (ret)
                        goto out;
                if (state->end <= end) {
                        set_state_bits(tree, state, bits, changeset);
                prealloc->end = this_end;
                inserted_state = insert_state(tree, prealloc, bits, changeset);
                if (IS_ERR(inserted_state)) {
-                       err = PTR_ERR(inserted_state);
-                       extent_io_tree_panic(tree, prealloc, "insert", err);
+                       ret = PTR_ERR(inserted_state);
+                       extent_io_tree_panic(tree, prealloc, "insert", ret);
                }
 
                cache_state(inserted_state, cached_state);
                if (state->state & exclusive_bits) {
                        *failed_start = start;
                        cache_state(state, failed_state);
-                       err = -EEXIST;
+                       ret = -EEXIST;
                        goto out;
                }
 
                prealloc = alloc_extent_state_atomic(prealloc);
                if (!prealloc)
                        goto search_again;
-               err = split_state(tree, state, prealloc, end + 1);
-               if (err)
-                       extent_io_tree_panic(tree, state, "split", err);
+               ret = split_state(tree, state, prealloc, end + 1);
+               if (ret)
+                       extent_io_tree_panic(tree, state, "split", ret);
 
                set_state_bits(tree, prealloc, bits, changeset);
                cache_state(prealloc, cached_state);
        if (prealloc)
                free_extent_state(prealloc);
 
-       return err;
+       return ret;
 
 }