gfs2: Fix "Make glock lru list scanning safer"
authorAndreas Gruenbacher <agruenba@redhat.com>
Thu, 28 Mar 2024 18:59:45 +0000 (19:59 +0100)
committerAndreas Gruenbacher <agruenba@redhat.com>
Tue, 9 Apr 2024 16:35:58 +0000 (18:35 +0200)
Commit 228804a35caa tried to add a refcount check to
gfs2_scan_glock_lru() to make sure that glocks that are still referenced
cannot be freed.  It failed to account for the bias state_change() adds
to the refcount for held glocks, so held glocks are no longer removed
from the glock cache, which can lead to out-of-memory problems.  Fix
that.  (The inodes those glocks are associated with do get shrunk and do
get pushed out of memory.)

In addition, use the same eligibility check in gfs2_scan_glock_lru() and
gfs2_dispose_glock_lru().

Signed-off-by: Andreas Gruenbacher <agruenba@redhat.com>
fs/gfs2/glock.c

index 2882a42e88aaa60ede90f6d02566a90986a9b1d9..04e0a8ac61d7bd4695328e79453cf4e6071ca98b 100644 (file)
@@ -1987,6 +1987,14 @@ static int glock_cmp(void *priv, const struct list_head *a,
        return 0;
 }
 
+static bool can_free_glock(struct gfs2_glock *gl)
+{
+       bool held = gl->gl_state != LM_ST_UNLOCKED;
+
+       return !test_bit(GLF_LOCK, &gl->gl_flags) &&
+              gl->gl_lockref.count == held;
+}
+
 /**
  * gfs2_dispose_glock_lru - Demote a list of glocks
  * @list: The list to dispose of
@@ -2020,7 +2028,7 @@ add_back_to_lru:
                        atomic_inc(&lru_count);
                        continue;
                }
-               if (test_bit(GLF_LOCK, &gl->gl_flags)) {
+               if (!can_free_glock(gl)) {
                        spin_unlock(&gl->gl_lockref.lock);
                        goto add_back_to_lru;
                }
@@ -2052,16 +2060,10 @@ static long gfs2_scan_glock_lru(int nr)
        list_for_each_entry_safe(gl, next, &lru_list, gl_lru) {
                if (nr-- <= 0)
                        break;
-               /* Test for being demotable */
-               if (!test_bit(GLF_LOCK, &gl->gl_flags)) {
-                       if (!spin_trylock(&gl->gl_lockref.lock))
-                               continue;
-                       if (!gl->gl_lockref.count) {
-                               list_move(&gl->gl_lru, &dispose);
-                               atomic_dec(&lru_count);
-                               freed++;
-                       }
-                       spin_unlock(&gl->gl_lockref.lock);
+               if (can_free_glock(gl)) {
+                       list_move(&gl->gl_lru, &dispose);
+                       atomic_dec(&lru_count);
+                       freed++;
                }
        }
        if (!list_empty(&dispose))