dev_priv->mm.object_memory);
 
        size = count = mappable_size = mappable_count = 0;
-       count_objects(&dev_priv->mm.bound_list, gtt_list);
+       count_objects(&dev_priv->mm.bound_list, global_list);
        seq_printf(m, "%u [%u] objects, %zu [%zu] bytes in gtt\n",
                   count, mappable_count, size, mappable_size);
 
                   count, mappable_count, size, mappable_size);
 
        size = count = purgeable_size = purgeable_count = 0;
-       list_for_each_entry(obj, &dev_priv->mm.unbound_list, gtt_list) {
+       list_for_each_entry(obj, &dev_priv->mm.unbound_list, global_list) {
                size += obj->base.size, ++count;
                if (obj->madv == I915_MADV_DONTNEED)
                        purgeable_size += obj->base.size, ++purgeable_count;
        seq_printf(m, "%u unbound objects, %zu bytes\n", count, size);
 
        size = count = mappable_size = mappable_count = 0;
-       list_for_each_entry(obj, &dev_priv->mm.bound_list, gtt_list) {
+       list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
                if (obj->fault_mappable) {
                        size += obj->gtt_space->size;
                        ++count;
                return ret;
 
        total_obj_size = total_gtt_size = count = 0;
-       list_for_each_entry(obj, &dev_priv->mm.bound_list, gtt_list) {
+       list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
                if (list == PINNED_LIST && obj->pin_count == 0)
                        continue;
 
        }
 
        if (val & DROP_UNBOUND) {
-               list_for_each_entry_safe(obj, next, &dev_priv->mm.unbound_list, gtt_list)
+               list_for_each_entry_safe(obj, next, &dev_priv->mm.unbound_list,
+                                        global_list)
                        if (obj->pages_pin_count == 0) {
                                ret = i915_gem_object_put_pages(obj);
                                if (ret)
 
        struct drm_mm_node *gtt_space;
        /** Stolen memory for this object, instead of being backed by shmem. */
        struct drm_mm_node *stolen;
-       struct list_head gtt_list;
+       struct list_head global_list;
 
        /** This object's place on the active/inactive lists */
        struct list_head ring_list;
 
 
        pinned = 0;
        mutex_lock(&dev->struct_mutex);
-       list_for_each_entry(obj, &dev_priv->mm.bound_list, gtt_list)
+       list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list)
                if (obj->pin_count)
                        pinned += obj->gtt_space->size;
        mutex_unlock(&dev->struct_mutex);
        /* ->put_pages might need to allocate memory for the bit17 swizzle
         * array, hence protect them from being reaped by removing them from gtt
         * lists early. */
-       list_del(&obj->gtt_list);
+       list_del(&obj->global_list);
 
        ops->put_pages(obj);
        obj->pages = NULL;
 
        list_for_each_entry_safe(obj, next,
                                 &dev_priv->mm.unbound_list,
-                                gtt_list) {
+                                global_list) {
                if ((i915_gem_object_is_purgeable(obj) || !purgeable_only) &&
                    i915_gem_object_put_pages(obj) == 0) {
                        count += obj->base.size >> PAGE_SHIFT;
 
        i915_gem_evict_everything(dev_priv->dev);
 
-       list_for_each_entry_safe(obj, next, &dev_priv->mm.unbound_list, gtt_list)
+       list_for_each_entry_safe(obj, next, &dev_priv->mm.unbound_list,
+                                global_list)
                i915_gem_object_put_pages(obj);
 }
 
        if (ret)
                return ret;
 
-       list_add_tail(&obj->gtt_list, &dev_priv->mm.unbound_list);
+       list_add_tail(&obj->global_list, &dev_priv->mm.unbound_list);
        return 0;
 }
 
        i915_gem_object_unpin_pages(obj);
 
        list_del(&obj->mm_list);
-       list_move_tail(&obj->gtt_list, &dev_priv->mm.unbound_list);
+       list_move_tail(&obj->global_list, &dev_priv->mm.unbound_list);
        /* Avoid an unnecessary call to unbind on rebind. */
        obj->map_and_fenceable = true;
 
        struct drm_i915_gem_object *obj;
        int err = 0;
 
-       list_for_each_entry(obj, &dev_priv->mm.gtt_list, gtt_list) {
+       list_for_each_entry(obj, &dev_priv->mm.gtt_list, global_list) {
                if (obj->gtt_space == NULL) {
                        printk(KERN_ERR "object found on GTT list with no space reserved\n");
                        err++;
                return ret;
        }
 
-       list_move_tail(&obj->gtt_list, &dev_priv->mm.bound_list);
+       list_move_tail(&obj->global_list, &dev_priv->mm.bound_list);
        list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list);
 
        obj->gtt_space = node;
                          const struct drm_i915_gem_object_ops *ops)
 {
        INIT_LIST_HEAD(&obj->mm_list);
-       INIT_LIST_HEAD(&obj->gtt_list);
+       INIT_LIST_HEAD(&obj->global_list);
        INIT_LIST_HEAD(&obj->ring_list);
        INIT_LIST_HEAD(&obj->exec_list);
 
        }
 
        cnt = 0;
-       list_for_each_entry(obj, &dev_priv->mm.unbound_list, gtt_list)
+       list_for_each_entry(obj, &dev_priv->mm.unbound_list, global_list)
                if (obj->pages_pin_count == 0)
                        cnt += obj->base.size >> PAGE_SHIFT;
-       list_for_each_entry(obj, &dev_priv->mm.inactive_list, gtt_list)
+       list_for_each_entry(obj, &dev_priv->mm.inactive_list, global_list)
                if (obj->pin_count == 0 && obj->pages_pin_count == 0)
                        cnt += obj->base.size >> PAGE_SHIFT;
 
 
        dev_priv->gtt.gtt_clear_range(dev, dev_priv->gtt.start / PAGE_SIZE,
                                      dev_priv->gtt.total / PAGE_SIZE);
 
-       list_for_each_entry(obj, &dev_priv->mm.bound_list, gtt_list) {
+       list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
                i915_gem_clflush_object(obj);
                i915_gem_gtt_bind_object(obj, obj->cache_level);
        }
                dev_priv->mm.gtt_space.color_adjust = i915_gtt_color_adjust;
 
        /* Mark any preallocated objects as occupied */
-       list_for_each_entry(obj, &dev_priv->mm.bound_list, gtt_list) {
+       list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
                DRM_DEBUG_KMS("reserving preallocated space: %x + %zx\n",
                              obj->gtt_offset, obj->base.size);
 
 
        obj->gtt_offset = gtt_offset;
        obj->has_global_gtt_mapping = 1;
 
-       list_add_tail(&obj->gtt_list, &dev_priv->mm.bound_list);
+       list_add_tail(&obj->global_list, &dev_priv->mm.bound_list);
        list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list);
 
        return obj;
 
        struct drm_i915_gem_object *obj;
        int i = 0;
 
-       list_for_each_entry(obj, head, gtt_list) {
+       list_for_each_entry(obj, head, global_list) {
                if (obj->pin_count == 0)
                        continue;
 
        if (ring->id != RCS || !error->ccid)
                return;
 
-       list_for_each_entry(obj, &dev_priv->mm.bound_list, gtt_list) {
+       list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
                if ((error->ccid & PAGE_MASK) == obj->gtt_offset) {
                        ering->ctx = i915_error_object_create_sized(dev_priv,
                                                                    obj, 1);
        list_for_each_entry(obj, &dev_priv->mm.active_list, mm_list)
                i++;
        error->active_bo_count = i;
-       list_for_each_entry(obj, &dev_priv->mm.bound_list, gtt_list)
+       list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list)
                if (obj->pin_count)
                        i++;
        error->pinned_bo_count = i - error->active_bo_count;