]> git.karo-electronics.de Git - karo-tx-linux.git/commitdiff
drm/i915: Move map-and-fenceable tracking to the VMA
authorChris Wilson <chris@chris-wilson.co.uk>
Thu, 18 Aug 2016 16:16:55 +0000 (17:16 +0100)
committerChris Wilson <chris@chris-wilson.co.uk>
Thu, 18 Aug 2016 21:36:48 +0000 (22:36 +0100)
By moving map-and-fenceable tracking from the object to the VMA, we gain
fine-grained tracking and the ability to track individual fences on the VMA
(subsequent patch).

Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
Reviewed-by: Joonas Lahtinen <joonas.lahtinen@linux.intel.com>
Link: http://patchwork.freedesktop.org/patch/msgid/20160818161718.27187-16-chris@chris-wilson.co.uk
drivers/gpu/drm/i915/i915_drv.h
drivers/gpu/drm/i915/i915_gem.c
drivers/gpu/drm/i915/i915_gem_execbuffer.c
drivers/gpu/drm/i915/i915_gem_fence.c
drivers/gpu/drm/i915/i915_gem_gtt.c
drivers/gpu/drm/i915/i915_gem_gtt.h
drivers/gpu/drm/i915/i915_gem_tiling.c
drivers/gpu/drm/i915/intel_display.c

index 91861a08787c5464b2fa7a6c983b3afc305b6cf7..b3623945b5557a23609806d17c1cc0b4a7220488 100644 (file)
@@ -2192,12 +2192,6 @@ struct drm_i915_gem_object {
         */
        unsigned int fence_dirty:1;
 
-       /**
-        * Is the object at the current location in the gtt mappable and
-        * fenceable? Used to avoid costly recalculations.
-        */
-       unsigned int map_and_fenceable:1;
-
        /**
         * Whether the current gtt mapping needs to be mappable (and isn't just
         * mappable by accident). Track pin and fault separate for a more
index cfec2ff4fc7c09327d426fb74576732c8fed1917..1f6312ca646c3aa73eedcf03d5448a45fea942f0 100644 (file)
@@ -2899,8 +2899,7 @@ int i915_vma_unbind(struct i915_vma *vma)
        GEM_BUG_ON(obj->bind_count == 0);
        GEM_BUG_ON(!obj->pages);
 
-       if (i915_vma_is_ggtt(vma) &&
-           vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
+       if (i915_vma_is_map_and_fenceable(vma)) {
                i915_gem_object_finish_gtt(obj);
 
                /* release the fence reg _after_ flushing */
@@ -2909,6 +2908,7 @@ int i915_vma_unbind(struct i915_vma *vma)
                        return ret;
 
                __i915_vma_iounmap(vma);
+               vma->flags &= ~I915_VMA_CAN_FENCE;
        }
 
        if (likely(!vma->vm->closed)) {
@@ -2920,13 +2920,10 @@ int i915_vma_unbind(struct i915_vma *vma)
        drm_mm_remove_node(&vma->node);
        list_move_tail(&vma->vm_link, &vma->vm->unbound_list);
 
-       if (i915_vma_is_ggtt(vma)) {
-               if (vma->ggtt_view.type == I915_GGTT_VIEW_NORMAL) {
-                       obj->map_and_fenceable = false;
-               } else if (vma->pages) {
-                       sg_free_table(vma->pages);
-                       kfree(vma->pages);
-               }
+       if (vma->pages != obj->pages) {
+               GEM_BUG_ON(!vma->pages);
+               sg_free_table(vma->pages);
+               kfree(vma->pages);
        }
        vma->pages = NULL;
 
@@ -3703,8 +3700,6 @@ i915_gem_ring_throttle(struct drm_device *dev, struct drm_file *file)
 static bool
 i915_vma_misplaced(struct i915_vma *vma, u64 size, u64 alignment, u64 flags)
 {
-       struct drm_i915_gem_object *obj = vma->obj;
-
        if (!drm_mm_node_allocated(&vma->node))
                return false;
 
@@ -3714,7 +3709,7 @@ i915_vma_misplaced(struct i915_vma *vma, u64 size, u64 alignment, u64 flags)
        if (alignment && vma->node.start & (alignment - 1))
                return true;
 
-       if (flags & PIN_MAPPABLE && !obj->map_and_fenceable)
+       if (flags & PIN_MAPPABLE && !i915_vma_is_map_and_fenceable(vma))
                return true;
 
        if (flags & PIN_OFFSET_BIAS &&
@@ -3736,10 +3731,10 @@ void __i915_vma_set_map_and_fenceable(struct i915_vma *vma)
        u32 fence_size, fence_alignment;
 
        fence_size = i915_gem_get_ggtt_size(dev_priv,
-                                           obj->base.size,
+                                           vma->size,
                                            i915_gem_object_get_tiling(obj));
        fence_alignment = i915_gem_get_ggtt_alignment(dev_priv,
-                                                     obj->base.size,
+                                                     vma->size,
                                                      i915_gem_object_get_tiling(obj),
                                                      true);
 
@@ -3749,7 +3744,10 @@ void __i915_vma_set_map_and_fenceable(struct i915_vma *vma)
        mappable = (vma->node.start + fence_size <=
                    dev_priv->ggtt.mappable_end);
 
-       obj->map_and_fenceable = mappable && fenceable;
+       if (mappable && fenceable)
+               vma->flags |= I915_VMA_CAN_FENCE;
+       else
+               vma->flags &= ~I915_VMA_CAN_FENCE;
 }
 
 int __i915_vma_do_pin(struct i915_vma *vma,
@@ -3809,12 +3807,11 @@ i915_gem_object_ggtt_pin(struct drm_i915_gem_object *obj,
 
                WARN(i915_vma_is_pinned(vma),
                     "bo is already pinned in ggtt with incorrect alignment:"
-                    " offset=%08x, req.alignment=%llx, req.map_and_fenceable=%d,"
-                    " obj->map_and_fenceable=%d\n",
-                    i915_ggtt_offset(vma),
-                    alignment,
+                    " offset=%08x, req.alignment=%llx,"
+                    " req.map_and_fenceable=%d, vma->map_and_fenceable=%d\n",
+                    i915_ggtt_offset(vma), alignment,
                     !!(flags & PIN_MAPPABLE),
-                    obj->map_and_fenceable);
+                    i915_vma_is_map_and_fenceable(vma));
                ret = i915_vma_unbind(vma);
                if (ret)
                        return ERR_PTR(ret);
index 98d5aa5e0a347a0451e19f5c9e66944ca3815d11..28888d6089141bca487de26c8f12313ed9cb7deb 100644 (file)
@@ -857,7 +857,6 @@ static bool
 eb_vma_misplaced(struct i915_vma *vma)
 {
        struct drm_i915_gem_exec_object2 *entry = vma->exec_entry;
-       struct drm_i915_gem_object *obj = vma->obj;
 
        WARN_ON(entry->flags & __EXEC_OBJECT_NEEDS_MAP &&
                !i915_vma_is_ggtt(vma));
@@ -878,7 +877,8 @@ eb_vma_misplaced(struct i915_vma *vma)
                return true;
 
        /* avoid costly ping-pong once a batch bo ended up non-mappable */
-       if (entry->flags & __EXEC_OBJECT_NEEDS_MAP && !obj->map_and_fenceable)
+       if (entry->flags & __EXEC_OBJECT_NEEDS_MAP &&
+           !i915_vma_is_map_and_fenceable(vma))
                return !only_mappable_for_reloc(entry->flags);
 
        if ((entry->flags & EXEC_OBJECT_SUPPORTS_48B_ADDRESS) == 0 &&
index b0c6c2777725cae82d2cde3637d16c8345477ae0..e15365be404566e70c254dd69d89968fc9f7b0b3 100644 (file)
@@ -130,7 +130,9 @@ static void i915_write_fence_reg(struct drm_device *dev, int reg,
                     !is_power_of_2(vma->node.size) ||
                     (vma->node.start & (vma->node.size - 1)),
                     "object 0x%08llx [fenceable? %d] not 1M or pot-size (0x%08llx) aligned\n",
-                    vma->node.start, obj->map_and_fenceable, vma->node.size);
+                    vma->node.start,
+                    i915_vma_is_map_and_fenceable(vma),
+                    vma->node.size);
 
                if (tiling == I915_TILING_Y && HAS_128_BYTE_Y_TILING(dev))
                        tile_width = 128;
@@ -389,9 +391,6 @@ i915_gem_object_get_fence(struct drm_i915_gem_object *obj)
                        return 0;
                }
        } else if (enable) {
-               if (WARN_ON(!obj->map_and_fenceable))
-                       return -EINVAL;
-
                reg = i915_find_fence_reg(dev);
                if (IS_ERR(reg))
                        return PTR_ERR(reg);
index 3631944ac2d9d6a08a60d7a439edf1749acb555d..e31f98df26f6739786208ef2d7faec54b88ac17d 100644 (file)
@@ -3671,7 +3671,7 @@ void __iomem *i915_vma_pin_iomap(struct i915_vma *vma)
        assert_rpm_wakelock_held(to_i915(vma->vm->dev));
 
        lockdep_assert_held(&vma->vm->dev->struct_mutex);
-       if (WARN_ON(!vma->obj->map_and_fenceable))
+       if (WARN_ON(!i915_vma_is_map_and_fenceable(vma)))
                return IO_ERR_PTR(-ENODEV);
 
        GEM_BUG_ON(!i915_vma_is_ggtt(vma));
index d6e4b652919607978b98148a17db751796646b6d..d7ff78b46266f0ff4037c0419391cfe03b40dd2d 100644 (file)
@@ -197,8 +197,9 @@ struct i915_vma {
 #define I915_VMA_LOCAL_BIND    BIT(7)
 #define I915_VMA_BIND_MASK (I915_VMA_GLOBAL_BIND | I915_VMA_LOCAL_BIND | I915_VMA_PIN_OVERFLOW)
 
-#define I915_VMA_GGTT  BIT(8)
-#define I915_VMA_CLOSED BIT(9)
+#define I915_VMA_GGTT          BIT(8)
+#define I915_VMA_CAN_FENCE     BIT(9)
+#define I915_VMA_CLOSED                BIT(10)
 
        unsigned int active;
        struct i915_gem_active last_read[I915_NUM_ENGINES];
@@ -239,6 +240,11 @@ static inline bool i915_vma_is_ggtt(const struct i915_vma *vma)
        return vma->flags & I915_VMA_GGTT;
 }
 
+static inline bool i915_vma_is_map_and_fenceable(const struct i915_vma *vma)
+{
+       return vma->flags & I915_VMA_CAN_FENCE;
+}
+
 static inline bool i915_vma_is_closed(const struct i915_vma *vma)
 {
        return vma->flags & I915_VMA_CLOSED;
index bfefb63a55ef971ce3657bc049ab16c951c4993a..af70d4460a9e0f3c022060825a8fba59aab5e23e 100644 (file)
@@ -134,7 +134,7 @@ i915_gem_object_fence_prepare(struct drm_i915_gem_object *obj, int tiling_mode)
        if (!vma)
                return 0;
 
-       if (!obj->map_and_fenceable)
+       if (!i915_vma_is_map_and_fenceable(vma))
                return 0;
 
        if (IS_GEN3(dev_priv)) {
@@ -145,7 +145,7 @@ i915_gem_object_fence_prepare(struct drm_i915_gem_object *obj, int tiling_mode)
                        goto bad;
        }
 
-       size = i915_gem_get_ggtt_size(dev_priv, obj->base.size, tiling_mode);
+       size = i915_gem_get_ggtt_size(dev_priv, vma->size, tiling_mode);
        if (vma->node.size < size)
                goto bad;
 
index 31eaeedfad302517b1939ca601d425194549745f..04a8900f68c1131885f2a27a1e49f33056448bfb 100644 (file)
@@ -2224,7 +2224,7 @@ intel_pin_and_fence_fb_obj(struct drm_framebuffer *fb, unsigned int rotation)
         * framebuffer compression.  For simplicity, we always install
         * a fence as the cost is not that onerous.
         */
-       if (view.type == I915_GGTT_VIEW_NORMAL) {
+       if (i915_vma_is_map_and_fenceable(vma)) {
                ret = i915_gem_object_get_fence(obj);
                if (ret == -EDEADLK) {
                        /*
@@ -2262,11 +2262,11 @@ void intel_unpin_fb_obj(struct drm_framebuffer *fb, unsigned int rotation)
        WARN_ON(!mutex_is_locked(&obj->base.dev->struct_mutex));
 
        intel_fill_fb_ggtt_view(&view, fb, rotation);
+       vma = i915_gem_object_to_ggtt(obj, &view);
 
-       if (view.type == I915_GGTT_VIEW_NORMAL)
+       if (i915_vma_is_map_and_fenceable(vma))
                i915_gem_object_unpin_fence(obj);
 
-       vma = i915_gem_object_to_ggtt(obj, &view);
        i915_gem_object_unpin_from_display_plane(vma);
 }