HDMI_AUDIO_ON, /* force turn on HDMI audio */
};
-#define I915_GTT_RESERVED (1<<0)
#define I915_GTT_OFFSET_NONE ((u32)-1)
struct drm_i915_gem_object_ops {
const struct drm_i915_gem_object_ops *ops;
/** Current space allocated to this object in the GTT, if any. */
- struct drm_mm_node *gtt_space;
+ struct drm_mm_node gtt_space;
/** Stolen memory for this object, instead of being backed by shmem. */
struct drm_mm_node *stolen;
struct list_head global_list;
static inline unsigned long
i915_gem_obj_ggtt_offset(struct drm_i915_gem_object *o)
{
- return o->gtt_space->start;
+ return o->gtt_space.start;
}
/* Whether or not this object is currently mapped by the translation tables */
static inline bool
i915_gem_obj_ggtt_bound(struct drm_i915_gem_object *o)
{
- return o->gtt_space != NULL;
+ return drm_mm_node_allocated(&o->gtt_space);
}
/* The size used in the translation tables may be larger than the actual size of
static inline unsigned long
i915_gem_obj_ggtt_size(struct drm_i915_gem_object *o)
{
- return o->gtt_space->size;
+ return o->gtt_space.size;
}
static inline void
i915_gem_obj_ggtt_set_color(struct drm_i915_gem_object *o,
enum i915_cache_level color)
{
- o->gtt_space->color = color;
+ o->gtt_space.color = color;
}
/**
/* Avoid an unnecessary call to unbind on rebind. */
obj->map_and_fenceable = true;
- drm_mm_put_block(obj->gtt_space);
- obj->gtt_space = NULL;
+ drm_mm_remove_node(&obj->gtt_space);
return 0;
}
if (HAS_LLC(dev))
return true;
- if (gtt_space == NULL)
+ if (!drm_mm_node_allocated(gtt_space))
return true;
if (list_empty(>t_space->node_list))
{
struct drm_device *dev = obj->base.dev;
drm_i915_private_t *dev_priv = dev->dev_private;
- struct drm_mm_node *node;
u32 size, fence_size, fence_alignment, unfenced_alignment;
bool mappable, fenceable;
size_t gtt_max = map_and_fenceable ?
i915_gem_object_pin_pages(obj);
- node = kzalloc(sizeof(*node), GFP_KERNEL);
- if (node == NULL) {
- i915_gem_object_unpin_pages(obj);
- return -ENOMEM;
- }
-
search_free:
- ret = drm_mm_insert_node_in_range_generic(&dev_priv->mm.gtt_space, node,
+ ret = drm_mm_insert_node_in_range_generic(&dev_priv->mm.gtt_space,
+ &obj->gtt_space,
size, alignment,
obj->cache_level, 0, gtt_max);
if (ret) {
goto search_free;
i915_gem_object_unpin_pages(obj);
- kfree(node);
return ret;
}
- if (WARN_ON(!i915_gem_valid_gtt_space(dev, node, obj->cache_level))) {
+ if (WARN_ON(!i915_gem_valid_gtt_space(dev, &obj->gtt_space,
+ obj->cache_level))) {
i915_gem_object_unpin_pages(obj);
- drm_mm_put_block(node);
+ drm_mm_remove_node(&obj->gtt_space);
return -EINVAL;
}
ret = i915_gem_gtt_prepare_object(obj);
if (ret) {
i915_gem_object_unpin_pages(obj);
- drm_mm_put_block(node);
+ drm_mm_remove_node(&obj->gtt_space);
return ret;
}
list_move_tail(&obj->global_list, &dev_priv->mm.bound_list);
list_add_tail(&obj->mm_list, &dev_priv->mm.inactive_list);
- obj->gtt_space = node;
-
fenceable =
- node->size == fence_size &&
- (node->start & (fence_alignment - 1)) == 0;
+ i915_gem_obj_ggtt_size(obj) == fence_size &&
+ (i915_gem_obj_ggtt_offset(obj) & (fence_alignment - 1)) == 0;
mappable = i915_gem_obj_ggtt_offset(obj) + obj->base.size <=
dev_priv->gtt.mappable_end;
return -EBUSY;
}
- if (!i915_gem_valid_gtt_space(dev, obj->gtt_space, cache_level)) {
+ if (!i915_gem_valid_gtt_space(dev, &obj->gtt_space, cache_level)) {
ret = i915_gem_object_unbind(obj);
if (ret)
return ret;
return false;
list_add(&obj->exec_list, unwind);
- return drm_mm_scan_add_block(obj->gtt_space);
+ return drm_mm_scan_add_block(&obj->gtt_space);
}
int
struct drm_i915_gem_object,
exec_list);
- ret = drm_mm_scan_remove_block(obj->gtt_space);
+ ret = drm_mm_scan_remove_block(&obj->gtt_space);
BUG_ON(ret);
list_del_init(&obj->exec_list);
obj = list_first_entry(&unwind_list,
struct drm_i915_gem_object,
exec_list);
- if (drm_mm_scan_remove_block(obj->gtt_space)) {
+ if (drm_mm_scan_remove_block(&obj->gtt_space)) {
list_move(&obj->exec_list, &eviction_list);
drm_gem_object_reference(&obj->base);
continue;
/* Mark any preallocated objects as occupied */
list_for_each_entry(obj, &dev_priv->mm.bound_list, global_list) {
- uintptr_t offset = (uintptr_t) obj->gtt_space;
int ret;
DRM_DEBUG_KMS("reserving preallocated space: %lx + %zx\n",
- offset, obj->base.size);
-
- BUG_ON((offset & I915_GTT_RESERVED) != 0);
- offset &= ~I915_GTT_RESERVED;
- obj->gtt_space = kzalloc(sizeof(*obj->gtt_space), GFP_KERNEL);
- if (!obj->gtt_space) {
- DRM_ERROR("Failed to preserve object at offset %lx\n",
- offset);
- continue;
- }
- obj->gtt_space->start = (unsigned long)offset;
- obj->gtt_space->size = obj->base.size;
+ i915_gem_obj_ggtt_offset(obj), obj->base.size);
+
+ WARN_ON(i915_gem_obj_ggtt_bound(obj));
ret = drm_mm_reserve_node(&dev_priv->mm.gtt_space,
- obj->gtt_space);
- if (ret) {
+ &obj->gtt_space);
+ if (ret)
DRM_DEBUG_KMS("Reservation failed\n");
- kfree(obj->gtt_space);
- obj->gtt_space = NULL;
- }
obj->has_global_gtt_mapping = 1;
}
* setting up the GTT space. The actual reservation will occur
* later.
*/
+ obj->gtt_space.start = gtt_offset;
+ obj->gtt_space.size = size;
if (drm_mm_initialized(&dev_priv->mm.gtt_space)) {
- obj->gtt_space = kzalloc(sizeof(*obj->gtt_space), GFP_KERNEL);
- if (!obj->gtt_space) {
- DRM_DEBUG_KMS("-ENOMEM stolen GTT space\n");
- goto unref_out;
- }
-
- obj->gtt_space->start = gtt_offset;
- obj->gtt_space->size = size;
ret = drm_mm_reserve_node(&dev_priv->mm.gtt_space,
- obj->gtt_space);
+ &obj->gtt_space);
if (ret) {
DRM_DEBUG_KMS("failed to allocate stolen GTT space\n");
- goto free_out;
+ goto unref_out;
}
- } else {
- if (WARN_ON(gtt_offset & ~PAGE_MASK))
- DRM_DEBUG_KMS("Cannot preserve non page aligned offset\n");
- obj->gtt_space =
- (struct drm_mm_node *)((uintptr_t)(I915_GTT_RESERVED | gtt_offset));
}
obj->has_global_gtt_mapping = 1;
return obj;
-free_out:
- kfree(obj->gtt_space);
- obj->gtt_space = NULL;
unref_out:
drm_gem_object_unreference(&obj->base);
return NULL;