mirror of
https://github.com/torvalds/linux.git
synced 2026-03-08 03:44:45 +01:00
drm/ttm: rename ttm_bo_put to _fini v3
Give TTM BOs a separate cleanup function. No funktional change, but the next step in removing the TTM BO reference counting and replacing it with the GEM object reference counting. v2: move the code around a bit to make it clearer what's happening v3: fix nouveau_bo_fini as well Signed-off-by: Christian König <christian.koenig@amd.com> Acked-by: Thomas Hellström <thomas.hellstrom@linux.intel.com> Acked-by: Joonas Lahtinen <joonas.lahtinen@linux.intel.com> Link: https://lore.kernel.org/r/20250909144311.1927-1-christian.koenig@amd.com
This commit is contained in:
parent
940dd88c5f
commit
ed7a4397f5
15 changed files with 59 additions and 58 deletions
|
|
@ -198,7 +198,7 @@ static void amdgpu_gem_object_free(struct drm_gem_object *gobj)
|
|||
struct amdgpu_bo *aobj = gem_to_amdgpu_bo(gobj);
|
||||
|
||||
amdgpu_hmm_unregister(aobj);
|
||||
ttm_bo_put(&aobj->tbo);
|
||||
ttm_bo_fini(&aobj->tbo);
|
||||
}
|
||||
|
||||
int amdgpu_gem_object_create(struct amdgpu_device *adev, unsigned long size,
|
||||
|
|
|
|||
|
|
@ -107,7 +107,7 @@ static const struct drm_gem_object_funcs drm_gem_vram_object_funcs;
|
|||
|
||||
static void drm_gem_vram_cleanup(struct drm_gem_vram_object *gbo)
|
||||
{
|
||||
/* We got here via ttm_bo_put(), which means that the
|
||||
/* We got here via ttm_bo_fini(), which means that the
|
||||
* TTM buffer object in 'bo' has already been cleaned
|
||||
* up; only release the GEM object.
|
||||
*/
|
||||
|
|
@ -234,11 +234,11 @@ EXPORT_SYMBOL(drm_gem_vram_create);
|
|||
* drm_gem_vram_put() - Releases a reference to a VRAM-backed GEM object
|
||||
* @gbo: the GEM VRAM object
|
||||
*
|
||||
* See ttm_bo_put() for more information.
|
||||
* See ttm_bo_fini() for more information.
|
||||
*/
|
||||
void drm_gem_vram_put(struct drm_gem_vram_object *gbo)
|
||||
{
|
||||
ttm_bo_put(&gbo->bo);
|
||||
ttm_bo_fini(&gbo->bo);
|
||||
}
|
||||
EXPORT_SYMBOL(drm_gem_vram_put);
|
||||
|
||||
|
|
|
|||
|
|
@ -1029,7 +1029,7 @@ static void i915_ttm_delayed_free(struct drm_i915_gem_object *obj)
|
|||
{
|
||||
GEM_BUG_ON(!obj->ttm.created);
|
||||
|
||||
ttm_bo_put(i915_gem_to_ttm(obj));
|
||||
ttm_bo_fini(i915_gem_to_ttm(obj));
|
||||
}
|
||||
|
||||
static vm_fault_t vm_fault_ttm(struct vm_fault *vmf)
|
||||
|
|
@ -1325,7 +1325,7 @@ int __i915_gem_ttm_object_init(struct intel_memory_region *mem,
|
|||
* If this function fails, it will call the destructor, but
|
||||
* our caller still owns the object. So no freeing in the
|
||||
* destructor until obj->ttm.created is true.
|
||||
* Similarly, in delayed_destroy, we can't call ttm_bo_put()
|
||||
* Similarly, in delayed_destroy, we can't call ttm_bo_fini()
|
||||
* until successful initialization.
|
||||
*/
|
||||
ret = ttm_bo_init_reserved(&i915->bdev, i915_gem_to_ttm(obj), bo_type,
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ static void lsdc_gem_object_free(struct drm_gem_object *obj)
|
|||
struct ttm_buffer_object *tbo = to_ttm_bo(obj);
|
||||
|
||||
if (tbo)
|
||||
ttm_bo_put(tbo);
|
||||
ttm_bo_fini(tbo);
|
||||
}
|
||||
|
||||
static int lsdc_gem_object_vmap(struct drm_gem_object *obj, struct iosys_map *map)
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ nouveau_bo(struct ttm_buffer_object *bo)
|
|||
static inline void
|
||||
nouveau_bo_fini(struct nouveau_bo *bo)
|
||||
{
|
||||
ttm_bo_put(&bo->bo);
|
||||
ttm_bo_fini(&bo->bo);
|
||||
}
|
||||
|
||||
extern struct ttm_device_funcs nouveau_bo_driver;
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ nouveau_gem_object_del(struct drm_gem_object *gem)
|
|||
return;
|
||||
}
|
||||
|
||||
ttm_bo_put(&nvbo->bo);
|
||||
ttm_bo_fini(&nvbo->bo);
|
||||
|
||||
pm_runtime_mark_last_busy(dev);
|
||||
pm_runtime_put_autosuspend(dev);
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ void qxl_gem_object_free(struct drm_gem_object *gobj)
|
|||
qxl_surface_evict(qdev, qobj, false);
|
||||
|
||||
tbo = &qobj->tbo;
|
||||
ttm_bo_put(tbo);
|
||||
ttm_bo_fini(tbo);
|
||||
}
|
||||
|
||||
int qxl_gem_object_create(struct qxl_device *qdev, int size,
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ static void radeon_gem_object_free(struct drm_gem_object *gobj)
|
|||
|
||||
if (robj) {
|
||||
radeon_mn_unregister(robj);
|
||||
ttm_bo_put(&robj->tbo);
|
||||
ttm_bo_fini(&robj->tbo);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -379,7 +379,7 @@ static void ttm_bo_unreserve_bulk(struct kunit *test)
|
|||
dma_resv_fini(resv);
|
||||
}
|
||||
|
||||
static void ttm_bo_put_basic(struct kunit *test)
|
||||
static void ttm_bo_fini_basic(struct kunit *test)
|
||||
{
|
||||
struct ttm_test_devices *priv = test->priv;
|
||||
struct ttm_buffer_object *bo;
|
||||
|
|
@ -410,7 +410,7 @@ static void ttm_bo_put_basic(struct kunit *test)
|
|||
dma_resv_unlock(bo->base.resv);
|
||||
KUNIT_EXPECT_EQ(test, err, 0);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static const char *mock_name(struct dma_fence *f)
|
||||
|
|
@ -423,7 +423,7 @@ static const struct dma_fence_ops mock_fence_ops = {
|
|||
.get_timeline_name = mock_name,
|
||||
};
|
||||
|
||||
static void ttm_bo_put_shared_resv(struct kunit *test)
|
||||
static void ttm_bo_fini_shared_resv(struct kunit *test)
|
||||
{
|
||||
struct ttm_test_devices *priv = test->priv;
|
||||
struct ttm_buffer_object *bo;
|
||||
|
|
@ -463,7 +463,7 @@ static void ttm_bo_put_shared_resv(struct kunit *test)
|
|||
bo->type = ttm_bo_type_device;
|
||||
bo->base.resv = external_resv;
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static void ttm_bo_pin_basic(struct kunit *test)
|
||||
|
|
@ -616,8 +616,8 @@ static struct kunit_case ttm_bo_test_cases[] = {
|
|||
KUNIT_CASE(ttm_bo_unreserve_basic),
|
||||
KUNIT_CASE(ttm_bo_unreserve_pinned),
|
||||
KUNIT_CASE(ttm_bo_unreserve_bulk),
|
||||
KUNIT_CASE(ttm_bo_put_basic),
|
||||
KUNIT_CASE(ttm_bo_put_shared_resv),
|
||||
KUNIT_CASE(ttm_bo_fini_basic),
|
||||
KUNIT_CASE(ttm_bo_fini_shared_resv),
|
||||
KUNIT_CASE(ttm_bo_pin_basic),
|
||||
KUNIT_CASE(ttm_bo_pin_unpin_resource),
|
||||
KUNIT_CASE(ttm_bo_multiple_pin_one_unpin),
|
||||
|
|
|
|||
|
|
@ -144,7 +144,7 @@ static void ttm_bo_init_reserved_sys_man(struct kunit *test)
|
|||
drm_mm_node_allocated(&bo->base.vma_node.vm_node));
|
||||
|
||||
ttm_resource_free(bo, &bo->resource);
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static void ttm_bo_init_reserved_mock_man(struct kunit *test)
|
||||
|
|
@ -186,7 +186,7 @@ static void ttm_bo_init_reserved_mock_man(struct kunit *test)
|
|||
drm_mm_node_allocated(&bo->base.vma_node.vm_node));
|
||||
|
||||
ttm_resource_free(bo, &bo->resource);
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_type);
|
||||
}
|
||||
|
||||
|
|
@ -221,7 +221,7 @@ static void ttm_bo_init_reserved_resv(struct kunit *test)
|
|||
KUNIT_EXPECT_PTR_EQ(test, bo->base.resv, &resv);
|
||||
|
||||
ttm_resource_free(bo, &bo->resource);
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static void ttm_bo_validate_basic(struct kunit *test)
|
||||
|
|
@ -265,7 +265,7 @@ static void ttm_bo_validate_basic(struct kunit *test)
|
|||
KUNIT_EXPECT_EQ(test, bo->resource->placement,
|
||||
DRM_BUDDY_TOPDOWN_ALLOCATION);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, snd_mem);
|
||||
}
|
||||
|
||||
|
|
@ -292,7 +292,7 @@ static void ttm_bo_validate_invalid_placement(struct kunit *test)
|
|||
|
||||
KUNIT_EXPECT_EQ(test, err, -ENOMEM);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static void ttm_bo_validate_failed_alloc(struct kunit *test)
|
||||
|
|
@ -321,7 +321,7 @@ static void ttm_bo_validate_failed_alloc(struct kunit *test)
|
|||
|
||||
KUNIT_EXPECT_EQ(test, err, -ENOMEM);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
ttm_bad_manager_fini(priv->ttm_dev, mem_type);
|
||||
}
|
||||
|
||||
|
|
@ -353,7 +353,7 @@ static void ttm_bo_validate_pinned(struct kunit *test)
|
|||
ttm_bo_unpin(bo);
|
||||
dma_resv_unlock(bo->base.resv);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static const struct ttm_bo_validate_test_case ttm_mem_type_cases[] = {
|
||||
|
|
@ -403,7 +403,7 @@ static void ttm_bo_validate_same_placement(struct kunit *test)
|
|||
KUNIT_EXPECT_EQ(test, err, 0);
|
||||
KUNIT_EXPECT_EQ(test, ctx_val.bytes_moved, 0);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
|
||||
if (params->mem_type != TTM_PL_SYSTEM)
|
||||
ttm_mock_manager_fini(priv->ttm_dev, params->mem_type);
|
||||
|
|
@ -452,7 +452,7 @@ static void ttm_bo_validate_busy_placement(struct kunit *test)
|
|||
KUNIT_EXPECT_EQ(test, bo->resource->mem_type, snd_mem);
|
||||
KUNIT_ASSERT_TRUE(test, list_is_singular(&man->lru[bo->priority]));
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
ttm_bad_manager_fini(priv->ttm_dev, fst_mem);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, snd_mem);
|
||||
}
|
||||
|
|
@ -495,7 +495,7 @@ static void ttm_bo_validate_multihop(struct kunit *test)
|
|||
KUNIT_EXPECT_EQ(test, ctx_val.bytes_moved, size * 2);
|
||||
KUNIT_EXPECT_EQ(test, bo->resource->mem_type, final_mem);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
|
||||
ttm_mock_manager_fini(priv->ttm_dev, fst_mem);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, tmp_mem);
|
||||
|
|
@ -567,7 +567,7 @@ static void ttm_bo_validate_no_placement_signaled(struct kunit *test)
|
|||
KUNIT_ASSERT_TRUE(test, flags & TTM_TT_FLAG_ZERO_ALLOC);
|
||||
}
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static int threaded_dma_resv_signal(void *arg)
|
||||
|
|
@ -635,7 +635,7 @@ static void ttm_bo_validate_no_placement_not_signaled(struct kunit *test)
|
|||
/* Make sure we have an idle object at this point */
|
||||
dma_resv_wait_timeout(bo->base.resv, usage, false, MAX_SCHEDULE_TIMEOUT);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static void ttm_bo_validate_move_fence_signaled(struct kunit *test)
|
||||
|
|
@ -668,7 +668,7 @@ static void ttm_bo_validate_move_fence_signaled(struct kunit *test)
|
|||
KUNIT_EXPECT_EQ(test, bo->resource->mem_type, mem_type);
|
||||
KUNIT_EXPECT_EQ(test, ctx.bytes_moved, size);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
dma_fence_put(man->move);
|
||||
}
|
||||
|
||||
|
|
@ -753,7 +753,7 @@ static void ttm_bo_validate_move_fence_not_signaled(struct kunit *test)
|
|||
else
|
||||
KUNIT_EXPECT_EQ(test, bo->resource->mem_type, fst_mem);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, fst_mem);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, snd_mem);
|
||||
}
|
||||
|
|
@ -807,8 +807,8 @@ static void ttm_bo_validate_happy_evict(struct kunit *test)
|
|||
KUNIT_EXPECT_EQ(test, bos[1].resource->mem_type, mem_type);
|
||||
|
||||
for (i = 0; i < bo_no; i++)
|
||||
ttm_bo_put(&bos[i]);
|
||||
ttm_bo_put(bo_val);
|
||||
ttm_bo_fini(&bos[i]);
|
||||
ttm_bo_fini(bo_val);
|
||||
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_type);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_multihop);
|
||||
|
|
@ -852,12 +852,12 @@ static void ttm_bo_validate_all_pinned_evict(struct kunit *test)
|
|||
|
||||
KUNIT_EXPECT_EQ(test, err, -ENOMEM);
|
||||
|
||||
ttm_bo_put(bo_small);
|
||||
ttm_bo_fini(bo_small);
|
||||
|
||||
ttm_bo_reserve(bo_big, false, false, NULL);
|
||||
ttm_bo_unpin(bo_big);
|
||||
dma_resv_unlock(bo_big->base.resv);
|
||||
ttm_bo_put(bo_big);
|
||||
ttm_bo_fini(bo_big);
|
||||
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_type);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_multihop);
|
||||
|
|
@ -916,13 +916,13 @@ static void ttm_bo_validate_allowed_only_evict(struct kunit *test)
|
|||
KUNIT_EXPECT_EQ(test, bo_evictable->resource->mem_type, mem_type_evict);
|
||||
KUNIT_EXPECT_EQ(test, ctx_val.bytes_moved, size * 2 + BO_SIZE);
|
||||
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_put(bo_evictable);
|
||||
ttm_bo_fini(bo);
|
||||
ttm_bo_fini(bo_evictable);
|
||||
|
||||
ttm_bo_reserve(bo_pinned, false, false, NULL);
|
||||
ttm_bo_unpin(bo_pinned);
|
||||
dma_resv_unlock(bo_pinned->base.resv);
|
||||
ttm_bo_put(bo_pinned);
|
||||
ttm_bo_fini(bo_pinned);
|
||||
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_type);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_multihop);
|
||||
|
|
@ -973,8 +973,8 @@ static void ttm_bo_validate_deleted_evict(struct kunit *test)
|
|||
KUNIT_EXPECT_NULL(test, bo_big->ttm);
|
||||
KUNIT_EXPECT_NULL(test, bo_big->resource);
|
||||
|
||||
ttm_bo_put(bo_small);
|
||||
ttm_bo_put(bo_big);
|
||||
ttm_bo_fini(bo_small);
|
||||
ttm_bo_fini(bo_big);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_type);
|
||||
}
|
||||
|
||||
|
|
@ -1025,8 +1025,8 @@ static void ttm_bo_validate_busy_domain_evict(struct kunit *test)
|
|||
KUNIT_EXPECT_EQ(test, bo_init->resource->mem_type, mem_type);
|
||||
KUNIT_EXPECT_NULL(test, bo_val->resource);
|
||||
|
||||
ttm_bo_put(bo_init);
|
||||
ttm_bo_put(bo_val);
|
||||
ttm_bo_fini(bo_init);
|
||||
ttm_bo_fini(bo_val);
|
||||
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_type);
|
||||
ttm_bad_manager_fini(priv->ttm_dev, mem_type_evict);
|
||||
|
|
@ -1070,8 +1070,8 @@ static void ttm_bo_validate_evict_gutting(struct kunit *test)
|
|||
KUNIT_ASSERT_NULL(test, bo_evict->resource);
|
||||
KUNIT_ASSERT_TRUE(test, bo_evict->ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC);
|
||||
|
||||
ttm_bo_put(bo_evict);
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo_evict);
|
||||
ttm_bo_fini(bo);
|
||||
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_type);
|
||||
}
|
||||
|
|
@ -1128,9 +1128,9 @@ static void ttm_bo_validate_recrusive_evict(struct kunit *test)
|
|||
ttm_mock_manager_fini(priv->ttm_dev, mem_type);
|
||||
ttm_mock_manager_fini(priv->ttm_dev, mem_type_evict);
|
||||
|
||||
ttm_bo_put(bo_val);
|
||||
ttm_bo_put(bo_tt);
|
||||
ttm_bo_put(bo_mock);
|
||||
ttm_bo_fini(bo_val);
|
||||
ttm_bo_fini(bo_tt);
|
||||
ttm_bo_fini(bo_mock);
|
||||
}
|
||||
|
||||
static struct kunit_case ttm_bo_validate_test_cases[] = {
|
||||
|
|
|
|||
|
|
@ -318,18 +318,17 @@ static void ttm_bo_release(struct kref *kref)
|
|||
bo->destroy(bo);
|
||||
}
|
||||
|
||||
/**
|
||||
* ttm_bo_put
|
||||
*
|
||||
* @bo: The buffer object.
|
||||
*
|
||||
* Unreference a buffer object.
|
||||
*/
|
||||
/* TODO: remove! */
|
||||
void ttm_bo_put(struct ttm_buffer_object *bo)
|
||||
{
|
||||
kref_put(&bo->kref, ttm_bo_release);
|
||||
}
|
||||
EXPORT_SYMBOL(ttm_bo_put);
|
||||
|
||||
void ttm_bo_fini(struct ttm_buffer_object *bo)
|
||||
{
|
||||
ttm_bo_put(bo);
|
||||
}
|
||||
EXPORT_SYMBOL(ttm_bo_fini);
|
||||
|
||||
static int ttm_bo_bounce_temp_buffer(struct ttm_buffer_object *bo,
|
||||
struct ttm_operation_ctx *ctx,
|
||||
|
|
|
|||
|
|
@ -55,4 +55,6 @@ ttm_bo_get_unless_zero(struct ttm_buffer_object *bo)
|
|||
return bo;
|
||||
}
|
||||
|
||||
void ttm_bo_put(struct ttm_buffer_object *bo);
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ static void vmw_gem_object_free(struct drm_gem_object *gobj)
|
|||
{
|
||||
struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gobj);
|
||||
if (bo)
|
||||
ttm_bo_put(bo);
|
||||
ttm_bo_fini(bo);
|
||||
}
|
||||
|
||||
static int vmw_gem_object_open(struct drm_gem_object *obj,
|
||||
|
|
|
|||
|
|
@ -1696,7 +1696,7 @@ static void xe_gem_object_free(struct drm_gem_object *obj)
|
|||
* refcount directly if needed.
|
||||
*/
|
||||
__xe_bo_vunmap(gem_to_xe_bo(obj));
|
||||
ttm_bo_put(container_of(obj, struct ttm_buffer_object, base));
|
||||
ttm_bo_fini(container_of(obj, struct ttm_buffer_object, base));
|
||||
}
|
||||
|
||||
static void xe_gem_object_close(struct drm_gem_object *obj,
|
||||
|
|
|
|||
|
|
@ -391,7 +391,7 @@ int ttm_bo_wait_ctx(struct ttm_buffer_object *bo,
|
|||
int ttm_bo_validate(struct ttm_buffer_object *bo,
|
||||
struct ttm_placement *placement,
|
||||
struct ttm_operation_ctx *ctx);
|
||||
void ttm_bo_put(struct ttm_buffer_object *bo);
|
||||
void ttm_bo_fini(struct ttm_buffer_object *bo);
|
||||
void ttm_bo_set_bulk_move(struct ttm_buffer_object *bo,
|
||||
struct ttm_lru_bulk_move *bulk);
|
||||
bool ttm_bo_eviction_valuable(struct ttm_buffer_object *bo,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue