38 int npages = bo->
tbo.num_pages;
61 if (bo->
gem_base.export_dma_buf == dma_buf) {
62 DRM_ERROR(
"unreference dmabuf %p\n", &bo->
gem_base);
64 drm_gem_object_unreference_unlocked(&bo->
gem_base);
68 static void *radeon_gem_kmap_atomic(
struct dma_buf *dma_buf,
unsigned long page_num)
73 static void radeon_gem_kunmap_atomic(
struct dma_buf *dma_buf,
unsigned long page_num,
void *
addr)
77 static void *radeon_gem_kmap(
struct dma_buf *dma_buf,
unsigned long page_num)
82 static void radeon_gem_kunmap(
struct dma_buf *dma_buf,
unsigned long page_num,
void *
addr)
87 static int radeon_gem_prime_mmap(
struct dma_buf *dma_buf,
struct vm_area_struct *vma)
92 static void *radeon_gem_prime_vmap(
struct dma_buf *dma_buf)
116 static void radeon_gem_prime_vunmap(
struct dma_buf *dma_buf,
void *
vaddr)
128 const static struct dma_buf_ops radeon_dmabuf_ops = {
129 .map_dma_buf = radeon_gem_map_dma_buf,
130 .unmap_dma_buf = radeon_gem_unmap_dma_buf,
131 .release = radeon_gem_dmabuf_release,
132 .kmap = radeon_gem_kmap,
133 .kmap_atomic = radeon_gem_kmap_atomic,
134 .kunmap = radeon_gem_kunmap,
135 .kunmap_atomic = radeon_gem_kunmap_atomic,
136 .mmap = radeon_gem_prime_mmap,
137 .vmap = radeon_gem_prime_vmap,
138 .vunmap = radeon_gem_prime_vunmap,
141 static int radeon_prime_create(
struct drm_device *dev,
165 struct drm_gem_object *obj,
178 radeon_bo_unreserve(bo);
181 radeon_bo_unreserve(bo);
186 struct dma_buf *dma_buf)
193 if (dma_buf->
ops == &radeon_dmabuf_ops) {
196 drm_gem_object_reference(&bo->
gem_base);
204 return ERR_CAST(attach);
212 ret = radeon_prime_create(dev, dma_buf->
size, sg, &bo);