2 * Copyright (C) 2016 Fuzhou Rockchip Electronics Co., Ltd
3 * author: Jung Zhao jung.zhao@rock-chips.com
4 * Randy Li, randy.li@rock-chips.com
6 * This software is licensed under the terms of the GNU General Public
7 * License version 2, as published by the Free Software Foundation, and
8 * may be copied, distributed, and modified under those terms.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
16 #include <linux/dma-iommu.h>
18 #include <linux/dma-buf.h>
20 #include <drm/drm_atomic.h>
21 #include <drm/drm_crtc_helper.h>
22 #include <drm/drm_fb_helper.h>
23 #include <drm/drm_sync_helper.h>
24 #include <drm/rockchip_drm.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/rockchip-iovmm.h>
27 #include <linux/pm_runtime.h>
28 #include <linux/memblock.h>
29 #include <linux/module.h>
30 #include <linux/of_address.h>
31 #include <linux/of_graph.h>
32 #include <linux/component.h>
33 #include <linux/fence.h>
34 #include <linux/console.h>
35 #include <linux/kref.h>
36 #include <linux/fdtable.h>
38 #include "vcodec_iommu_ops.h"
40 struct vcodec_drm_buffer {
41 struct list_head list;
42 struct dma_buf *dma_buf;
50 struct dma_buf_attachment *attach;
54 struct vcodec_iommu_session_info *session_info;
57 struct vcodec_iommu_drm_info {
58 struct iommu_domain *domain;
62 static struct vcodec_drm_buffer *
63 vcodec_drm_get_buffer_no_lock(struct vcodec_iommu_session_info *session_info,
66 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
68 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
70 if (drm_buffer->index == idx)
77 static struct vcodec_drm_buffer *
78 vcodec_drm_get_buffer_fd_no_lock(struct vcodec_iommu_session_info *session_info,
81 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
82 struct dma_buf *dma_buf = NULL;
84 dma_buf = dma_buf_get(fd);
86 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
88 if (drm_buffer->dma_buf == dma_buf) {
99 static void vcodec_drm_detach(struct vcodec_iommu_info *iommu_info)
101 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
102 struct device *dev = iommu_info->dev;
103 struct iommu_domain *domain = drm_info->domain;
105 mutex_lock(&iommu_info->iommu_mutex);
107 if (!drm_info->attached) {
108 mutex_unlock(&iommu_info->iommu_mutex);
112 iommu_detach_device(domain, dev);
113 drm_info->attached = false;
115 mutex_unlock(&iommu_info->iommu_mutex);
118 static int vcodec_drm_attach_unlock(struct vcodec_iommu_info *iommu_info)
120 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
121 struct device *dev = iommu_info->dev;
122 struct iommu_domain *domain = drm_info->domain;
125 ret = dma_set_coherent_mask(dev, DMA_BIT_MASK(32));
129 dma_set_max_seg_size(dev, DMA_BIT_MASK(32));
130 ret = iommu_attach_device(domain, dev);
132 dev_err(dev, "Failed to attach iommu device\n");
136 if (!common_iommu_setup_dma_ops(dev, 0x10000000, SZ_2G, domain->ops)) {
137 dev_err(dev, "Failed to set dma_ops\n");
138 iommu_detach_device(domain, dev);
145 static int vcodec_drm_attach(struct vcodec_iommu_info *iommu_info)
147 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
150 mutex_lock(&iommu_info->iommu_mutex);
152 if (drm_info->attached) {
153 mutex_unlock(&iommu_info->iommu_mutex);
157 ret = vcodec_drm_attach_unlock(iommu_info);
159 mutex_unlock(&iommu_info->iommu_mutex);
163 drm_info->attached = true;
165 mutex_unlock(&iommu_info->iommu_mutex);
170 static void *vcodec_drm_sgt_map_kernel(struct vcodec_drm_buffer *drm_buffer)
172 struct vcodec_iommu_session_info *session_info =
173 drm_buffer->session_info;
174 struct device *dev = session_info->dev;
175 struct scatterlist *sgl, *sg;
176 int nr_pages = PAGE_ALIGN(drm_buffer->size) >> PAGE_SHIFT;
177 int i = 0, j = 0, k = 0;
180 drm_buffer->pages = kmalloc_array(nr_pages, sizeof(*drm_buffer->pages),
182 if (!(drm_buffer->pages)) {
183 dev_err(dev, "drm map can not alloc pages\n");
188 sgl = drm_buffer->sgt->sgl;
190 for_each_sg(sgl, sg, drm_buffer->sgt->nents, i) {
192 for (j = 0; j < sg->length / PAGE_SIZE; j++)
193 drm_buffer->pages[k++] = page++;
196 return vmap(drm_buffer->pages, nr_pages, VM_MAP,
197 pgprot_noncached(PAGE_KERNEL));
200 static void vcodec_drm_sgt_unmap_kernel(struct vcodec_drm_buffer *drm_buffer)
202 vunmap(drm_buffer->cpu_addr);
203 kfree(drm_buffer->pages);
206 static void vcodec_drm_clear_map(struct kref *ref)
208 struct vcodec_drm_buffer *drm_buffer =
209 container_of(ref, struct vcodec_drm_buffer, ref);
210 struct vcodec_iommu_session_info *session_info =
211 drm_buffer->session_info;
212 struct vcodec_iommu_info *iommu_info = session_info->iommu_info;
213 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
214 struct device *dev = session_info->dev;
215 struct iommu_domain *domain = drm_info->domain;
217 mutex_lock(&iommu_info->iommu_mutex);
218 drm_info = session_info->iommu_info->private;
219 if (!drm_info->attached) {
220 if (vcodec_drm_attach_unlock(session_info->iommu_info))
221 dev_err(dev, "can't clea map, attach iommu failed.\n");
224 if (drm_buffer->cpu_addr) {
225 vcodec_drm_sgt_unmap_kernel(drm_buffer);
226 drm_buffer->cpu_addr = NULL;
229 if (drm_buffer->attach) {
230 dma_buf_unmap_attachment(drm_buffer->attach, drm_buffer->sgt,
232 dma_buf_detach(drm_buffer->dma_buf, drm_buffer->attach);
233 dma_buf_put(drm_buffer->dma_buf);
234 drm_buffer->attach = NULL;
237 if (!drm_info->attached)
238 iommu_detach_device(domain, dev);
240 mutex_unlock(&iommu_info->iommu_mutex);
243 static void vcdoec_drm_dump_info(struct vcodec_iommu_session_info *session_info)
245 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
247 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_OPS_DUMP,
248 "still there are below buffers stored in list\n");
249 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
251 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_OPS_DUMP,
252 "index %d drm_buffer dma_buf %p cpu_addr %p\n",
254 drm_buffer->dma_buf, drm_buffer->cpu_addr);
258 static int vcodec_drm_free(struct vcodec_iommu_session_info *session_info,
261 struct device *dev = session_info->dev;
262 /* please double-check all maps have been release */
263 struct vcodec_drm_buffer *drm_buffer;
265 mutex_lock(&session_info->list_mutex);
266 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
269 dev_err(dev, "can not find %d buffer in list\n", idx);
270 mutex_unlock(&session_info->list_mutex);
275 if (atomic_read(&drm_buffer->ref.refcount) == 0) {
276 dma_buf_put(drm_buffer->dma_buf);
277 list_del_init(&drm_buffer->list);
280 mutex_unlock(&session_info->list_mutex);
286 vcodec_drm_unmap_iommu(struct vcodec_iommu_session_info *session_info,
289 struct device *dev = session_info->dev;
290 struct vcodec_drm_buffer *drm_buffer;
292 /* Force to flush iommu table */
293 if (of_machine_is_compatible("rockchip,rk3288"))
294 rockchip_iovmm_invalidate_tlb(session_info->mmu_dev);
296 mutex_lock(&session_info->list_mutex);
297 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
298 mutex_unlock(&session_info->list_mutex);
301 dev_err(dev, "can not find %d buffer in list\n", idx);
305 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
310 static int vcodec_drm_map_iommu(struct vcodec_iommu_session_info *session_info,
315 struct device *dev = session_info->dev;
316 struct vcodec_drm_buffer *drm_buffer;
318 /* Force to flush iommu table */
319 if (of_machine_is_compatible("rockchip,rk3288"))
320 rockchip_iovmm_invalidate_tlb(session_info->mmu_dev);
322 mutex_lock(&session_info->list_mutex);
323 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
324 mutex_unlock(&session_info->list_mutex);
327 dev_err(dev, "can not find %d buffer in list\n", idx);
331 kref_get(&drm_buffer->ref);
333 *iova = drm_buffer->iova;
335 *size = drm_buffer->size;
340 vcodec_drm_unmap_kernel(struct vcodec_iommu_session_info *session_info, int idx)
342 struct device *dev = session_info->dev;
343 struct vcodec_drm_buffer *drm_buffer;
345 mutex_lock(&session_info->list_mutex);
346 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
347 mutex_unlock(&session_info->list_mutex);
350 dev_err(dev, "can not find %d buffer in list\n", idx);
355 if (drm_buffer->cpu_addr) {
356 vcodec_drm_sgt_unmap_kernel(drm_buffer);
357 drm_buffer->cpu_addr = NULL;
360 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
365 vcodec_drm_free_fd(struct vcodec_iommu_session_info *session_info, int fd)
367 struct device *dev = session_info->dev;
368 /* please double-check all maps have been release */
369 struct vcodec_drm_buffer *drm_buffer = NULL;
371 mutex_lock(&session_info->list_mutex);
372 drm_buffer = vcodec_drm_get_buffer_fd_no_lock(session_info, fd);
375 dev_err(dev, "can not find %d buffer in list\n", fd);
376 mutex_unlock(&session_info->list_mutex);
380 mutex_unlock(&session_info->list_mutex);
382 vcodec_drm_unmap_iommu(session_info, drm_buffer->index);
384 mutex_lock(&session_info->list_mutex);
385 if (atomic_read(&drm_buffer->ref.refcount) == 0) {
386 dma_buf_put(drm_buffer->dma_buf);
387 list_del_init(&drm_buffer->list);
390 mutex_unlock(&session_info->list_mutex);
396 vcodec_drm_clear_session(struct vcodec_iommu_session_info *session_info)
398 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
400 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
402 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
403 vcodec_drm_free(session_info, drm_buffer->index);
408 vcodec_drm_map_kernel(struct vcodec_iommu_session_info *session_info, int idx)
410 struct device *dev = session_info->dev;
411 struct vcodec_drm_buffer *drm_buffer;
413 mutex_lock(&session_info->list_mutex);
414 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
415 mutex_unlock(&session_info->list_mutex);
418 dev_err(dev, "can not find %d buffer in list\n", idx);
422 if (!drm_buffer->cpu_addr)
423 drm_buffer->cpu_addr =
424 vcodec_drm_sgt_map_kernel(drm_buffer);
426 kref_get(&drm_buffer->ref);
428 return drm_buffer->cpu_addr;
431 static int vcodec_drm_import(struct vcodec_iommu_session_info *session_info,
434 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
435 struct vcodec_iommu_info *iommu_info = session_info->iommu_info;
436 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
437 struct device *dev = session_info->dev;
438 struct dma_buf_attachment *attach;
439 struct sg_table *sgt;
440 struct dma_buf *dma_buf;
443 dma_buf = dma_buf_get(fd);
444 if (IS_ERR(dma_buf)) {
445 ret = PTR_ERR(dma_buf);
449 list_for_each_entry_safe(drm_buffer, n,
450 &session_info->buffer_list, list) {
451 if (drm_buffer->dma_buf == dma_buf) {
452 dma_buf_put(dma_buf);
453 return drm_buffer->index;
457 drm_buffer = kzalloc(sizeof(*drm_buffer), GFP_KERNEL);
463 drm_buffer->dma_buf = dma_buf;
464 drm_buffer->session_info = session_info;
466 kref_init(&drm_buffer->ref);
468 mutex_lock(&iommu_info->iommu_mutex);
469 drm_info = session_info->iommu_info->private;
470 if (!drm_info->attached) {
471 ret = vcodec_drm_attach_unlock(session_info->iommu_info);
476 attach = dma_buf_attach(drm_buffer->dma_buf, dev);
477 if (IS_ERR(attach)) {
478 ret = PTR_ERR(attach);
482 get_dma_buf(drm_buffer->dma_buf);
484 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL);
490 drm_buffer->iova = sg_dma_address(sgt->sgl);
491 drm_buffer->size = drm_buffer->dma_buf->size;
493 drm_buffer->attach = attach;
494 drm_buffer->sgt = sgt;
496 mutex_unlock(&iommu_info->iommu_mutex);
498 INIT_LIST_HEAD(&drm_buffer->list);
499 mutex_lock(&session_info->list_mutex);
500 drm_buffer->index = session_info->max_idx;
501 list_add_tail(&drm_buffer->list, &session_info->buffer_list);
502 session_info->max_idx++;
503 if ((session_info->max_idx & 0xfffffff) == 0)
504 session_info->max_idx = 0;
505 mutex_unlock(&session_info->list_mutex);
507 return drm_buffer->index;
510 dev_err(dev, "dmabuf map attach failed\n");
511 dma_buf_detach(drm_buffer->dma_buf, attach);
512 dma_buf_put(drm_buffer->dma_buf);
515 mutex_unlock(&iommu_info->iommu_mutex);
520 static int vcodec_drm_create(struct vcodec_iommu_info *iommu_info)
522 struct vcodec_iommu_drm_info *drm_info;
525 iommu_info->private = kzalloc(sizeof(*drm_info),
527 drm_info = iommu_info->private;
531 drm_info->domain = iommu_domain_alloc(&platform_bus_type);
532 drm_info->attached = false;
533 if (!drm_info->domain)
536 ret = iommu_get_dma_cookie(drm_info->domain);
538 goto err_free_domain;
540 vcodec_drm_attach(iommu_info);
545 iommu_domain_free(drm_info->domain);
550 static int vcodec_drm_destroy(struct vcodec_iommu_info *iommu_info)
552 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
554 vcodec_drm_detach(iommu_info);
555 iommu_put_dma_cookie(drm_info->domain);
556 iommu_domain_free(drm_info->domain);
559 iommu_info->private = NULL;
564 static struct vcodec_iommu_ops drm_ops = {
565 .create = vcodec_drm_create,
566 .import = vcodec_drm_import,
567 .free = vcodec_drm_free,
568 .free_fd = vcodec_drm_free_fd,
569 .map_kernel = vcodec_drm_map_kernel,
570 .unmap_kernel = vcodec_drm_unmap_kernel,
571 .map_iommu = vcodec_drm_map_iommu,
572 .unmap_iommu = vcodec_drm_unmap_iommu,
573 .destroy = vcodec_drm_destroy,
574 .dump = vcdoec_drm_dump_info,
575 .attach = vcodec_drm_attach,
576 .detach = vcodec_drm_detach,
577 .clear = vcodec_drm_clear_session,
580 void vcodec_iommu_drm_set_ops(struct vcodec_iommu_info *iommu_info)
584 iommu_info->ops = &drm_ops;