2 * Copyright (C) 2016 Fuzhou Rockchip Electronics Co., Ltd
3 * author: Jung Zhao jung.zhao@rock-chips.com
4 * Randy Li, randy.li@rock-chips.com
6 * This software is licensed under the terms of the GNU General Public
7 * License version 2, as published by the Free Software Foundation, and
8 * may be copied, distributed, and modified under those terms.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
16 #include <linux/dma-iommu.h>
18 #include <linux/dma-buf.h>
20 #include <drm/drm_atomic.h>
21 #include <drm/drm_crtc_helper.h>
22 #include <drm/drm_fb_helper.h>
23 #include <drm/drm_sync_helper.h>
24 #include <drm/rockchip_drm.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/rockchip-iovmm.h>
27 #include <linux/pm_runtime.h>
28 #include <linux/memblock.h>
29 #include <linux/module.h>
30 #include <linux/of_address.h>
31 #include <linux/of_graph.h>
32 #include <linux/component.h>
33 #include <linux/fence.h>
34 #include <linux/console.h>
35 #include <linux/kref.h>
36 #include <linux/fdtable.h>
37 #include <linux/ktime.h>
39 #include "vcodec_iommu_ops.h"
41 struct vcodec_drm_buffer {
42 struct list_head list;
43 struct dma_buf *dma_buf;
51 struct dma_buf_attachment *attach;
55 struct vcodec_iommu_session_info *session_info;
59 struct vcodec_iommu_drm_info {
60 struct iommu_domain *domain;
64 static struct vcodec_drm_buffer *
65 vcodec_drm_get_buffer_no_lock(struct vcodec_iommu_session_info *session_info,
68 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
70 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
72 if (drm_buffer->index == idx) {
73 drm_buffer->last_used = ktime_get();
81 static struct vcodec_drm_buffer *
82 vcodec_drm_get_buffer_fd_no_lock(struct vcodec_iommu_session_info *session_info,
85 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
86 struct dma_buf *dma_buf = NULL;
88 dma_buf = dma_buf_get(fd);
90 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
92 if (drm_buffer->dma_buf == dma_buf) {
93 drm_buffer->last_used = ktime_get();
104 static void vcodec_drm_detach(struct vcodec_iommu_info *iommu_info)
106 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
107 struct device *dev = iommu_info->dev;
108 struct iommu_domain *domain = drm_info->domain;
110 mutex_lock(&iommu_info->iommu_mutex);
112 if (!drm_info->attached) {
113 mutex_unlock(&iommu_info->iommu_mutex);
117 iommu_detach_device(domain, dev);
118 drm_info->attached = false;
120 mutex_unlock(&iommu_info->iommu_mutex);
123 static int vcodec_drm_attach_unlock(struct vcodec_iommu_info *iommu_info)
125 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
126 struct device *dev = iommu_info->dev;
127 struct iommu_domain *domain = drm_info->domain;
130 ret = dma_set_coherent_mask(dev, DMA_BIT_MASK(32));
134 dma_set_max_seg_size(dev, DMA_BIT_MASK(32));
135 ret = iommu_attach_device(domain, dev);
137 dev_err(dev, "Failed to attach iommu device\n");
141 if (!common_iommu_setup_dma_ops(dev, 0x10000000, SZ_2G, domain->ops)) {
142 dev_err(dev, "Failed to set dma_ops\n");
143 iommu_detach_device(domain, dev);
150 static int vcodec_drm_attach(struct vcodec_iommu_info *iommu_info)
152 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
155 mutex_lock(&iommu_info->iommu_mutex);
157 if (drm_info->attached) {
158 mutex_unlock(&iommu_info->iommu_mutex);
162 ret = vcodec_drm_attach_unlock(iommu_info);
164 mutex_unlock(&iommu_info->iommu_mutex);
168 drm_info->attached = true;
170 mutex_unlock(&iommu_info->iommu_mutex);
175 static void *vcodec_drm_sgt_map_kernel(struct vcodec_drm_buffer *drm_buffer)
177 struct vcodec_iommu_session_info *session_info =
178 drm_buffer->session_info;
179 struct device *dev = session_info->dev;
180 struct scatterlist *sgl, *sg;
181 int nr_pages = PAGE_ALIGN(drm_buffer->size) >> PAGE_SHIFT;
182 int i = 0, j = 0, k = 0;
185 drm_buffer->pages = kmalloc_array(nr_pages, sizeof(*drm_buffer->pages),
187 if (!(drm_buffer->pages)) {
188 dev_err(dev, "drm map can not alloc pages\n");
193 sgl = drm_buffer->sgt->sgl;
195 for_each_sg(sgl, sg, drm_buffer->sgt->nents, i) {
197 for (j = 0; j < sg->length / PAGE_SIZE; j++)
198 drm_buffer->pages[k++] = page++;
201 return vmap(drm_buffer->pages, nr_pages, VM_MAP,
202 pgprot_noncached(PAGE_KERNEL));
205 static void vcodec_drm_sgt_unmap_kernel(struct vcodec_drm_buffer *drm_buffer)
207 vunmap(drm_buffer->cpu_addr);
208 kfree(drm_buffer->pages);
211 static void vcodec_drm_clear_map(struct kref *ref)
213 struct vcodec_drm_buffer *drm_buffer =
214 container_of(ref, struct vcodec_drm_buffer, ref);
215 struct vcodec_iommu_session_info *session_info =
216 drm_buffer->session_info;
217 struct vcodec_iommu_info *iommu_info = session_info->iommu_info;
218 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
219 struct device *dev = session_info->dev;
220 struct iommu_domain *domain = drm_info->domain;
222 mutex_lock(&iommu_info->iommu_mutex);
223 drm_info = session_info->iommu_info->private;
224 if (!drm_info->attached) {
225 if (vcodec_drm_attach_unlock(session_info->iommu_info))
226 dev_err(dev, "can't clea map, attach iommu failed.\n");
229 if (drm_buffer->cpu_addr) {
230 vcodec_drm_sgt_unmap_kernel(drm_buffer);
231 drm_buffer->cpu_addr = NULL;
234 if (drm_buffer->attach) {
235 dma_buf_unmap_attachment(drm_buffer->attach, drm_buffer->sgt,
237 dma_buf_detach(drm_buffer->dma_buf, drm_buffer->attach);
238 dma_buf_put(drm_buffer->dma_buf);
239 drm_buffer->attach = NULL;
242 if (!drm_info->attached)
243 iommu_detach_device(domain, dev);
245 mutex_unlock(&iommu_info->iommu_mutex);
248 static void vcdoec_drm_dump_info(struct vcodec_iommu_session_info *session_info)
250 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
252 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_OPS_DUMP,
253 "still there are below buffers stored in list\n");
254 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
256 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_OPS_DUMP,
257 "index %d drm_buffer dma_buf %p cpu_addr %p\n",
259 drm_buffer->dma_buf, drm_buffer->cpu_addr);
263 static int vcodec_drm_free(struct vcodec_iommu_session_info *session_info,
266 struct device *dev = session_info->dev;
267 /* please double-check all maps have been release */
268 struct vcodec_drm_buffer *drm_buffer;
270 mutex_lock(&session_info->list_mutex);
271 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
274 dev_err(dev, "can not find %d buffer in list\n", idx);
275 mutex_unlock(&session_info->list_mutex);
280 if (atomic_read(&drm_buffer->ref.refcount) == 0) {
281 dma_buf_put(drm_buffer->dma_buf);
282 list_del_init(&drm_buffer->list);
284 session_info->buffer_nums--;
285 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_NORMAL,
286 "buffer nums %d\n", session_info->buffer_nums);
288 mutex_unlock(&session_info->list_mutex);
294 vcodec_drm_unmap_iommu(struct vcodec_iommu_session_info *session_info,
297 struct device *dev = session_info->dev;
298 struct vcodec_drm_buffer *drm_buffer;
300 /* Force to flush iommu table */
301 if (of_machine_is_compatible("rockchip,rk3288"))
302 rockchip_iovmm_invalidate_tlb(session_info->mmu_dev);
304 mutex_lock(&session_info->list_mutex);
305 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
306 mutex_unlock(&session_info->list_mutex);
309 dev_err(dev, "can not find %d buffer in list\n", idx);
313 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
318 static int vcodec_drm_map_iommu(struct vcodec_iommu_session_info *session_info,
323 struct device *dev = session_info->dev;
324 struct vcodec_drm_buffer *drm_buffer;
326 /* Force to flush iommu table */
327 if (of_machine_is_compatible("rockchip,rk3288"))
328 rockchip_iovmm_invalidate_tlb(session_info->mmu_dev);
330 mutex_lock(&session_info->list_mutex);
331 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
332 mutex_unlock(&session_info->list_mutex);
335 dev_err(dev, "can not find %d buffer in list\n", idx);
339 kref_get(&drm_buffer->ref);
341 *iova = drm_buffer->iova;
343 *size = drm_buffer->size;
348 vcodec_drm_unmap_kernel(struct vcodec_iommu_session_info *session_info, int idx)
350 struct device *dev = session_info->dev;
351 struct vcodec_drm_buffer *drm_buffer;
353 mutex_lock(&session_info->list_mutex);
354 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
355 mutex_unlock(&session_info->list_mutex);
358 dev_err(dev, "can not find %d buffer in list\n", idx);
363 if (drm_buffer->cpu_addr) {
364 vcodec_drm_sgt_unmap_kernel(drm_buffer);
365 drm_buffer->cpu_addr = NULL;
368 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
373 vcodec_drm_free_fd(struct vcodec_iommu_session_info *session_info, int fd)
375 struct device *dev = session_info->dev;
376 /* please double-check all maps have been release */
377 struct vcodec_drm_buffer *drm_buffer = NULL;
379 mutex_lock(&session_info->list_mutex);
380 drm_buffer = vcodec_drm_get_buffer_fd_no_lock(session_info, fd);
383 dev_err(dev, "can not find %d buffer in list\n", fd);
384 mutex_unlock(&session_info->list_mutex);
388 mutex_unlock(&session_info->list_mutex);
390 vcodec_drm_unmap_iommu(session_info, drm_buffer->index);
392 mutex_lock(&session_info->list_mutex);
393 if (atomic_read(&drm_buffer->ref.refcount) == 0) {
394 dma_buf_put(drm_buffer->dma_buf);
395 list_del_init(&drm_buffer->list);
397 session_info->buffer_nums--;
398 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_NORMAL,
399 "buffer nums %d\n", session_info->buffer_nums);
401 mutex_unlock(&session_info->list_mutex);
407 vcodec_drm_clear_session(struct vcodec_iommu_session_info *session_info)
409 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
411 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
413 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
414 vcodec_drm_free(session_info, drm_buffer->index);
419 vcodec_drm_map_kernel(struct vcodec_iommu_session_info *session_info, int idx)
421 struct device *dev = session_info->dev;
422 struct vcodec_drm_buffer *drm_buffer;
424 mutex_lock(&session_info->list_mutex);
425 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
426 mutex_unlock(&session_info->list_mutex);
429 dev_err(dev, "can not find %d buffer in list\n", idx);
433 if (!drm_buffer->cpu_addr)
434 drm_buffer->cpu_addr =
435 vcodec_drm_sgt_map_kernel(drm_buffer);
437 kref_get(&drm_buffer->ref);
439 return drm_buffer->cpu_addr;
442 static int vcodec_drm_import(struct vcodec_iommu_session_info *session_info,
445 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
446 struct vcodec_drm_buffer *oldest_buffer = NULL, *loop_buffer = NULL;
447 struct vcodec_iommu_info *iommu_info = session_info->iommu_info;
448 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
449 struct iommu_domain *domain = drm_info->domain;
450 struct device *dev = session_info->dev;
451 struct dma_buf_attachment *attach;
452 struct sg_table *sgt;
453 struct dma_buf *dma_buf;
454 ktime_t oldest_time = ktime_set(0, 0);
457 dma_buf = dma_buf_get(fd);
458 if (IS_ERR(dma_buf)) {
459 ret = PTR_ERR(dma_buf);
463 list_for_each_entry_safe(drm_buffer, n,
464 &session_info->buffer_list, list) {
465 if (drm_buffer->dma_buf == dma_buf) {
466 dma_buf_put(dma_buf);
467 drm_buffer->last_used = ktime_get();
468 return drm_buffer->index;
472 drm_buffer = kzalloc(sizeof(*drm_buffer), GFP_KERNEL);
478 drm_buffer->dma_buf = dma_buf;
479 drm_buffer->session_info = session_info;
480 drm_buffer->last_used = ktime_get();
482 kref_init(&drm_buffer->ref);
484 mutex_lock(&iommu_info->iommu_mutex);
485 drm_info = session_info->iommu_info->private;
486 if (!drm_info->attached) {
487 ret = vcodec_drm_attach_unlock(session_info->iommu_info);
492 attach = dma_buf_attach(drm_buffer->dma_buf, dev);
493 if (IS_ERR(attach)) {
494 ret = PTR_ERR(attach);
498 get_dma_buf(drm_buffer->dma_buf);
500 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL);
506 drm_buffer->iova = sg_dma_address(sgt->sgl);
507 drm_buffer->size = drm_buffer->dma_buf->size;
509 drm_buffer->attach = attach;
510 drm_buffer->sgt = sgt;
512 if (!drm_info->attached)
513 iommu_detach_device(domain, dev);
515 mutex_unlock(&iommu_info->iommu_mutex);
517 INIT_LIST_HEAD(&drm_buffer->list);
518 mutex_lock(&session_info->list_mutex);
519 session_info->buffer_nums++;
520 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_NORMAL,
521 "buffer nums %d\n", session_info->buffer_nums);
522 if (session_info->buffer_nums > BUFFER_LIST_MAX_NUMS) {
523 list_for_each_entry_safe(loop_buffer, n,
524 &session_info->buffer_list, list) {
525 if (ktime_to_ns(oldest_time) == 0 ||
526 ktime_after(oldest_time,
527 loop_buffer->last_used)) {
528 oldest_time = loop_buffer->last_used;
529 oldest_buffer = loop_buffer;
532 kref_put(&oldest_buffer->ref, vcodec_drm_clear_map);
533 dma_buf_put(oldest_buffer->dma_buf);
534 list_del_init(&oldest_buffer->list);
535 kfree(oldest_buffer);
536 session_info->buffer_nums--;
538 drm_buffer->index = session_info->max_idx;
539 list_add_tail(&drm_buffer->list, &session_info->buffer_list);
540 session_info->max_idx++;
541 if ((session_info->max_idx & 0xfffffff) == 0)
542 session_info->max_idx = 0;
543 mutex_unlock(&session_info->list_mutex);
545 return drm_buffer->index;
548 dev_err(dev, "dmabuf map attach failed\n");
549 dma_buf_detach(drm_buffer->dma_buf, attach);
550 dma_buf_put(drm_buffer->dma_buf);
553 mutex_unlock(&iommu_info->iommu_mutex);
558 static int vcodec_drm_create(struct vcodec_iommu_info *iommu_info)
560 struct vcodec_iommu_drm_info *drm_info;
563 iommu_info->private = kzalloc(sizeof(*drm_info),
565 drm_info = iommu_info->private;
569 drm_info->domain = iommu_domain_alloc(&platform_bus_type);
570 drm_info->attached = false;
571 if (!drm_info->domain)
574 ret = iommu_get_dma_cookie(drm_info->domain);
576 goto err_free_domain;
578 vcodec_drm_attach(iommu_info);
583 iommu_domain_free(drm_info->domain);
588 static int vcodec_drm_destroy(struct vcodec_iommu_info *iommu_info)
590 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
592 vcodec_drm_detach(iommu_info);
593 iommu_put_dma_cookie(drm_info->domain);
594 iommu_domain_free(drm_info->domain);
597 iommu_info->private = NULL;
602 static struct vcodec_iommu_ops drm_ops = {
603 .create = vcodec_drm_create,
604 .import = vcodec_drm_import,
605 .free = vcodec_drm_free,
606 .free_fd = vcodec_drm_free_fd,
607 .map_kernel = vcodec_drm_map_kernel,
608 .unmap_kernel = vcodec_drm_unmap_kernel,
609 .map_iommu = vcodec_drm_map_iommu,
610 .unmap_iommu = vcodec_drm_unmap_iommu,
611 .destroy = vcodec_drm_destroy,
612 .dump = vcdoec_drm_dump_info,
613 .attach = vcodec_drm_attach,
614 .detach = vcodec_drm_detach,
615 .clear = vcodec_drm_clear_session,
618 void vcodec_iommu_drm_set_ops(struct vcodec_iommu_info *iommu_info)
622 iommu_info->ops = &drm_ops;