2 * Copyright (C) 2016 Fuzhou Rockchip Electronics Co., Ltd
3 * author: Jung Zhao jung.zhao@rock-chips.com
4 * Randy Li, randy.li@rock-chips.com
6 * This software is licensed under the terms of the GNU General Public
7 * License version 2, as published by the Free Software Foundation, and
8 * may be copied, distributed, and modified under those terms.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
16 #include <linux/dma-iommu.h>
18 #include <linux/dma-buf.h>
20 #include <drm/drm_atomic.h>
21 #include <drm/drm_crtc_helper.h>
22 #include <drm/drm_fb_helper.h>
23 #include <drm/drm_sync_helper.h>
24 #include <drm/rockchip_drm.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/rockchip-iovmm.h>
27 #include <linux/pm_runtime.h>
28 #include <linux/memblock.h>
29 #include <linux/module.h>
30 #include <linux/of_address.h>
31 #include <linux/of_graph.h>
32 #include <linux/component.h>
33 #include <linux/fence.h>
34 #include <linux/console.h>
35 #include <linux/kref.h>
36 #include <linux/fdtable.h>
38 #include "vcodec_iommu_ops.h"
40 struct vcodec_drm_buffer {
41 struct list_head list;
42 struct dma_buf *dma_buf;
51 struct dma_buf_attachment *attach;
55 struct vcodec_iommu_session_info *session_info;
58 struct vcodec_iommu_drm_info {
59 struct iommu_domain *domain;
63 static struct vcodec_drm_buffer *
64 vcodec_drm_get_buffer_no_lock(struct vcodec_iommu_session_info *session_info,
67 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
69 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
71 if (drm_buffer->index == idx)
78 static struct vcodec_drm_buffer *
79 vcodec_drm_get_buffer_fd_no_lock(struct vcodec_iommu_session_info *session_info,
82 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
84 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
86 if (drm_buffer->fd == fd)
93 static void vcodec_drm_detach(struct vcodec_iommu_info *iommu_info)
95 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
96 struct device *dev = iommu_info->dev;
97 struct iommu_domain *domain = drm_info->domain;
99 mutex_lock(&iommu_info->iommu_mutex);
101 if (!drm_info->attached) {
102 mutex_unlock(&iommu_info->iommu_mutex);
106 iommu_detach_device(domain, dev);
107 drm_info->attached = false;
109 mutex_unlock(&iommu_info->iommu_mutex);
112 static int vcodec_drm_attach_unlock(struct vcodec_iommu_info *iommu_info)
114 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
115 struct device *dev = iommu_info->dev;
116 struct iommu_domain *domain = drm_info->domain;
119 ret = dma_set_coherent_mask(dev, DMA_BIT_MASK(32));
123 dma_set_max_seg_size(dev, DMA_BIT_MASK(32));
124 ret = iommu_attach_device(domain, dev);
126 dev_err(dev, "Failed to attach iommu device\n");
130 if (!common_iommu_setup_dma_ops(dev, 0x10000000, SZ_2G, domain->ops)) {
131 dev_err(dev, "Failed to set dma_ops\n");
132 iommu_detach_device(domain, dev);
139 static int vcodec_drm_attach(struct vcodec_iommu_info *iommu_info)
141 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
144 mutex_lock(&iommu_info->iommu_mutex);
146 if (drm_info->attached) {
147 mutex_unlock(&iommu_info->iommu_mutex);
151 ret = vcodec_drm_attach_unlock(iommu_info);
153 mutex_unlock(&iommu_info->iommu_mutex);
157 drm_info->attached = true;
159 mutex_unlock(&iommu_info->iommu_mutex);
164 static void *vcodec_drm_sgt_map_kernel(struct vcodec_drm_buffer *drm_buffer)
166 struct vcodec_iommu_session_info *session_info =
167 drm_buffer->session_info;
168 struct device *dev = session_info->dev;
169 struct scatterlist *sgl, *sg;
170 int nr_pages = PAGE_ALIGN(drm_buffer->size) >> PAGE_SHIFT;
171 int i = 0, j = 0, k = 0;
174 drm_buffer->pages = kmalloc_array(nr_pages, sizeof(*drm_buffer->pages),
176 if (!(drm_buffer->pages)) {
177 dev_err(dev, "drm map can not alloc pages\n");
182 sgl = drm_buffer->sgt->sgl;
184 for_each_sg(sgl, sg, drm_buffer->sgt->nents, i) {
186 for (j = 0; j < sg->length / PAGE_SIZE; j++)
187 drm_buffer->pages[k++] = page++;
190 return vmap(drm_buffer->pages, nr_pages, VM_MAP,
191 pgprot_noncached(PAGE_KERNEL));
194 static void vcodec_drm_sgt_unmap_kernel(struct vcodec_drm_buffer *drm_buffer)
196 vunmap(drm_buffer->cpu_addr);
197 kfree(drm_buffer->pages);
200 static void vcodec_drm_clear_map(struct kref *ref)
202 struct vcodec_drm_buffer *drm_buffer =
203 container_of(ref, struct vcodec_drm_buffer, ref);
204 struct vcodec_iommu_session_info *session_info =
205 drm_buffer->session_info;
206 struct vcodec_iommu_info *iommu_info = session_info->iommu_info;
207 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
208 struct device *dev = session_info->dev;
209 struct iommu_domain *domain = drm_info->domain;
211 mutex_lock(&iommu_info->iommu_mutex);
212 drm_info = session_info->iommu_info->private;
213 if (!drm_info->attached) {
214 if (vcodec_drm_attach_unlock(session_info->iommu_info))
215 dev_err(dev, "can't clea map, attach iommu failed.\n");
218 if (drm_buffer->cpu_addr) {
219 vcodec_drm_sgt_unmap_kernel(drm_buffer);
220 drm_buffer->cpu_addr = NULL;
223 if (drm_buffer->attach) {
224 dma_buf_unmap_attachment(drm_buffer->attach, drm_buffer->sgt,
226 dma_buf_detach(drm_buffer->dma_buf, drm_buffer->attach);
227 dma_buf_put(drm_buffer->dma_buf);
228 drm_buffer->attach = NULL;
231 if (!drm_info->attached)
232 iommu_detach_device(domain, dev);
234 mutex_unlock(&iommu_info->iommu_mutex);
237 static void vcdoec_drm_dump_info(struct vcodec_iommu_session_info *session_info)
239 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
241 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_OPS_DUMP,
242 "still there are below buffers stored in list\n");
243 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
245 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_OPS_DUMP,
246 "index %d drm_buffer fd %d cpu_addr %p\n",
248 drm_buffer->fd, drm_buffer->cpu_addr);
252 static int vcodec_drm_free(struct vcodec_iommu_session_info *session_info,
255 struct device *dev = session_info->dev;
256 /* please double-check all maps have been release */
257 struct vcodec_drm_buffer *drm_buffer;
259 mutex_lock(&session_info->list_mutex);
260 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
263 dev_err(dev, "can not find %d buffer in list\n", idx);
264 mutex_unlock(&session_info->list_mutex);
269 if (atomic_read(&drm_buffer->ref.refcount) == 0) {
270 dma_buf_put(drm_buffer->dma_buf);
271 list_del_init(&drm_buffer->list);
274 mutex_unlock(&session_info->list_mutex);
280 vcodec_drm_unmap_iommu(struct vcodec_iommu_session_info *session_info,
283 struct device *dev = session_info->dev;
284 struct vcodec_drm_buffer *drm_buffer;
286 /* Force to flush iommu table */
287 if (of_machine_is_compatible("rockchip,rk3288"))
288 rockchip_iovmm_invalidate_tlb(session_info->mmu_dev);
290 mutex_lock(&session_info->list_mutex);
291 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
292 mutex_unlock(&session_info->list_mutex);
295 dev_err(dev, "can not find %d buffer in list\n", idx);
299 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
304 static int vcodec_drm_map_iommu(struct vcodec_iommu_session_info *session_info,
309 struct device *dev = session_info->dev;
310 struct vcodec_drm_buffer *drm_buffer;
312 /* Force to flush iommu table */
313 if (of_machine_is_compatible("rockchip,rk3288"))
314 rockchip_iovmm_invalidate_tlb(session_info->mmu_dev);
316 mutex_lock(&session_info->list_mutex);
317 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
318 mutex_unlock(&session_info->list_mutex);
321 dev_err(dev, "can not find %d buffer in list\n", idx);
325 kref_get(&drm_buffer->ref);
327 *iova = drm_buffer->iova;
329 *size = drm_buffer->size;
334 vcodec_drm_unmap_kernel(struct vcodec_iommu_session_info *session_info, int idx)
336 struct device *dev = session_info->dev;
337 struct vcodec_drm_buffer *drm_buffer;
339 mutex_lock(&session_info->list_mutex);
340 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
341 mutex_unlock(&session_info->list_mutex);
344 dev_err(dev, "can not find %d buffer in list\n", idx);
349 if (drm_buffer->cpu_addr) {
350 vcodec_drm_sgt_unmap_kernel(drm_buffer);
351 drm_buffer->cpu_addr = NULL;
354 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
359 vcodec_drm_free_fd(struct vcodec_iommu_session_info *session_info, int fd)
361 struct device *dev = session_info->dev;
362 /* please double-check all maps have been release */
363 struct vcodec_drm_buffer *drm_buffer = NULL;
365 mutex_lock(&session_info->list_mutex);
366 drm_buffer = vcodec_drm_get_buffer_fd_no_lock(session_info, fd);
369 dev_err(dev, "can not find %d buffer in list\n", fd);
370 mutex_unlock(&session_info->list_mutex);
374 mutex_unlock(&session_info->list_mutex);
376 vcodec_drm_unmap_iommu(session_info, drm_buffer->index);
378 mutex_lock(&session_info->list_mutex);
379 if (atomic_read(&drm_buffer->ref.refcount) == 0) {
380 dma_buf_put(drm_buffer->dma_buf);
381 list_del_init(&drm_buffer->list);
384 mutex_unlock(&session_info->list_mutex);
390 vcodec_drm_clear_session(struct vcodec_iommu_session_info *session_info)
392 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
394 list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
396 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
397 vcodec_drm_free(session_info, drm_buffer->index);
402 vcodec_drm_map_kernel(struct vcodec_iommu_session_info *session_info, int idx)
404 struct device *dev = session_info->dev;
405 struct vcodec_drm_buffer *drm_buffer;
407 mutex_lock(&session_info->list_mutex);
408 drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
409 mutex_unlock(&session_info->list_mutex);
412 dev_err(dev, "can not find %d buffer in list\n", idx);
416 if (!drm_buffer->cpu_addr)
417 drm_buffer->cpu_addr =
418 vcodec_drm_sgt_map_kernel(drm_buffer);
420 kref_get(&drm_buffer->ref);
422 return drm_buffer->cpu_addr;
425 static int vcodec_drm_import(struct vcodec_iommu_session_info *session_info,
428 struct vcodec_drm_buffer *drm_buffer = NULL, *n;
429 struct vcodec_iommu_info *iommu_info = session_info->iommu_info;
430 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
431 struct device *dev = session_info->dev;
432 struct dma_buf_attachment *attach;
433 struct sg_table *sgt;
436 list_for_each_entry_safe(drm_buffer, n,
437 &session_info->buffer_list, list) {
438 if (drm_buffer->fd == fd)
439 return drm_buffer->index;
442 drm_buffer = kzalloc(sizeof(*drm_buffer), GFP_KERNEL);
448 drm_buffer->dma_buf = dma_buf_get(fd);
449 if (IS_ERR(drm_buffer->dma_buf)) {
450 ret = PTR_ERR(drm_buffer->dma_buf);
455 drm_buffer->session_info = session_info;
457 kref_init(&drm_buffer->ref);
459 mutex_lock(&iommu_info->iommu_mutex);
460 drm_info = session_info->iommu_info->private;
461 if (!drm_info->attached) {
462 ret = vcodec_drm_attach_unlock(session_info->iommu_info);
467 attach = dma_buf_attach(drm_buffer->dma_buf, dev);
468 if (IS_ERR(attach)) {
469 ret = PTR_ERR(attach);
473 get_dma_buf(drm_buffer->dma_buf);
475 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL);
481 drm_buffer->iova = sg_dma_address(sgt->sgl);
482 drm_buffer->size = drm_buffer->dma_buf->size;
484 drm_buffer->attach = attach;
485 drm_buffer->sgt = sgt;
487 mutex_unlock(&iommu_info->iommu_mutex);
489 INIT_LIST_HEAD(&drm_buffer->list);
490 mutex_lock(&session_info->list_mutex);
491 drm_buffer->index = session_info->max_idx;
492 list_add_tail(&drm_buffer->list, &session_info->buffer_list);
493 session_info->max_idx++;
494 if ((session_info->max_idx & 0xfffffff) == 0)
495 session_info->max_idx = 0;
496 mutex_unlock(&session_info->list_mutex);
498 return drm_buffer->index;
501 dev_err(dev, "dmabuf map attach failed\n");
502 dma_buf_detach(drm_buffer->dma_buf, attach);
503 dma_buf_put(drm_buffer->dma_buf);
506 mutex_unlock(&iommu_info->iommu_mutex);
511 static int vcodec_drm_create(struct vcodec_iommu_info *iommu_info)
513 struct vcodec_iommu_drm_info *drm_info;
516 iommu_info->private = kzalloc(sizeof(*drm_info),
518 drm_info = iommu_info->private;
522 drm_info->domain = iommu_domain_alloc(&platform_bus_type);
523 drm_info->attached = false;
524 if (!drm_info->domain)
527 ret = iommu_get_dma_cookie(drm_info->domain);
529 goto err_free_domain;
531 vcodec_drm_attach(iommu_info);
536 iommu_domain_free(drm_info->domain);
541 static int vcodec_drm_destroy(struct vcodec_iommu_info *iommu_info)
543 struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
545 vcodec_drm_detach(iommu_info);
546 iommu_put_dma_cookie(drm_info->domain);
547 iommu_domain_free(drm_info->domain);
550 iommu_info->private = NULL;
555 static struct vcodec_iommu_ops drm_ops = {
556 .create = vcodec_drm_create,
557 .import = vcodec_drm_import,
558 .free = vcodec_drm_free,
559 .free_fd = vcodec_drm_free_fd,
560 .map_kernel = vcodec_drm_map_kernel,
561 .unmap_kernel = vcodec_drm_unmap_kernel,
562 .map_iommu = vcodec_drm_map_iommu,
563 .unmap_iommu = vcodec_drm_unmap_iommu,
564 .destroy = vcodec_drm_destroy,
565 .dump = vcdoec_drm_dump_info,
566 .attach = vcodec_drm_attach,
567 .detach = vcodec_drm_detach,
568 .clear = vcodec_drm_clear_session,
571 void vcodec_iommu_drm_set_ops(struct vcodec_iommu_info *iommu_info)
575 iommu_info->ops = &drm_ops;