video: rockchip: vcodec: add buffer recycle mechanism in vpu
[firefly-linux-kernel-4.4.55.git] / drivers / video / rockchip / vcodec / vcodec_iommu_drm.c
1 /*
2  * Copyright (C) 2016 Fuzhou Rockchip Electronics Co., Ltd
3  * author: Jung Zhao jung.zhao@rock-chips.com
4  *         Randy Li, randy.li@rock-chips.com
5  *
6  * This software is licensed under the terms of the GNU General Public
7  * License version 2, as published by the Free Software Foundation, and
8  * may be copied, distributed, and modified under those terms.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  */
16 #include <linux/dma-iommu.h>
17
18 #include <linux/dma-buf.h>
19 #include <drm/drmP.h>
20 #include <drm/drm_atomic.h>
21 #include <drm/drm_crtc_helper.h>
22 #include <drm/drm_fb_helper.h>
23 #include <drm/drm_sync_helper.h>
24 #include <drm/rockchip_drm.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/rockchip-iovmm.h>
27 #include <linux/pm_runtime.h>
28 #include <linux/memblock.h>
29 #include <linux/module.h>
30 #include <linux/of_address.h>
31 #include <linux/of_graph.h>
32 #include <linux/component.h>
33 #include <linux/fence.h>
34 #include <linux/console.h>
35 #include <linux/kref.h>
36 #include <linux/fdtable.h>
37 #include <linux/ktime.h>
38
39 #include "vcodec_iommu_ops.h"
40
41 struct vcodec_drm_buffer {
42         struct list_head list;
43         struct dma_buf *dma_buf;
44         union {
45                 unsigned long iova;
46                 unsigned long phys;
47         };
48         void *cpu_addr;
49         unsigned long size;
50         int index;
51         struct dma_buf_attachment *attach;
52         struct sg_table *sgt;
53         struct page **pages;
54         struct kref ref;
55         struct vcodec_iommu_session_info *session_info;
56         ktime_t last_used;
57 };
58
59 struct vcodec_iommu_drm_info {
60         struct iommu_domain *domain;
61         bool attached;
62 };
63
64 static struct vcodec_drm_buffer *
65 vcodec_drm_get_buffer_no_lock(struct vcodec_iommu_session_info *session_info,
66                               int idx)
67 {
68         struct vcodec_drm_buffer *drm_buffer = NULL, *n;
69
70         list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
71                                  list) {
72                 if (drm_buffer->index == idx) {
73                         drm_buffer->last_used = ktime_get();
74                         return drm_buffer;
75                 }
76         }
77
78         return NULL;
79 }
80
81 static struct vcodec_drm_buffer *
82 vcodec_drm_get_buffer_fd_no_lock(struct vcodec_iommu_session_info *session_info,
83                                  int fd)
84 {
85         struct vcodec_drm_buffer *drm_buffer = NULL, *n;
86         struct dma_buf *dma_buf = NULL;
87
88         dma_buf = dma_buf_get(fd);
89
90         list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
91                                  list) {
92                 if (drm_buffer->dma_buf == dma_buf) {
93                         drm_buffer->last_used = ktime_get();
94                         dma_buf_put(dma_buf);
95                         return drm_buffer;
96                 }
97         }
98
99         dma_buf_put(dma_buf);
100
101         return NULL;
102 }
103
104 static void vcodec_drm_detach(struct vcodec_iommu_info *iommu_info)
105 {
106         struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
107         struct device *dev = iommu_info->dev;
108         struct iommu_domain *domain = drm_info->domain;
109
110         mutex_lock(&iommu_info->iommu_mutex);
111
112         if (!drm_info->attached) {
113                 mutex_unlock(&iommu_info->iommu_mutex);
114                 return;
115         }
116
117         iommu_detach_device(domain, dev);
118         drm_info->attached = false;
119
120         mutex_unlock(&iommu_info->iommu_mutex);
121 }
122
123 static int vcodec_drm_attach_unlock(struct vcodec_iommu_info *iommu_info)
124 {
125         struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
126         struct device *dev = iommu_info->dev;
127         struct iommu_domain *domain = drm_info->domain;
128         int ret = 0;
129
130         ret = dma_set_coherent_mask(dev, DMA_BIT_MASK(32));
131         if (ret)
132                 return ret;
133
134         dma_set_max_seg_size(dev, DMA_BIT_MASK(32));
135         ret = iommu_attach_device(domain, dev);
136         if (ret) {
137                 dev_err(dev, "Failed to attach iommu device\n");
138                 return ret;
139         }
140
141         if (!common_iommu_setup_dma_ops(dev, 0x10000000, SZ_2G, domain->ops)) {
142                 dev_err(dev, "Failed to set dma_ops\n");
143                 iommu_detach_device(domain, dev);
144                 ret = -ENODEV;
145         }
146
147         return ret;
148 }
149
150 static int vcodec_drm_attach(struct vcodec_iommu_info *iommu_info)
151 {
152         struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
153         int ret;
154
155         mutex_lock(&iommu_info->iommu_mutex);
156
157         if (drm_info->attached) {
158                 mutex_unlock(&iommu_info->iommu_mutex);
159                 return 0;
160         }
161
162         ret = vcodec_drm_attach_unlock(iommu_info);
163         if (ret) {
164                 mutex_unlock(&iommu_info->iommu_mutex);
165                 return ret;
166         }
167
168         drm_info->attached = true;
169
170         mutex_unlock(&iommu_info->iommu_mutex);
171
172         return ret;
173 }
174
175 static void *vcodec_drm_sgt_map_kernel(struct vcodec_drm_buffer *drm_buffer)
176 {
177         struct vcodec_iommu_session_info *session_info =
178                 drm_buffer->session_info;
179         struct device *dev = session_info->dev;
180         struct scatterlist *sgl, *sg;
181         int nr_pages = PAGE_ALIGN(drm_buffer->size) >> PAGE_SHIFT;
182         int i = 0, j = 0, k = 0;
183         struct page *page;
184
185         drm_buffer->pages = kmalloc_array(nr_pages, sizeof(*drm_buffer->pages),
186                                           GFP_KERNEL);
187         if (!(drm_buffer->pages)) {
188                 dev_err(dev, "drm map can not alloc pages\n");
189
190                 return NULL;
191         }
192
193         sgl = drm_buffer->sgt->sgl;
194
195         for_each_sg(sgl, sg, drm_buffer->sgt->nents, i) {
196                 page = sg_page(sg);
197                 for (j = 0; j < sg->length / PAGE_SIZE; j++)
198                         drm_buffer->pages[k++] = page++;
199         }
200
201         return vmap(drm_buffer->pages, nr_pages, VM_MAP,
202                     pgprot_noncached(PAGE_KERNEL));
203 }
204
205 static void vcodec_drm_sgt_unmap_kernel(struct vcodec_drm_buffer *drm_buffer)
206 {
207         vunmap(drm_buffer->cpu_addr);
208         kfree(drm_buffer->pages);
209 }
210
211 static void vcodec_drm_clear_map(struct kref *ref)
212 {
213         struct vcodec_drm_buffer *drm_buffer =
214                 container_of(ref, struct vcodec_drm_buffer, ref);
215         struct vcodec_iommu_session_info *session_info =
216                 drm_buffer->session_info;
217         struct vcodec_iommu_info *iommu_info = session_info->iommu_info;
218         struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
219         struct device *dev = session_info->dev;
220         struct iommu_domain *domain = drm_info->domain;
221
222         mutex_lock(&iommu_info->iommu_mutex);
223         drm_info = session_info->iommu_info->private;
224         if (!drm_info->attached) {
225                 if (vcodec_drm_attach_unlock(session_info->iommu_info))
226                         dev_err(dev, "can't clea map, attach iommu failed.\n");
227         }
228
229         if (drm_buffer->cpu_addr) {
230                 vcodec_drm_sgt_unmap_kernel(drm_buffer);
231                 drm_buffer->cpu_addr = NULL;
232         }
233
234         if (drm_buffer->attach) {
235                 dma_buf_unmap_attachment(drm_buffer->attach, drm_buffer->sgt,
236                                          DMA_BIDIRECTIONAL);
237                 dma_buf_detach(drm_buffer->dma_buf, drm_buffer->attach);
238                 dma_buf_put(drm_buffer->dma_buf);
239                 drm_buffer->attach = NULL;
240         }
241
242         if (!drm_info->attached)
243                 iommu_detach_device(domain, dev);
244
245         mutex_unlock(&iommu_info->iommu_mutex);
246 }
247
248 static void vcdoec_drm_dump_info(struct vcodec_iommu_session_info *session_info)
249 {
250         struct vcodec_drm_buffer *drm_buffer = NULL, *n;
251
252         vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_OPS_DUMP,
253                         "still there are below buffers stored in list\n");
254         list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
255                                  list) {
256                 vpu_iommu_debug(session_info->debug_level, DEBUG_IOMMU_OPS_DUMP,
257                                 "index %d drm_buffer dma_buf %p cpu_addr %p\n",
258                                 drm_buffer->index,
259                                 drm_buffer->dma_buf, drm_buffer->cpu_addr);
260         }
261 }
262
263 static int vcodec_drm_free(struct vcodec_iommu_session_info *session_info,
264                            int idx)
265 {
266         struct device *dev = session_info->dev;
267         /* please double-check all maps have been release */
268         struct vcodec_drm_buffer *drm_buffer;
269
270         mutex_lock(&session_info->list_mutex);
271         drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
272
273         if (!drm_buffer) {
274                 dev_err(dev, "can not find %d buffer in list\n", idx);
275                 mutex_unlock(&session_info->list_mutex);
276
277                 return -EINVAL;
278         }
279
280         if (atomic_read(&drm_buffer->ref.refcount) == 0) {
281                 dma_buf_put(drm_buffer->dma_buf);
282                 list_del_init(&drm_buffer->list);
283                 kfree(drm_buffer);
284                 session_info->buffer_nums--;
285         }
286         mutex_unlock(&session_info->list_mutex);
287
288         return 0;
289 }
290
291 static int
292 vcodec_drm_unmap_iommu(struct vcodec_iommu_session_info *session_info,
293                        int idx)
294 {
295         struct device *dev = session_info->dev;
296         struct vcodec_drm_buffer *drm_buffer;
297
298         /* Force to flush iommu table */
299         if (of_machine_is_compatible("rockchip,rk3288"))
300                 rockchip_iovmm_invalidate_tlb(session_info->mmu_dev);
301
302         mutex_lock(&session_info->list_mutex);
303         drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
304         mutex_unlock(&session_info->list_mutex);
305
306         if (!drm_buffer) {
307                 dev_err(dev, "can not find %d buffer in list\n", idx);
308                 return -EINVAL;
309         }
310
311         kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
312
313         return 0;
314 }
315
316 static int vcodec_drm_map_iommu(struct vcodec_iommu_session_info *session_info,
317                                 int idx,
318                                 unsigned long *iova,
319                                 unsigned long *size)
320 {
321         struct device *dev = session_info->dev;
322         struct vcodec_drm_buffer *drm_buffer;
323
324         /* Force to flush iommu table */
325         if (of_machine_is_compatible("rockchip,rk3288"))
326                 rockchip_iovmm_invalidate_tlb(session_info->mmu_dev);
327
328         mutex_lock(&session_info->list_mutex);
329         drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
330         mutex_unlock(&session_info->list_mutex);
331
332         if (!drm_buffer) {
333                 dev_err(dev, "can not find %d buffer in list\n", idx);
334                 return -EINVAL;
335         }
336
337         kref_get(&drm_buffer->ref);
338         if (iova)
339                 *iova = drm_buffer->iova;
340         if (size)
341                 *size = drm_buffer->size;
342         return 0;
343 }
344
345 static int
346 vcodec_drm_unmap_kernel(struct vcodec_iommu_session_info *session_info, int idx)
347 {
348         struct device *dev = session_info->dev;
349         struct vcodec_drm_buffer *drm_buffer;
350
351         mutex_lock(&session_info->list_mutex);
352         drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
353         mutex_unlock(&session_info->list_mutex);
354
355         if (!drm_buffer) {
356                 dev_err(dev, "can not find %d buffer in list\n", idx);
357
358                 return -EINVAL;
359         }
360
361         if (drm_buffer->cpu_addr) {
362                 vcodec_drm_sgt_unmap_kernel(drm_buffer);
363                 drm_buffer->cpu_addr = NULL;
364         }
365
366         kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
367         return 0;
368 }
369
370 static int
371 vcodec_drm_free_fd(struct vcodec_iommu_session_info *session_info, int fd)
372 {
373         struct device *dev = session_info->dev;
374         /* please double-check all maps have been release */
375         struct vcodec_drm_buffer *drm_buffer = NULL;
376
377         mutex_lock(&session_info->list_mutex);
378         drm_buffer = vcodec_drm_get_buffer_fd_no_lock(session_info, fd);
379
380         if (!drm_buffer) {
381                 dev_err(dev, "can not find %d buffer in list\n", fd);
382                 mutex_unlock(&session_info->list_mutex);
383
384                 return -EINVAL;
385         }
386         mutex_unlock(&session_info->list_mutex);
387
388         vcodec_drm_unmap_iommu(session_info, drm_buffer->index);
389
390         mutex_lock(&session_info->list_mutex);
391         if (atomic_read(&drm_buffer->ref.refcount) == 0) {
392                 dma_buf_put(drm_buffer->dma_buf);
393                 list_del_init(&drm_buffer->list);
394                 kfree(drm_buffer);
395                 session_info->buffer_nums--;
396         }
397         mutex_unlock(&session_info->list_mutex);
398
399         return 0;
400 }
401
402 static void
403 vcodec_drm_clear_session(struct vcodec_iommu_session_info *session_info)
404 {
405         struct vcodec_drm_buffer *drm_buffer = NULL, *n;
406
407         list_for_each_entry_safe(drm_buffer, n, &session_info->buffer_list,
408                                  list) {
409                 kref_put(&drm_buffer->ref, vcodec_drm_clear_map);
410                 vcodec_drm_free(session_info, drm_buffer->index);
411         }
412 }
413
414 static void *
415 vcodec_drm_map_kernel(struct vcodec_iommu_session_info *session_info, int idx)
416 {
417         struct device *dev = session_info->dev;
418         struct vcodec_drm_buffer *drm_buffer;
419
420         mutex_lock(&session_info->list_mutex);
421         drm_buffer = vcodec_drm_get_buffer_no_lock(session_info, idx);
422         mutex_unlock(&session_info->list_mutex);
423
424         if (!drm_buffer) {
425                 dev_err(dev, "can not find %d buffer in list\n", idx);
426                 return NULL;
427         }
428
429         if (!drm_buffer->cpu_addr)
430                 drm_buffer->cpu_addr =
431                         vcodec_drm_sgt_map_kernel(drm_buffer);
432
433         kref_get(&drm_buffer->ref);
434
435         return drm_buffer->cpu_addr;
436 }
437
438 static int vcodec_drm_import(struct vcodec_iommu_session_info *session_info,
439                              int fd)
440 {
441         struct vcodec_drm_buffer *drm_buffer = NULL, *n;
442         struct vcodec_drm_buffer *oldest_buffer = NULL, *loop_buffer = NULL;
443         struct vcodec_iommu_info *iommu_info = session_info->iommu_info;
444         struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
445         struct iommu_domain *domain = drm_info->domain;
446         struct device *dev = session_info->dev;
447         struct dma_buf_attachment *attach;
448         struct sg_table *sgt;
449         struct dma_buf *dma_buf;
450         ktime_t oldest_time = ktime_set(0, 0);
451         int ret = 0;
452
453         dma_buf = dma_buf_get(fd);
454         if (IS_ERR(dma_buf)) {
455                 ret = PTR_ERR(dma_buf);
456                 return ret;
457         }
458
459         list_for_each_entry_safe(drm_buffer, n,
460                                  &session_info->buffer_list, list) {
461                 if (drm_buffer->dma_buf == dma_buf) {
462                         dma_buf_put(dma_buf);
463                         drm_buffer->last_used = ktime_get();
464                         return drm_buffer->index;
465                 }
466         }
467
468         drm_buffer = kzalloc(sizeof(*drm_buffer), GFP_KERNEL);
469         if (!drm_buffer) {
470                 ret = -ENOMEM;
471                 return ret;
472         }
473
474         drm_buffer->dma_buf = dma_buf;
475         drm_buffer->session_info = session_info;
476         drm_buffer->last_used = ktime_get();
477
478         kref_init(&drm_buffer->ref);
479
480         mutex_lock(&iommu_info->iommu_mutex);
481         drm_info = session_info->iommu_info->private;
482         if (!drm_info->attached) {
483                 ret = vcodec_drm_attach_unlock(session_info->iommu_info);
484                 if (ret)
485                         goto fail_out;
486         }
487
488         attach = dma_buf_attach(drm_buffer->dma_buf, dev);
489         if (IS_ERR(attach)) {
490                 ret = PTR_ERR(attach);
491                 goto fail_out;
492         }
493
494         get_dma_buf(drm_buffer->dma_buf);
495
496         sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL);
497         if (IS_ERR(sgt)) {
498                 ret = PTR_ERR(sgt);
499                 goto fail_detach;
500         }
501
502         drm_buffer->iova = sg_dma_address(sgt->sgl);
503         drm_buffer->size = drm_buffer->dma_buf->size;
504
505         drm_buffer->attach = attach;
506         drm_buffer->sgt = sgt;
507
508         if (!drm_info->attached)
509                 iommu_detach_device(domain, dev);
510
511         mutex_unlock(&iommu_info->iommu_mutex);
512
513         INIT_LIST_HEAD(&drm_buffer->list);
514         mutex_lock(&session_info->list_mutex);
515         session_info->buffer_nums++;
516         if (session_info->buffer_nums > BUFFER_LIST_MAX_NUMS) {
517                 list_for_each_entry_safe(loop_buffer, n,
518                                  &session_info->buffer_list, list) {
519                         if (ktime_to_ns(oldest_time) == 0 ||
520                             ktime_after(oldest_time,
521                                         loop_buffer->last_used)) {
522                                 oldest_time = loop_buffer->last_used;
523                                 oldest_buffer = loop_buffer;
524                         }
525                 }
526                 kref_put(&oldest_buffer->ref, vcodec_drm_clear_map);
527                 dma_buf_put(oldest_buffer->dma_buf);
528                 list_del_init(&oldest_buffer->list);
529                 kfree(oldest_buffer);
530                 session_info->buffer_nums--;
531         }
532         drm_buffer->index = session_info->max_idx;
533         list_add_tail(&drm_buffer->list, &session_info->buffer_list);
534         session_info->max_idx++;
535         if ((session_info->max_idx & 0xfffffff) == 0)
536                 session_info->max_idx = 0;
537         mutex_unlock(&session_info->list_mutex);
538
539         return drm_buffer->index;
540
541 fail_detach:
542         dev_err(dev, "dmabuf map attach failed\n");
543         dma_buf_detach(drm_buffer->dma_buf, attach);
544         dma_buf_put(drm_buffer->dma_buf);
545 fail_out:
546         kfree(drm_buffer);
547         mutex_unlock(&iommu_info->iommu_mutex);
548
549         return ret;
550 }
551
552 static int vcodec_drm_create(struct vcodec_iommu_info *iommu_info)
553 {
554         struct vcodec_iommu_drm_info *drm_info;
555         int ret;
556
557         iommu_info->private = kzalloc(sizeof(*drm_info),
558                                       GFP_KERNEL);
559         drm_info = iommu_info->private;
560         if (!drm_info)
561                 return -ENOMEM;
562
563         drm_info->domain = iommu_domain_alloc(&platform_bus_type);
564         drm_info->attached = false;
565         if (!drm_info->domain)
566                 return -ENOMEM;
567
568         ret = iommu_get_dma_cookie(drm_info->domain);
569         if (ret)
570                 goto err_free_domain;
571
572         vcodec_drm_attach(iommu_info);
573
574         return 0;
575
576 err_free_domain:
577         iommu_domain_free(drm_info->domain);
578
579         return ret;
580 }
581
582 static int vcodec_drm_destroy(struct vcodec_iommu_info *iommu_info)
583 {
584         struct vcodec_iommu_drm_info *drm_info = iommu_info->private;
585
586         vcodec_drm_detach(iommu_info);
587         iommu_put_dma_cookie(drm_info->domain);
588         iommu_domain_free(drm_info->domain);
589
590         kfree(drm_info);
591         iommu_info->private = NULL;
592
593         return 0;
594 }
595
596 static struct vcodec_iommu_ops drm_ops = {
597         .create = vcodec_drm_create,
598         .import = vcodec_drm_import,
599         .free = vcodec_drm_free,
600         .free_fd = vcodec_drm_free_fd,
601         .map_kernel = vcodec_drm_map_kernel,
602         .unmap_kernel = vcodec_drm_unmap_kernel,
603         .map_iommu = vcodec_drm_map_iommu,
604         .unmap_iommu = vcodec_drm_unmap_iommu,
605         .destroy = vcodec_drm_destroy,
606         .dump = vcdoec_drm_dump_info,
607         .attach = vcodec_drm_attach,
608         .detach = vcodec_drm_detach,
609         .clear = vcodec_drm_clear_session,
610 };
611
612 void vcodec_iommu_drm_set_ops(struct vcodec_iommu_info *iommu_info)
613 {
614         if (!iommu_info)
615                 return;
616         iommu_info->ops = &drm_ops;
617 }