1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
4 * Copyright (C) 2017 Linaro Ltd.
7 #include <linux/iopoll.h>
8 #include <linux/list.h>
9 #include <linux/mutex.h>
10 #include <linux/pm_runtime.h>
11 #include <linux/slab.h>
12 #include <media/videobuf2-dma-sg.h>
13 #include <media/v4l2-mem2mem.h>
14 #include <asm/div64.h>
18 #include "hfi_helper.h"
19 #include "hfi_venus_io.h"
22 struct list_head list;
30 bool venus_helper_check_codec(struct venus_inst *inst, u32 v4l2_pixfmt)
32 struct venus_core *core = inst->core;
33 u32 session_type = inst->session_type;
36 switch (v4l2_pixfmt) {
37 case V4L2_PIX_FMT_H264:
38 codec = HFI_VIDEO_CODEC_H264;
40 case V4L2_PIX_FMT_H263:
41 codec = HFI_VIDEO_CODEC_H263;
43 case V4L2_PIX_FMT_MPEG1:
44 codec = HFI_VIDEO_CODEC_MPEG1;
46 case V4L2_PIX_FMT_MPEG2:
47 codec = HFI_VIDEO_CODEC_MPEG2;
49 case V4L2_PIX_FMT_MPEG4:
50 codec = HFI_VIDEO_CODEC_MPEG4;
52 case V4L2_PIX_FMT_VC1_ANNEX_G:
53 case V4L2_PIX_FMT_VC1_ANNEX_L:
54 codec = HFI_VIDEO_CODEC_VC1;
56 case V4L2_PIX_FMT_VP8:
57 codec = HFI_VIDEO_CODEC_VP8;
59 case V4L2_PIX_FMT_VP9:
60 codec = HFI_VIDEO_CODEC_VP9;
62 case V4L2_PIX_FMT_XVID:
63 codec = HFI_VIDEO_CODEC_DIVX;
65 case V4L2_PIX_FMT_HEVC:
66 codec = HFI_VIDEO_CODEC_HEVC;
72 if (session_type == VIDC_SESSION_TYPE_ENC && core->enc_codecs & codec)
75 if (session_type == VIDC_SESSION_TYPE_DEC && core->dec_codecs & codec)
80 EXPORT_SYMBOL_GPL(venus_helper_check_codec);
82 static int venus_helper_queue_dpb_bufs(struct venus_inst *inst)
87 list_for_each_entry(buf, &inst->dpbbufs, list) {
88 struct hfi_frame_data fdata;
90 memset(&fdata, 0, sizeof(fdata));
91 fdata.alloc_len = buf->size;
92 fdata.device_addr = buf->da;
93 fdata.buffer_type = buf->type;
95 ret = hfi_session_process_buf(inst, &fdata);
104 int venus_helper_free_dpb_bufs(struct venus_inst *inst)
106 struct intbuf *buf, *n;
108 list_for_each_entry_safe(buf, n, &inst->dpbbufs, list) {
109 list_del_init(&buf->list);
110 dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da,
115 INIT_LIST_HEAD(&inst->dpbbufs);
119 EXPORT_SYMBOL_GPL(venus_helper_free_dpb_bufs);
121 int venus_helper_alloc_dpb_bufs(struct venus_inst *inst)
123 struct venus_core *core = inst->core;
124 struct device *dev = core->dev;
125 enum hfi_version ver = core->res->hfi_version;
126 struct hfi_buffer_requirements bufreq;
127 u32 buftype = inst->dpb_buftype;
128 unsigned int dpb_size = 0;
134 /* no need to allocate dpb buffers */
138 if (inst->dpb_buftype == HFI_BUFFER_OUTPUT)
139 dpb_size = inst->output_buf_size;
140 else if (inst->dpb_buftype == HFI_BUFFER_OUTPUT2)
141 dpb_size = inst->output2_buf_size;
146 ret = venus_helper_get_bufreq(inst, buftype, &bufreq);
150 count = HFI_BUFREQ_COUNT_MIN(&bufreq, ver);
152 for (i = 0; i < count; i++) {
153 buf = kzalloc(sizeof(*buf), GFP_KERNEL);
160 buf->size = dpb_size;
161 buf->attrs = DMA_ATTR_WRITE_COMBINE |
162 DMA_ATTR_NO_KERNEL_MAPPING;
163 buf->va = dma_alloc_attrs(dev, buf->size, &buf->da, GFP_KERNEL,
171 list_add_tail(&buf->list, &inst->dpbbufs);
177 venus_helper_free_dpb_bufs(inst);
180 EXPORT_SYMBOL_GPL(venus_helper_alloc_dpb_bufs);
182 static int intbufs_set_buffer(struct venus_inst *inst, u32 type)
184 struct venus_core *core = inst->core;
185 struct device *dev = core->dev;
186 struct hfi_buffer_requirements bufreq;
187 struct hfi_buffer_desc bd;
192 ret = venus_helper_get_bufreq(inst, type, &bufreq);
199 for (i = 0; i < bufreq.count_actual; i++) {
200 buf = kzalloc(sizeof(*buf), GFP_KERNEL);
206 buf->type = bufreq.type;
207 buf->size = bufreq.size;
208 buf->attrs = DMA_ATTR_WRITE_COMBINE |
209 DMA_ATTR_NO_KERNEL_MAPPING;
210 buf->va = dma_alloc_attrs(dev, buf->size, &buf->da, GFP_KERNEL,
217 memset(&bd, 0, sizeof(bd));
218 bd.buffer_size = buf->size;
219 bd.buffer_type = buf->type;
221 bd.device_addr = buf->da;
223 ret = hfi_session_set_buffers(inst, &bd);
225 dev_err(dev, "set session buffers failed\n");
229 list_add_tail(&buf->list, &inst->internalbufs);
235 dma_free_attrs(dev, buf->size, buf->va, buf->da, buf->attrs);
241 static int intbufs_unset_buffers(struct venus_inst *inst)
243 struct hfi_buffer_desc bd = {0};
244 struct intbuf *buf, *n;
247 list_for_each_entry_safe(buf, n, &inst->internalbufs, list) {
248 bd.buffer_size = buf->size;
249 bd.buffer_type = buf->type;
251 bd.device_addr = buf->da;
252 bd.response_required = true;
254 ret = hfi_session_unset_buffers(inst, &bd);
256 list_del_init(&buf->list);
257 dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da,
265 static const unsigned int intbuf_types_1xx[] = {
266 HFI_BUFFER_INTERNAL_SCRATCH(HFI_VERSION_1XX),
267 HFI_BUFFER_INTERNAL_SCRATCH_1(HFI_VERSION_1XX),
268 HFI_BUFFER_INTERNAL_SCRATCH_2(HFI_VERSION_1XX),
269 HFI_BUFFER_INTERNAL_PERSIST,
270 HFI_BUFFER_INTERNAL_PERSIST_1,
273 static const unsigned int intbuf_types_4xx[] = {
274 HFI_BUFFER_INTERNAL_SCRATCH(HFI_VERSION_4XX),
275 HFI_BUFFER_INTERNAL_SCRATCH_1(HFI_VERSION_4XX),
276 HFI_BUFFER_INTERNAL_SCRATCH_2(HFI_VERSION_4XX),
277 HFI_BUFFER_INTERNAL_PERSIST,
278 HFI_BUFFER_INTERNAL_PERSIST_1,
281 static int intbufs_alloc(struct venus_inst *inst)
283 const unsigned int *intbuf;
287 if (IS_V4(inst->core)) {
288 arr_sz = ARRAY_SIZE(intbuf_types_4xx);
289 intbuf = intbuf_types_4xx;
291 arr_sz = ARRAY_SIZE(intbuf_types_1xx);
292 intbuf = intbuf_types_1xx;
295 for (i = 0; i < arr_sz; i++) {
296 ret = intbufs_set_buffer(inst, intbuf[i]);
304 intbufs_unset_buffers(inst);
308 static int intbufs_free(struct venus_inst *inst)
310 return intbufs_unset_buffers(inst);
313 static u32 load_per_instance(struct venus_inst *inst)
317 if (!inst || !(inst->state >= INST_INIT && inst->state < INST_STOP))
320 mbs = (ALIGN(inst->width, 16) / 16) * (ALIGN(inst->height, 16) / 16);
322 return mbs * inst->fps;
325 static u32 load_per_type(struct venus_core *core, u32 session_type)
327 struct venus_inst *inst = NULL;
330 mutex_lock(&core->lock);
331 list_for_each_entry(inst, &core->instances, list) {
332 if (inst->session_type != session_type)
335 mbs_per_sec += load_per_instance(inst);
337 mutex_unlock(&core->lock);
342 static int load_scale_clocks(struct venus_core *core)
344 const struct freq_tbl *table = core->res->freq_tbl;
345 unsigned int num_rows = core->res->freq_tbl_size;
346 unsigned long freq = table[0].freq;
347 struct clk *clk = core->clks[0];
348 struct device *dev = core->dev;
353 mbs_per_sec = load_per_type(core, VIDC_SESSION_TYPE_ENC) +
354 load_per_type(core, VIDC_SESSION_TYPE_DEC);
356 if (mbs_per_sec > core->res->max_load)
357 dev_warn(dev, "HW is overloaded, needed: %d max: %d\n",
358 mbs_per_sec, core->res->max_load);
360 if (!mbs_per_sec && num_rows > 1) {
361 freq = table[num_rows - 1].freq;
365 for (i = 0; i < num_rows; i++) {
366 if (mbs_per_sec > table[i].load)
368 freq = table[i].freq;
373 ret = clk_set_rate(clk, freq);
377 ret = clk_set_rate(core->core0_clk, freq);
381 ret = clk_set_rate(core->core1_clk, freq);
388 dev_err(dev, "failed to set clock rate %lu (%d)\n", freq, ret);
392 static void fill_buffer_desc(const struct venus_buffer *buf,
393 struct hfi_buffer_desc *bd, bool response)
395 memset(bd, 0, sizeof(*bd));
396 bd->buffer_type = HFI_BUFFER_OUTPUT;
397 bd->buffer_size = buf->size;
399 bd->device_addr = buf->dma_addr;
400 bd->response_required = response;
403 static void return_buf_error(struct venus_inst *inst,
404 struct vb2_v4l2_buffer *vbuf)
406 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
408 if (vbuf->vb2_buf.type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
409 v4l2_m2m_src_buf_remove_by_buf(m2m_ctx, vbuf);
411 v4l2_m2m_dst_buf_remove_by_buf(m2m_ctx, vbuf);
413 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
417 session_process_buf(struct venus_inst *inst, struct vb2_v4l2_buffer *vbuf)
419 struct venus_buffer *buf = to_venus_buffer(vbuf);
420 struct vb2_buffer *vb = &vbuf->vb2_buf;
421 unsigned int type = vb->type;
422 struct hfi_frame_data fdata;
425 memset(&fdata, 0, sizeof(fdata));
426 fdata.alloc_len = buf->size;
427 fdata.device_addr = buf->dma_addr;
428 fdata.timestamp = vb->timestamp;
429 do_div(fdata.timestamp, NSEC_PER_USEC);
431 fdata.clnt_data = vbuf->vb2_buf.index;
433 if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
434 fdata.buffer_type = HFI_BUFFER_INPUT;
435 fdata.filled_len = vb2_get_plane_payload(vb, 0);
436 fdata.offset = vb->planes[0].data_offset;
438 if (vbuf->flags & V4L2_BUF_FLAG_LAST || !fdata.filled_len)
439 fdata.flags |= HFI_BUFFERFLAG_EOS;
440 } else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
441 if (inst->session_type == VIDC_SESSION_TYPE_ENC)
442 fdata.buffer_type = HFI_BUFFER_OUTPUT;
444 fdata.buffer_type = inst->opb_buftype;
445 fdata.filled_len = 0;
449 ret = hfi_session_process_buf(inst, &fdata);
456 static bool is_dynamic_bufmode(struct venus_inst *inst)
458 struct venus_core *core = inst->core;
459 struct venus_caps *caps;
462 * v4 doesn't send BUFFER_ALLOC_MODE_SUPPORTED property and supports
463 * dynamic buffer mode by default for HFI_BUFFER_OUTPUT/OUTPUT2.
468 caps = venus_caps_by_codec(core, inst->hfi_codec, inst->session_type);
472 return caps->cap_bufs_mode_dynamic;
475 static int session_unregister_bufs(struct venus_inst *inst)
477 struct venus_buffer *buf, *n;
478 struct hfi_buffer_desc bd;
481 if (is_dynamic_bufmode(inst))
484 list_for_each_entry_safe(buf, n, &inst->registeredbufs, reg_list) {
485 fill_buffer_desc(buf, &bd, true);
486 ret = hfi_session_unset_buffers(inst, &bd);
487 list_del_init(&buf->reg_list);
493 static int session_register_bufs(struct venus_inst *inst)
495 struct venus_core *core = inst->core;
496 struct device *dev = core->dev;
497 struct hfi_buffer_desc bd;
498 struct venus_buffer *buf;
501 if (is_dynamic_bufmode(inst))
504 list_for_each_entry(buf, &inst->registeredbufs, reg_list) {
505 fill_buffer_desc(buf, &bd, false);
506 ret = hfi_session_set_buffers(inst, &bd);
508 dev_err(dev, "%s: set buffer failed\n", __func__);
516 static u32 to_hfi_raw_fmt(u32 v4l2_fmt)
519 case V4L2_PIX_FMT_NV12:
520 return HFI_COLOR_FORMAT_NV12;
521 case V4L2_PIX_FMT_NV21:
522 return HFI_COLOR_FORMAT_NV21;
530 int venus_helper_get_bufreq(struct venus_inst *inst, u32 type,
531 struct hfi_buffer_requirements *req)
533 u32 ptype = HFI_PROPERTY_CONFIG_BUFFER_REQUIREMENTS;
534 union hfi_get_property hprop;
539 memset(req, 0, sizeof(*req));
541 ret = hfi_session_get_property(inst, ptype, &hprop);
547 for (i = 0; i < HFI_BUFFER_TYPE_MAX; i++) {
548 if (hprop.bufreq[i].type != type)
552 memcpy(req, &hprop.bufreq[i], sizeof(*req));
559 EXPORT_SYMBOL_GPL(venus_helper_get_bufreq);
561 static u32 get_framesize_raw_nv12(u32 width, u32 height)
563 u32 y_stride, uv_stride, y_plane;
564 u32 y_sclines, uv_sclines, uv_plane;
567 y_stride = ALIGN(width, 128);
568 uv_stride = ALIGN(width, 128);
569 y_sclines = ALIGN(height, 32);
570 uv_sclines = ALIGN(((height + 1) >> 1), 16);
572 y_plane = y_stride * y_sclines;
573 uv_plane = uv_stride * uv_sclines + SZ_4K;
574 size = y_plane + uv_plane + SZ_8K;
576 return ALIGN(size, SZ_4K);
579 static u32 get_framesize_raw_nv12_ubwc(u32 width, u32 height)
581 u32 y_meta_stride, y_meta_plane;
582 u32 y_stride, y_plane;
583 u32 uv_meta_stride, uv_meta_plane;
584 u32 uv_stride, uv_plane;
585 u32 extradata = SZ_16K;
587 y_meta_stride = ALIGN(DIV_ROUND_UP(width, 32), 64);
588 y_meta_plane = y_meta_stride * ALIGN(DIV_ROUND_UP(height, 8), 16);
589 y_meta_plane = ALIGN(y_meta_plane, SZ_4K);
591 y_stride = ALIGN(width, 128);
592 y_plane = ALIGN(y_stride * ALIGN(height, 32), SZ_4K);
594 uv_meta_stride = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
595 uv_meta_plane = uv_meta_stride * ALIGN(DIV_ROUND_UP(height / 2, 8), 16);
596 uv_meta_plane = ALIGN(uv_meta_plane, SZ_4K);
598 uv_stride = ALIGN(width, 128);
599 uv_plane = ALIGN(uv_stride * ALIGN(height / 2, 32), SZ_4K);
601 return ALIGN(y_meta_plane + y_plane + uv_meta_plane + uv_plane +
602 max(extradata, y_stride * 48), SZ_4K);
605 u32 venus_helper_get_framesz_raw(u32 hfi_fmt, u32 width, u32 height)
608 case HFI_COLOR_FORMAT_NV12:
609 case HFI_COLOR_FORMAT_NV21:
610 return get_framesize_raw_nv12(width, height);
611 case HFI_COLOR_FORMAT_NV12_UBWC:
612 return get_framesize_raw_nv12_ubwc(width, height);
617 EXPORT_SYMBOL_GPL(venus_helper_get_framesz_raw);
619 u32 venus_helper_get_framesz(u32 v4l2_fmt, u32 width, u32 height)
625 case V4L2_PIX_FMT_MPEG:
626 case V4L2_PIX_FMT_H264:
627 case V4L2_PIX_FMT_H264_NO_SC:
628 case V4L2_PIX_FMT_H264_MVC:
629 case V4L2_PIX_FMT_H263:
630 case V4L2_PIX_FMT_MPEG1:
631 case V4L2_PIX_FMT_MPEG2:
632 case V4L2_PIX_FMT_MPEG4:
633 case V4L2_PIX_FMT_XVID:
634 case V4L2_PIX_FMT_VC1_ANNEX_G:
635 case V4L2_PIX_FMT_VC1_ANNEX_L:
636 case V4L2_PIX_FMT_VP8:
637 case V4L2_PIX_FMT_VP9:
638 case V4L2_PIX_FMT_HEVC:
647 sz = ALIGN(height, 32) * ALIGN(width, 32) * 3 / 2 / 2;
648 return ALIGN(sz, SZ_4K);
651 hfi_fmt = to_hfi_raw_fmt(v4l2_fmt);
655 return venus_helper_get_framesz_raw(hfi_fmt, width, height);
657 EXPORT_SYMBOL_GPL(venus_helper_get_framesz);
659 int venus_helper_set_input_resolution(struct venus_inst *inst,
660 unsigned int width, unsigned int height)
662 u32 ptype = HFI_PROPERTY_PARAM_FRAME_SIZE;
663 struct hfi_framesize fs;
665 fs.buffer_type = HFI_BUFFER_INPUT;
669 return hfi_session_set_property(inst, ptype, &fs);
671 EXPORT_SYMBOL_GPL(venus_helper_set_input_resolution);
673 int venus_helper_set_output_resolution(struct venus_inst *inst,
674 unsigned int width, unsigned int height,
677 u32 ptype = HFI_PROPERTY_PARAM_FRAME_SIZE;
678 struct hfi_framesize fs;
680 fs.buffer_type = buftype;
684 return hfi_session_set_property(inst, ptype, &fs);
686 EXPORT_SYMBOL_GPL(venus_helper_set_output_resolution);
688 int venus_helper_set_work_mode(struct venus_inst *inst, u32 mode)
690 const u32 ptype = HFI_PROPERTY_PARAM_WORK_MODE;
691 struct hfi_video_work_mode wm;
693 if (!IS_V4(inst->core))
696 wm.video_work_mode = mode;
698 return hfi_session_set_property(inst, ptype, &wm);
700 EXPORT_SYMBOL_GPL(venus_helper_set_work_mode);
702 int venus_helper_set_core_usage(struct venus_inst *inst, u32 usage)
704 const u32 ptype = HFI_PROPERTY_CONFIG_VIDEOCORES_USAGE;
705 struct hfi_videocores_usage_type cu;
707 if (!IS_V4(inst->core))
710 cu.video_core_enable_mask = usage;
712 return hfi_session_set_property(inst, ptype, &cu);
714 EXPORT_SYMBOL_GPL(venus_helper_set_core_usage);
716 int venus_helper_set_num_bufs(struct venus_inst *inst, unsigned int input_bufs,
717 unsigned int output_bufs,
718 unsigned int output2_bufs)
720 u32 ptype = HFI_PROPERTY_PARAM_BUFFER_COUNT_ACTUAL;
721 struct hfi_buffer_count_actual buf_count;
724 buf_count.type = HFI_BUFFER_INPUT;
725 buf_count.count_actual = input_bufs;
727 ret = hfi_session_set_property(inst, ptype, &buf_count);
731 buf_count.type = HFI_BUFFER_OUTPUT;
732 buf_count.count_actual = output_bufs;
734 ret = hfi_session_set_property(inst, ptype, &buf_count);
739 buf_count.type = HFI_BUFFER_OUTPUT2;
740 buf_count.count_actual = output2_bufs;
742 ret = hfi_session_set_property(inst, ptype, &buf_count);
747 EXPORT_SYMBOL_GPL(venus_helper_set_num_bufs);
749 int venus_helper_set_raw_format(struct venus_inst *inst, u32 hfi_format,
752 const u32 ptype = HFI_PROPERTY_PARAM_UNCOMPRESSED_FORMAT_SELECT;
753 struct hfi_uncompressed_format_select fmt;
755 fmt.buffer_type = buftype;
756 fmt.format = hfi_format;
758 return hfi_session_set_property(inst, ptype, &fmt);
760 EXPORT_SYMBOL_GPL(venus_helper_set_raw_format);
762 int venus_helper_set_color_format(struct venus_inst *inst, u32 pixfmt)
764 u32 hfi_format, buftype;
766 if (inst->session_type == VIDC_SESSION_TYPE_DEC)
767 buftype = HFI_BUFFER_OUTPUT;
768 else if (inst->session_type == VIDC_SESSION_TYPE_ENC)
769 buftype = HFI_BUFFER_INPUT;
773 hfi_format = to_hfi_raw_fmt(pixfmt);
777 return venus_helper_set_raw_format(inst, hfi_format, buftype);
779 EXPORT_SYMBOL_GPL(venus_helper_set_color_format);
781 int venus_helper_set_multistream(struct venus_inst *inst, bool out_en,
784 struct hfi_multi_stream multi = {0};
785 u32 ptype = HFI_PROPERTY_PARAM_VDEC_MULTI_STREAM;
788 multi.buffer_type = HFI_BUFFER_OUTPUT;
789 multi.enable = out_en;
791 ret = hfi_session_set_property(inst, ptype, &multi);
795 multi.buffer_type = HFI_BUFFER_OUTPUT2;
796 multi.enable = out2_en;
798 return hfi_session_set_property(inst, ptype, &multi);
800 EXPORT_SYMBOL_GPL(venus_helper_set_multistream);
802 int venus_helper_set_dyn_bufmode(struct venus_inst *inst)
804 const u32 ptype = HFI_PROPERTY_PARAM_BUFFER_ALLOC_MODE;
805 struct hfi_buffer_alloc_mode mode;
808 if (!is_dynamic_bufmode(inst))
811 mode.type = HFI_BUFFER_OUTPUT;
812 mode.mode = HFI_BUFFER_MODE_DYNAMIC;
814 ret = hfi_session_set_property(inst, ptype, &mode);
818 mode.type = HFI_BUFFER_OUTPUT2;
820 return hfi_session_set_property(inst, ptype, &mode);
822 EXPORT_SYMBOL_GPL(venus_helper_set_dyn_bufmode);
824 int venus_helper_set_bufsize(struct venus_inst *inst, u32 bufsize, u32 buftype)
826 const u32 ptype = HFI_PROPERTY_PARAM_BUFFER_SIZE_ACTUAL;
827 struct hfi_buffer_size_actual bufsz;
829 bufsz.type = buftype;
830 bufsz.size = bufsize;
832 return hfi_session_set_property(inst, ptype, &bufsz);
834 EXPORT_SYMBOL_GPL(venus_helper_set_bufsize);
836 unsigned int venus_helper_get_opb_size(struct venus_inst *inst)
838 /* the encoder has only one output */
839 if (inst->session_type == VIDC_SESSION_TYPE_ENC)
840 return inst->output_buf_size;
842 if (inst->opb_buftype == HFI_BUFFER_OUTPUT)
843 return inst->output_buf_size;
844 else if (inst->opb_buftype == HFI_BUFFER_OUTPUT2)
845 return inst->output2_buf_size;
849 EXPORT_SYMBOL_GPL(venus_helper_get_opb_size);
851 static void delayed_process_buf_func(struct work_struct *work)
853 struct venus_buffer *buf, *n;
854 struct venus_inst *inst;
857 inst = container_of(work, struct venus_inst, delayed_process_work);
859 mutex_lock(&inst->lock);
861 if (!(inst->streamon_out & inst->streamon_cap))
864 list_for_each_entry_safe(buf, n, &inst->delayed_process, ref_list) {
865 if (buf->flags & HFI_BUFFERFLAG_READONLY)
868 ret = session_process_buf(inst, &buf->vb);
870 return_buf_error(inst, &buf->vb);
872 list_del_init(&buf->ref_list);
875 mutex_unlock(&inst->lock);
878 void venus_helper_release_buf_ref(struct venus_inst *inst, unsigned int idx)
880 struct venus_buffer *buf;
882 list_for_each_entry(buf, &inst->registeredbufs, reg_list) {
883 if (buf->vb.vb2_buf.index == idx) {
884 buf->flags &= ~HFI_BUFFERFLAG_READONLY;
885 schedule_work(&inst->delayed_process_work);
890 EXPORT_SYMBOL_GPL(venus_helper_release_buf_ref);
892 void venus_helper_acquire_buf_ref(struct vb2_v4l2_buffer *vbuf)
894 struct venus_buffer *buf = to_venus_buffer(vbuf);
896 buf->flags |= HFI_BUFFERFLAG_READONLY;
898 EXPORT_SYMBOL_GPL(venus_helper_acquire_buf_ref);
900 static int is_buf_refed(struct venus_inst *inst, struct vb2_v4l2_buffer *vbuf)
902 struct venus_buffer *buf = to_venus_buffer(vbuf);
904 if (buf->flags & HFI_BUFFERFLAG_READONLY) {
905 list_add_tail(&buf->ref_list, &inst->delayed_process);
906 schedule_work(&inst->delayed_process_work);
913 struct vb2_v4l2_buffer *
914 venus_helper_find_buf(struct venus_inst *inst, unsigned int type, u32 idx)
916 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
918 if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
919 return v4l2_m2m_src_buf_remove_by_idx(m2m_ctx, idx);
921 return v4l2_m2m_dst_buf_remove_by_idx(m2m_ctx, idx);
923 EXPORT_SYMBOL_GPL(venus_helper_find_buf);
925 int venus_helper_vb2_buf_init(struct vb2_buffer *vb)
927 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
928 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
929 struct venus_buffer *buf = to_venus_buffer(vbuf);
930 struct sg_table *sgt;
932 sgt = vb2_dma_sg_plane_desc(vb, 0);
936 buf->size = vb2_plane_size(vb, 0);
937 buf->dma_addr = sg_dma_address(sgt->sgl);
939 if (vb->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
940 list_add_tail(&buf->reg_list, &inst->registeredbufs);
944 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_init);
946 int venus_helper_vb2_buf_prepare(struct vb2_buffer *vb)
948 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
949 unsigned int out_buf_size = venus_helper_get_opb_size(inst);
951 if (vb->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE &&
952 vb2_plane_size(vb, 0) < out_buf_size)
954 if (vb->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE &&
955 vb2_plane_size(vb, 0) < inst->input_buf_size)
960 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_prepare);
962 void venus_helper_vb2_buf_queue(struct vb2_buffer *vb)
964 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
965 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
966 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
969 mutex_lock(&inst->lock);
971 v4l2_m2m_buf_queue(m2m_ctx, vbuf);
973 if (!(inst->streamon_out & inst->streamon_cap))
976 ret = is_buf_refed(inst, vbuf);
980 ret = session_process_buf(inst, vbuf);
982 return_buf_error(inst, vbuf);
985 mutex_unlock(&inst->lock);
987 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_queue);
989 void venus_helper_buffers_done(struct venus_inst *inst,
990 enum vb2_buffer_state state)
992 struct vb2_v4l2_buffer *buf;
994 while ((buf = v4l2_m2m_src_buf_remove(inst->m2m_ctx)))
995 v4l2_m2m_buf_done(buf, state);
996 while ((buf = v4l2_m2m_dst_buf_remove(inst->m2m_ctx)))
997 v4l2_m2m_buf_done(buf, state);
999 EXPORT_SYMBOL_GPL(venus_helper_buffers_done);
1001 void venus_helper_vb2_stop_streaming(struct vb2_queue *q)
1003 struct venus_inst *inst = vb2_get_drv_priv(q);
1004 struct venus_core *core = inst->core;
1007 mutex_lock(&inst->lock);
1009 if (inst->streamon_out & inst->streamon_cap) {
1010 ret = hfi_session_stop(inst);
1011 ret |= hfi_session_unload_res(inst);
1012 ret |= session_unregister_bufs(inst);
1013 ret |= intbufs_free(inst);
1014 ret |= hfi_session_deinit(inst);
1016 if (inst->session_error || core->sys_error)
1020 hfi_session_abort(inst);
1022 venus_helper_free_dpb_bufs(inst);
1024 load_scale_clocks(core);
1025 INIT_LIST_HEAD(&inst->registeredbufs);
1028 venus_helper_buffers_done(inst, VB2_BUF_STATE_ERROR);
1030 if (q->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
1031 inst->streamon_out = 0;
1033 inst->streamon_cap = 0;
1035 mutex_unlock(&inst->lock);
1037 EXPORT_SYMBOL_GPL(venus_helper_vb2_stop_streaming);
1039 int venus_helper_vb2_start_streaming(struct venus_inst *inst)
1041 struct venus_core *core = inst->core;
1044 ret = intbufs_alloc(inst);
1048 ret = session_register_bufs(inst);
1052 load_scale_clocks(core);
1054 ret = hfi_session_load_res(inst);
1056 goto err_unreg_bufs;
1058 ret = hfi_session_start(inst);
1060 goto err_unload_res;
1062 ret = venus_helper_queue_dpb_bufs(inst);
1064 goto err_session_stop;
1069 hfi_session_stop(inst);
1071 hfi_session_unload_res(inst);
1073 session_unregister_bufs(inst);
1078 EXPORT_SYMBOL_GPL(venus_helper_vb2_start_streaming);
1080 void venus_helper_m2m_device_run(void *priv)
1082 struct venus_inst *inst = priv;
1083 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
1084 struct v4l2_m2m_buffer *buf, *n;
1087 mutex_lock(&inst->lock);
1089 v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, buf, n) {
1090 ret = session_process_buf(inst, &buf->vb);
1092 return_buf_error(inst, &buf->vb);
1095 v4l2_m2m_for_each_src_buf_safe(m2m_ctx, buf, n) {
1096 ret = session_process_buf(inst, &buf->vb);
1098 return_buf_error(inst, &buf->vb);
1101 mutex_unlock(&inst->lock);
1103 EXPORT_SYMBOL_GPL(venus_helper_m2m_device_run);
1105 void venus_helper_m2m_job_abort(void *priv)
1107 struct venus_inst *inst = priv;
1109 v4l2_m2m_job_finish(inst->m2m_dev, inst->m2m_ctx);
1111 EXPORT_SYMBOL_GPL(venus_helper_m2m_job_abort);
1113 void venus_helper_init_instance(struct venus_inst *inst)
1115 if (inst->session_type == VIDC_SESSION_TYPE_DEC) {
1116 INIT_LIST_HEAD(&inst->delayed_process);
1117 INIT_WORK(&inst->delayed_process_work,
1118 delayed_process_buf_func);
1121 EXPORT_SYMBOL_GPL(venus_helper_init_instance);
1123 static bool find_fmt_from_caps(struct venus_caps *caps, u32 buftype, u32 fmt)
1127 for (i = 0; i < caps->num_fmts; i++) {
1128 if (caps->fmts[i].buftype == buftype &&
1129 caps->fmts[i].fmt == fmt)
1136 int venus_helper_get_out_fmts(struct venus_inst *inst, u32 v4l2_fmt,
1137 u32 *out_fmt, u32 *out2_fmt, bool ubwc)
1139 struct venus_core *core = inst->core;
1140 struct venus_caps *caps;
1141 u32 ubwc_fmt, fmt = to_hfi_raw_fmt(v4l2_fmt);
1142 bool found, found_ubwc;
1144 *out_fmt = *out2_fmt = 0;
1149 caps = venus_caps_by_codec(core, inst->hfi_codec, inst->session_type);
1154 ubwc_fmt = fmt | HFI_COLOR_FORMAT_UBWC_BASE;
1155 found_ubwc = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT,
1157 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT2, fmt);
1159 if (found_ubwc && found) {
1160 *out_fmt = ubwc_fmt;
1166 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT, fmt);
1173 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT2, fmt);
1182 EXPORT_SYMBOL_GPL(venus_helper_get_out_fmts);
1184 int venus_helper_power_enable(struct venus_core *core, u32 session_type,
1187 void __iomem *ctrl, *stat;
1191 if (!IS_V3(core) && !IS_V4(core))
1195 if (session_type == VIDC_SESSION_TYPE_DEC)
1196 ctrl = core->base + WRAPPER_VDEC_VCODEC_POWER_CONTROL;
1198 ctrl = core->base + WRAPPER_VENC_VCODEC_POWER_CONTROL;
1207 if (session_type == VIDC_SESSION_TYPE_DEC) {
1208 ctrl = core->base + WRAPPER_VCODEC0_MMCC_POWER_CONTROL;
1209 stat = core->base + WRAPPER_VCODEC0_MMCC_POWER_STATUS;
1211 ctrl = core->base + WRAPPER_VCODEC1_MMCC_POWER_CONTROL;
1212 stat = core->base + WRAPPER_VCODEC1_MMCC_POWER_STATUS;
1218 ret = readl_poll_timeout(stat, val, val & BIT(1), 1, 100);
1224 ret = readl_poll_timeout(stat, val, !(val & BIT(1)), 1, 100);
1231 EXPORT_SYMBOL_GPL(venus_helper_power_enable);