Linux Kernel  3.7.1
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
s5p_mfc_opr_v6.c
Go to the documentation of this file.
1 /*
2  * drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c
3  *
4  * Samsung MFC (Multi Function Codec - FIMV) driver
5  * This file contains hw related functions.
6  *
7  * Copyright (c) 2012 Samsung Electronics Co., Ltd.
8  * http://www.samsung.com/
9  *
10  * This program is free software; you can redistribute it and/or modify
11  * it under the terms of the GNU General Public License version 2 as
12  * published by the Free Software Foundation.
13  */
14 
15 #undef DEBUG
16 
17 #include <linux/delay.h>
18 #include <linux/mm.h>
19 #include <linux/io.h>
20 #include <linux/jiffies.h>
21 #include <linux/firmware.h>
22 #include <linux/err.h>
23 #include <linux/sched.h>
24 #include <linux/dma-mapping.h>
25 
26 #include <asm/cacheflush.h>
27 
28 #include "s5p_mfc_common.h"
29 #include "s5p_mfc_cmd.h"
30 #include "s5p_mfc_intr.h"
31 #include "s5p_mfc_pm.h"
32 #include "s5p_mfc_debug.h"
33 #include "s5p_mfc_opr.h"
34 #include "s5p_mfc_opr_v6.h"
35 
36 /* #define S5P_MFC_DEBUG_REGWRITE */
37 #ifdef S5P_MFC_DEBUG_REGWRITE
38 #undef writel
39 #define writel(v, r) \
40  do { \
41  pr_err("MFCWRITE(%p): %08x\n", r, (unsigned int)v); \
42  __raw_writel(v, r); \
43  } while (0)
44 #endif /* S5P_MFC_DEBUG_REGWRITE */
45 
46 #define READL(offset) readl(dev->regs_base + (offset))
47 #define WRITEL(data, offset) writel((data), dev->regs_base + (offset))
48 #define OFFSETA(x) (((x) - dev->port_a) >> S5P_FIMV_MEM_OFFSET)
49 #define OFFSETB(x) (((x) - dev->port_b) >> S5P_FIMV_MEM_OFFSET)
50 
51 /* Allocate temporary buffers for decoding */
53 {
54  /* NOP */
55 
56  return 0;
57 }
58 
59 /* Release temproary buffers for decoding */
61 {
62  /* NOP */
63 }
64 
66 {
67  /* NOP */
68  return -1;
69 }
70 
71 /* Allocate codec buffers */
73 {
74  struct s5p_mfc_dev *dev = ctx->dev;
75  unsigned int mb_width, mb_height;
76 
77  mb_width = MB_WIDTH(ctx->img_width);
78  mb_height = MB_HEIGHT(ctx->img_height);
79 
80  if (ctx->type == MFCINST_DECODER) {
81  mfc_debug(2, "Luma size:%d Chroma size:%d MV size:%d\n",
82  ctx->luma_size, ctx->chroma_size, ctx->mv_size);
83  mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count);
84  } else if (ctx->type == MFCINST_ENCODER) {
86  ALIGN(S5P_FIMV_TMV_BUFFER_SIZE_V6(mb_width, mb_height),
88  ctx->luma_dpb_size = ALIGN((mb_width * mb_height) *
91  ctx->chroma_dpb_size = ALIGN((mb_width * mb_height) *
95  ctx->img_width, ctx->img_height,
96  mb_width, mb_height),
98 
99  mfc_debug(2, "recon luma size: %d chroma size: %d\n",
100  ctx->luma_dpb_size, ctx->chroma_dpb_size);
101  } else {
102  return -EINVAL;
103  }
104 
105  /* Codecs have different memory requirements */
106  switch (ctx->codec_mode) {
109  ctx->scratch_buf_size =
111  mb_width,
112  mb_height);
115  ctx->bank1_size =
116  ctx->scratch_buf_size +
117  (ctx->mv_count * ctx->mv_size);
118  break;
120  ctx->scratch_buf_size =
122  mb_width,
123  mb_height);
126  ctx->bank1_size = ctx->scratch_buf_size;
127  break;
130  ctx->scratch_buf_size =
132  mb_width,
133  mb_height);
136  ctx->bank1_size = ctx->scratch_buf_size;
137  break;
139  ctx->bank1_size = 0;
140  ctx->bank2_size = 0;
141  break;
143  ctx->scratch_buf_size =
145  mb_width,
146  mb_height);
149  ctx->bank1_size = ctx->scratch_buf_size;
150  break;
152  ctx->scratch_buf_size =
154  mb_width,
155  mb_height);
158  ctx->bank1_size = ctx->scratch_buf_size;
159  break;
161  ctx->scratch_buf_size =
163  mb_width,
164  mb_height);
167  ctx->bank1_size =
168  ctx->scratch_buf_size + ctx->tmv_buffer_size +
169  (ctx->dpb_count * (ctx->luma_dpb_size +
170  ctx->chroma_dpb_size + ctx->me_buffer_size));
171  ctx->bank2_size = 0;
172  break;
175  ctx->scratch_buf_size =
177  mb_width,
178  mb_height);
181  ctx->bank1_size =
182  ctx->scratch_buf_size + ctx->tmv_buffer_size +
183  (ctx->dpb_count * (ctx->luma_dpb_size +
184  ctx->chroma_dpb_size + ctx->me_buffer_size));
185  ctx->bank2_size = 0;
186  break;
187  default:
188  break;
189  }
190 
191  /* Allocate only if memory from bank 1 is necessary */
192  if (ctx->bank1_size > 0) {
193  ctx->bank1_buf = vb2_dma_contig_memops.alloc(
195  if (IS_ERR(ctx->bank1_buf)) {
196  ctx->bank1_buf = 0;
197  pr_err("Buf alloc for decoding failed (port A)\n");
198  return -ENOMEM;
199  }
200  ctx->bank1_phys = s5p_mfc_mem_cookie(
202  BUG_ON(ctx->bank1_phys & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
203  }
204 
205  return 0;
206 }
207 
208 /* Release buffers allocated for codec */
210 {
211  if (ctx->bank1_buf) {
213  ctx->bank1_buf = 0;
214  ctx->bank1_phys = 0;
215  ctx->bank1_size = 0;
216  }
217 }
218 
219 /* Allocate memory for instance data buffer */
221 {
222  struct s5p_mfc_dev *dev = ctx->dev;
223  struct s5p_mfc_buf_size_v6 *buf_size = dev->variant->buf_size->priv;
224 
225  mfc_debug_enter();
226 
227  switch (ctx->codec_mode) {
230  ctx->ctx.size = buf_size->h264_dec_ctx;
231  break;
238  ctx->ctx.size = buf_size->other_dec_ctx;
239  break;
241  ctx->ctx.size = buf_size->h264_enc_ctx;
242  break;
245  ctx->ctx.size = buf_size->other_enc_ctx;
246  break;
247  default:
248  ctx->ctx.size = 0;
249  mfc_err("Codec type(%d) should be checked!\n", ctx->codec_mode);
250  break;
251  }
252 
253  ctx->ctx.alloc = vb2_dma_contig_memops.alloc(
254  dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->ctx.size);
255  if (IS_ERR(ctx->ctx.alloc)) {
256  mfc_err("Allocating context buffer failed.\n");
257  return PTR_ERR(ctx->ctx.alloc);
258  }
259 
260  ctx->ctx.dma = s5p_mfc_mem_cookie(
261  dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], ctx->ctx.alloc);
262 
263  ctx->ctx.virt = vb2_dma_contig_memops.vaddr(ctx->ctx.alloc);
264  if (!ctx->ctx.virt) {
265  vb2_dma_contig_memops.put(ctx->ctx.alloc);
266  ctx->ctx.alloc = NULL;
267  ctx->ctx.dma = 0;
268  ctx->ctx.virt = NULL;
269 
270  mfc_err("Remapping context buffer failed.\n");
271  return -ENOMEM;
272  }
273 
274  memset(ctx->ctx.virt, 0, ctx->ctx.size);
275  wmb();
276 
277  mfc_debug_leave();
278 
279  return 0;
280 }
281 
282 /* Release instance buffer */
284 {
285  mfc_debug_enter();
286 
287  if (ctx->ctx.alloc) {
288  vb2_dma_contig_memops.put(ctx->ctx.alloc);
289  ctx->ctx.alloc = NULL;
290  ctx->ctx.dma = 0;
291  ctx->ctx.virt = NULL;
292  }
293 
294  mfc_debug_leave();
295 }
296 
297 /* Allocate context buffers for SYS_INIT */
299 {
300  struct s5p_mfc_buf_size_v6 *buf_size = dev->variant->buf_size->priv;
301 
302  mfc_debug_enter();
303 
304  dev->ctx_buf.alloc = vb2_dma_contig_memops.alloc(
305  dev->alloc_ctx[MFC_BANK1_ALLOC_CTX], buf_size->dev_ctx);
306  if (IS_ERR(dev->ctx_buf.alloc)) {
307  mfc_err("Allocating DESC buffer failed.\n");
308  return PTR_ERR(dev->ctx_buf.alloc);
309  }
310 
311  dev->ctx_buf.dma = s5p_mfc_mem_cookie(
313  dev->ctx_buf.alloc);
314 
315  dev->ctx_buf.virt = vb2_dma_contig_memops.vaddr(dev->ctx_buf.alloc);
316  if (!dev->ctx_buf.virt) {
317  vb2_dma_contig_memops.put(dev->ctx_buf.alloc);
318  dev->ctx_buf.alloc = NULL;
319  dev->ctx_buf.dma = 0;
320 
321  mfc_err("Remapping DESC buffer failed.\n");
322  return -ENOMEM;
323  }
324 
325  memset(dev->ctx_buf.virt, 0, buf_size->dev_ctx);
326  wmb();
327 
328  mfc_debug_leave();
329 
330  return 0;
331 }
332 
333 /* Release context buffers for SYS_INIT */
335 {
336  if (dev->ctx_buf.alloc) {
337  vb2_dma_contig_memops.put(dev->ctx_buf.alloc);
338  dev->ctx_buf.alloc = NULL;
339  dev->ctx_buf.dma = 0;
340  dev->ctx_buf.virt = NULL;
341  }
342 }
343 
344 static int calc_plane(int width, int height)
345 {
346  int mbX, mbY;
347 
350 
351  if (width * height < S5P_FIMV_MAX_FRAME_SIZE_V6)
352  mbY = (mbY + 1) / 2 * 2;
353 
354  return (mbX * S5P_FIMV_NUM_PIXELS_IN_MB_COL_V6) *
356 }
357 
359 {
362  mfc_debug(2, "SEQ Done: Movie dimensions %dx%d,\n"
363  "buffer dimensions: %dx%d\n", ctx->img_width,
364  ctx->img_height, ctx->buf_width, ctx->buf_height);
365 
366  ctx->luma_size = calc_plane(ctx->img_width, ctx->img_height);
367  ctx->chroma_size = calc_plane(ctx->img_width, (ctx->img_height >> 1));
368  if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
371  ctx->img_height);
372  ctx->mv_size = ALIGN(ctx->mv_size, 16);
373  } else {
374  ctx->mv_size = 0;
375  }
376 }
377 
379 {
380  unsigned int mb_width, mb_height;
381 
382  mb_width = MB_WIDTH(ctx->img_width);
383  mb_height = MB_HEIGHT(ctx->img_height);
384 
386  ctx->luma_size = ALIGN((mb_width * mb_height) * 256, 256);
387  ctx->chroma_size = ALIGN((mb_width * mb_height) * 128, 256);
388 }
389 
390 /* Set registers for decoding stream buffer */
392  unsigned int start_num_byte, unsigned int strm_size)
393 {
394  struct s5p_mfc_dev *dev = ctx->dev;
395  struct s5p_mfc_buf_size *buf_size = dev->variant->buf_size;
396 
397  mfc_debug_enter();
398  mfc_debug(2, "inst_no: %d, buf_addr: 0x%08x,\n"
399  "buf_size: 0x%08x (%d)\n",
400  ctx->inst_no, buf_addr, strm_size, strm_size);
404  WRITEL(start_num_byte, S5P_FIMV_D_CPB_BUFFER_OFFSET_V6);
405 
406  mfc_debug_leave();
407  return 0;
408 }
409 
410 /* Set decoding frame buffer */
412 {
413  unsigned int frame_size, i;
414  unsigned int frame_size_ch, frame_size_mv;
415  struct s5p_mfc_dev *dev = ctx->dev;
416  size_t buf_addr1;
417  int buf_size1;
418  int align_gap;
419 
420  buf_addr1 = ctx->bank1_phys;
421  buf_size1 = ctx->bank1_size;
422 
423  mfc_debug(2, "Buf1: %p (%d)\n", (void *)buf_addr1, buf_size1);
424  mfc_debug(2, "Total DPB COUNT: %d\n", ctx->total_dpb_count);
425  mfc_debug(2, "Setting display delay to %d\n", ctx->display_delay);
426 
430 
433  buf_addr1 += ctx->scratch_buf_size;
434  buf_size1 -= ctx->scratch_buf_size;
435 
436  if (ctx->codec_mode == S5P_FIMV_CODEC_H264_DEC ||
440  }
441 
442  frame_size = ctx->luma_size;
443  frame_size_ch = ctx->chroma_size;
444  frame_size_mv = ctx->mv_size;
445  mfc_debug(2, "Frame size: %d ch: %d mv: %d\n",
446  frame_size, frame_size_ch, frame_size_mv);
447 
448  for (i = 0; i < ctx->total_dpb_count; i++) {
449  /* Bank2 */
450  mfc_debug(2, "Luma %d: %x\n", i,
451  ctx->dst_bufs[i].cookie.raw.luma);
452  WRITEL(ctx->dst_bufs[i].cookie.raw.luma,
453  S5P_FIMV_D_LUMA_DPB_V6 + i * 4);
454  mfc_debug(2, "\tChroma %d: %x\n", i,
455  ctx->dst_bufs[i].cookie.raw.chroma);
456  WRITEL(ctx->dst_bufs[i].cookie.raw.chroma,
457  S5P_FIMV_D_CHROMA_DPB_V6 + i * 4);
458  }
459  if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
461  for (i = 0; i < ctx->mv_count; i++) {
462  /* To test alignment */
463  align_gap = buf_addr1;
464  buf_addr1 = ALIGN(buf_addr1, 16);
465  align_gap = buf_addr1 - align_gap;
466  buf_size1 -= align_gap;
467 
468  mfc_debug(2, "\tBuf1: %x, size: %d\n",
469  buf_addr1, buf_size1);
470  WRITEL(buf_addr1, S5P_FIMV_D_MV_BUFFER_V6 + i * 4);
471  buf_addr1 += frame_size_mv;
472  buf_size1 -= frame_size_mv;
473  }
474  }
475 
476  mfc_debug(2, "Buf1: %u, buf_size1: %d (frames %d)\n",
477  buf_addr1, buf_size1, ctx->total_dpb_count);
478  if (buf_size1 < 0) {
479  mfc_debug(2, "Not enough memory has been allocated.\n");
480  return -ENOMEM;
481  }
482 
484  s5p_mfc_hw_call(dev->mfc_cmds, cmd_host2risc, dev,
486 
487  mfc_debug(2, "After setting buffers.\n");
488  return 0;
489 }
490 
491 /* Set registers for encoding stream buffer */
493  unsigned long addr, unsigned int size)
494 {
495  struct s5p_mfc_dev *dev = ctx->dev;
496 
497  WRITEL(addr, S5P_FIMV_E_STREAM_BUFFER_ADDR_V6); /* 16B align */
499 
500  mfc_debug(2, "stream buf addr: 0x%08lx, size: 0x%d",
501  addr, size);
502 
503  return 0;
504 }
505 
507  unsigned long y_addr, unsigned long c_addr)
508 {
509  struct s5p_mfc_dev *dev = ctx->dev;
510 
511  WRITEL(y_addr, S5P_FIMV_E_SOURCE_LUMA_ADDR_V6); /* 256B align */
513 
514  mfc_debug(2, "enc src y buf addr: 0x%08lx", y_addr);
515  mfc_debug(2, "enc src c buf addr: 0x%08lx", c_addr);
516 }
517 
519  unsigned long *y_addr, unsigned long *c_addr)
520 {
521  struct s5p_mfc_dev *dev = ctx->dev;
522  unsigned long enc_recon_y_addr, enc_recon_c_addr;
523 
526 
527  enc_recon_y_addr = READL(S5P_FIMV_E_RECON_LUMA_DPB_ADDR_V6);
528  enc_recon_c_addr = READL(S5P_FIMV_E_RECON_CHROMA_DPB_ADDR_V6);
529 
530  mfc_debug(2, "recon y addr: 0x%08lx", enc_recon_y_addr);
531  mfc_debug(2, "recon c addr: 0x%08lx", enc_recon_c_addr);
532 }
533 
534 /* Set encoding ref & codec buffer */
536 {
537  struct s5p_mfc_dev *dev = ctx->dev;
538  size_t buf_addr1, buf_size1;
539  int i;
540 
541  mfc_debug_enter();
542 
543  buf_addr1 = ctx->bank1_phys;
544  buf_size1 = ctx->bank1_size;
545 
546  mfc_debug(2, "Buf1: %p (%d)\n", (void *)buf_addr1, buf_size1);
547 
548  for (i = 0; i < ctx->dpb_count; i++) {
549  WRITEL(buf_addr1, S5P_FIMV_E_LUMA_DPB_V6 + (4 * i));
550  buf_addr1 += ctx->luma_dpb_size;
551  WRITEL(buf_addr1, S5P_FIMV_E_CHROMA_DPB_V6 + (4 * i));
552  buf_addr1 += ctx->chroma_dpb_size;
553  WRITEL(buf_addr1, S5P_FIMV_E_ME_BUFFER_V6 + (4 * i));
554  buf_addr1 += ctx->me_buffer_size;
555  buf_size1 -= (ctx->luma_dpb_size + ctx->chroma_dpb_size +
556  ctx->me_buffer_size);
557  }
558 
561  buf_addr1 += ctx->scratch_buf_size;
562  buf_size1 -= ctx->scratch_buf_size;
563 
564  WRITEL(buf_addr1, S5P_FIMV_E_TMV_BUFFER0_V6);
565  buf_addr1 += ctx->tmv_buffer_size >> 1;
566  WRITEL(buf_addr1, S5P_FIMV_E_TMV_BUFFER1_V6);
567  buf_addr1 += ctx->tmv_buffer_size >> 1;
568  buf_size1 -= ctx->tmv_buffer_size;
569 
570  mfc_debug(2, "Buf1: %u, buf_size1: %d (ref frames %d)\n",
571  buf_addr1, buf_size1, ctx->dpb_count);
572  if (buf_size1 < 0) {
573  mfc_debug(2, "Not enough memory has been allocated.\n");
574  return -ENOMEM;
575  }
576 
578  s5p_mfc_hw_call(dev->mfc_cmds, cmd_host2risc, dev,
580 
581  mfc_debug_leave();
582 
583  return 0;
584 }
585 
586 static int s5p_mfc_set_slice_mode(struct s5p_mfc_ctx *ctx)
587 {
588  struct s5p_mfc_dev *dev = ctx->dev;
589 
590  /* multi-slice control */
591  /* multi-slice MB number or bit size */
595  } else if (ctx->slice_mode ==
598  } else {
601  }
602 
603  return 0;
604 }
605 
606 static int s5p_mfc_set_enc_params(struct s5p_mfc_ctx *ctx)
607 {
608  struct s5p_mfc_dev *dev = ctx->dev;
609  struct s5p_mfc_enc_params *p = &ctx->enc_params;
610  unsigned int reg = 0;
611 
612  mfc_debug_enter();
613 
614  /* width */
615  WRITEL(ctx->img_width, S5P_FIMV_E_FRAME_WIDTH_V6); /* 16 align */
616  /* height */
617  WRITEL(ctx->img_height, S5P_FIMV_E_FRAME_HEIGHT_V6); /* 16 align */
618 
619  /* cropped width */
621  /* cropped height */
623  /* cropped offset */
625 
626  /* pictype : IDR period */
627  reg = 0;
628  reg |= p->gop_size & 0xFFFF;
630 
631  /* multi-slice control */
632  /* multi-slice MB number or bit size */
633  ctx->slice_mode = p->slice_mode;
634  reg = 0;
636  reg |= (0x1 << 3);
638  ctx->slice_size.mb = p->slice_mb;
640  reg |= (0x1 << 3);
642  ctx->slice_size.bits = p->slice_bit;
643  } else {
644  reg &= ~(0x1 << 3);
646  }
647 
648  s5p_mfc_set_slice_mode(ctx);
649 
650  /* cyclic intra refresh */
653  if (p->intra_refresh_mb == 0)
654  reg &= ~(0x1 << 4);
655  else
656  reg |= (0x1 << 4);
658 
659  /* 'NON_REFERENCE_STORE_ENABLE' for debugging */
661  reg &= ~(0x1 << 9);
663 
664  /* memory structure cur. frame */
665  if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M) {
666  /* 0: Linear, 1: 2D tiled*/
668  reg &= ~(0x1 << 7);
670  /* 0: NV12(CbCr), 1: NV21(CrCb) */
672  } else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV21M) {
673  /* 0: Linear, 1: 2D tiled*/
675  reg &= ~(0x1 << 7);
677  /* 0: NV12(CbCr), 1: NV21(CrCb) */
679  } else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT_16X16) {
680  /* 0: Linear, 1: 2D tiled*/
682  reg |= (0x1 << 7);
684  /* 0: NV12(CbCr), 1: NV21(CrCb) */
686  }
687 
688  /* memory structure recon. frame */
689  /* 0: Linear, 1: 2D tiled */
691  reg |= (0x1 << 8);
693 
694  /* padding control & value */
696  if (p->pad) {
697  reg = 0;
699  reg |= (1 << 31);
701  reg |= ((p->pad_cr & 0xFF) << 16);
703  reg |= ((p->pad_cb & 0xFF) << 8);
705  reg |= p->pad_luma & 0xFF;
707  }
708 
709  /* rate control config. */
710  reg = 0;
711  /* frame-level rate control */
712  reg |= ((p->rc_frame & 0x1) << 9);
714 
715  /* bit rate */
716  if (p->rc_frame)
717  WRITEL(p->rc_bitrate,
719  else
721 
722  /* reaction coefficient */
723  if (p->rc_frame) {
724  if (p->rc_reaction_coeff < TIGHT_CBR_MAX) /* tight CBR */
726  else /* loose CBR */
728  }
729 
730  /* seq header ctrl */
732  reg &= ~(0x1 << 2);
733  reg |= ((p->seq_hdr_mode & 0x1) << 2);
734 
735  /* frame skip mode */
736  reg &= ~(0x3);
737  reg |= (p->frame_skip_mode & 0x3);
739 
740  /* 'DROP_CONTROL_ENABLE', disable */
742  reg &= ~(0x1 << 10);
744 
745  /* setting for MV range [16, 256] */
746  reg = 0;
747  reg &= ~(0x3FFF);
748  reg = 256;
750 
751  reg = 0;
752  reg &= ~(0x3FFF);
753  reg = 256;
755 
761 
765 
768 
769  mfc_debug_leave();
770 
771  return 0;
772 }
773 
774 static int s5p_mfc_set_enc_params_h264(struct s5p_mfc_ctx *ctx)
775 {
776  struct s5p_mfc_dev *dev = ctx->dev;
777  struct s5p_mfc_enc_params *p = &ctx->enc_params;
778  struct s5p_mfc_h264_enc_params *p_h264 = &p->codec.h264;
779  unsigned int reg = 0;
780  int i;
781 
782  mfc_debug_enter();
783 
784  s5p_mfc_set_enc_params(ctx);
785 
786  /* pictype : number of B */
788  reg &= ~(0x3 << 16);
789  reg |= ((p->num_b_frame & 0x3) << 16);
791 
792  /* profile & level */
793  reg = 0;
795  reg |= ((p_h264->level & 0xFF) << 8);
797  reg |= p_h264->profile & 0x3F;
799 
800  /* rate control config. */
803  reg &= ~(0x1 << 8);
804  reg |= ((p->rc_mb & 0x1) << 8);
807  reg &= ~(0x3F);
808  reg |= p_h264->rc_frame_qp & 0x3F;
810 
811  /* max & min value of QP */
812  reg = 0;
814  reg |= ((p_h264->rc_max_qp & 0x3F) << 8);
816  reg |= p_h264->rc_min_qp & 0x3F;
818 
819  /* other QPs */
821  if (!p->rc_frame && !p->rc_mb) {
822  reg = 0;
823  reg |= ((p_h264->rc_b_frame_qp & 0x3F) << 16);
824  reg |= ((p_h264->rc_p_frame_qp & 0x3F) << 8);
825  reg |= p_h264->rc_frame_qp & 0x3F;
827  }
828 
829  /* frame rate */
830  if (p->rc_frame && p->rc_framerate_num && p->rc_framerate_denom) {
831  reg = 0;
832  reg |= ((p->rc_framerate_num & 0xFFFF) << 16);
833  reg |= p->rc_framerate_denom & 0xFFFF;
835  }
836 
837  /* vbv buffer size */
838  if (p->frame_skip_mode ==
840  WRITEL(p_h264->cpb_size & 0xFFFF,
842 
843  if (p->rc_frame)
845  }
846 
847  /* interlace */
848  reg = 0;
849  reg |= ((p_h264->interlace & 0x1) << 3);
851 
852  /* height */
853  if (p_h264->interlace) {
854  WRITEL(ctx->img_height >> 1,
855  S5P_FIMV_E_FRAME_HEIGHT_V6); /* 32 align */
856  /* cropped height */
857  WRITEL(ctx->img_height >> 1,
859  }
860 
861  /* loop filter ctrl */
863  reg &= ~(0x3 << 1);
864  reg |= ((p_h264->loop_filter_mode & 0x3) << 1);
866 
867  /* loopfilter alpha offset */
868  if (p_h264->loop_filter_alpha < 0) {
869  reg = 0x10;
870  reg |= (0xFF - p_h264->loop_filter_alpha) + 1;
871  } else {
872  reg = 0x00;
873  reg |= (p_h264->loop_filter_alpha & 0xF);
874  }
876 
877  /* loopfilter beta offset */
878  if (p_h264->loop_filter_beta < 0) {
879  reg = 0x10;
880  reg |= (0xFF - p_h264->loop_filter_beta) + 1;
881  } else {
882  reg = 0x00;
883  reg |= (p_h264->loop_filter_beta & 0xF);
884  }
886 
887  /* entropy coding mode */
889  reg &= ~(0x1);
890  reg |= p_h264->entropy_mode & 0x1;
892 
893  /* number of ref. picture */
895  reg &= ~(0x1 << 7);
896  reg |= (((p_h264->num_ref_pic_4p - 1) & 0x1) << 7);
898 
899  /* 8x8 transform enable */
901  reg &= ~(0x3 << 12);
902  reg |= ((p_h264->_8x8_transform & 0x3) << 12);
904 
905  /* macroblock adaptive scaling features */
907  if (p->rc_mb) {
908  reg = 0;
910  reg |= ((p_h264->rc_mb_dark & 0x1) << 3);
912  reg |= ((p_h264->rc_mb_smooth & 0x1) << 2);
914  reg |= ((p_h264->rc_mb_static & 0x1) << 1);
916  reg |= p_h264->rc_mb_activity & 0x1;
918  }
919 
920  /* aspect ratio VUI */
922  reg &= ~(0x1 << 5);
923  reg |= ((p_h264->vui_sar & 0x1) << 5);
925 
928  if (p_h264->vui_sar) {
929  /* aspect ration IDC */
930  reg = 0;
931  reg |= p_h264->vui_sar_idc & 0xFF;
933  if (p_h264->vui_sar_idc == 0xFF) {
934  /* extended SAR */
935  reg = 0;
936  reg |= (p_h264->vui_ext_sar_width & 0xFFFF) << 16;
937  reg |= p_h264->vui_ext_sar_height & 0xFFFF;
939  }
940  }
941 
942  /* intra picture period for H.264 open GOP */
943  /* control */
945  reg &= ~(0x1 << 4);
946  reg |= ((p_h264->open_gop & 0x1) << 4);
948  /* value */
950  if (p_h264->open_gop) {
951  reg = 0;
952  reg |= p_h264->open_gop_size & 0xFFFF;
954  }
955 
956  /* 'WEIGHTED_BI_PREDICTION' for B is disable */
958  reg &= ~(0x3 << 9);
960 
961  /* 'CONSTRAINED_INTRA_PRED_ENABLE' is disable */
963  reg &= ~(0x1 << 14);
965 
966  /* ASO */
968  reg &= ~(0x1 << 6);
969  reg |= ((p_h264->aso & 0x1) << 6);
971 
972  /* hier qp enable */
974  reg &= ~(0x1 << 8);
975  reg |= ((p_h264->open_gop & 0x1) << 8);
977  reg = 0;
978  if (p_h264->hier_qp && p_h264->hier_qp_layer) {
979  reg |= (p_h264->hier_qp_type & 0x1) << 0x3;
980  reg |= p_h264->hier_qp_layer & 0x7;
982  /* QP value for each layer */
983  for (i = 0; i < (p_h264->hier_qp_layer & 0x7); i++)
984  WRITEL(p_h264->hier_qp_layer_qp[i],
986  i * 4);
987  }
988  /* number of coding layer should be zero when hierarchical is disable */
990 
991  /* frame packing SEI generation */
993  reg &= ~(0x1 << 25);
994  reg |= ((p_h264->sei_frame_packing & 0x1) << 25);
996  if (p_h264->sei_frame_packing) {
997  reg = 0;
999  reg |= ((p_h264->sei_fp_curr_frame_0 & 0x1) << 2);
1001  reg |= p_h264->sei_fp_arrangement_type & 0x3;
1003  }
1004 
1005  if (p_h264->fmo) {
1006  switch (p_h264->fmo_map_type) {
1008  if (p_h264->fmo_slice_grp > 4)
1009  p_h264->fmo_slice_grp = 4;
1010  for (i = 0; i < (p_h264->fmo_slice_grp & 0xF); i++)
1011  WRITEL(p_h264->fmo_run_len[i] - 1,
1013  i * 4);
1014  break;
1016  if (p_h264->fmo_slice_grp > 4)
1017  p_h264->fmo_slice_grp = 4;
1018  break;
1021  if (p_h264->fmo_slice_grp > 2)
1022  p_h264->fmo_slice_grp = 2;
1023  WRITEL(p_h264->fmo_chg_dir & 0x1,
1025  /* the valid range is 0 ~ number of macroblocks -1 */
1026  WRITEL(p_h264->fmo_chg_rate,
1028  break;
1029  default:
1030  mfc_err("Unsupported map type for FMO: %d\n",
1031  p_h264->fmo_map_type);
1032  p_h264->fmo_map_type = 0;
1033  p_h264->fmo_slice_grp = 1;
1034  break;
1035  }
1036 
1037  WRITEL(p_h264->fmo_map_type,
1039  WRITEL(p_h264->fmo_slice_grp - 1,
1041  } else {
1043  }
1044 
1045  mfc_debug_leave();
1046 
1047  return 0;
1048 }
1049 
1050 static int s5p_mfc_set_enc_params_mpeg4(struct s5p_mfc_ctx *ctx)
1051 {
1052  struct s5p_mfc_dev *dev = ctx->dev;
1053  struct s5p_mfc_enc_params *p = &ctx->enc_params;
1054  struct s5p_mfc_mpeg4_enc_params *p_mpeg4 = &p->codec.mpeg4;
1055  unsigned int reg = 0;
1056 
1057  mfc_debug_enter();
1058 
1059  s5p_mfc_set_enc_params(ctx);
1060 
1061  /* pictype : number of B */
1063  reg &= ~(0x3 << 16);
1064  reg |= ((p->num_b_frame & 0x3) << 16);
1066 
1067  /* profile & level */
1068  reg = 0;
1070  reg |= ((p_mpeg4->level & 0xFF) << 8);
1072  reg |= p_mpeg4->profile & 0x3F;
1074 
1075  /* rate control config. */
1078  reg &= ~(0x1 << 8);
1079  reg |= ((p->rc_mb & 0x1) << 8);
1082  reg &= ~(0x3F);
1083  reg |= p_mpeg4->rc_frame_qp & 0x3F;
1085 
1086  /* max & min value of QP */
1087  reg = 0;
1089  reg |= ((p_mpeg4->rc_max_qp & 0x3F) << 8);
1091  reg |= p_mpeg4->rc_min_qp & 0x3F;
1093 
1094  /* other QPs */
1096  if (!p->rc_frame && !p->rc_mb) {
1097  reg = 0;
1098  reg |= ((p_mpeg4->rc_b_frame_qp & 0x3F) << 16);
1099  reg |= ((p_mpeg4->rc_p_frame_qp & 0x3F) << 8);
1100  reg |= p_mpeg4->rc_frame_qp & 0x3F;
1102  }
1103 
1104  /* frame rate */
1105  if (p->rc_frame && p->rc_framerate_num && p->rc_framerate_denom) {
1106  reg = 0;
1107  reg |= ((p->rc_framerate_num & 0xFFFF) << 16);
1108  reg |= p->rc_framerate_denom & 0xFFFF;
1110  }
1111 
1112  /* vbv buffer size */
1113  if (p->frame_skip_mode ==
1116 
1117  if (p->rc_frame)
1119  }
1120 
1121  /* Disable HEC */
1124 
1125  mfc_debug_leave();
1126 
1127  return 0;
1128 }
1129 
1130 static int s5p_mfc_set_enc_params_h263(struct s5p_mfc_ctx *ctx)
1131 {
1132  struct s5p_mfc_dev *dev = ctx->dev;
1133  struct s5p_mfc_enc_params *p = &ctx->enc_params;
1134  struct s5p_mfc_mpeg4_enc_params *p_h263 = &p->codec.mpeg4;
1135  unsigned int reg = 0;
1136 
1137  mfc_debug_enter();
1138 
1139  s5p_mfc_set_enc_params(ctx);
1140 
1141  /* profile & level */
1142  reg = 0;
1144  reg |= (0x1 << 4);
1146 
1147  /* rate control config. */
1150  reg &= ~(0x1 << 8);
1151  reg |= ((p->rc_mb & 0x1) << 8);
1154  reg &= ~(0x3F);
1155  reg |= p_h263->rc_frame_qp & 0x3F;
1157 
1158  /* max & min value of QP */
1159  reg = 0;
1161  reg |= ((p_h263->rc_max_qp & 0x3F) << 8);
1163  reg |= p_h263->rc_min_qp & 0x3F;
1165 
1166  /* other QPs */
1168  if (!p->rc_frame && !p->rc_mb) {
1169  reg = 0;
1170  reg |= ((p_h263->rc_b_frame_qp & 0x3F) << 16);
1171  reg |= ((p_h263->rc_p_frame_qp & 0x3F) << 8);
1172  reg |= p_h263->rc_frame_qp & 0x3F;
1174  }
1175 
1176  /* frame rate */
1177  if (p->rc_frame && p->rc_framerate_num && p->rc_framerate_denom) {
1178  reg = 0;
1179  reg |= ((p->rc_framerate_num & 0xFFFF) << 16);
1180  reg |= p->rc_framerate_denom & 0xFFFF;
1182  }
1183 
1184  /* vbv buffer size */
1185  if (p->frame_skip_mode ==
1188 
1189  if (p->rc_frame)
1191  }
1192 
1193  mfc_debug_leave();
1194 
1195  return 0;
1196 }
1197 
1198 /* Initialize decoding */
1200 {
1201  struct s5p_mfc_dev *dev = ctx->dev;
1202  unsigned int reg = 0;
1203  int fmo_aso_ctrl = 0;
1204 
1205  mfc_debug_enter();
1206  mfc_debug(2, "InstNo: %d/%d\n", ctx->inst_no,
1208  mfc_debug(2, "BUFs: %08x %08x %08x\n",
1212 
1213  /* FMO_ASO_CTRL - 0: Enable, 1: Disable */
1214  reg |= (fmo_aso_ctrl << S5P_FIMV_D_OPT_FMO_ASO_CTRL_MASK_V6);
1215 
1216  /* When user sets desplay_delay to 0,
1217  * It works as "display_delay enable" and delay set to 0.
1218  * If user wants display_delay disable, It should be
1219  * set to negative value. */
1220  if (ctx->display_delay >= 0) {
1221  reg |= (0x1 << S5P_FIMV_D_OPT_DDELAY_EN_SHIFT_V6);
1223  }
1224  /* Setup loop filter, for decoding this is only valid for MPEG4 */
1225  if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_DEC) {
1226  mfc_debug(2, "Set loop filter to: %d\n",
1227  ctx->loop_filter_mpeg4);
1228  reg |= (ctx->loop_filter_mpeg4 <<
1230  }
1231  if (ctx->dst_fmt->fourcc == V4L2_PIX_FMT_NV12MT_16X16)
1232  reg |= (0x1 << S5P_FIMV_D_OPT_TILE_MODE_SHIFT_V6);
1233 
1235 
1236  /* 0: NV12(CbCr), 1: NV21(CrCb) */
1237  if (ctx->dst_fmt->fourcc == V4L2_PIX_FMT_NV21M)
1239  else
1241 
1242  /* sei parse */
1244 
1246  s5p_mfc_hw_call(dev->mfc_cmds, cmd_host2risc, dev,
1248 
1249  mfc_debug_leave();
1250  return 0;
1251 }
1252 
1253 static inline void s5p_mfc_set_flush(struct s5p_mfc_ctx *ctx, int flush)
1254 {
1255  struct s5p_mfc_dev *dev = ctx->dev;
1256  unsigned int dpb;
1257  if (flush)
1258  dpb = READL(S5P_FIMV_SI_CH0_DPB_CONF_CTRL) | (1 << 14);
1259  else
1260  dpb = READL(S5P_FIMV_SI_CH0_DPB_CONF_CTRL) & ~(1 << 14);
1262 }
1263 
1264 /* Decode a single frame */
1266  enum s5p_mfc_decode_arg last_frame)
1267 {
1268  struct s5p_mfc_dev *dev = ctx->dev;
1269 
1272 
1274  /* Issue different commands to instance basing on whether it
1275  * is the last frame or not. */
1276  switch (last_frame) {
1277  case 0:
1278  s5p_mfc_hw_call(dev->mfc_cmds, cmd_host2risc, dev,
1280  break;
1281  case 1:
1282  s5p_mfc_hw_call(dev->mfc_cmds, cmd_host2risc, dev,
1284  break;
1285  default:
1286  mfc_err("Unsupported last frame arg.\n");
1287  return -EINVAL;
1288  }
1289 
1290  mfc_debug(2, "Decoding a usual frame.\n");
1291  return 0;
1292 }
1293 
1295 {
1296  struct s5p_mfc_dev *dev = ctx->dev;
1297 
1298  if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
1299  s5p_mfc_set_enc_params_h264(ctx);
1300  else if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_ENC)
1301  s5p_mfc_set_enc_params_mpeg4(ctx);
1302  else if (ctx->codec_mode == S5P_MFC_CODEC_H263_ENC)
1303  s5p_mfc_set_enc_params_h263(ctx);
1304  else {
1305  mfc_err("Unknown codec for encoding (%x).\n",
1306  ctx->codec_mode);
1307  return -EINVAL;
1308  }
1309 
1311  s5p_mfc_hw_call(dev->mfc_cmds, cmd_host2risc, dev,
1313 
1314  return 0;
1315 }
1316 
1318 {
1319  struct s5p_mfc_dev *dev = ctx->dev;
1320  struct s5p_mfc_enc_params *p = &ctx->enc_params;
1321  struct s5p_mfc_h264_enc_params *p_h264 = &p->codec.h264;
1322  int i;
1323 
1324  if (p_h264->aso) {
1325  for (i = 0; i < 8; i++)
1326  WRITEL(p_h264->aso_slice_order[i],
1328  }
1329  return 0;
1330 }
1331 
1332 /* Encode a single frame */
1334 {
1335  struct s5p_mfc_dev *dev = ctx->dev;
1336 
1337  mfc_debug(2, "++\n");
1338 
1339  /* memory structure cur. frame */
1340 
1341  if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
1343 
1344  s5p_mfc_set_slice_mode(ctx);
1345 
1347  s5p_mfc_hw_call(dev->mfc_cmds, cmd_host2risc, dev,
1349 
1350  mfc_debug(2, "--\n");
1351 
1352  return 0;
1353 }
1354 
1355 static inline int s5p_mfc_get_new_ctx(struct s5p_mfc_dev *dev)
1356 {
1357  unsigned long flags;
1358  int new_ctx;
1359  int cnt;
1360 
1361  spin_lock_irqsave(&dev->condlock, flags);
1362  mfc_debug(2, "Previos context: %d (bits %08lx)\n", dev->curr_ctx,
1363  dev->ctx_work_bits);
1364  new_ctx = (dev->curr_ctx + 1) % MFC_NUM_CONTEXTS;
1365  cnt = 0;
1366  while (!test_bit(new_ctx, &dev->ctx_work_bits)) {
1367  new_ctx = (new_ctx + 1) % MFC_NUM_CONTEXTS;
1368  cnt++;
1369  if (cnt > MFC_NUM_CONTEXTS) {
1370  /* No contexts to run */
1371  spin_unlock_irqrestore(&dev->condlock, flags);
1372  return -EAGAIN;
1373  }
1374  }
1375  spin_unlock_irqrestore(&dev->condlock, flags);
1376  return new_ctx;
1377 }
1378 
1379 static inline void s5p_mfc_run_dec_last_frames(struct s5p_mfc_ctx *ctx)
1380 {
1381  struct s5p_mfc_dev *dev = ctx->dev;
1382  struct s5p_mfc_buf *temp_vb;
1383  unsigned long flags;
1384 
1385  spin_lock_irqsave(&dev->irqlock, flags);
1386 
1387  /* Frames are being decoded */
1388  if (list_empty(&ctx->src_queue)) {
1389  mfc_debug(2, "No src buffers.\n");
1390  spin_unlock_irqrestore(&dev->irqlock, flags);
1391  return;
1392  }
1393  /* Get the next source buffer */
1394  temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
1395  temp_vb->flags |= MFC_BUF_FLAG_USED;
1397  vb2_dma_contig_plane_dma_addr(temp_vb->b, 0), 0, 0);
1398  spin_unlock_irqrestore(&dev->irqlock, flags);
1399 
1400  dev->curr_ctx = ctx->num;
1403 }
1404 
1405 static inline int s5p_mfc_run_dec_frame(struct s5p_mfc_ctx *ctx)
1406 {
1407  struct s5p_mfc_dev *dev = ctx->dev;
1408  struct s5p_mfc_buf *temp_vb;
1409  unsigned long flags;
1410  int last_frame = 0;
1411  unsigned int index;
1412 
1413  spin_lock_irqsave(&dev->irqlock, flags);
1414 
1415  /* Frames are being decoded */
1416  if (list_empty(&ctx->src_queue)) {
1417  mfc_debug(2, "No src buffers.\n");
1418  spin_unlock_irqrestore(&dev->irqlock, flags);
1419  return -EAGAIN;
1420  }
1421  /* Get the next source buffer */
1422  temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
1423  temp_vb->flags |= MFC_BUF_FLAG_USED;
1425  vb2_dma_contig_plane_dma_addr(temp_vb->b, 0),
1426  ctx->consumed_stream,
1427  temp_vb->b->v4l2_planes[0].bytesused);
1428  spin_unlock_irqrestore(&dev->irqlock, flags);
1429 
1430  index = temp_vb->b->v4l2_buf.index;
1431 
1432  dev->curr_ctx = ctx->num;
1434  if (temp_vb->b->v4l2_planes[0].bytesused == 0) {
1435  last_frame = 1;
1436  mfc_debug(2, "Setting ctx->state to FINISHING\n");
1437  ctx->state = MFCINST_FINISHING;
1438  }
1439  s5p_mfc_decode_one_frame_v6(ctx, last_frame);
1440 
1441  return 0;
1442 }
1443 
1444 static inline int s5p_mfc_run_enc_frame(struct s5p_mfc_ctx *ctx)
1445 {
1446  struct s5p_mfc_dev *dev = ctx->dev;
1447  unsigned long flags;
1448  struct s5p_mfc_buf *dst_mb;
1449  struct s5p_mfc_buf *src_mb;
1450  unsigned long src_y_addr, src_c_addr, dst_addr;
1451  /*
1452  unsigned int src_y_size, src_c_size;
1453  */
1454  unsigned int dst_size;
1455  unsigned int index;
1456 
1457  spin_lock_irqsave(&dev->irqlock, flags);
1458 
1459  if (list_empty(&ctx->src_queue)) {
1460  mfc_debug(2, "no src buffers.\n");
1461  spin_unlock_irqrestore(&dev->irqlock, flags);
1462  return -EAGAIN;
1463  }
1464 
1465  if (list_empty(&ctx->dst_queue)) {
1466  mfc_debug(2, "no dst buffers.\n");
1467  spin_unlock_irqrestore(&dev->irqlock, flags);
1468  return -EAGAIN;
1469  }
1470 
1471  src_mb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
1472  src_mb->flags |= MFC_BUF_FLAG_USED;
1473  src_y_addr = vb2_dma_contig_plane_dma_addr(src_mb->b, 0);
1474  src_c_addr = vb2_dma_contig_plane_dma_addr(src_mb->b, 1);
1475 
1476  mfc_debug(2, "enc src y addr: 0x%08lx", src_y_addr);
1477  mfc_debug(2, "enc src c addr: 0x%08lx", src_c_addr);
1478 
1479  s5p_mfc_set_enc_frame_buffer_v6(ctx, src_y_addr, src_c_addr);
1480 
1481  dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
1482  dst_mb->flags |= MFC_BUF_FLAG_USED;
1483  dst_addr = vb2_dma_contig_plane_dma_addr(dst_mb->b, 0);
1484  dst_size = vb2_plane_size(dst_mb->b, 0);
1485 
1486  s5p_mfc_set_enc_stream_buffer_v6(ctx, dst_addr, dst_size);
1487 
1488  spin_unlock_irqrestore(&dev->irqlock, flags);
1489 
1490  index = src_mb->b->v4l2_buf.index;
1491 
1492  dev->curr_ctx = ctx->num;
1495 
1496  return 0;
1497 }
1498 
1499 static inline void s5p_mfc_run_init_dec(struct s5p_mfc_ctx *ctx)
1500 {
1501  struct s5p_mfc_dev *dev = ctx->dev;
1502  unsigned long flags;
1503  struct s5p_mfc_buf *temp_vb;
1504 
1505  /* Initializing decoding - parsing header */
1506  spin_lock_irqsave(&dev->irqlock, flags);
1507  mfc_debug(2, "Preparing to init decoding.\n");
1508  temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
1509  mfc_debug(2, "Header size: %d\n", temp_vb->b->v4l2_planes[0].bytesused);
1511  vb2_dma_contig_plane_dma_addr(temp_vb->b, 0), 0,
1512  temp_vb->b->v4l2_planes[0].bytesused);
1513  spin_unlock_irqrestore(&dev->irqlock, flags);
1514  dev->curr_ctx = ctx->num;
1517 }
1518 
1519 static inline void s5p_mfc_run_init_enc(struct s5p_mfc_ctx *ctx)
1520 {
1521  struct s5p_mfc_dev *dev = ctx->dev;
1522  unsigned long flags;
1523  struct s5p_mfc_buf *dst_mb;
1524  unsigned long dst_addr;
1525  unsigned int dst_size;
1526 
1527  spin_lock_irqsave(&dev->irqlock, flags);
1528 
1529  dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
1530  dst_addr = vb2_dma_contig_plane_dma_addr(dst_mb->b, 0);
1531  dst_size = vb2_plane_size(dst_mb->b, 0);
1532  s5p_mfc_set_enc_stream_buffer_v6(ctx, dst_addr, dst_size);
1533  spin_unlock_irqrestore(&dev->irqlock, flags);
1534  dev->curr_ctx = ctx->num;
1537 }
1538 
1539 static inline int s5p_mfc_run_init_dec_buffers(struct s5p_mfc_ctx *ctx)
1540 {
1541  struct s5p_mfc_dev *dev = ctx->dev;
1542  int ret;
1543  /* Header was parsed now start processing
1544  * First set the output frame buffers
1545  * s5p_mfc_alloc_dec_buffers(ctx); */
1546 
1547  if (ctx->capture_state != QUEUE_BUFS_MMAPED) {
1548  mfc_err("It seems that not all destionation buffers were\n"
1549  "mmaped.MFC requires that all destination are mmaped\n"
1550  "before starting processing.\n");
1551  return -EAGAIN;
1552  }
1553 
1554  dev->curr_ctx = ctx->num;
1557  if (ret) {
1558  mfc_err("Failed to alloc frame mem.\n");
1559  ctx->state = MFCINST_ERROR;
1560  }
1561  return ret;
1562 }
1563 
1564 static inline int s5p_mfc_run_init_enc_buffers(struct s5p_mfc_ctx *ctx)
1565 {
1566  struct s5p_mfc_dev *dev = ctx->dev;
1567  int ret;
1568 
1569  ret = s5p_mfc_alloc_codec_buffers_v6(ctx);
1570  if (ret) {
1571  mfc_err("Failed to allocate encoding buffers.\n");
1572  return -ENOMEM;
1573  }
1574 
1575  /* Header was generated now starting processing
1576  * First set the reference frame buffers
1577  */
1578  if (ctx->capture_state != QUEUE_BUFS_REQUESTED) {
1579  mfc_err("It seems that destionation buffers were not\n"
1580  "requested.MFC requires that header should be generated\n"
1581  "before allocating codec buffer.\n");
1582  return -EAGAIN;
1583  }
1584 
1585  dev->curr_ctx = ctx->num;
1587  ret = s5p_mfc_set_enc_ref_buffer_v6(ctx);
1588  if (ret) {
1589  mfc_err("Failed to alloc frame mem.\n");
1590  ctx->state = MFCINST_ERROR;
1591  }
1592  return ret;
1593 }
1594 
1595 /* Try running an operation on hardware */
1597 {
1598  struct s5p_mfc_ctx *ctx;
1599  int new_ctx;
1600  unsigned int ret = 0;
1601 
1602  mfc_debug(1, "Try run dev: %p\n", dev);
1603 
1604  /* Check whether hardware is not running */
1605  if (test_and_set_bit(0, &dev->hw_lock) != 0) {
1606  /* This is perfectly ok, the scheduled ctx should wait */
1607  mfc_debug(1, "Couldn't lock HW.\n");
1608  return;
1609  }
1610 
1611  /* Choose the context to run */
1612  new_ctx = s5p_mfc_get_new_ctx(dev);
1613  if (new_ctx < 0) {
1614  /* No contexts to run */
1615  if (test_and_clear_bit(0, &dev->hw_lock) == 0) {
1616  mfc_err("Failed to unlock hardware.\n");
1617  return;
1618  }
1619 
1620  mfc_debug(1, "No ctx is scheduled to be run.\n");
1621  return;
1622  }
1623 
1624  mfc_debug(1, "New context: %d\n", new_ctx);
1625  ctx = dev->ctx[new_ctx];
1626  mfc_debug(1, "Seting new context to %p\n", ctx);
1627  /* Got context to run in ctx */
1628  mfc_debug(1, "ctx->dst_queue_cnt=%d ctx->dpb_count=%d ctx->src_queue_cnt=%d\n",
1629  ctx->dst_queue_cnt, ctx->dpb_count, ctx->src_queue_cnt);
1630  mfc_debug(1, "ctx->state=%d\n", ctx->state);
1631  /* Last frame has already been sent to MFC
1632  * Now obtaining frames from MFC buffer */
1633 
1634  s5p_mfc_clock_on();
1635  if (ctx->type == MFCINST_DECODER) {
1636  switch (ctx->state) {
1637  case MFCINST_FINISHING:
1638  s5p_mfc_run_dec_last_frames(ctx);
1639  break;
1640  case MFCINST_RUNNING:
1641  ret = s5p_mfc_run_dec_frame(ctx);
1642  break;
1643  case MFCINST_INIT:
1645  ret = s5p_mfc_hw_call(dev->mfc_cmds, open_inst_cmd,
1646  ctx);
1647  break;
1648  case MFCINST_RETURN_INST:
1650  ret = s5p_mfc_hw_call(dev->mfc_cmds, close_inst_cmd,
1651  ctx);
1652  break;
1653  case MFCINST_GOT_INST:
1654  s5p_mfc_run_init_dec(ctx);
1655  break;
1656  case MFCINST_HEAD_PARSED:
1657  ret = s5p_mfc_run_init_dec_buffers(ctx);
1658  break;
1660  s5p_mfc_run_dec_last_frames(ctx);
1661  break;
1663  s5p_mfc_run_dec_last_frames(ctx);
1664  break;
1666  mfc_debug(2, "Finished remaining frames after resolution change.\n");
1667  ctx->capture_state = QUEUE_FREE;
1668  mfc_debug(2, "Will re-init the codec`.\n");
1669  s5p_mfc_run_init_dec(ctx);
1670  break;
1671  default:
1672  ret = -EAGAIN;
1673  }
1674  } else if (ctx->type == MFCINST_ENCODER) {
1675  switch (ctx->state) {
1676  case MFCINST_FINISHING:
1677  case MFCINST_RUNNING:
1678  ret = s5p_mfc_run_enc_frame(ctx);
1679  break;
1680  case MFCINST_INIT:
1681  ret = s5p_mfc_hw_call(dev->mfc_cmds, open_inst_cmd,
1682  ctx);
1683  break;
1684  case MFCINST_RETURN_INST:
1685  ret = s5p_mfc_hw_call(dev->mfc_cmds, close_inst_cmd,
1686  ctx);
1687  break;
1688  case MFCINST_GOT_INST:
1689  s5p_mfc_run_init_enc(ctx);
1690  break;
1691  case MFCINST_HEAD_PARSED: /* Only for MFC6.x */
1692  ret = s5p_mfc_run_init_enc_buffers(ctx);
1693  break;
1694  default:
1695  ret = -EAGAIN;
1696  }
1697  } else {
1698  mfc_err("invalid context type: %d\n", ctx->type);
1699  ret = -EAGAIN;
1700  }
1701 
1702  if (ret) {
1703  /* Free hardware lock */
1704  if (test_and_clear_bit(0, &dev->hw_lock) == 0)
1705  mfc_err("Failed to unlock hardware.\n");
1706 
1707  /* This is in deed imporant, as no operation has been
1708  * scheduled, reduce the clock count as no one will
1709  * ever do this, because no interrupt related to this try_run
1710  * will ever come from hardware. */
1712  }
1713 }
1714 
1715 
1716 void s5p_mfc_cleanup_queue_v6(struct list_head *lh, struct vb2_queue *vq)
1717 {
1718  struct s5p_mfc_buf *b;
1719  int i;
1720 
1721  while (!list_empty(lh)) {
1722  b = list_entry(lh->next, struct s5p_mfc_buf, list);
1723  for (i = 0; i < b->b->num_planes; i++)
1724  vb2_set_plane_payload(b->b, i, 0);
1726  list_del(&b->list);
1727  }
1728 }
1729 
1731 {
1734 }
1735 
1736 void s5p_mfc_write_info_v6(struct s5p_mfc_ctx *ctx, unsigned int data,
1737  unsigned int ofs)
1738 {
1739  struct s5p_mfc_dev *dev = ctx->dev;
1740 
1741  s5p_mfc_clock_on();
1742  WRITEL(data, ofs);
1744 }
1745 
1746 unsigned int s5p_mfc_read_info_v6(struct s5p_mfc_ctx *ctx, unsigned int ofs)
1747 {
1748  struct s5p_mfc_dev *dev = ctx->dev;
1749  int ret;
1750 
1751  s5p_mfc_clock_on();
1752  ret = READL(ofs);
1754 
1755  return ret;
1756 }
1757 
1759 {
1761 }
1762 
1764 {
1766 }
1767 
1769 {
1771 }
1772 
1774 {
1776 }
1777 
1779 {
1782 }
1783 
1785 {
1788 }
1789 
1791 {
1793 }
1794 
1796 {
1797  return mfc_read(dev, S5P_FIMV_RISC2HOST_CMD_V6) &
1799 }
1800 
1802 {
1803  return mfc_read(dev, S5P_FIMV_ERROR_CODE_V6);
1804 }
1805 
1806 int s5p_mfc_err_dec_v6(unsigned int err)
1807 {
1809 }
1810 
1811 int s5p_mfc_err_dspl_v6(unsigned int err)
1812 {
1814 }
1815 
1817 {
1819 }
1820 
1822 {
1824 }
1825 
1827 {
1828  return mfc_read(dev, S5P_FIMV_D_MIN_NUM_DPB_V6);
1829 }
1830 
1832 {
1833  return mfc_read(dev, S5P_FIMV_D_MIN_NUM_MV_V6);
1834 }
1835 
1837 {
1839 }
1840 
1842 {
1843  return mfc_read(dev, S5P_FIMV_E_NUM_DPB_V6);
1844 }
1845 
1847 {
1848  return mfc_read(dev, S5P_FIMV_E_STREAM_SIZE_V6);
1849 }
1850 
1852 {
1853  return mfc_read(dev, S5P_FIMV_E_SLICE_TYPE_V6);
1854 }
1855 
1857 {
1859 }
1860 
1862 {
1864 }
1865 
1867 {
1869 }
1870 
1872 {
1873  return mfc_read(dev, S5P_FIMV_D_MVC_VIEW_ID_V6);
1874 }
1875 
1876 unsigned int s5p_mfc_get_pic_type_top_v6(struct s5p_mfc_ctx *ctx)
1877 {
1879 }
1880 
1881 unsigned int s5p_mfc_get_pic_type_bot_v6(struct s5p_mfc_ctx *ctx)
1882 {
1884 }
1885 
1886 unsigned int s5p_mfc_get_crop_info_h_v6(struct s5p_mfc_ctx *ctx)
1887 {
1888  return s5p_mfc_read_info_v6(ctx, CROP_INFO_H_V6);
1889 }
1890 
1891 unsigned int s5p_mfc_get_crop_info_v_v6(struct s5p_mfc_ctx *ctx)
1892 {
1893  return s5p_mfc_read_info_v6(ctx, CROP_INFO_V_V6);
1894 }
1895 
1896 /* Initialize opr function pointers for MFC v6 */
1897 static struct s5p_mfc_hw_ops s5p_mfc_ops_v6 = {
1898  .alloc_dec_temp_buffers = s5p_mfc_alloc_dec_temp_buffers_v6,
1899  .release_dec_desc_buffer = s5p_mfc_release_dec_desc_buffer_v6,
1900  .alloc_codec_buffers = s5p_mfc_alloc_codec_buffers_v6,
1901  .release_codec_buffers = s5p_mfc_release_codec_buffers_v6,
1902  .alloc_instance_buffer = s5p_mfc_alloc_instance_buffer_v6,
1903  .release_instance_buffer = s5p_mfc_release_instance_buffer_v6,
1904  .alloc_dev_context_buffer =
1906  .release_dev_context_buffer =
1908  .dec_calc_dpb_size = s5p_mfc_dec_calc_dpb_size_v6,
1909  .enc_calc_src_size = s5p_mfc_enc_calc_src_size_v6,
1910  .set_dec_stream_buffer = s5p_mfc_set_dec_stream_buffer_v6,
1911  .set_dec_frame_buffer = s5p_mfc_set_dec_frame_buffer_v6,
1912  .set_enc_stream_buffer = s5p_mfc_set_enc_stream_buffer_v6,
1913  .set_enc_frame_buffer = s5p_mfc_set_enc_frame_buffer_v6,
1914  .get_enc_frame_buffer = s5p_mfc_get_enc_frame_buffer_v6,
1915  .set_enc_ref_buffer = s5p_mfc_set_enc_ref_buffer_v6,
1916  .init_decode = s5p_mfc_init_decode_v6,
1917  .init_encode = s5p_mfc_init_encode_v6,
1918  .encode_one_frame = s5p_mfc_encode_one_frame_v6,
1919  .try_run = s5p_mfc_try_run_v6,
1920  .cleanup_queue = s5p_mfc_cleanup_queue_v6,
1921  .clear_int_flags = s5p_mfc_clear_int_flags_v6,
1922  .write_info = s5p_mfc_write_info_v6,
1923  .read_info = s5p_mfc_read_info_v6,
1924  .get_dspl_y_adr = s5p_mfc_get_dspl_y_adr_v6,
1925  .get_dec_y_adr = s5p_mfc_get_dec_y_adr_v6,
1926  .get_dspl_status = s5p_mfc_get_dspl_status_v6,
1927  .get_dec_status = s5p_mfc_get_dec_status_v6,
1928  .get_dec_frame_type = s5p_mfc_get_dec_frame_type_v6,
1929  .get_disp_frame_type = s5p_mfc_get_disp_frame_type_v6,
1930  .get_consumed_stream = s5p_mfc_get_consumed_stream_v6,
1931  .get_int_reason = s5p_mfc_get_int_reason_v6,
1932  .get_int_err = s5p_mfc_get_int_err_v6,
1933  .err_dec = s5p_mfc_err_dec_v6,
1934  .err_dspl = s5p_mfc_err_dspl_v6,
1935  .get_img_width = s5p_mfc_get_img_width_v6,
1936  .get_img_height = s5p_mfc_get_img_height_v6,
1937  .get_dpb_count = s5p_mfc_get_dpb_count_v6,
1938  .get_mv_count = s5p_mfc_get_mv_count_v6,
1939  .get_inst_no = s5p_mfc_get_inst_no_v6,
1940  .get_enc_strm_size = s5p_mfc_get_enc_strm_size_v6,
1941  .get_enc_slice_type = s5p_mfc_get_enc_slice_type_v6,
1942  .get_enc_dpb_count = s5p_mfc_get_enc_dpb_count_v6,
1943  .get_enc_pic_count = s5p_mfc_get_enc_pic_count_v6,
1944  .get_sei_avail_status = s5p_mfc_get_sei_avail_status_v6,
1945  .get_mvc_num_views = s5p_mfc_get_mvc_num_views_v6,
1946  .get_mvc_view_id = s5p_mfc_get_mvc_view_id_v6,
1947  .get_pic_type_top = s5p_mfc_get_pic_type_top_v6,
1948  .get_pic_type_bot = s5p_mfc_get_pic_type_bot_v6,
1949  .get_crop_info_h = s5p_mfc_get_crop_info_h_v6,
1950  .get_crop_info_v = s5p_mfc_get_crop_info_v_v6,
1951 };
1952 
1954 {
1955  return &s5p_mfc_ops_v6;
1956 }