Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | RSS feed

  1. /*
  2.  * Intel MediaSDK QSV codec-independent code
  3.  *
  4.  * copyright (c) 2013 Luca Barbato
  5.  * copyright (c) 2015 Anton Khirnov <anton@khirnov.net>
  6.  *
  7.  * This file is part of FFmpeg.
  8.  *
  9.  * FFmpeg is free software; you can redistribute it and/or
  10.  * modify it under the terms of the GNU Lesser General Public
  11.  * License as published by the Free Software Foundation; either
  12.  * version 2.1 of the License, or (at your option) any later version.
  13.  *
  14.  * FFmpeg is distributed in the hope that it will be useful,
  15.  * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16.  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  17.  * Lesser General Public License for more details.
  18.  *
  19.  * You should have received a copy of the GNU Lesser General Public
  20.  * License along with FFmpeg; if not, write to the Free Software
  21.  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22.  */
  23.  
  24. #include <string.h>
  25. #include <sys/types.h>
  26.  
  27. #include <mfx/mfxvideo.h>
  28.  
  29. #include "libavutil/common.h"
  30. #include "libavutil/mem.h"
  31. #include "libavutil/log.h"
  32. #include "libavutil/pixfmt.h"
  33. #include "libavutil/time.h"
  34.  
  35. #include "avcodec.h"
  36. #include "internal.h"
  37. #include "qsv.h"
  38. #include "qsv_internal.h"
  39. #include "qsvdec.h"
  40.  
  41. int ff_qsv_map_pixfmt(enum AVPixelFormat format)
  42. {
  43.     switch (format) {
  44.     case AV_PIX_FMT_YUV420P:
  45.     case AV_PIX_FMT_YUVJ420P:
  46.         return AV_PIX_FMT_NV12;
  47.     default:
  48.         return AVERROR(ENOSYS);
  49.     }
  50. }
  51.  
  52. int ff_qsv_decode_init(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt)
  53. {
  54.     mfxVideoParam param = { { 0 } };
  55.     mfxBitstream bs   = { { { 0 } } };
  56.     int ret;
  57.     enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV,
  58.                                        AV_PIX_FMT_NV12,
  59.                                        AV_PIX_FMT_NONE };
  60.  
  61.     q->iopattern  = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  62.     if (!q->session) {
  63.         if (avctx->hwaccel_context) {
  64.             AVQSVContext *qsv = avctx->hwaccel_context;
  65.  
  66.             q->session        = qsv->session;
  67.             q->iopattern      = qsv->iopattern;
  68.             q->ext_buffers    = qsv->ext_buffers;
  69.             q->nb_ext_buffers = qsv->nb_ext_buffers;
  70.         }
  71.         if (!q->session) {
  72.             ret = ff_qsv_init_internal_session(avctx, &q->internal_qs,
  73.                                                q->load_plugins);
  74.             if (ret < 0)
  75.                 return ret;
  76.  
  77.             q->session = q->internal_qs.session;
  78.         }
  79.     }
  80.  
  81.     if (avpkt->size) {
  82.         bs.Data       = avpkt->data;
  83.         bs.DataLength = avpkt->size;
  84.         bs.MaxLength  = bs.DataLength;
  85.         bs.TimeStamp  = avpkt->pts;
  86.     } else
  87.         return AVERROR_INVALIDDATA;
  88.  
  89.     ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
  90.     if (ret < 0) {
  91.         av_log(avctx, AV_LOG_ERROR, "Unsupported codec_id %08x\n", avctx->codec_id);
  92.         return ret;
  93.     }
  94.  
  95.     param.mfx.CodecId = ret;
  96.  
  97.     ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, &param);
  98.     if (MFX_ERR_MORE_DATA==ret) {
  99.         /* this code means that header not found so we return packet size to skip
  100.            a current packet
  101.          */
  102.         return avpkt->size;
  103.     } else if (ret < 0) {
  104.         av_log(avctx, AV_LOG_ERROR, "Decode header error %d\n", ret);
  105.         return ff_qsv_error(ret);
  106.     }
  107.     param.IOPattern   = q->iopattern;
  108.     param.AsyncDepth  = q->async_depth;
  109.     param.ExtParam    = q->ext_buffers;
  110.     param.NumExtParam = q->nb_ext_buffers;
  111.     param.mfx.FrameInfo.BitDepthLuma   = 8;
  112.     param.mfx.FrameInfo.BitDepthChroma = 8;
  113.  
  114.     ret = MFXVideoDECODE_Init(q->session, &param);
  115.     if (ret < 0) {
  116.         if (MFX_ERR_INVALID_VIDEO_PARAM==ret) {
  117.             av_log(avctx, AV_LOG_ERROR,
  118.                    "Error initializing the MFX video decoder, unsupported video\n");
  119.         } else {
  120.             av_log(avctx, AV_LOG_ERROR,
  121.                    "Error initializing the MFX video decoder %d\n", ret);
  122.         }
  123.         return ff_qsv_error(ret);
  124.     }
  125.  
  126.     ret = ff_get_format(avctx, pix_fmts);
  127.     if (ret < 0)
  128.         return ret;
  129.  
  130.     avctx->pix_fmt      = ret;
  131.     avctx->profile      = param.mfx.CodecProfile;
  132.     avctx->level        = param.mfx.CodecLevel;
  133.     avctx->coded_width  = param.mfx.FrameInfo.Width;
  134.     avctx->coded_height = param.mfx.FrameInfo.Height;
  135.     avctx->width        = param.mfx.FrameInfo.CropW - param.mfx.FrameInfo.CropX;
  136.     avctx->height       = param.mfx.FrameInfo.CropH - param.mfx.FrameInfo.CropY;
  137.  
  138.     /* maximum decoder latency should be not exceed max DPB size for h.264 and
  139.        HEVC which is 16 for both cases.
  140.        So weare  pre-allocating fifo big enough for 17 elements:
  141.      */
  142.     if (!q->async_fifo) {
  143.         q->async_fifo = av_fifo_alloc((1 + 16) *
  144.                                       (sizeof(mfxSyncPoint) + sizeof(QSVFrame*)));
  145.         if (!q->async_fifo)
  146.             return AVERROR(ENOMEM);
  147.     }
  148.  
  149.     q->input_fifo = av_fifo_alloc(1024*16);
  150.     if (!q->input_fifo)
  151.         return AVERROR(ENOMEM);
  152.  
  153.     q->engine_ready = 1;
  154.  
  155.     return 0;
  156. }
  157.  
  158. static int alloc_frame(AVCodecContext *avctx, QSVFrame *frame)
  159. {
  160.     int ret;
  161.  
  162.     ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
  163.     if (ret < 0)
  164.         return ret;
  165.  
  166.     if (frame->frame->format == AV_PIX_FMT_QSV) {
  167.         frame->surface = (mfxFrameSurface1*)frame->frame->data[3];
  168.     } else {
  169.         frame->surface_internal.Info.BitDepthLuma   = 8;
  170.         frame->surface_internal.Info.BitDepthChroma = 8;
  171.         frame->surface_internal.Info.FourCC         = MFX_FOURCC_NV12;
  172.         frame->surface_internal.Info.Width          = avctx->coded_width;
  173.         frame->surface_internal.Info.Height         = avctx->coded_height;
  174.         frame->surface_internal.Info.ChromaFormat   = MFX_CHROMAFORMAT_YUV420;
  175.  
  176.         frame->surface_internal.Data.PitchLow = frame->frame->linesize[0];
  177.         frame->surface_internal.Data.Y        = frame->frame->data[0];
  178.         frame->surface_internal.Data.UV       = frame->frame->data[1];
  179.  
  180.         frame->surface = &frame->surface_internal;
  181.     }
  182.  
  183.     return 0;
  184. }
  185.  
  186. static void qsv_clear_unused_frames(QSVContext *q)
  187. {
  188.     QSVFrame *cur = q->work_frames;
  189.     while (cur) {
  190.         if (cur->surface && !cur->surface->Data.Locked && !cur->queued) {
  191.             cur->surface = NULL;
  192.             av_frame_unref(cur->frame);
  193.         }
  194.         cur = cur->next;
  195.     }
  196. }
  197.  
  198. static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
  199. {
  200.     QSVFrame *frame, **last;
  201.     int ret;
  202.  
  203.     qsv_clear_unused_frames(q);
  204.  
  205.     frame = q->work_frames;
  206.     last  = &q->work_frames;
  207.     while (frame) {
  208.         if (!frame->surface) {
  209.             ret = alloc_frame(avctx, frame);
  210.             if (ret < 0)
  211.                 return ret;
  212.             *surf = frame->surface;
  213.             return 0;
  214.         }
  215.  
  216.         last  = &frame->next;
  217.         frame = frame->next;
  218.     }
  219.  
  220.     frame = av_mallocz(sizeof(*frame));
  221.     if (!frame)
  222.         return AVERROR(ENOMEM);
  223.     frame->frame = av_frame_alloc();
  224.     if (!frame->frame) {
  225.         av_freep(&frame);
  226.         return AVERROR(ENOMEM);
  227.     }
  228.     *last = frame;
  229.  
  230.     ret = alloc_frame(avctx, frame);
  231.     if (ret < 0)
  232.         return ret;
  233.  
  234.     *surf = frame->surface;
  235.  
  236.     return 0;
  237. }
  238.  
  239. static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
  240. {
  241.     QSVFrame *cur = q->work_frames;
  242.     while (cur) {
  243.         if (surf == cur->surface)
  244.             return cur;
  245.         cur = cur->next;
  246.     }
  247.     return NULL;
  248. }
  249.  
  250. /*  This function uses for 'smart' releasing of consumed data
  251.     from the input bitstream fifo.
  252.     Since the input fifo mapped to mfxBitstream which does not understand
  253.     a wrapping of data over fifo end, we should also to relocate a possible
  254.     data rest to fifo begin. If rest of data is absent then we just reset fifo's
  255.     pointers to initial positions.
  256.     NOTE the case when fifo does contain unconsumed data is rare and typical
  257.     amount of such data is 1..4 bytes.
  258. */
  259. static void qsv_fifo_relocate(AVFifoBuffer *f, int bytes_to_free)
  260. {
  261.     int data_size;
  262.     int data_rest = 0;
  263.  
  264.     av_fifo_drain(f, bytes_to_free);
  265.  
  266.     data_size = av_fifo_size(f);
  267.     if (data_size > 0) {
  268.         if (f->buffer!=f->rptr) {
  269.             if ( (f->end - f->rptr) < data_size) {
  270.                 data_rest = data_size - (f->end - f->rptr);
  271.                 data_size-=data_rest;
  272.                 memmove(f->buffer+data_size, f->buffer, data_rest);
  273.             }
  274.             memmove(f->buffer, f->rptr, data_size);
  275.             data_size+= data_rest;
  276.         }
  277.     }
  278.     f->rptr = f->buffer;
  279.     f->wptr = f->buffer + data_size;
  280.     f->wndx = data_size;
  281.     f->rndx = 0;
  282. }
  283.  
  284. int ff_qsv_decode(AVCodecContext *avctx, QSVContext *q,
  285.                   AVFrame *frame, int *got_frame,
  286.                   AVPacket *avpkt)
  287. {
  288.     QSVFrame *out_frame;
  289.     mfxFrameSurface1 *insurf;
  290.     mfxFrameSurface1 *outsurf;
  291.     mfxSyncPoint sync;
  292.     mfxBitstream bs = { { { 0 } } };
  293.     int ret;
  294.     int n_out_frames;
  295.     int buffered = 0;
  296.  
  297.     if (!q->engine_ready) {
  298.         ret = ff_qsv_decode_init(avctx, q, avpkt);
  299.         if (ret)
  300.             return ret;
  301.     }
  302.  
  303.     if (avpkt->size ) {
  304.         if (av_fifo_size(q->input_fifo)) {
  305.             /* we have got rest of previous packet into buffer */
  306.             if (av_fifo_space(q->input_fifo) < avpkt->size) {
  307.                 ret = av_fifo_grow(q->input_fifo, avpkt->size);
  308.                 if (ret < 0)
  309.                     return ret;
  310.             }
  311.             av_fifo_generic_write(q->input_fifo, avpkt->data, avpkt->size, NULL);
  312.             bs.Data       = q->input_fifo->rptr;
  313.             bs.DataLength = av_fifo_size(q->input_fifo);
  314.             buffered = 1;
  315.         } else {
  316.             bs.Data       = avpkt->data;
  317.             bs.DataLength = avpkt->size;
  318.         }
  319.         bs.MaxLength  = bs.DataLength;
  320.         bs.TimeStamp  = avpkt->pts;
  321.     }
  322.  
  323.     while (1) {
  324.         ret = get_surface(avctx, q, &insurf);
  325.         if (ret < 0)
  326.             return ret;
  327.         do {
  328.             ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
  329.                                                   insurf, &outsurf, &sync);
  330.             if (ret != MFX_WRN_DEVICE_BUSY)
  331.                 break;
  332.             av_usleep(500);
  333.         } while (1);
  334.  
  335.         if (MFX_WRN_VIDEO_PARAM_CHANGED==ret) {
  336.             /* TODO: handle here sequence header changing */
  337.         }
  338.  
  339.         if (sync) {
  340.             QSVFrame *out_frame = find_frame(q, outsurf);
  341.  
  342.             if (!out_frame) {
  343.                 av_log(avctx, AV_LOG_ERROR,
  344.                        "The returned surface does not correspond to any frame\n");
  345.                 return AVERROR_BUG;
  346.             }
  347.  
  348.             out_frame->queued = 1;
  349.             av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
  350.             av_fifo_generic_write(q->async_fifo, &sync,      sizeof(sync),      NULL);
  351.  
  352.             continue;
  353.         }
  354.         if (MFX_ERR_MORE_SURFACE != ret && ret < 0)
  355.             break;
  356.     }
  357.  
  358.     /* make sure we do not enter an infinite loop if the SDK
  359.      * did not consume any data and did not return anything */
  360.     if (!sync && !bs.DataOffset) {
  361.         av_log(avctx, AV_LOG_WARNING, "A decode call did not consume any data\n");
  362.         bs.DataOffset = avpkt->size;
  363.     }
  364.  
  365.     if (buffered) {
  366.         qsv_fifo_relocate(q->input_fifo, bs.DataOffset);
  367.     } else if (bs.DataOffset!=avpkt->size) {
  368.         /* some data of packet was not consumed. store it to local buffer */
  369.         av_fifo_generic_write(q->input_fifo, avpkt->data+bs.DataOffset,
  370.                               avpkt->size - bs.DataOffset, NULL);
  371.     }
  372.  
  373.     if (MFX_ERR_MORE_DATA!=ret && ret < 0) {
  374.         av_log(avctx, AV_LOG_ERROR, "Error %d during QSV decoding.\n", ret);
  375.         return ff_qsv_error(ret);
  376.     }
  377.     n_out_frames = av_fifo_size(q->async_fifo) / (sizeof(out_frame)+sizeof(sync));
  378.  
  379.     if (n_out_frames > q->async_depth || (!avpkt->size && n_out_frames) ) {
  380.         AVFrame *src_frame;
  381.  
  382.         av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
  383.         av_fifo_generic_read(q->async_fifo, &sync,      sizeof(sync),      NULL);
  384.         out_frame->queued = 0;
  385.  
  386.         MFXVideoCORE_SyncOperation(q->session, sync, 60000);
  387.  
  388.         src_frame = out_frame->frame;
  389.  
  390.         ret = av_frame_ref(frame, src_frame);
  391.         if (ret < 0)
  392.             return ret;
  393.  
  394.         outsurf = out_frame->surface;
  395.  
  396.         frame->pkt_pts = frame->pts = outsurf->Data.TimeStamp;
  397.  
  398.         frame->repeat_pict =
  399.             outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
  400.             outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
  401.             outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
  402.         frame->top_field_first =
  403.             outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
  404.         frame->interlaced_frame =
  405.             !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);
  406.  
  407.         *got_frame = 1;
  408.     }
  409.  
  410.     return avpkt->size;
  411. }
  412.  
  413. int ff_qsv_decode_close(QSVContext *q)
  414. {
  415.     QSVFrame *cur = q->work_frames;
  416.  
  417.     while (cur) {
  418.         q->work_frames = cur->next;
  419.         av_frame_free(&cur->frame);
  420.         av_freep(&cur);
  421.         cur = q->work_frames;
  422.     }
  423.  
  424.     av_fifo_free(q->async_fifo);
  425.     q->async_fifo = NULL;
  426.  
  427.     av_fifo_free(q->input_fifo);
  428.     q->input_fifo = NULL;
  429.  
  430.     MFXVideoDECODE_Close(q->session);
  431.     q->session = NULL;
  432.  
  433.     ff_qsv_close_internal_session(&q->internal_qs);
  434.  
  435.     q->engine_ready = 0;
  436.  
  437.     return 0;
  438. }
  439.