Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | RSS feed

  1. /**************************************************************************
  2.  *
  3.  * Copyright 2013 Advanced Micro Devices, Inc.
  4.  * All Rights Reserved.
  5.  *
  6.  * Permission is hereby granted, free of charge, to any person obtaining a
  7.  * copy of this software and associated documentation files (the
  8.  * "Software"), to deal in the Software without restriction, including
  9.  * without limitation the rights to use, copy, modify, merge, publish,
  10.  * distribute, sub license, and/or sell copies of the Software, and to
  11.  * permit persons to whom the Software is furnished to do so, subject to
  12.  * the following conditions:
  13.  *
  14.  * The above copyright notice and this permission notice (including the
  15.  * next paragraph) shall be included in all copies or substantial portions
  16.  * of the Software.
  17.  *
  18.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  19.  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  20.  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
  21.  * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
  22.  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  23.  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  24.  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  25.  *
  26.  **************************************************************************/
  27.  
  28. /*
  29.  * Authors:
  30.  *      Christian König <christian.koenig@amd.com>
  31.  *
  32.  */
  33.  
  34.  
  35. #include <assert.h>
  36.  
  37. #include <OMX_Video.h>
  38.  
  39. /* bellagio defines a DEBUG macro that we don't want */
  40. #ifndef DEBUG
  41. #include <bellagio/omxcore.h>
  42. #undef DEBUG
  43. #else
  44. #include <bellagio/omxcore.h>
  45. #endif
  46.  
  47. #include <bellagio/omx_base_video_port.h>
  48.  
  49. #include "pipe/p_screen.h"
  50. #include "pipe/p_video_codec.h"
  51. #include "state_tracker/drm_driver.h"
  52. #include "util/u_memory.h"
  53. #include "vl/vl_video_buffer.h"
  54.  
  55. #include "entrypoint.h"
  56. #include "vid_enc.h"
  57.  
  58. struct encode_task {
  59.    struct list_head list;
  60.  
  61.    struct pipe_video_buffer *buf;
  62.    unsigned pic_order_cnt;
  63.    struct pipe_resource *bitstream;
  64.    void *feedback;
  65. };
  66.  
  67. struct input_buf_private {
  68.    struct list_head tasks;
  69.  
  70.    struct pipe_resource *resource;
  71.    struct pipe_transfer *transfer;
  72. };
  73.  
  74. struct output_buf_private {
  75.    struct pipe_resource *bitstream;
  76.    struct pipe_transfer *transfer;
  77. };
  78.  
  79. static OMX_ERRORTYPE vid_enc_Constructor(OMX_COMPONENTTYPE *comp, OMX_STRING name);
  80. static OMX_ERRORTYPE vid_enc_Destructor(OMX_COMPONENTTYPE *comp);
  81. static OMX_ERRORTYPE vid_enc_SetParameter(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR param);
  82. static OMX_ERRORTYPE vid_enc_GetParameter(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR param);
  83. static OMX_ERRORTYPE vid_enc_SetConfig(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR config);
  84. static OMX_ERRORTYPE vid_enc_GetConfig(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR config);
  85. static OMX_ERRORTYPE vid_enc_MessageHandler(OMX_COMPONENTTYPE *comp, internalRequestMessageType *msg);
  86. static OMX_ERRORTYPE vid_enc_AllocateInBuffer(omx_base_PortType *port, OMX_INOUT OMX_BUFFERHEADERTYPE **buf,
  87.                                               OMX_IN OMX_U32 idx, OMX_IN OMX_PTR private, OMX_IN OMX_U32 size);
  88. static OMX_ERRORTYPE vid_enc_UseInBuffer(omx_base_PortType *port, OMX_BUFFERHEADERTYPE **buf, OMX_U32 idx,
  89.                                          OMX_PTR private, OMX_U32 size, OMX_U8 *mem);
  90. static OMX_ERRORTYPE vid_enc_FreeInBuffer(omx_base_PortType *port, OMX_U32 idx, OMX_BUFFERHEADERTYPE *buf);
  91. static OMX_ERRORTYPE vid_enc_EncodeFrame(omx_base_PortType *port, OMX_BUFFERHEADERTYPE *buf);
  92. static OMX_ERRORTYPE vid_enc_AllocateOutBuffer(omx_base_PortType *comp, OMX_INOUT OMX_BUFFERHEADERTYPE **buf,
  93.                                                OMX_IN OMX_U32 idx, OMX_IN OMX_PTR private, OMX_IN OMX_U32 size);
  94. static OMX_ERRORTYPE vid_enc_FreeOutBuffer(omx_base_PortType *port, OMX_U32 idx, OMX_BUFFERHEADERTYPE *buf);
  95. static void vid_enc_BufferEncoded(OMX_COMPONENTTYPE *comp, OMX_BUFFERHEADERTYPE* input, OMX_BUFFERHEADERTYPE* output);
  96.  
  97. static void enc_ReleaseTasks(struct list_head *head);
  98.  
  99. OMX_ERRORTYPE vid_enc_LoaderComponent(stLoaderComponentType *comp)
  100. {
  101.    comp->componentVersion.s.nVersionMajor = 0;
  102.    comp->componentVersion.s.nVersionMinor = 0;
  103.    comp->componentVersion.s.nRevision = 0;
  104.    comp->componentVersion.s.nStep = 1;
  105.    comp->name_specific_length = 1;
  106.    comp->constructor = vid_enc_Constructor;
  107.  
  108.    comp->name = CALLOC(1, OMX_MAX_STRINGNAME_SIZE);
  109.    if (!comp->name)
  110.       return OMX_ErrorInsufficientResources;
  111.  
  112.    comp->name_specific = CALLOC(1, sizeof(char *));
  113.    if (!comp->name_specific)
  114.       goto error_arrays;
  115.  
  116.    comp->role_specific = CALLOC(1, sizeof(char *));
  117.    if (!comp->role_specific)
  118.       goto error_arrays;
  119.  
  120.    comp->name_specific[0] = CALLOC(1, OMX_MAX_STRINGNAME_SIZE);
  121.    if (comp->name_specific[0] == NULL)
  122.       goto error_specific;
  123.  
  124.    comp->role_specific[0] = CALLOC(1, OMX_MAX_STRINGNAME_SIZE);
  125.    if (comp->role_specific[0] == NULL)
  126.       goto error_specific;
  127.  
  128.    strcpy(comp->name, OMX_VID_ENC_BASE_NAME);
  129.    strcpy(comp->name_specific[0], OMX_VID_ENC_AVC_NAME);
  130.    strcpy(comp->role_specific[0], OMX_VID_ENC_AVC_ROLE);
  131.  
  132.    return OMX_ErrorNone;
  133.  
  134. error_specific:
  135.    FREE(comp->role_specific[0]);
  136.    FREE(comp->name_specific[0]);
  137.  
  138. error_arrays:
  139.    FREE(comp->role_specific);
  140.    FREE(comp->name_specific);
  141.  
  142.    FREE(comp->name);
  143.  
  144.    return OMX_ErrorInsufficientResources;
  145. }
  146.  
  147. static OMX_ERRORTYPE vid_enc_Constructor(OMX_COMPONENTTYPE *comp, OMX_STRING name)
  148. {
  149.    vid_enc_PrivateType *priv;
  150.    omx_base_video_PortType *port;
  151.    struct pipe_screen *screen;
  152.    OMX_ERRORTYPE r;
  153.    int i;
  154.  
  155.    assert(!comp->pComponentPrivate);
  156.  
  157.    priv = comp->pComponentPrivate = CALLOC(1, sizeof(vid_enc_PrivateType));
  158.    if (!priv)
  159.       return OMX_ErrorInsufficientResources;
  160.  
  161.    r = omx_base_filter_Constructor(comp, name);
  162.    if (r)
  163.         return r;
  164.  
  165.    priv->BufferMgmtCallback = vid_enc_BufferEncoded;
  166.    priv->messageHandler = vid_enc_MessageHandler;
  167.    priv->destructor = vid_enc_Destructor;
  168.  
  169.    comp->SetParameter = vid_enc_SetParameter;
  170.    comp->GetParameter = vid_enc_GetParameter;
  171.    comp->GetConfig = vid_enc_GetConfig;
  172.    comp->SetConfig = vid_enc_SetConfig;
  173.  
  174.    priv->screen = omx_get_screen();
  175.    if (!priv->screen)
  176.       return OMX_ErrorInsufficientResources;
  177.  
  178.    screen = priv->screen->pscreen;
  179.    if (!screen->get_video_param(screen, PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH,
  180.                                 PIPE_VIDEO_ENTRYPOINT_ENCODE, PIPE_VIDEO_CAP_SUPPORTED))
  181.       return OMX_ErrorBadParameter;
  182.  
  183.    priv->s_pipe = screen->context_create(screen, priv->screen);
  184.    if (!priv->s_pipe)
  185.       return OMX_ErrorInsufficientResources;
  186.  
  187.    if (!vl_compositor_init(&priv->compositor, priv->s_pipe)) {
  188.       priv->s_pipe->destroy(priv->s_pipe);
  189.       priv->s_pipe = NULL;
  190.       return OMX_ErrorInsufficientResources;
  191.    }
  192.  
  193.    if (!vl_compositor_init_state(&priv->cstate, priv->s_pipe)) {
  194.       vl_compositor_cleanup(&priv->compositor);
  195.       priv->s_pipe->destroy(priv->s_pipe);
  196.       priv->s_pipe = NULL;
  197.       return OMX_ErrorInsufficientResources;
  198.    }
  199.  
  200.    priv->t_pipe = screen->context_create(screen, priv->screen);
  201.    if (!priv->t_pipe)
  202.       return OMX_ErrorInsufficientResources;
  203.  
  204.    priv->sPortTypesParam[OMX_PortDomainVideo].nStartPortNumber = 0;
  205.    priv->sPortTypesParam[OMX_PortDomainVideo].nPorts = 2;
  206.    priv->ports = CALLOC(2, sizeof(omx_base_PortType *));
  207.    if (!priv->ports)
  208.       return OMX_ErrorInsufficientResources;
  209.  
  210.    for (i = 0; i < 2; ++i) {
  211.       priv->ports[i] = CALLOC(1, sizeof(omx_base_video_PortType));
  212.       if (!priv->ports[i])
  213.          return OMX_ErrorInsufficientResources;
  214.  
  215.       base_video_port_Constructor(comp, &priv->ports[i], i, i == 0);
  216.    }
  217.  
  218.    port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX];
  219.    port->sPortParam.format.video.nFrameWidth = 176;
  220.    port->sPortParam.format.video.nFrameHeight = 144;
  221.    port->sPortParam.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
  222.    port->sVideoParam.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
  223.    port->sPortParam.nBufferCountActual = 8;
  224.    port->sPortParam.nBufferCountMin = 4;
  225.  
  226.    port->Port_SendBufferFunction = vid_enc_EncodeFrame;
  227.    port->Port_AllocateBuffer = vid_enc_AllocateInBuffer;
  228.    port->Port_UseBuffer = vid_enc_UseInBuffer;
  229.    port->Port_FreeBuffer = vid_enc_FreeInBuffer;
  230.  
  231.    port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_OUTPUTPORT_INDEX];
  232.    strcpy(port->sPortParam.format.video.cMIMEType,"video/H264");
  233.    port->sPortParam.format.video.nFrameWidth = 176;
  234.    port->sPortParam.format.video.nFrameHeight = 144;
  235.    port->sPortParam.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
  236.    port->sVideoParam.eCompressionFormat = OMX_VIDEO_CodingAVC;
  237.  
  238.    port->Port_AllocateBuffer = vid_enc_AllocateOutBuffer;
  239.    port->Port_FreeBuffer = vid_enc_FreeOutBuffer;
  240.  
  241.    priv->bitrate.eControlRate = OMX_Video_ControlRateDisable;
  242.    priv->bitrate.nTargetBitrate = 0;
  243.  
  244.    priv->quant.nQpI = OMX_VID_ENC_QUANT_I_FRAMES_DEFAULT;
  245.    priv->quant.nQpP = OMX_VID_ENC_QUANT_P_FRAMES_DEFAULT;
  246.    priv->quant.nQpB = OMX_VID_ENC_QUANT_B_FRAMES_DEFAULT;
  247.  
  248.    priv->profile_level.eProfile = OMX_VIDEO_AVCProfileBaseline;
  249.    priv->profile_level.eLevel = OMX_VIDEO_AVCLevel42;
  250.  
  251.    priv->force_pic_type.IntraRefreshVOP = OMX_FALSE;
  252.    priv->frame_num = 0;
  253.    priv->pic_order_cnt = 0;
  254.    priv->restricted_b_frames = debug_get_bool_option("OMX_USE_RESTRICTED_B_FRAMES", FALSE);
  255.  
  256.    priv->scale.xWidth = OMX_VID_ENC_SCALING_WIDTH_DEFAULT;
  257.    priv->scale.xHeight = OMX_VID_ENC_SCALING_WIDTH_DEFAULT;
  258.  
  259.    LIST_INITHEAD(&priv->free_tasks);
  260.    LIST_INITHEAD(&priv->used_tasks);
  261.    LIST_INITHEAD(&priv->b_frames);
  262.  
  263.    return OMX_ErrorNone;
  264. }
  265.  
  266. static OMX_ERRORTYPE vid_enc_Destructor(OMX_COMPONENTTYPE *comp)
  267. {
  268.    vid_enc_PrivateType* priv = comp->pComponentPrivate;
  269.    int i;
  270.  
  271.    enc_ReleaseTasks(&priv->free_tasks);
  272.    enc_ReleaseTasks(&priv->used_tasks);
  273.    enc_ReleaseTasks(&priv->b_frames);
  274.  
  275.    if (priv->ports) {
  276.       for (i = 0; i < priv->sPortTypesParam[OMX_PortDomainVideo].nPorts; ++i) {
  277.          if(priv->ports[i])
  278.             priv->ports[i]->PortDestructor(priv->ports[i]);
  279.       }
  280.       FREE(priv->ports);
  281.       priv->ports=NULL;
  282.    }
  283.  
  284.    for (i = 0; i < OMX_VID_ENC_NUM_SCALING_BUFFERS; ++i)
  285.       if (priv->scale_buffer[i])
  286.          priv->scale_buffer[i]->destroy(priv->scale_buffer[i]);
  287.  
  288.    if (priv->s_pipe) {
  289.       vl_compositor_cleanup_state(&priv->cstate);
  290.       vl_compositor_cleanup(&priv->compositor);
  291.       priv->s_pipe->destroy(priv->s_pipe);
  292.    }
  293.  
  294.    if (priv->t_pipe)
  295.       priv->t_pipe->destroy(priv->t_pipe);
  296.  
  297.    if (priv->screen)
  298.       omx_put_screen();
  299.  
  300.    return omx_workaround_Destructor(comp);
  301. }
  302.  
  303. static OMX_ERRORTYPE enc_AllocateBackTexture(omx_base_PortType *port,
  304.                                              struct pipe_resource **resource,
  305.                                              struct pipe_transfer **transfer,
  306.                                              OMX_U8 **map)
  307. {
  308.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  309.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  310.    struct pipe_resource buf_templ;
  311.    struct pipe_box box = {};
  312.    OMX_U8 *ptr;
  313.  
  314.    memset(&buf_templ, 0, sizeof buf_templ);
  315.    buf_templ.target = PIPE_TEXTURE_2D;
  316.    buf_templ.format = PIPE_FORMAT_I8_UNORM;
  317.    buf_templ.bind = PIPE_BIND_LINEAR;
  318.    buf_templ.usage = PIPE_USAGE_STAGING;
  319.    buf_templ.flags = 0;
  320.    buf_templ.width0 = port->sPortParam.format.video.nFrameWidth;
  321.    buf_templ.height0 = port->sPortParam.format.video.nFrameHeight * 3 / 2;
  322.    buf_templ.depth0 = 1;
  323.    buf_templ.array_size = 1;
  324.  
  325.    *resource = priv->s_pipe->screen->resource_create(priv->s_pipe->screen, &buf_templ);
  326.    if (!*resource)
  327.       return OMX_ErrorInsufficientResources;
  328.  
  329.    box.width = (*resource)->width0;
  330.    box.height = (*resource)->height0;
  331.    box.depth = (*resource)->depth0;
  332.    ptr = priv->s_pipe->transfer_map(priv->s_pipe, *resource, 0, PIPE_TRANSFER_WRITE, &box, transfer);
  333.    if (map)
  334.       *map = ptr;
  335.  
  336.    return OMX_ErrorNone;
  337. }
  338.  
  339. static OMX_ERRORTYPE vid_enc_SetParameter(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR param)
  340. {
  341.    OMX_COMPONENTTYPE *comp = handle;
  342.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  343.    OMX_ERRORTYPE r;
  344.  
  345.    if (!param)
  346.       return OMX_ErrorBadParameter;
  347.  
  348.    switch(idx) {
  349.    case OMX_IndexParamPortDefinition: {
  350.       OMX_PARAM_PORTDEFINITIONTYPE *def = param;
  351.  
  352.       r = omx_base_component_SetParameter(handle, idx, param);
  353.       if (r)
  354.          return r;
  355.  
  356.       if (def->nPortIndex == OMX_BASE_FILTER_INPUTPORT_INDEX) {
  357.          omx_base_video_PortType *port;
  358.          unsigned framesize;
  359.          struct pipe_resource *resource;
  360.          struct pipe_transfer *transfer;
  361.  
  362.          port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX];
  363.          enc_AllocateBackTexture(priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX],
  364.                                  &resource, &transfer, NULL);
  365.          port->sPortParam.format.video.nStride = transfer->stride;
  366.          pipe_transfer_unmap(priv->s_pipe, transfer);
  367.          pipe_resource_reference(&resource, NULL);
  368.  
  369.          framesize = port->sPortParam.format.video.nStride *
  370.                      port->sPortParam.format.video.nFrameHeight;
  371.          port->sPortParam.format.video.nSliceHeight = port->sPortParam.format.video.nFrameHeight;
  372.          port->sPortParam.nBufferSize = framesize * 3 / 2;
  373.  
  374.          port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_OUTPUTPORT_INDEX];
  375.          port->sPortParam.nBufferSize = framesize * 512 / (16*16);
  376.      
  377.          priv->frame_rate = def->format.video.xFramerate;
  378.  
  379.          priv->callbacks->EventHandler(comp, priv->callbackData, OMX_EventPortSettingsChanged,
  380.                                        OMX_BASE_FILTER_OUTPUTPORT_INDEX, 0, NULL);
  381.       }
  382.       break;
  383.    }
  384.    case OMX_IndexParamStandardComponentRole: {
  385.       OMX_PARAM_COMPONENTROLETYPE *role = param;
  386.  
  387.       r = checkHeader(param, sizeof(OMX_PARAM_COMPONENTROLETYPE));
  388.       if (r)
  389.          return r;
  390.  
  391.       if (strcmp((char *)role->cRole, OMX_VID_ENC_AVC_ROLE)) {
  392.          return OMX_ErrorBadParameter;
  393.       }
  394.  
  395.       break;
  396.    }
  397.    case OMX_IndexParamVideoBitrate: {
  398.       OMX_VIDEO_PARAM_BITRATETYPE *bitrate = param;
  399.  
  400.       r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_BITRATETYPE));
  401.       if (r)
  402.          return r;
  403.  
  404.       priv->bitrate = *bitrate;
  405.  
  406.       break;
  407.    }
  408.    case OMX_IndexParamVideoQuantization: {
  409.       OMX_VIDEO_PARAM_QUANTIZATIONTYPE *quant = param;
  410.  
  411.       r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_QUANTIZATIONTYPE));
  412.       if (r)
  413.          return r;
  414.  
  415.       priv->quant = *quant;
  416.  
  417.       break;
  418.    }
  419.    case OMX_IndexParamVideoProfileLevelCurrent: {
  420.       OMX_VIDEO_PARAM_PROFILELEVELTYPE *profile_level = param;
  421.  
  422.       r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_PROFILELEVELTYPE));
  423.       if (r)
  424.          return r;
  425.  
  426.       priv->profile_level = *profile_level;
  427.  
  428.       break;
  429.    }
  430.    default:
  431.       return omx_base_component_SetParameter(handle, idx, param);
  432.    }
  433.    return OMX_ErrorNone;
  434. }
  435.  
  436. static OMX_ERRORTYPE vid_enc_GetParameter(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR param)
  437. {
  438.    OMX_COMPONENTTYPE *comp = handle;
  439.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  440.    OMX_ERRORTYPE r;
  441.  
  442.    if (!param)
  443.       return OMX_ErrorBadParameter;
  444.  
  445.    switch(idx) {
  446.    case OMX_IndexParamStandardComponentRole: {
  447.       OMX_PARAM_COMPONENTROLETYPE *role = param;
  448.  
  449.       r = checkHeader(param, sizeof(OMX_PARAM_COMPONENTROLETYPE));
  450.       if (r)
  451.          return r;
  452.  
  453.       strcpy((char *)role->cRole, OMX_VID_ENC_AVC_ROLE);
  454.       break;
  455.    }
  456.    case OMX_IndexParamVideoInit:
  457.       r = checkHeader(param, sizeof(OMX_PORT_PARAM_TYPE));
  458.       if (r)
  459.          return r;
  460.  
  461.       memcpy(param, &priv->sPortTypesParam[OMX_PortDomainVideo], sizeof(OMX_PORT_PARAM_TYPE));
  462.       break;
  463.  
  464.    case OMX_IndexParamVideoPortFormat: {
  465.       OMX_VIDEO_PARAM_PORTFORMATTYPE *format = param;
  466.       omx_base_video_PortType *port;
  467.  
  468.       r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_PORTFORMATTYPE));
  469.       if (r)
  470.          return r;
  471.  
  472.       if (format->nPortIndex > 1)
  473.          return OMX_ErrorBadPortIndex;
  474.  
  475.       port = (omx_base_video_PortType *)priv->ports[format->nPortIndex];
  476.       memcpy(format, &port->sVideoParam, sizeof(OMX_VIDEO_PARAM_PORTFORMATTYPE));
  477.       break;
  478.    }
  479.    case OMX_IndexParamVideoBitrate: {
  480.       OMX_VIDEO_PARAM_BITRATETYPE *bitrate = param;
  481.  
  482.       r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_BITRATETYPE));
  483.       if (r)
  484.          return r;
  485.  
  486.       bitrate->eControlRate = priv->bitrate.eControlRate;
  487.       bitrate->nTargetBitrate = priv->bitrate.nTargetBitrate;
  488.  
  489.       break;
  490.    }
  491.    case OMX_IndexParamVideoQuantization: {
  492.       OMX_VIDEO_PARAM_QUANTIZATIONTYPE *quant = param;
  493.  
  494.       r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_QUANTIZATIONTYPE));
  495.       if (r)
  496.          return r;
  497.  
  498.       quant->nQpI = priv->quant.nQpI;
  499.       quant->nQpP = priv->quant.nQpP;
  500.       quant->nQpB = priv->quant.nQpB;
  501.  
  502.       break;
  503.    }
  504.    case OMX_IndexParamVideoProfileLevelCurrent: {
  505.       OMX_VIDEO_PARAM_PROFILELEVELTYPE *profile_level = param;
  506.  
  507.       r = checkHeader(param, sizeof(OMX_VIDEO_PARAM_PROFILELEVELTYPE));
  508.       if (r)
  509.          return r;
  510.  
  511.       profile_level->eProfile = priv->profile_level.eProfile;
  512.       profile_level->eLevel = priv->profile_level.eLevel;
  513.  
  514.       break;
  515.    }
  516.    default:
  517.       return omx_base_component_GetParameter(handle, idx, param);
  518.    }
  519.    return OMX_ErrorNone;
  520. }
  521.  
  522. static OMX_ERRORTYPE vid_enc_SetConfig(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR config)
  523. {
  524.    OMX_COMPONENTTYPE *comp = handle;
  525.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  526.    OMX_ERRORTYPE r;
  527.    int i;
  528.  
  529.    if (!config)
  530.       return OMX_ErrorBadParameter;
  531.                          
  532.    switch(idx) {
  533.    case OMX_IndexConfigVideoIntraVOPRefresh: {
  534.       OMX_CONFIG_INTRAREFRESHVOPTYPE *type = config;
  535.  
  536.       r = checkHeader(config, sizeof(OMX_CONFIG_INTRAREFRESHVOPTYPE));
  537.       if (r)
  538.          return r;
  539.      
  540.       priv->force_pic_type = *type;
  541.      
  542.       break;
  543.    }
  544.    case OMX_IndexConfigCommonScale: {
  545.       OMX_CONFIG_SCALEFACTORTYPE *scale = config;
  546.  
  547.       r = checkHeader(config, sizeof(OMX_CONFIG_SCALEFACTORTYPE));
  548.       if (r)
  549.          return r;
  550.  
  551.       if (scale->xWidth < 176 || scale->xHeight < 144)
  552.          return OMX_ErrorBadParameter;
  553.  
  554.       for (i = 0; i < OMX_VID_ENC_NUM_SCALING_BUFFERS; ++i) {
  555.          if (priv->scale_buffer[i]) {
  556.             priv->scale_buffer[i]->destroy(priv->scale_buffer[i]);
  557.             priv->scale_buffer[i] = NULL;
  558.          }
  559.       }
  560.  
  561.       priv->scale = *scale;
  562.       if (priv->scale.xWidth != 0xffffffff && priv->scale.xHeight != 0xffffffff) {
  563.          struct pipe_video_buffer templat = {};
  564.  
  565.          templat.buffer_format = PIPE_FORMAT_NV12;
  566.          templat.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
  567.          templat.width = priv->scale.xWidth;
  568.          templat.height = priv->scale.xHeight;
  569.          templat.interlaced = false;
  570.          for (i = 0; i < OMX_VID_ENC_NUM_SCALING_BUFFERS; ++i) {
  571.             priv->scale_buffer[i] = priv->s_pipe->create_video_buffer(priv->s_pipe, &templat);
  572.             if (!priv->scale_buffer[i])
  573.                return OMX_ErrorInsufficientResources;
  574.          }
  575.       }
  576.  
  577.       break;
  578.    }
  579.    default:
  580.       return omx_base_component_SetConfig(handle, idx, config);
  581.    }
  582.  
  583.    return OMX_ErrorNone;
  584. }
  585.  
  586. static OMX_ERRORTYPE vid_enc_GetConfig(OMX_HANDLETYPE handle, OMX_INDEXTYPE idx, OMX_PTR config)
  587. {
  588.    OMX_COMPONENTTYPE *comp = handle;
  589.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  590.    OMX_ERRORTYPE r;
  591.  
  592.    if (!config)
  593.       return OMX_ErrorBadParameter;
  594.  
  595.    switch(idx) {
  596.    case OMX_IndexConfigCommonScale: {
  597.       OMX_CONFIG_SCALEFACTORTYPE *scale = config;
  598.  
  599.       r = checkHeader(config, sizeof(OMX_CONFIG_SCALEFACTORTYPE));
  600.       if (r)
  601.          return r;
  602.  
  603.       scale->xWidth = priv->scale.xWidth;
  604.       scale->xHeight = priv->scale.xHeight;
  605.  
  606.       break;
  607.    }
  608.    default:
  609.       return omx_base_component_GetConfig(handle, idx, config);
  610.    }
  611.    
  612.    return OMX_ErrorNone;
  613. }
  614.  
  615. static enum pipe_video_profile enc_TranslateOMXProfileToPipe(unsigned omx_profile)
  616. {
  617.    switch (omx_profile) {
  618.    case OMX_VIDEO_AVCProfileBaseline:
  619.       return PIPE_VIDEO_PROFILE_MPEG4_AVC_BASELINE;
  620.    case OMX_VIDEO_AVCProfileMain:
  621.       return PIPE_VIDEO_PROFILE_MPEG4_AVC_MAIN;
  622.    case OMX_VIDEO_AVCProfileExtended:
  623.       return PIPE_VIDEO_PROFILE_MPEG4_AVC_EXTENDED;
  624.    case OMX_VIDEO_AVCProfileHigh:
  625.       return PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH;
  626.    case OMX_VIDEO_AVCProfileHigh10:
  627.       return PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH10;
  628.    case OMX_VIDEO_AVCProfileHigh422:
  629.       return PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH422;
  630.    case OMX_VIDEO_AVCProfileHigh444:
  631.       return PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH444;
  632.    default:
  633.       return PIPE_VIDEO_PROFILE_UNKNOWN;
  634.    }
  635. }
  636.  
  637. static unsigned enc_TranslateOMXLevelToPipe(unsigned omx_level)
  638. {
  639.    switch (omx_level) {
  640.    case OMX_VIDEO_AVCLevel1:
  641.    case OMX_VIDEO_AVCLevel1b:
  642.       return 10;
  643.    case OMX_VIDEO_AVCLevel11:
  644.       return 11;
  645.    case OMX_VIDEO_AVCLevel12:
  646.       return 12;
  647.    case OMX_VIDEO_AVCLevel13:
  648.       return 13;
  649.    case OMX_VIDEO_AVCLevel2:
  650.       return 20;
  651.    case OMX_VIDEO_AVCLevel21:
  652.       return 21;
  653.    case OMX_VIDEO_AVCLevel22:
  654.       return 22;
  655.    case OMX_VIDEO_AVCLevel3:
  656.       return 30;
  657.    case OMX_VIDEO_AVCLevel31:
  658.       return 31;
  659.    case OMX_VIDEO_AVCLevel32:
  660.       return 32;
  661.    case OMX_VIDEO_AVCLevel4:
  662.       return 40;
  663.    case OMX_VIDEO_AVCLevel41:
  664.       return 41;
  665.    default:
  666.    case OMX_VIDEO_AVCLevel42:
  667.       return 42;
  668.    case OMX_VIDEO_AVCLevel5:
  669.       return 50;
  670.    case OMX_VIDEO_AVCLevel51:
  671.       return 51;
  672.    }
  673. }
  674.  
  675. static OMX_ERRORTYPE vid_enc_MessageHandler(OMX_COMPONENTTYPE* comp, internalRequestMessageType *msg)
  676. {
  677.    vid_enc_PrivateType* priv = comp->pComponentPrivate;
  678.  
  679.    if (msg->messageType == OMX_CommandStateSet) {
  680.       if ((msg->messageParam == OMX_StateIdle ) && (priv->state == OMX_StateLoaded)) {
  681.  
  682.          struct pipe_video_codec templat = {};
  683.          omx_base_video_PortType *port;
  684.  
  685.          port = (omx_base_video_PortType *)priv->ports[OMX_BASE_FILTER_INPUTPORT_INDEX];
  686.  
  687.          templat.profile = enc_TranslateOMXProfileToPipe(priv->profile_level.eProfile);
  688.          templat.level = enc_TranslateOMXLevelToPipe(priv->profile_level.eLevel);
  689.          templat.entrypoint = PIPE_VIDEO_ENTRYPOINT_ENCODE;
  690.          templat.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
  691.          templat.width = priv->scale_buffer[priv->current_scale_buffer] ?
  692.                             priv->scale.xWidth : port->sPortParam.format.video.nFrameWidth;
  693.          templat.height = priv->scale_buffer[priv->current_scale_buffer] ?
  694.                             priv->scale.xHeight : port->sPortParam.format.video.nFrameHeight;
  695.          templat.max_references = (templat.profile == PIPE_VIDEO_PROFILE_MPEG4_AVC_BASELINE) ?
  696.                             1 : OMX_VID_ENC_P_PERIOD_DEFAULT;
  697.  
  698.          priv->codec = priv->s_pipe->create_video_codec(priv->s_pipe, &templat);
  699.  
  700.       } else if ((msg->messageParam == OMX_StateLoaded) && (priv->state == OMX_StateIdle)) {
  701.          if (priv->codec) {
  702.             priv->codec->destroy(priv->codec);
  703.             priv->codec = NULL;
  704.          }
  705.       }
  706.    }
  707.  
  708.    return omx_base_component_MessageHandler(comp, msg);
  709. }
  710.  
  711. static OMX_ERRORTYPE vid_enc_AllocateInBuffer(omx_base_PortType *port, OMX_INOUT OMX_BUFFERHEADERTYPE **buf,
  712.                                               OMX_IN OMX_U32 idx, OMX_IN OMX_PTR private, OMX_IN OMX_U32 size)
  713. {
  714.    struct input_buf_private *inp;
  715.    OMX_ERRORTYPE r;
  716.  
  717.    r = base_port_AllocateBuffer(port, buf, idx, private, size);
  718.    if (r)
  719.       return r;
  720.  
  721.    inp = (*buf)->pInputPortPrivate = CALLOC_STRUCT(input_buf_private);
  722.    if (!inp) {
  723.       base_port_FreeBuffer(port, idx, *buf);
  724.       return OMX_ErrorInsufficientResources;
  725.    }
  726.  
  727.    LIST_INITHEAD(&inp->tasks);
  728.  
  729.    FREE((*buf)->pBuffer);
  730.    r = enc_AllocateBackTexture(port, &inp->resource, &inp->transfer, &(*buf)->pBuffer);
  731.    if (r) {
  732.       FREE(inp);
  733.       base_port_FreeBuffer(port, idx, *buf);
  734.       return r;
  735.    }
  736.  
  737.    return OMX_ErrorNone;
  738. }
  739.  
  740. static OMX_ERRORTYPE vid_enc_UseInBuffer(omx_base_PortType *port, OMX_BUFFERHEADERTYPE **buf, OMX_U32 idx,
  741.                                          OMX_PTR private, OMX_U32 size, OMX_U8 *mem)
  742. {
  743.    struct input_buf_private *inp;
  744.    OMX_ERRORTYPE r;
  745.  
  746.    r = base_port_UseBuffer(port, buf, idx, private, size, mem);
  747.    if (r)
  748.       return r;
  749.  
  750.    inp = (*buf)->pInputPortPrivate = CALLOC_STRUCT(input_buf_private);
  751.    if (!inp) {
  752.       base_port_FreeBuffer(port, idx, *buf);
  753.       return OMX_ErrorInsufficientResources;
  754.    }
  755.  
  756.    LIST_INITHEAD(&inp->tasks);
  757.  
  758.    return OMX_ErrorNone;
  759. }
  760.  
  761. static OMX_ERRORTYPE vid_enc_FreeInBuffer(omx_base_PortType *port, OMX_U32 idx, OMX_BUFFERHEADERTYPE *buf)
  762. {
  763.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  764.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  765.    struct input_buf_private *inp = buf->pInputPortPrivate;
  766.  
  767.    if (inp) {
  768.       enc_ReleaseTasks(&inp->tasks);
  769.       if (inp->transfer)
  770.          pipe_transfer_unmap(priv->s_pipe, inp->transfer);
  771.       pipe_resource_reference(&inp->resource, NULL);
  772.       FREE(inp);
  773.    }
  774.    buf->pBuffer = NULL;
  775.  
  776.    return base_port_FreeBuffer(port, idx, buf);
  777. }
  778.  
  779. static OMX_ERRORTYPE vid_enc_AllocateOutBuffer(omx_base_PortType *port, OMX_INOUT OMX_BUFFERHEADERTYPE **buf,
  780.                                                OMX_IN OMX_U32 idx, OMX_IN OMX_PTR private, OMX_IN OMX_U32 size)
  781. {
  782.    OMX_ERRORTYPE r;
  783.  
  784.    r = base_port_AllocateBuffer(port, buf, idx, private, size);
  785.    if (r)
  786.       return r;
  787.  
  788.    FREE((*buf)->pBuffer);
  789.    (*buf)->pBuffer = NULL;
  790.    (*buf)->pOutputPortPrivate = CALLOC(1, sizeof(struct output_buf_private));
  791.    if (!(*buf)->pOutputPortPrivate) {
  792.       base_port_FreeBuffer(port, idx, *buf);
  793.       return OMX_ErrorInsufficientResources;
  794.    }
  795.  
  796.    return OMX_ErrorNone;
  797. }
  798.  
  799. static OMX_ERRORTYPE vid_enc_FreeOutBuffer(omx_base_PortType *port, OMX_U32 idx, OMX_BUFFERHEADERTYPE *buf)
  800. {
  801.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  802.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  803.  
  804.    if (buf->pOutputPortPrivate) {
  805.       struct output_buf_private *outp = buf->pOutputPortPrivate;
  806.       if (outp->transfer)
  807.          pipe_transfer_unmap(priv->t_pipe, outp->transfer);
  808.       pipe_resource_reference(&outp->bitstream, NULL);
  809.       FREE(outp);
  810.       buf->pOutputPortPrivate = NULL;
  811.    }
  812.    buf->pBuffer = NULL;
  813.  
  814.    return base_port_FreeBuffer(port, idx, buf);
  815. }
  816.  
  817. static struct encode_task *enc_NeedTask(omx_base_PortType *port)
  818. {
  819.    OMX_VIDEO_PORTDEFINITIONTYPE *def = &port->sPortParam.format.video;
  820.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  821.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  822.  
  823.    struct pipe_video_buffer templat = {};
  824.    struct encode_task *task;
  825.  
  826.    if (!LIST_IS_EMPTY(&priv->free_tasks)) {
  827.       task = LIST_ENTRY(struct encode_task, priv->free_tasks.next, list);
  828.       LIST_DEL(&task->list);
  829.       return task;
  830.    }
  831.  
  832.    /* allocate a new one */
  833.    task = CALLOC_STRUCT(encode_task);
  834.    if (!task)
  835.       return NULL;
  836.  
  837.    templat.buffer_format = PIPE_FORMAT_NV12;
  838.    templat.chroma_format = PIPE_VIDEO_CHROMA_FORMAT_420;
  839.    templat.width = def->nFrameWidth;
  840.    templat.height = def->nFrameHeight;
  841.    templat.interlaced = false;
  842.  
  843.    task->buf = priv->s_pipe->create_video_buffer(priv->s_pipe, &templat);
  844.    if (!task->buf) {
  845.       FREE(task);
  846.       return NULL;
  847.    }
  848.  
  849.    return task;
  850. }
  851.  
  852. static void enc_MoveTasks(struct list_head *from, struct list_head *to)
  853. {
  854.    to->prev->next = from->next;
  855.    from->next->prev = to->prev;
  856.    from->prev->next = to;
  857.    to->prev = from->prev;
  858.    LIST_INITHEAD(from);
  859. }
  860.  
  861. static void enc_ReleaseTasks(struct list_head *head)
  862. {
  863.    struct encode_task *i, *next;
  864.  
  865.    LIST_FOR_EACH_ENTRY_SAFE(i, next, head, list) {
  866.       pipe_resource_reference(&i->bitstream, NULL);
  867.       i->buf->destroy(i->buf);
  868.       FREE(i);
  869.    }
  870. }
  871.  
  872. static OMX_ERRORTYPE enc_LoadImage(omx_base_PortType *port, OMX_BUFFERHEADERTYPE *buf,
  873.                                    struct pipe_video_buffer *vbuf)
  874. {
  875.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  876.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  877.    OMX_VIDEO_PORTDEFINITIONTYPE *def = &port->sPortParam.format.video;
  878.    struct pipe_box box = {};
  879.    struct input_buf_private *inp = buf->pInputPortPrivate;
  880.  
  881.    if (!inp->resource) {
  882.       struct pipe_sampler_view **views;
  883.       void *ptr;
  884.  
  885.       views = vbuf->get_sampler_view_planes(vbuf);
  886.       if (!views)
  887.          return OMX_ErrorInsufficientResources;
  888.  
  889.       ptr = buf->pBuffer;
  890.       box.width = def->nFrameWidth;
  891.       box.height = def->nFrameHeight;
  892.       box.depth = 1;
  893.       priv->s_pipe->transfer_inline_write(priv->s_pipe, views[0]->texture, 0,
  894.                                           PIPE_TRANSFER_WRITE, &box,
  895.                                           ptr, def->nStride, 0);
  896.       ptr = ((uint8_t*)buf->pBuffer) + (def->nStride * box.height);
  897.       box.width = def->nFrameWidth / 2;
  898.       box.height = def->nFrameHeight / 2;
  899.       box.depth = 1;
  900.       priv->s_pipe->transfer_inline_write(priv->s_pipe, views[1]->texture, 0,
  901.                                           PIPE_TRANSFER_WRITE, &box,
  902.                                           ptr, def->nStride, 0);
  903.    } else {
  904.       struct pipe_blit_info blit;
  905.       struct vl_video_buffer *dst_buf = (struct vl_video_buffer *)vbuf;
  906.  
  907.       pipe_transfer_unmap(priv->s_pipe, inp->transfer);
  908.  
  909.       box.width = def->nFrameWidth;
  910.       box.height = def->nFrameHeight;
  911.       box.depth = 1;
  912.  
  913.       priv->s_pipe->resource_copy_region(priv->s_pipe,
  914.                                          dst_buf->resources[0],
  915.                                          0, 0, 0, 0, inp->resource, 0, &box);
  916.  
  917.       memset(&blit, 0, sizeof(blit));
  918.       blit.src.resource = inp->resource;
  919.       blit.src.format = inp->resource->format;
  920.  
  921.       blit.src.box.x = 0;
  922.       blit.src.box.y = def->nFrameHeight;
  923.       blit.src.box.width = def->nFrameWidth;
  924.       blit.src.box.height = def->nFrameHeight / 2 ;
  925.       blit.src.box.depth = 1;
  926.  
  927.       blit.dst.resource = dst_buf->resources[1];
  928.       blit.dst.format = blit.dst.resource->format;
  929.  
  930.       blit.dst.box.width = def->nFrameWidth / 2;
  931.       blit.dst.box.height = def->nFrameHeight / 2;
  932.       blit.dst.box.depth = 1;
  933.       blit.filter = PIPE_TEX_FILTER_NEAREST;
  934.  
  935.       blit.mask = PIPE_MASK_G;
  936.       priv->s_pipe->blit(priv->s_pipe, &blit);
  937.  
  938.       blit.src.box.x = 1;
  939.       blit.mask = PIPE_MASK_R;
  940.       priv->s_pipe->blit(priv->s_pipe, &blit);
  941.       priv->s_pipe->flush(priv->s_pipe, NULL, 0);
  942.  
  943.       box.width = inp->resource->width0;
  944.       box.height = inp->resource->height0;
  945.       box.depth = inp->resource->depth0;
  946.       buf->pBuffer = priv->s_pipe->transfer_map(priv->s_pipe, inp->resource, 0,
  947.                                                 PIPE_TRANSFER_WRITE, &box,
  948.                                                 &inp->transfer);
  949.    }
  950.  
  951.    return OMX_ErrorNone;
  952. }
  953.  
  954. static void enc_ScaleInput(omx_base_PortType *port, struct pipe_video_buffer **vbuf, unsigned *size)
  955. {
  956.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  957.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  958.    OMX_VIDEO_PORTDEFINITIONTYPE *def = &port->sPortParam.format.video;
  959.    struct pipe_video_buffer *src_buf = *vbuf;
  960.    struct vl_compositor *compositor = &priv->compositor;
  961.    struct vl_compositor_state *s = &priv->cstate;
  962.    struct pipe_sampler_view **views;
  963.    struct pipe_surface **dst_surface;
  964.    unsigned i;
  965.  
  966.    if (!priv->scale_buffer[priv->current_scale_buffer])
  967.       return;
  968.  
  969.    views = src_buf->get_sampler_view_planes(src_buf);
  970.    dst_surface = priv->scale_buffer[priv->current_scale_buffer]->get_surfaces
  971.                  (priv->scale_buffer[priv->current_scale_buffer]);
  972.    vl_compositor_clear_layers(s);
  973.  
  974.    for (i = 0; i < VL_MAX_SURFACES; ++i) {
  975.       struct u_rect src_rect;
  976.       if (!views[i] || !dst_surface[i])
  977.          continue;
  978.       src_rect.x0 = 0;
  979.       src_rect.y0 = 0;
  980.       src_rect.x1 = def->nFrameWidth;
  981.       src_rect.y1 = def->nFrameHeight;
  982.       if (i > 0) {
  983.          src_rect.x1 /= 2;
  984.          src_rect.y1 /= 2;
  985.       }
  986.       vl_compositor_set_rgba_layer(s, compositor, 0, views[i], &src_rect, NULL, NULL);
  987.       vl_compositor_render(s, compositor, dst_surface[i], NULL, false);
  988.    }
  989.    *size  = priv->scale.xWidth * priv->scale.xHeight * 2;
  990.    *vbuf = priv->scale_buffer[priv->current_scale_buffer++];
  991.    priv->current_scale_buffer %= OMX_VID_ENC_NUM_SCALING_BUFFERS;
  992. }
  993.  
  994. static void enc_ControlPicture(omx_base_PortType *port, struct pipe_h264_enc_picture_desc *picture)
  995. {
  996.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  997.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  998.    struct pipe_h264_enc_rate_control *rate_ctrl = &picture->rate_ctrl;
  999.  
  1000.    switch (priv->bitrate.eControlRate) {
  1001.    case OMX_Video_ControlRateVariable:
  1002.       rate_ctrl->rate_ctrl_method = PIPE_H264_ENC_RATE_CONTROL_METHOD_VARIABLE;
  1003.       break;
  1004.    case OMX_Video_ControlRateConstant:
  1005.       rate_ctrl->rate_ctrl_method = PIPE_H264_ENC_RATE_CONTROL_METHOD_CONSTANT;
  1006.       break;
  1007.    case OMX_Video_ControlRateVariableSkipFrames:
  1008.       rate_ctrl->rate_ctrl_method = PIPE_H264_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP;
  1009.       break;
  1010.    case OMX_Video_ControlRateConstantSkipFrames:
  1011.       rate_ctrl->rate_ctrl_method = PIPE_H264_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP;
  1012.       break;
  1013.    default:
  1014.       rate_ctrl->rate_ctrl_method = PIPE_H264_ENC_RATE_CONTROL_METHOD_DISABLE;
  1015.       break;
  1016.    }
  1017.      
  1018.    rate_ctrl->frame_rate_den = OMX_VID_ENC_CONTROL_FRAME_RATE_DEN_DEFAULT;
  1019.    rate_ctrl->frame_rate_num = ((priv->frame_rate) >> 16) * rate_ctrl->frame_rate_den;
  1020.  
  1021.    if (rate_ctrl->rate_ctrl_method != PIPE_H264_ENC_RATE_CONTROL_METHOD_DISABLE) {
  1022.       if (priv->bitrate.nTargetBitrate < OMX_VID_ENC_BITRATE_MIN)
  1023.          rate_ctrl->target_bitrate = OMX_VID_ENC_BITRATE_MIN;
  1024.       else if (priv->bitrate.nTargetBitrate < OMX_VID_ENC_BITRATE_MAX)
  1025.          rate_ctrl->target_bitrate = priv->bitrate.nTargetBitrate;
  1026.       else
  1027.          rate_ctrl->target_bitrate = OMX_VID_ENC_BITRATE_MAX;
  1028.       rate_ctrl->peak_bitrate = rate_ctrl->target_bitrate;    
  1029.       if (rate_ctrl->target_bitrate < OMX_VID_ENC_BITRATE_MEDIAN)
  1030.          rate_ctrl->vbv_buffer_size = MIN2((rate_ctrl->target_bitrate * 2.75), OMX_VID_ENC_BITRATE_MEDIAN);
  1031.       else
  1032.          rate_ctrl->vbv_buffer_size = rate_ctrl->target_bitrate;
  1033.  
  1034.       if (rate_ctrl->frame_rate_num) {
  1035.          unsigned long long t = rate_ctrl->target_bitrate;
  1036.          t *= rate_ctrl->frame_rate_den;
  1037.          rate_ctrl->target_bits_picture = t / rate_ctrl->frame_rate_num;
  1038.       } else {
  1039.          rate_ctrl->target_bits_picture = rate_ctrl->target_bitrate;
  1040.       }
  1041.       rate_ctrl->peak_bits_picture_integer = rate_ctrl->target_bits_picture;
  1042.       rate_ctrl->peak_bits_picture_fraction = 0;
  1043.    }
  1044.    
  1045.    picture->quant_i_frames = priv->quant.nQpI;
  1046.    picture->quant_p_frames = priv->quant.nQpP;
  1047.    picture->quant_b_frames = priv->quant.nQpB;
  1048.  
  1049.    picture->frame_num = priv->frame_num;
  1050.    picture->ref_idx_l0 = priv->ref_idx_l0;
  1051.    picture->ref_idx_l1 = priv->ref_idx_l1;
  1052. }
  1053.  
  1054. static void enc_HandleTask(omx_base_PortType *port, struct encode_task *task,
  1055.                            enum pipe_h264_enc_picture_type picture_type)
  1056. {
  1057.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  1058.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  1059.    unsigned size = priv->ports[OMX_BASE_FILTER_OUTPUTPORT_INDEX]->sPortParam.nBufferSize;
  1060.    struct pipe_video_buffer *vbuf = task->buf;
  1061.    struct pipe_h264_enc_picture_desc picture = {};
  1062.  
  1063.    /* -------------- scale input image --------- */
  1064.    enc_ScaleInput(port, &vbuf, &size);
  1065.    priv->s_pipe->flush(priv->s_pipe, NULL, 0);
  1066.  
  1067.    /* -------------- allocate output buffer --------- */
  1068.    task->bitstream = pipe_buffer_create(priv->s_pipe->screen, PIPE_BIND_VERTEX_BUFFER,
  1069.                                         PIPE_USAGE_STREAM, size);
  1070.  
  1071.    picture.picture_type = picture_type;
  1072.    picture.pic_order_cnt = task->pic_order_cnt;
  1073.    if (priv->restricted_b_frames && picture_type == PIPE_H264_ENC_PICTURE_TYPE_B)
  1074.       picture.not_referenced = true;
  1075.    enc_ControlPicture(port, &picture);
  1076.  
  1077.    /* -------------- encode frame --------- */
  1078.    priv->codec->begin_frame(priv->codec, vbuf, &picture.base);
  1079.    priv->codec->encode_bitstream(priv->codec, vbuf, task->bitstream, &task->feedback);
  1080.    priv->codec->end_frame(priv->codec, vbuf, &picture.base);
  1081. }
  1082.  
  1083. static void enc_ClearBframes(omx_base_PortType *port, struct input_buf_private *inp)
  1084. {
  1085.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  1086.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  1087.    struct encode_task *task;
  1088.  
  1089.    if (LIST_IS_EMPTY(&priv->b_frames))
  1090.       return;
  1091.  
  1092.    task = LIST_ENTRY(struct encode_task, priv->b_frames.prev, list);
  1093.    LIST_DEL(&task->list);
  1094.  
  1095.    /* promote last from to P frame */
  1096.    priv->ref_idx_l0 = priv->ref_idx_l1;
  1097.    enc_HandleTask(port, task, PIPE_H264_ENC_PICTURE_TYPE_P);
  1098.    LIST_ADDTAIL(&task->list, &inp->tasks);
  1099.    priv->ref_idx_l1 = priv->frame_num++;
  1100.  
  1101.    /* handle B frames */
  1102.    LIST_FOR_EACH_ENTRY(task, &priv->b_frames, list) {
  1103.       enc_HandleTask(port, task, PIPE_H264_ENC_PICTURE_TYPE_B);
  1104.       if (!priv->restricted_b_frames)
  1105.          priv->ref_idx_l0 = priv->frame_num;
  1106.       priv->frame_num++;
  1107.    }
  1108.  
  1109.    enc_MoveTasks(&priv->b_frames, &inp->tasks);
  1110. }
  1111.  
  1112. static OMX_ERRORTYPE vid_enc_EncodeFrame(omx_base_PortType *port, OMX_BUFFERHEADERTYPE *buf)
  1113. {
  1114.    OMX_COMPONENTTYPE* comp = port->standCompContainer;
  1115.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  1116.    struct input_buf_private *inp = buf->pInputPortPrivate;
  1117.    enum pipe_h264_enc_picture_type picture_type;
  1118.    struct encode_task *task;
  1119.    OMX_ERRORTYPE err;
  1120.  
  1121.    enc_MoveTasks(&inp->tasks, &priv->free_tasks);
  1122.    task = enc_NeedTask(port);
  1123.    if (!task)
  1124.       return OMX_ErrorInsufficientResources;
  1125.  
  1126.    if (buf->nFilledLen == 0) {
  1127.       if (buf->nFlags & OMX_BUFFERFLAG_EOS) {
  1128.          buf->nFilledLen = buf->nAllocLen;
  1129.          enc_ClearBframes(port, inp);
  1130.       }
  1131.       return base_port_SendBufferFunction(port, buf);
  1132.    }
  1133.  
  1134.    if (buf->pOutputPortPrivate) {
  1135.       struct pipe_video_buffer *vbuf = buf->pOutputPortPrivate;
  1136.       buf->pOutputPortPrivate = task->buf;
  1137.       task->buf = vbuf;
  1138.    } else {
  1139.       /* ------- load input image into video buffer ---- */
  1140.       err = enc_LoadImage(port, buf, task->buf);
  1141.       if (err != OMX_ErrorNone)
  1142.          return err;
  1143.    }
  1144.  
  1145.    /* -------------- determine picture type --------- */
  1146.    if (!(priv->pic_order_cnt % OMX_VID_ENC_IDR_PERIOD_DEFAULT) ||
  1147.        priv->force_pic_type.IntraRefreshVOP) {
  1148.       enc_ClearBframes(port, inp);
  1149.       picture_type = PIPE_H264_ENC_PICTURE_TYPE_IDR;
  1150.       priv->force_pic_type.IntraRefreshVOP = OMX_FALSE;
  1151.       priv->frame_num = 0;
  1152.    } else if (priv->codec->profile == PIPE_VIDEO_PROFILE_MPEG4_AVC_BASELINE ||
  1153.               !(priv->pic_order_cnt % OMX_VID_ENC_P_PERIOD_DEFAULT) ||
  1154.               (buf->nFlags & OMX_BUFFERFLAG_EOS)) {
  1155.       picture_type = PIPE_H264_ENC_PICTURE_TYPE_P;
  1156.    } else {
  1157.       picture_type = PIPE_H264_ENC_PICTURE_TYPE_B;
  1158.    }
  1159.    
  1160.    task->pic_order_cnt = priv->pic_order_cnt++;
  1161.  
  1162.    if (picture_type == PIPE_H264_ENC_PICTURE_TYPE_B) {
  1163.       /* put frame at the tail of the queue */
  1164.       LIST_ADDTAIL(&task->list, &priv->b_frames);
  1165.    } else {
  1166.       /* handle I or P frame */
  1167.       priv->ref_idx_l0 = priv->ref_idx_l1;
  1168.       enc_HandleTask(port, task, picture_type);
  1169.       LIST_ADDTAIL(&task->list, &inp->tasks);
  1170.       priv->ref_idx_l1 = priv->frame_num++;
  1171.  
  1172.       /* handle B frames */
  1173.       LIST_FOR_EACH_ENTRY(task, &priv->b_frames, list) {
  1174.          enc_HandleTask(port, task, PIPE_H264_ENC_PICTURE_TYPE_B);
  1175.          if (!priv->restricted_b_frames)
  1176.             priv->ref_idx_l0 = priv->frame_num;
  1177.          priv->frame_num++;
  1178.       }
  1179.  
  1180.       enc_MoveTasks(&priv->b_frames, &inp->tasks);
  1181.    }
  1182.  
  1183.    if (LIST_IS_EMPTY(&inp->tasks))
  1184.       return port->ReturnBufferFunction(port, buf);
  1185.    else
  1186.       return base_port_SendBufferFunction(port, buf);
  1187. }
  1188.  
  1189. static void vid_enc_BufferEncoded(OMX_COMPONENTTYPE *comp, OMX_BUFFERHEADERTYPE* input, OMX_BUFFERHEADERTYPE* output)
  1190. {
  1191.    vid_enc_PrivateType *priv = comp->pComponentPrivate;
  1192.    struct output_buf_private *outp = output->pOutputPortPrivate;
  1193.    struct input_buf_private *inp = input->pInputPortPrivate;
  1194.    struct encode_task *task;
  1195.    struct pipe_box box = {};
  1196.    unsigned size;
  1197.  
  1198.    if (!inp || LIST_IS_EMPTY(&inp->tasks)) {
  1199.       input->nFilledLen = 0; /* mark buffer as empty */
  1200.       enc_MoveTasks(&priv->used_tasks, &inp->tasks);
  1201.       return;
  1202.    }
  1203.  
  1204.    task = LIST_ENTRY(struct encode_task, inp->tasks.next, list);
  1205.    LIST_DEL(&task->list);
  1206.    LIST_ADDTAIL(&task->list, &priv->used_tasks);
  1207.  
  1208.    if (!task->bitstream)
  1209.       return;
  1210.  
  1211.    /* ------------- map result buffer ----------------- */
  1212.  
  1213.    if (outp->transfer)
  1214.       pipe_transfer_unmap(priv->t_pipe, outp->transfer);
  1215.  
  1216.    pipe_resource_reference(&outp->bitstream, task->bitstream);
  1217.    pipe_resource_reference(&task->bitstream, NULL);
  1218.  
  1219.    box.width = outp->bitstream->width0;
  1220.    box.height = outp->bitstream->height0;
  1221.    box.depth = outp->bitstream->depth0;
  1222.  
  1223.    output->pBuffer = priv->t_pipe->transfer_map(priv->t_pipe, outp->bitstream, 0,
  1224.                                                 PIPE_TRANSFER_READ_WRITE,
  1225.                                                 &box, &outp->transfer);
  1226.  
  1227.    /* ------------- get size of result ----------------- */
  1228.  
  1229.    priv->codec->get_feedback(priv->codec, task->feedback, &size);
  1230.  
  1231.    output->nOffset = 0;
  1232.    output->nFilledLen = size; /* mark buffer as full */
  1233. }
  1234.