Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | RSS feed

  1. /*
  2.  * Interface to the Android Stagefright library for
  3.  * H/W accelerated H.264 decoding
  4.  *
  5.  * Copyright (C) 2011 Mohamed Naufal
  6.  * Copyright (C) 2011 Martin Storsjö
  7.  *
  8.  * This file is part of FFmpeg.
  9.  *
  10.  * FFmpeg is free software; you can redistribute it and/or
  11.  * modify it under the terms of the GNU Lesser General Public
  12.  * License as published by the Free Software Foundation; either
  13.  * version 2.1 of the License, or (at your option) any later version.
  14.  *
  15.  * FFmpeg is distributed in the hope that it will be useful,
  16.  * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17.  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  18.  * Lesser General Public License for more details.
  19.  *
  20.  * You should have received a copy of the GNU Lesser General Public
  21.  * License along with FFmpeg; if not, write to the Free Software
  22.  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  23.  */
  24.  
  25. #include <binder/ProcessState.h>
  26. #include <media/stagefright/MetaData.h>
  27. #include <media/stagefright/MediaBufferGroup.h>
  28. #include <media/stagefright/MediaDebug.h>
  29. #include <media/stagefright/MediaDefs.h>
  30. #include <media/stagefright/OMXClient.h>
  31. #include <media/stagefright/OMXCodec.h>
  32. #include <utils/List.h>
  33. #include <new>
  34. #include <map>
  35.  
  36. extern "C" {
  37. #include "avcodec.h"
  38. #include "libavutil/imgutils.h"
  39. #include "internal.h"
  40. }
  41.  
  42. #define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
  43.  
  44. using namespace android;
  45.  
  46. struct Frame {
  47.     status_t status;
  48.     size_t size;
  49.     int64_t time;
  50.     int key;
  51.     uint8_t *buffer;
  52.     AVFrame *vframe;
  53. };
  54.  
  55. struct TimeStamp {
  56.     int64_t pts;
  57.     int64_t reordered_opaque;
  58. };
  59.  
  60. class CustomSource;
  61.  
  62. struct StagefrightContext {
  63.     AVCodecContext *avctx;
  64.     AVBitStreamFilterContext *bsfc;
  65.     uint8_t* orig_extradata;
  66.     int orig_extradata_size;
  67.     sp<MediaSource> *source;
  68.     List<Frame*> *in_queue, *out_queue;
  69.     pthread_mutex_t in_mutex, out_mutex;
  70.     pthread_cond_t condition;
  71.     pthread_t decode_thread_id;
  72.  
  73.     Frame *end_frame;
  74.     bool source_done;
  75.     volatile sig_atomic_t thread_started, thread_exited, stop_decode;
  76.  
  77.     AVFrame *prev_frame;
  78.     std::map<int64_t, TimeStamp> *ts_map;
  79.     int64_t frame_index;
  80.  
  81.     uint8_t *dummy_buf;
  82.     int dummy_bufsize;
  83.  
  84.     OMXClient *client;
  85.     sp<MediaSource> *decoder;
  86.     const char *decoder_component;
  87. };
  88.  
  89. class CustomSource : public MediaSource {
  90. public:
  91.     CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
  92.         s = (StagefrightContext*)avctx->priv_data;
  93.         source_meta = meta;
  94.         frame_size  = (avctx->width * avctx->height * 3) / 2;
  95.         buf_group.add_buffer(new MediaBuffer(frame_size));
  96.     }
  97.  
  98.     virtual sp<MetaData> getFormat() {
  99.         return source_meta;
  100.     }
  101.  
  102.     virtual status_t start(MetaData *params) {
  103.         return OK;
  104.     }
  105.  
  106.     virtual status_t stop() {
  107.         return OK;
  108.     }
  109.  
  110.     virtual status_t read(MediaBuffer **buffer,
  111.                           const MediaSource::ReadOptions *options) {
  112.         Frame *frame;
  113.         status_t ret;
  114.  
  115.         if (s->thread_exited)
  116.             return ERROR_END_OF_STREAM;
  117.         pthread_mutex_lock(&s->in_mutex);
  118.  
  119.         while (s->in_queue->empty())
  120.             pthread_cond_wait(&s->condition, &s->in_mutex);
  121.  
  122.         frame = *s->in_queue->begin();
  123.         ret = frame->status;
  124.  
  125.         if (ret == OK) {
  126.             ret = buf_group.acquire_buffer(buffer);
  127.             if (ret == OK) {
  128.                 memcpy((*buffer)->data(), frame->buffer, frame->size);
  129.                 (*buffer)->set_range(0, frame->size);
  130.                 (*buffer)->meta_data()->clear();
  131.                 (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
  132.                 (*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
  133.             } else {
  134.                 av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
  135.             }
  136.             av_freep(&frame->buffer);
  137.         }
  138.  
  139.         s->in_queue->erase(s->in_queue->begin());
  140.         pthread_mutex_unlock(&s->in_mutex);
  141.  
  142.         av_freep(&frame);
  143.         return ret;
  144.     }
  145.  
  146. private:
  147.     MediaBufferGroup buf_group;
  148.     sp<MetaData> source_meta;
  149.     StagefrightContext *s;
  150.     int frame_size;
  151. };
  152.  
  153. void* decode_thread(void *arg)
  154. {
  155.     AVCodecContext *avctx = (AVCodecContext*)arg;
  156.     StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  157.     const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(avctx->pix_fmt);
  158.     Frame* frame;
  159.     MediaBuffer *buffer;
  160.     int32_t w, h;
  161.     int decode_done = 0;
  162.     int ret;
  163.     int src_linesize[3];
  164.     const uint8_t *src_data[3];
  165.     int64_t out_frame_index = 0;
  166.  
  167.     do {
  168.         buffer = NULL;
  169.         frame = (Frame*)av_mallocz(sizeof(Frame));
  170.         if (!frame) {
  171.             frame         = s->end_frame;
  172.             frame->status = AVERROR(ENOMEM);
  173.             decode_done   = 1;
  174.             s->end_frame  = NULL;
  175.             goto push_frame;
  176.         }
  177.         frame->status = (*s->decoder)->read(&buffer);
  178.         if (frame->status == OK) {
  179.             sp<MetaData> outFormat = (*s->decoder)->getFormat();
  180.             outFormat->findInt32(kKeyWidth , &w);
  181.             outFormat->findInt32(kKeyHeight, &h);
  182.             frame->vframe = av_frame_alloc();
  183.             if (!frame->vframe) {
  184.                 frame->status = AVERROR(ENOMEM);
  185.                 decode_done   = 1;
  186.                 buffer->release();
  187.                 goto push_frame;
  188.             }
  189.             ret = ff_get_buffer(avctx, frame->vframe, AV_GET_BUFFER_FLAG_REF);
  190.             if (ret < 0) {
  191.                 frame->status = ret;
  192.                 decode_done   = 1;
  193.                 buffer->release();
  194.                 goto push_frame;
  195.             }
  196.  
  197.             // The OMX.SEC decoder doesn't signal the modified width/height
  198.             if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
  199.                 (w & 15 || h & 15)) {
  200.                 if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == buffer->range_length()) {
  201.                     w = (w + 15)&~15;
  202.                     h = (h + 15)&~15;
  203.                 }
  204.             }
  205.  
  206.             if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
  207.                 avctx->width  = w;
  208.                 avctx->height = h;
  209.             }
  210.  
  211.             src_linesize[0] = av_image_get_linesize(avctx->pix_fmt, w, 0);
  212.             src_linesize[1] = av_image_get_linesize(avctx->pix_fmt, w, 1);
  213.             src_linesize[2] = av_image_get_linesize(avctx->pix_fmt, w, 2);
  214.  
  215.             src_data[0] = (uint8_t*)buffer->data();
  216.             src_data[1] = src_data[0] + src_linesize[0] * h;
  217.             src_data[2] = src_data[1] + src_linesize[1] * -(-h>>pix_desc->log2_chroma_h);
  218.             av_image_copy(frame->vframe->data, frame->vframe->linesize,
  219.                           src_data, src_linesize,
  220.                           avctx->pix_fmt, avctx->width, avctx->height);
  221.  
  222.             buffer->meta_data()->findInt64(kKeyTime, &out_frame_index);
  223.             if (out_frame_index && s->ts_map->count(out_frame_index) > 0) {
  224.                 frame->vframe->pts = (*s->ts_map)[out_frame_index].pts;
  225.                 frame->vframe->reordered_opaque = (*s->ts_map)[out_frame_index].reordered_opaque;
  226.                 s->ts_map->erase(out_frame_index);
  227.             }
  228.             buffer->release();
  229.             } else if (frame->status == INFO_FORMAT_CHANGED) {
  230.                 if (buffer)
  231.                     buffer->release();
  232.                 av_free(frame);
  233.                 continue;
  234.             } else {
  235.                 decode_done = 1;
  236.             }
  237. push_frame:
  238.         while (true) {
  239.             pthread_mutex_lock(&s->out_mutex);
  240.             if (s->out_queue->size() >= 10) {
  241.                 pthread_mutex_unlock(&s->out_mutex);
  242.                 usleep(10000);
  243.                 continue;
  244.             }
  245.             break;
  246.         }
  247.         s->out_queue->push_back(frame);
  248.         pthread_mutex_unlock(&s->out_mutex);
  249.     } while (!decode_done && !s->stop_decode);
  250.  
  251.     s->thread_exited = true;
  252.  
  253.     return 0;
  254. }
  255.  
  256. static av_cold int Stagefright_init(AVCodecContext *avctx)
  257. {
  258.     StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  259.     sp<MetaData> meta, outFormat;
  260.     int32_t colorFormat = 0;
  261.     int ret;
  262.  
  263.     if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
  264.         return -1;
  265.  
  266.     s->avctx = avctx;
  267.     s->bsfc  = av_bitstream_filter_init("h264_mp4toannexb");
  268.     if (!s->bsfc) {
  269.         av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
  270.         return -1;
  271.     }
  272.  
  273.     s->orig_extradata_size = avctx->extradata_size;
  274.     s->orig_extradata = (uint8_t*) av_mallocz(avctx->extradata_size +
  275.                                               AV_INPUT_BUFFER_PADDING_SIZE);
  276.     if (!s->orig_extradata) {
  277.         ret = AVERROR(ENOMEM);
  278.         goto fail;
  279.     }
  280.     memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);
  281.  
  282.     meta = new MetaData;
  283.     if (!meta) {
  284.         ret = AVERROR(ENOMEM);
  285.         goto fail;
  286.     }
  287.     meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
  288.     meta->setInt32(kKeyWidth, avctx->width);
  289.     meta->setInt32(kKeyHeight, avctx->height);
  290.     meta->setData(kKeyAVCC, kTypeAVCC, avctx->extradata, avctx->extradata_size);
  291.  
  292.     android::ProcessState::self()->startThreadPool();
  293.  
  294.     s->source    = new sp<MediaSource>();
  295.     *s->source   = new CustomSource(avctx, meta);
  296.     s->in_queue  = new List<Frame*>;
  297.     s->out_queue = new List<Frame*>;
  298.     s->ts_map    = new std::map<int64_t, TimeStamp>;
  299.     s->client    = new OMXClient;
  300.     s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
  301.     if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
  302.         !s->ts_map || !s->end_frame) {
  303.         ret = AVERROR(ENOMEM);
  304.         goto fail;
  305.     }
  306.  
  307.     if (s->client->connect() !=  OK) {
  308.         av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
  309.         ret = -1;
  310.         goto fail;
  311.     }
  312.  
  313.     s->decoder  = new sp<MediaSource>();
  314.     *s->decoder = OMXCodec::Create(s->client->interface(), meta,
  315.                                   false, *s->source, NULL,
  316.                                   OMXCodec::kClientNeedsFramebuffer);
  317.     if ((*s->decoder)->start() !=  OK) {
  318.         av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
  319.         ret = -1;
  320.         s->client->disconnect();
  321.         goto fail;
  322.     }
  323.  
  324.     outFormat = (*s->decoder)->getFormat();
  325.     outFormat->findInt32(kKeyColorFormat, &colorFormat);
  326.     if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
  327.         colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
  328.         avctx->pix_fmt = AV_PIX_FMT_NV21;
  329.     else if (colorFormat == OMX_COLOR_FormatYCbYCr)
  330.         avctx->pix_fmt = AV_PIX_FMT_YUYV422;
  331.     else if (colorFormat == OMX_COLOR_FormatCbYCrY)
  332.         avctx->pix_fmt = AV_PIX_FMT_UYVY422;
  333.     else
  334.         avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  335.  
  336.     outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
  337.     if (s->decoder_component)
  338.         s->decoder_component = av_strdup(s->decoder_component);
  339.  
  340.     pthread_mutex_init(&s->in_mutex, NULL);
  341.     pthread_mutex_init(&s->out_mutex, NULL);
  342.     pthread_cond_init(&s->condition, NULL);
  343.     return 0;
  344.  
  345. fail:
  346.     av_bitstream_filter_close(s->bsfc);
  347.     av_freep(&s->orig_extradata);
  348.     av_freep(&s->end_frame);
  349.     delete s->in_queue;
  350.     delete s->out_queue;
  351.     delete s->ts_map;
  352.     delete s->client;
  353.     return ret;
  354. }
  355.  
  356. static int Stagefright_decode_frame(AVCodecContext *avctx, void *data,
  357.                                     int *got_frame, AVPacket *avpkt)
  358. {
  359.     StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  360.     Frame *frame;
  361.     status_t status;
  362.     int orig_size = avpkt->size;
  363.     AVPacket pkt = *avpkt;
  364.     AVFrame *ret_frame;
  365.  
  366.     if (!s->thread_started) {
  367.         if(pthread_create(&s->decode_thread_id, NULL, &decode_thread, avctx))
  368.             return AVERROR(ENOMEM);
  369.         s->thread_started = true;
  370.     }
  371.  
  372.     if (avpkt && avpkt->data) {
  373.         av_bitstream_filter_filter(s->bsfc, avctx, NULL, &pkt.data, &pkt.size,
  374.                                    avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
  375.         avpkt = &pkt;
  376.     }
  377.  
  378.     if (!s->source_done) {
  379.         if(!s->dummy_buf) {
  380.             s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
  381.             if (!s->dummy_buf)
  382.                 return AVERROR(ENOMEM);
  383.             s->dummy_bufsize = avpkt->size;
  384.             memcpy(s->dummy_buf, avpkt->data, avpkt->size);
  385.         }
  386.  
  387.         frame = (Frame*)av_mallocz(sizeof(Frame));
  388.         if (avpkt->data) {
  389.             frame->status  = OK;
  390.             frame->size    = avpkt->size;
  391.             frame->key     = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
  392.             frame->buffer  = (uint8_t*)av_malloc(avpkt->size);
  393.             if (!frame->buffer) {
  394.                 av_freep(&frame);
  395.                 return AVERROR(ENOMEM);
  396.             }
  397.             uint8_t *ptr = avpkt->data;
  398.             // The OMX.SEC decoder fails without this.
  399.             if (avpkt->size == orig_size + avctx->extradata_size) {
  400.                 ptr += avctx->extradata_size;
  401.                 frame->size = orig_size;
  402.             }
  403.             memcpy(frame->buffer, ptr, orig_size);
  404.             if (avpkt == &pkt)
  405.                 av_free(avpkt->data);
  406.  
  407.             frame->time = ++s->frame_index;
  408.             (*s->ts_map)[s->frame_index].pts = avpkt->pts;
  409.             (*s->ts_map)[s->frame_index].reordered_opaque = avctx->reordered_opaque;
  410.         } else {
  411.             frame->status  = ERROR_END_OF_STREAM;
  412.             s->source_done = true;
  413.         }
  414.  
  415.         while (true) {
  416.             if (s->thread_exited) {
  417.                 s->source_done = true;
  418.                 break;
  419.             }
  420.             pthread_mutex_lock(&s->in_mutex);
  421.             if (s->in_queue->size() >= 10) {
  422.                 pthread_mutex_unlock(&s->in_mutex);
  423.                 usleep(10000);
  424.                 continue;
  425.             }
  426.             s->in_queue->push_back(frame);
  427.             pthread_cond_signal(&s->condition);
  428.             pthread_mutex_unlock(&s->in_mutex);
  429.             break;
  430.         }
  431.     }
  432.     while (true) {
  433.         pthread_mutex_lock(&s->out_mutex);
  434.         if (!s->out_queue->empty()) break;
  435.         pthread_mutex_unlock(&s->out_mutex);
  436.         if (s->source_done) {
  437.             usleep(10000);
  438.             continue;
  439.         } else {
  440.             return orig_size;
  441.         }
  442.     }
  443.  
  444.     frame = *s->out_queue->begin();
  445.     s->out_queue->erase(s->out_queue->begin());
  446.     pthread_mutex_unlock(&s->out_mutex);
  447.  
  448.     ret_frame = frame->vframe;
  449.     status  = frame->status;
  450.     av_freep(&frame);
  451.  
  452.     if (status == ERROR_END_OF_STREAM)
  453.         return 0;
  454.     if (status != OK) {
  455.         if (status == AVERROR(ENOMEM))
  456.             return status;
  457.         av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
  458.         return -1;
  459.     }
  460.  
  461.     if (s->prev_frame)
  462.         av_frame_free(&s->prev_frame);
  463.     s->prev_frame = ret_frame;
  464.  
  465.     *got_frame = 1;
  466.     *(AVFrame*)data = *ret_frame;
  467.     return orig_size;
  468. }
  469.  
  470. static av_cold int Stagefright_close(AVCodecContext *avctx)
  471. {
  472.     StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
  473.     Frame *frame;
  474.  
  475.     if (s->thread_started) {
  476.         if (!s->thread_exited) {
  477.             s->stop_decode = 1;
  478.  
  479.             // Make sure decode_thread() doesn't get stuck
  480.             pthread_mutex_lock(&s->out_mutex);
  481.             while (!s->out_queue->empty()) {
  482.                 frame = *s->out_queue->begin();
  483.                 s->out_queue->erase(s->out_queue->begin());
  484.                 if (frame->vframe)
  485.                     av_frame_free(&frame->vframe);
  486.                 av_freep(&frame);
  487.             }
  488.             pthread_mutex_unlock(&s->out_mutex);
  489.  
  490.             // Feed a dummy frame prior to signalling EOF.
  491.             // This is required to terminate the decoder(OMX.SEC)
  492.             // when only one frame is read during stream info detection.
  493.             if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
  494.                 frame->status = OK;
  495.                 frame->size   = s->dummy_bufsize;
  496.                 frame->key    = 1;
  497.                 frame->buffer = s->dummy_buf;
  498.                 pthread_mutex_lock(&s->in_mutex);
  499.                 s->in_queue->push_back(frame);
  500.                 pthread_cond_signal(&s->condition);
  501.                 pthread_mutex_unlock(&s->in_mutex);
  502.                 s->dummy_buf = NULL;
  503.             }
  504.  
  505.             pthread_mutex_lock(&s->in_mutex);
  506.             s->end_frame->status = ERROR_END_OF_STREAM;
  507.             s->in_queue->push_back(s->end_frame);
  508.             pthread_cond_signal(&s->condition);
  509.             pthread_mutex_unlock(&s->in_mutex);
  510.             s->end_frame = NULL;
  511.         }
  512.  
  513.         pthread_join(s->decode_thread_id, NULL);
  514.  
  515.         if (s->prev_frame)
  516.             av_frame_free(&s->prev_frame);
  517.  
  518.         s->thread_started = false;
  519.     }
  520.  
  521.     while (!s->in_queue->empty()) {
  522.         frame = *s->in_queue->begin();
  523.         s->in_queue->erase(s->in_queue->begin());
  524.         if (frame->size)
  525.             av_freep(&frame->buffer);
  526.         av_freep(&frame);
  527.     }
  528.  
  529.     while (!s->out_queue->empty()) {
  530.         frame = *s->out_queue->begin();
  531.         s->out_queue->erase(s->out_queue->begin());
  532.         if (frame->vframe)
  533.             av_frame_free(&frame->vframe);
  534.         av_freep(&frame);
  535.     }
  536.  
  537.     (*s->decoder)->stop();
  538.     s->client->disconnect();
  539.  
  540.     if (s->decoder_component)
  541.         av_freep(&s->decoder_component);
  542.     av_freep(&s->dummy_buf);
  543.     av_freep(&s->end_frame);
  544.  
  545.     // Reset the extradata back to the original mp4 format, so that
  546.     // the next invocation (both when decoding and when called from
  547.     // av_find_stream_info) get the original mp4 format extradata.
  548.     av_freep(&avctx->extradata);
  549.     avctx->extradata = s->orig_extradata;
  550.     avctx->extradata_size = s->orig_extradata_size;
  551.  
  552.     delete s->in_queue;
  553.     delete s->out_queue;
  554.     delete s->ts_map;
  555.     delete s->client;
  556.     delete s->decoder;
  557.     delete s->source;
  558.  
  559.     pthread_mutex_destroy(&s->in_mutex);
  560.     pthread_mutex_destroy(&s->out_mutex);
  561.     pthread_cond_destroy(&s->condition);
  562.     av_bitstream_filter_close(s->bsfc);
  563.     return 0;
  564. }
  565.  
  566. AVCodec ff_libstagefright_h264_decoder = {
  567.     "libstagefright_h264",
  568.     NULL_IF_CONFIG_SMALL("libstagefright H.264"),
  569.     AVMEDIA_TYPE_VIDEO,
  570.     AV_CODEC_ID_H264,
  571.     AV_CODEC_CAP_DELAY,
  572.     NULL, //supported_framerates
  573.     NULL, //pix_fmts
  574.     NULL, //supported_samplerates
  575.     NULL, //sample_fmts
  576.     NULL, //channel_layouts
  577.     0,    //max_lowres
  578.     NULL, //priv_class
  579.     NULL, //profiles
  580.     sizeof(StagefrightContext),
  581.     NULL, //next
  582.     NULL, //init_thread_copy
  583.     NULL, //update_thread_context
  584.     NULL, //defaults
  585.     NULL, //init_static_data
  586.     Stagefright_init,
  587.     NULL, //encode
  588.     NULL, //encode2
  589.     Stagefright_decode_frame,
  590.     Stagefright_close,
  591. };
  592.