Subversion Repositories Kolibri OS

Rev

Go to most recent revision | Blame | Last modification | View Log | RSS feed

  1. /*
  2.  * GXF muxer.
  3.  * Copyright (c) 2006 SmartJog S.A., Baptiste Coudurier <baptiste dot coudurier at smartjog dot com>
  4.  *
  5.  * This file is part of FFmpeg.
  6.  *
  7.  * FFmpeg is free software; you can redistribute it and/or
  8.  * modify it under the terms of the GNU Lesser General Public
  9.  * License as published by the Free Software Foundation; either
  10.  * version 2.1 of the License, or (at your option) any later version.
  11.  *
  12.  * FFmpeg is distributed in the hope that it will be useful,
  13.  * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14.  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
  15.  * Lesser General Public License for more details.
  16.  *
  17.  * You should have received a copy of the GNU Lesser General Public
  18.  * License along with FFmpeg; if not, write to the Free Software
  19.  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20.  */
  21.  
  22. #include "libavutil/avassert.h"
  23. #include "libavutil/intfloat.h"
  24. #include "libavutil/opt.h"
  25. #include "libavutil/mathematics.h"
  26. #include "libavutil/timecode.h"
  27. #include "avformat.h"
  28. #include "internal.h"
  29. #include "gxf.h"
  30. #include "audiointerleave.h"
  31.  
  32. #define GXF_AUDIO_PACKET_SIZE 65536
  33.  
  34. #define GXF_TIMECODE(c, d, h, m, s, f) \
  35.     ((c) << 30 | (d) << 29 | (h) << 24 | (m) << 16 | (s) << 8 | (f))
  36.  
  37. typedef struct GXFTimecode{
  38.     int hh;
  39.     int mm;
  40.     int ss;
  41.     int ff;
  42.     int color;
  43.     int drop;
  44. } GXFTimecode;
  45.  
  46. typedef struct GXFStreamContext {
  47.     AudioInterleaveContext aic;
  48.     uint32_t track_type;
  49.     uint32_t sample_size;
  50.     uint32_t sample_rate;
  51.     uint16_t media_type;
  52.     uint16_t media_info;
  53.     int frame_rate_index;
  54.     int lines_index;
  55.     int fields;
  56.     int iframes;
  57.     int pframes;
  58.     int bframes;
  59.     int p_per_gop;
  60.     int b_per_i_or_p; ///< number of B frames per I frame or P frame
  61.     int first_gop_closed;
  62.     unsigned order;   ///< interleaving order
  63. } GXFStreamContext;
  64.  
  65. typedef struct GXFContext {
  66.     AVClass *av_class;
  67.     uint32_t nb_fields;
  68.     uint16_t audio_tracks;
  69.     uint16_t mpeg_tracks;
  70.     int64_t creation_time;
  71.     uint32_t umf_start_offset;
  72.     uint32_t umf_track_offset;
  73.     uint32_t umf_media_offset;
  74.     uint32_t umf_length;
  75.     uint16_t umf_track_size;
  76.     uint16_t umf_media_size;
  77.     AVRational time_base;
  78.     int flags;
  79.     GXFStreamContext timecode_track;
  80.     unsigned *flt_entries;    ///< offsets of packets /1024, starts after 2nd video field
  81.     unsigned flt_entries_nb;
  82.     uint64_t *map_offsets;    ///< offset of map packets
  83.     unsigned map_offsets_nb;
  84.     unsigned packet_count;
  85.     GXFTimecode tc;
  86. } GXFContext;
  87.  
  88. static const struct {
  89.     int height, index;
  90. } gxf_lines_tab[] = {
  91.     { 480,  1 }, /* NTSC */
  92.     { 512,  1 }, /* NTSC + VBI */
  93.     { 576,  2 }, /* PAL */
  94.     { 608,  2 }, /* PAL + VBI */
  95.     { 1080, 4 },
  96.     { 720,  6 },
  97. };
  98.  
  99. static const AVCodecTag gxf_media_types[] = {
  100.     { AV_CODEC_ID_MJPEG     ,   3 }, /* NTSC */
  101.     { AV_CODEC_ID_MJPEG     ,   4 }, /* PAL */
  102.     { AV_CODEC_ID_PCM_S24LE ,   9 },
  103.     { AV_CODEC_ID_PCM_S16LE ,  10 },
  104.     { AV_CODEC_ID_MPEG2VIDEO,  11 }, /* NTSC */
  105.     { AV_CODEC_ID_MPEG2VIDEO,  12 }, /* PAL */
  106.     { AV_CODEC_ID_DVVIDEO   ,  13 }, /* NTSC */
  107.     { AV_CODEC_ID_DVVIDEO   ,  14 }, /* PAL */
  108.     { AV_CODEC_ID_DVVIDEO   ,  15 }, /* 50M NTSC */
  109.     { AV_CODEC_ID_DVVIDEO   ,  16 }, /* 50M PAL */
  110.     { AV_CODEC_ID_AC3       ,  17 },
  111.     //{ AV_CODEC_ID_NONE,  ,   18 }, /* Non compressed 24 bit audio */
  112.     { AV_CODEC_ID_MPEG2VIDEO,  20 }, /* MPEG HD */
  113.     { AV_CODEC_ID_MPEG1VIDEO,  22 }, /* NTSC */
  114.     { AV_CODEC_ID_MPEG1VIDEO,  23 }, /* PAL */
  115.     { AV_CODEC_ID_NONE,         0 },
  116. };
  117.  
  118. #define SERVER_PATH "EXT:/PDR/default/"
  119. #define ES_NAME_PATTERN "EXT:/PDR/default/ES."
  120.  
  121. static int gxf_find_lines_index(AVStream *st)
  122. {
  123.     GXFStreamContext *sc = st->priv_data;
  124.     int i;
  125.  
  126.     for (i = 0; i < 6; ++i) {
  127.         if (st->codec->height == gxf_lines_tab[i].height) {
  128.             sc->lines_index = gxf_lines_tab[i].index;
  129.             return 0;
  130.         }
  131.     }
  132.     return -1;
  133. }
  134.  
  135. static void gxf_write_padding(AVIOContext *pb, int64_t to_pad)
  136. {
  137.     for (; to_pad > 0; to_pad--) {
  138.         avio_w8(pb, 0);
  139.     }
  140. }
  141.  
  142. static int64_t updatePacketSize(AVIOContext *pb, int64_t pos)
  143. {
  144.     int64_t curpos;
  145.     int size;
  146.  
  147.     size = avio_tell(pb) - pos;
  148.     if (size % 4) {
  149.         gxf_write_padding(pb, 4 - size % 4);
  150.         size = avio_tell(pb) - pos;
  151.     }
  152.     curpos = avio_tell(pb);
  153.     avio_seek(pb, pos + 6, SEEK_SET);
  154.     avio_wb32(pb, size);
  155.     avio_seek(pb, curpos, SEEK_SET);
  156.     return curpos - pos;
  157. }
  158.  
  159. static int64_t updateSize(AVIOContext *pb, int64_t pos)
  160. {
  161.     int64_t curpos;
  162.  
  163.     curpos = avio_tell(pb);
  164.     avio_seek(pb, pos, SEEK_SET);
  165.     avio_wb16(pb, curpos - pos - 2);
  166.     avio_seek(pb, curpos, SEEK_SET);
  167.     return curpos - pos;
  168. }
  169.  
  170. static void gxf_write_packet_header(AVIOContext *pb, GXFPktType type)
  171. {
  172.     avio_wb32(pb, 0);  /* packet leader for synchro */
  173.     avio_w8(pb, 1);
  174.     avio_w8(pb, type); /* map packet */
  175.     avio_wb32(pb, 0);  /* size */
  176.     avio_wb32(pb, 0);  /* reserved */
  177.     avio_w8(pb, 0xE1); /* trailer 1 */
  178.     avio_w8(pb, 0xE2); /* trailer 2 */
  179. }
  180.  
  181. static int gxf_write_mpeg_auxiliary(AVIOContext *pb, AVStream *st)
  182. {
  183.     GXFStreamContext *sc = st->priv_data;
  184.     char buffer[1024];
  185.     int size, starting_line;
  186.  
  187.     if (sc->iframes) {
  188.         sc->p_per_gop = sc->pframes / sc->iframes;
  189.         if (sc->pframes % sc->iframes)
  190.             sc->p_per_gop++;
  191.         if (sc->pframes) {
  192.             sc->b_per_i_or_p = sc->bframes / sc->pframes;
  193.             if (sc->bframes % sc->pframes)
  194.                 sc->b_per_i_or_p++;
  195.         }
  196.         if (sc->p_per_gop > 9)
  197.             sc->p_per_gop = 9; /* ensure value won't take more than one char */
  198.         if (sc->b_per_i_or_p > 9)
  199.             sc->b_per_i_or_p = 9; /* ensure value won't take more than one char */
  200.     }
  201.     if (st->codec->height == 512 || st->codec->height == 608)
  202.         starting_line = 7; // VBI
  203.     else if (st->codec->height == 480)
  204.         starting_line = 20;
  205.     else
  206.         starting_line = 23; // default PAL
  207.  
  208.     size = snprintf(buffer, sizeof(buffer), "Ver 1\nBr %.6f\nIpg 1\nPpi %d\nBpiop %d\n"
  209.                     "Pix 0\nCf %d\nCg %d\nSl %d\nnl16 %d\nVi 1\nf1 1\n",
  210.                     (float)st->codec->bit_rate, sc->p_per_gop, sc->b_per_i_or_p,
  211.                     st->codec->pix_fmt == AV_PIX_FMT_YUV422P ? 2 : 1, sc->first_gop_closed == 1,
  212.                     starting_line, (st->codec->height + 15) / 16);
  213.     av_assert0(size < sizeof(buffer));
  214.     avio_w8(pb, TRACK_MPG_AUX);
  215.     avio_w8(pb, size + 1);
  216.     avio_write(pb, (uint8_t *)buffer, size + 1);
  217.     return size + 3;
  218. }
  219.  
  220. static int gxf_write_dv_auxiliary(AVIOContext *pb, AVStream *st)
  221. {
  222.     int64_t track_aux_data = 0;
  223.  
  224.     avio_w8(pb, TRACK_AUX);
  225.     avio_w8(pb, 8);
  226.     if (st->codec->pix_fmt == AV_PIX_FMT_YUV420P)
  227.         track_aux_data |= 0x01;     /* marks stream as DVCAM instead of DVPRO */
  228.     track_aux_data |= 0x40000000;   /* aux data is valid */
  229.     avio_wl64(pb, track_aux_data);
  230.     return 8;
  231. }
  232.  
  233. static int gxf_write_timecode_auxiliary(AVIOContext *pb, GXFContext *gxf)
  234. {
  235.     uint32_t timecode = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
  236.                                      gxf->tc.hh, gxf->tc.mm,
  237.                                      gxf->tc.ss, gxf->tc.ff);
  238.  
  239.     avio_w8(pb, TRACK_AUX);
  240.     avio_w8(pb, 8);
  241.     avio_wl32(pb, timecode);
  242.     /* reserved */
  243.     avio_wl32(pb, 0);
  244.     return 8;
  245. }
  246.  
  247. static int gxf_write_track_description(AVFormatContext *s, GXFStreamContext *sc, int index)
  248. {
  249.     GXFContext *gxf = s->priv_data;
  250.     AVIOContext *pb = s->pb;
  251.     int64_t pos;
  252.  
  253.     /* track description section */
  254.     avio_w8(pb, sc->media_type + 0x80);
  255.     avio_w8(pb, index + 0xC0);
  256.  
  257.     pos = avio_tell(pb);
  258.     avio_wb16(pb, 0); /* size */
  259.  
  260.     /* media file name */
  261.     avio_w8(pb, TRACK_NAME);
  262.     avio_w8(pb, strlen(ES_NAME_PATTERN) + 3);
  263.     avio_write(pb, ES_NAME_PATTERN, sizeof(ES_NAME_PATTERN) - 1);
  264.     avio_wb16(pb, sc->media_info);
  265.     avio_w8(pb, 0);
  266.  
  267.     switch (sc->track_type) {
  268.         case 3:     /* timecode */
  269.             gxf_write_timecode_auxiliary(pb, gxf);
  270.             break;
  271.         case 4:     /* MPEG2 */
  272.         case 9:     /* MPEG1 */
  273.             gxf_write_mpeg_auxiliary(pb, s->streams[index]);
  274.             break;
  275.         case 5:     /* DV25 */
  276.         case 6:     /* DV50 */
  277.             gxf_write_dv_auxiliary(pb, s->streams[index]);
  278.             break;
  279.         default:
  280.             avio_w8(pb, TRACK_AUX);
  281.             avio_w8(pb, 8);
  282.             avio_wl64(pb, 0);
  283.     }
  284.  
  285.     /* file system version */
  286.     avio_w8(pb, TRACK_VER);
  287.     avio_w8(pb, 4);
  288.     avio_wb32(pb, 0);
  289.  
  290.     /* frame rate */
  291.     avio_w8(pb, TRACK_FPS);
  292.     avio_w8(pb, 4);
  293.     avio_wb32(pb, sc->frame_rate_index);
  294.  
  295.     /* lines per frame */
  296.     avio_w8(pb, TRACK_LINES);
  297.     avio_w8(pb, 4);
  298.     avio_wb32(pb, sc->lines_index);
  299.  
  300.     /* fields per frame */
  301.     avio_w8(pb, TRACK_FPF);
  302.     avio_w8(pb, 4);
  303.     avio_wb32(pb, sc->fields);
  304.  
  305.     return updateSize(pb, pos);
  306. }
  307.  
  308. static int gxf_write_material_data_section(AVFormatContext *s)
  309. {
  310.     GXFContext *gxf = s->priv_data;
  311.     AVIOContext *pb = s->pb;
  312.     int64_t pos;
  313.     int len;
  314.     const char *filename = strrchr(s->filename, '/');
  315.  
  316.     pos = avio_tell(pb);
  317.     avio_wb16(pb, 0); /* size */
  318.  
  319.     /* name */
  320.     if (filename)
  321.         filename++;
  322.     else
  323.         filename = s->filename;
  324.     len = strlen(filename);
  325.  
  326.     avio_w8(pb, MAT_NAME);
  327.     avio_w8(pb, strlen(SERVER_PATH) + len + 1);
  328.     avio_write(pb, SERVER_PATH, sizeof(SERVER_PATH) - 1);
  329.     avio_write(pb, filename, len);
  330.     avio_w8(pb, 0);
  331.  
  332.     /* first field */
  333.     avio_w8(pb, MAT_FIRST_FIELD);
  334.     avio_w8(pb, 4);
  335.     avio_wb32(pb, 0);
  336.  
  337.     /* last field */
  338.     avio_w8(pb, MAT_LAST_FIELD);
  339.     avio_w8(pb, 4);
  340.     avio_wb32(pb, gxf->nb_fields);
  341.  
  342.     /* reserved */
  343.     avio_w8(pb, MAT_MARK_IN);
  344.     avio_w8(pb, 4);
  345.     avio_wb32(pb, 0);
  346.  
  347.     avio_w8(pb, MAT_MARK_OUT);
  348.     avio_w8(pb, 4);
  349.     avio_wb32(pb, gxf->nb_fields);
  350.  
  351.     /* estimated size */
  352.     avio_w8(pb, MAT_SIZE);
  353.     avio_w8(pb, 4);
  354.     avio_wb32(pb, avio_size(pb) / 1024);
  355.  
  356.     return updateSize(pb, pos);
  357. }
  358.  
  359. static int gxf_write_track_description_section(AVFormatContext *s)
  360. {
  361.     GXFContext *gxf = s->priv_data;
  362.     AVIOContext *pb = s->pb;
  363.     int64_t pos;
  364.     int i;
  365.  
  366.     pos = avio_tell(pb);
  367.     avio_wb16(pb, 0); /* size */
  368.     for (i = 0; i < s->nb_streams; ++i)
  369.         gxf_write_track_description(s, s->streams[i]->priv_data, i);
  370.  
  371.     gxf_write_track_description(s, &gxf->timecode_track, s->nb_streams);
  372.  
  373.     return updateSize(pb, pos);
  374. }
  375.  
  376. static int gxf_write_map_packet(AVFormatContext *s, int rewrite)
  377. {
  378.     GXFContext *gxf = s->priv_data;
  379.     AVIOContext *pb = s->pb;
  380.     int64_t pos = avio_tell(pb);
  381.  
  382.     if (!rewrite) {
  383.         if (!(gxf->map_offsets_nb % 30)) {
  384.             int err;
  385.             if ((err = av_reallocp_array(&gxf->map_offsets,
  386.                                          gxf->map_offsets_nb + 30,
  387.                                          sizeof(*gxf->map_offsets))) < 0) {
  388.                 gxf->map_offsets_nb = 0;
  389.                 av_log(s, AV_LOG_ERROR, "could not realloc map offsets\n");
  390.                 return err;
  391.             }
  392.         }
  393.         gxf->map_offsets[gxf->map_offsets_nb++] = pos; // do not increment here
  394.     }
  395.  
  396.     gxf_write_packet_header(pb, PKT_MAP);
  397.  
  398.     /* preamble */
  399.     avio_w8(pb, 0xE0); /* version */
  400.     avio_w8(pb, 0xFF); /* reserved */
  401.  
  402.     gxf_write_material_data_section(s);
  403.     gxf_write_track_description_section(s);
  404.  
  405.     return updatePacketSize(pb, pos);
  406. }
  407.  
  408. static int gxf_write_flt_packet(AVFormatContext *s)
  409. {
  410.     GXFContext *gxf = s->priv_data;
  411.     AVIOContext *pb = s->pb;
  412.     int64_t pos = avio_tell(pb);
  413.     int fields_per_flt = (gxf->nb_fields+1) / 1000 + 1;
  414.     int flt_entries = gxf->nb_fields / fields_per_flt;
  415.     int i = 0;
  416.  
  417.     gxf_write_packet_header(pb, PKT_FLT);
  418.  
  419.     avio_wl32(pb, fields_per_flt); /* number of fields */
  420.     avio_wl32(pb, flt_entries); /* number of active flt entries */
  421.  
  422.     if (gxf->flt_entries) {
  423.         for (i = 0; i < flt_entries; i++)
  424.             avio_wl32(pb, gxf->flt_entries[(i*fields_per_flt)>>1]);
  425.     }
  426.  
  427.     for (; i < 1000; i++)
  428.         avio_wl32(pb, 0);
  429.  
  430.     return updatePacketSize(pb, pos);
  431. }
  432.  
  433. static int gxf_write_umf_material_description(AVFormatContext *s)
  434. {
  435.     GXFContext *gxf = s->priv_data;
  436.     AVIOContext *pb = s->pb;
  437.     int timecode_base = gxf->time_base.den == 60000 ? 60 : 50;
  438.     int64_t timestamp = 0;
  439.     AVDictionaryEntry *t;
  440.     uint64_t nb_fields;
  441.     uint32_t timecode_in; // timecode at mark in
  442.     uint32_t timecode_out; // timecode at mark out
  443.  
  444.     if (t = av_dict_get(s->metadata, "creation_time", NULL, 0))
  445.         timestamp = ff_iso8601_to_unix_time(t->value);
  446.  
  447.     timecode_in = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
  448.                                gxf->tc.hh, gxf->tc.mm,
  449.                                gxf->tc.ss, gxf->tc.ff);
  450.  
  451.     nb_fields = gxf->nb_fields +
  452.                 gxf->tc.hh * (timecode_base * 3600) +
  453.                 gxf->tc.mm * (timecode_base * 60)   +
  454.                 gxf->tc.ss * timecode_base          +
  455.                 gxf->tc.ff;
  456.  
  457.     timecode_out = GXF_TIMECODE(gxf->tc.color, gxf->tc.drop,
  458.                                 nb_fields / (timecode_base * 3600) % 24,
  459.                                 nb_fields / (timecode_base * 60)   % 60,
  460.                                 nb_fields /  timecode_base % 60,
  461.                                 nb_fields %  timecode_base);
  462.  
  463.     avio_wl32(pb, gxf->flags);
  464.     avio_wl32(pb, gxf->nb_fields); /* length of the longest track */
  465.     avio_wl32(pb, gxf->nb_fields); /* length of the shortest track */
  466.     avio_wl32(pb, 0); /* mark in */
  467.     avio_wl32(pb, gxf->nb_fields); /* mark out */
  468.     avio_wl32(pb, timecode_in); /* timecode mark in */
  469.     avio_wl32(pb, timecode_out); /* timecode mark out */
  470.     avio_wl64(pb, timestamp); /* modification time */
  471.     avio_wl64(pb, timestamp); /* creation time */
  472.     avio_wl16(pb, 0); /* reserved */
  473.     avio_wl16(pb, 0); /* reserved */
  474.     avio_wl16(pb, gxf->audio_tracks);
  475.     avio_wl16(pb, 1); /* timecode track count */
  476.     avio_wl16(pb, 0); /* reserved */
  477.     avio_wl16(pb, gxf->mpeg_tracks);
  478.     return 48;
  479. }
  480.  
  481. static int gxf_write_umf_payload(AVFormatContext *s)
  482. {
  483.     GXFContext *gxf = s->priv_data;
  484.     AVIOContext *pb = s->pb;
  485.  
  486.     avio_wl32(pb, gxf->umf_length); /* total length of the umf data */
  487.     avio_wl32(pb, 3); /* version */
  488.     avio_wl32(pb, s->nb_streams+1);
  489.     avio_wl32(pb, gxf->umf_track_offset); /* umf track section offset */
  490.     avio_wl32(pb, gxf->umf_track_size);
  491.     avio_wl32(pb, s->nb_streams+1);
  492.     avio_wl32(pb, gxf->umf_media_offset);
  493.     avio_wl32(pb, gxf->umf_media_size);
  494.     avio_wl32(pb, gxf->umf_length); /* user data offset */
  495.     avio_wl32(pb, 0); /* user data size */
  496.     avio_wl32(pb, 0); /* reserved */
  497.     avio_wl32(pb, 0); /* reserved */
  498.     return 48;
  499. }
  500.  
  501. static int gxf_write_umf_track_description(AVFormatContext *s)
  502. {
  503.     AVIOContext *pb = s->pb;
  504.     GXFContext *gxf = s->priv_data;
  505.     int64_t pos = avio_tell(pb);
  506.     int i;
  507.  
  508.     gxf->umf_track_offset = pos - gxf->umf_start_offset;
  509.     for (i = 0; i < s->nb_streams; ++i) {
  510.         GXFStreamContext *sc = s->streams[i]->priv_data;
  511.         avio_wl16(pb, sc->media_info);
  512.         avio_wl16(pb, 1);
  513.     }
  514.  
  515.     avio_wl16(pb, gxf->timecode_track.media_info);
  516.     avio_wl16(pb, 1);
  517.  
  518.     return avio_tell(pb) - pos;
  519. }
  520.  
  521. static int gxf_write_umf_media_mpeg(AVIOContext *pb, AVStream *st)
  522. {
  523.     GXFStreamContext *sc = st->priv_data;
  524.  
  525.     if (st->codec->pix_fmt == AV_PIX_FMT_YUV422P)
  526.         avio_wl32(pb, 2);
  527.     else
  528.         avio_wl32(pb, 1); /* default to 420 */
  529.     avio_wl32(pb, sc->first_gop_closed == 1); /* closed = 1, open = 0, unknown = 255 */
  530.     avio_wl32(pb, 3); /* top = 1, bottom = 2, frame = 3, unknown = 0 */
  531.     avio_wl32(pb, 1); /* I picture per GOP */
  532.     avio_wl32(pb, sc->p_per_gop);
  533.     avio_wl32(pb, sc->b_per_i_or_p);
  534.     if (st->codec->codec_id == AV_CODEC_ID_MPEG2VIDEO)
  535.         avio_wl32(pb, 2);
  536.     else if (st->codec->codec_id == AV_CODEC_ID_MPEG1VIDEO)
  537.         avio_wl32(pb, 1);
  538.     else
  539.         avio_wl32(pb, 0);
  540.     avio_wl32(pb, 0); /* reserved */
  541.     return 32;
  542. }
  543.  
  544. static int gxf_write_umf_media_timecode(AVIOContext *pb, int drop)
  545. {
  546.     avio_wl32(pb, drop); /* drop frame */
  547.     avio_wl32(pb, 0); /* reserved */
  548.     avio_wl32(pb, 0); /* reserved */
  549.     avio_wl32(pb, 0); /* reserved */
  550.     avio_wl32(pb, 0); /* reserved */
  551.     avio_wl32(pb, 0); /* reserved */
  552.     avio_wl32(pb, 0); /* reserved */
  553.     avio_wl32(pb, 0); /* reserved */
  554.     return 32;
  555. }
  556.  
  557. static int gxf_write_umf_media_dv(AVIOContext *pb, GXFStreamContext *sc, AVStream *st)
  558. {
  559.     int dv_umf_data = 0;
  560.  
  561.     if (st->codec->pix_fmt == AV_PIX_FMT_YUV420P)
  562.         dv_umf_data |= 0x20; /* marks as DVCAM instead of DVPRO */
  563.     avio_wl32(pb, dv_umf_data);
  564.     avio_wl32(pb, 0);
  565.     avio_wl32(pb, 0);
  566.     avio_wl32(pb, 0);
  567.     avio_wl32(pb, 0);
  568.     avio_wl32(pb, 0);
  569.     avio_wl32(pb, 0);
  570.     avio_wl32(pb, 0);
  571.     return 32;
  572. }
  573.  
  574. static int gxf_write_umf_media_audio(AVIOContext *pb, GXFStreamContext *sc)
  575. {
  576.     avio_wl64(pb, av_double2int(1)); /* sound level to begin to */
  577.     avio_wl64(pb, av_double2int(1)); /* sound level to begin to */
  578.     avio_wl32(pb, 0); /* number of fields over which to ramp up sound level */
  579.     avio_wl32(pb, 0); /* number of fields over which to ramp down sound level */
  580.     avio_wl32(pb, 0); /* reserved */
  581.     avio_wl32(pb, 0); /* reserved */
  582.     return 32;
  583. }
  584.  
  585. static int gxf_write_umf_media_description(AVFormatContext *s)
  586. {
  587.     GXFContext *gxf = s->priv_data;
  588.     AVIOContext *pb = s->pb;
  589.     int64_t pos;
  590.     int i, j;
  591.  
  592.     pos = avio_tell(pb);
  593.     gxf->umf_media_offset = pos - gxf->umf_start_offset;
  594.     for (i = 0; i <= s->nb_streams; ++i) {
  595.         GXFStreamContext *sc;
  596.         int64_t startpos, curpos;
  597.  
  598.         if (i == s->nb_streams)
  599.             sc = &gxf->timecode_track;
  600.         else
  601.             sc = s->streams[i]->priv_data;
  602.  
  603.         startpos = avio_tell(pb);
  604.         avio_wl16(pb, 0); /* length */
  605.         avio_wl16(pb, sc->media_info);
  606.         avio_wl16(pb, 0); /* reserved */
  607.         avio_wl16(pb, 0); /* reserved */
  608.         avio_wl32(pb, gxf->nb_fields);
  609.         avio_wl32(pb, 0); /* attributes rw, ro */
  610.         avio_wl32(pb, 0); /* mark in */
  611.         avio_wl32(pb, gxf->nb_fields); /* mark out */
  612.         avio_write(pb, ES_NAME_PATTERN, strlen(ES_NAME_PATTERN));
  613.         avio_wb16(pb, sc->media_info);
  614.         for (j = strlen(ES_NAME_PATTERN)+2; j < 88; j++)
  615.             avio_w8(pb, 0);
  616.         avio_wl32(pb, sc->track_type);
  617.         avio_wl32(pb, sc->sample_rate);
  618.         avio_wl32(pb, sc->sample_size);
  619.         avio_wl32(pb, 0); /* reserved */
  620.  
  621.         if (sc == &gxf->timecode_track)
  622.             gxf_write_umf_media_timecode(pb, gxf->tc.drop);
  623.         else {
  624.             AVStream *st = s->streams[i];
  625.             switch (st->codec->codec_id) {
  626.             case AV_CODEC_ID_MPEG1VIDEO:
  627.             case AV_CODEC_ID_MPEG2VIDEO:
  628.                 gxf_write_umf_media_mpeg(pb, st);
  629.                 break;
  630.             case AV_CODEC_ID_PCM_S16LE:
  631.                 gxf_write_umf_media_audio(pb, sc);
  632.                 break;
  633.             case AV_CODEC_ID_DVVIDEO:
  634.                 gxf_write_umf_media_dv(pb, sc, st);
  635.                 break;
  636.             }
  637.         }
  638.  
  639.         curpos = avio_tell(pb);
  640.         avio_seek(pb, startpos, SEEK_SET);
  641.         avio_wl16(pb, curpos - startpos);
  642.         avio_seek(pb, curpos, SEEK_SET);
  643.     }
  644.     return avio_tell(pb) - pos;
  645. }
  646.  
  647. static int gxf_write_umf_packet(AVFormatContext *s)
  648. {
  649.     GXFContext *gxf = s->priv_data;
  650.     AVIOContext *pb = s->pb;
  651.     int64_t pos = avio_tell(pb);
  652.  
  653.     gxf_write_packet_header(pb, PKT_UMF);
  654.  
  655.     /* preamble */
  656.     avio_w8(pb, 3); /* first and last (only) packet */
  657.     avio_wb32(pb, gxf->umf_length); /* data length */
  658.  
  659.     gxf->umf_start_offset = avio_tell(pb);
  660.     gxf_write_umf_payload(s);
  661.     gxf_write_umf_material_description(s);
  662.     gxf->umf_track_size = gxf_write_umf_track_description(s);
  663.     gxf->umf_media_size = gxf_write_umf_media_description(s);
  664.     gxf->umf_length = avio_tell(pb) - gxf->umf_start_offset;
  665.     return updatePacketSize(pb, pos);
  666. }
  667.  
  668. static const int GXF_samples_per_frame[] = { 32768, 0 };
  669.  
  670. static void gxf_init_timecode_track(GXFStreamContext *sc, GXFStreamContext *vsc)
  671. {
  672.     if (!vsc)
  673.         return;
  674.  
  675.     sc->media_type = vsc->sample_rate == 60 ? 7 : 8;
  676.     sc->sample_rate = vsc->sample_rate;
  677.     sc->media_info = ('T'<<8) | '0';
  678.     sc->track_type = 3;
  679.     sc->frame_rate_index = vsc->frame_rate_index;
  680.     sc->lines_index = vsc->lines_index;
  681.     sc->sample_size = 16;
  682.     sc->fields = vsc->fields;
  683. }
  684.  
  685. static int gxf_init_timecode(AVFormatContext *s, GXFTimecode *tc, const char *tcstr, int fields)
  686. {
  687.     char c;
  688.  
  689.     if (sscanf(tcstr, "%d:%d:%d%c%d", &tc->hh, &tc->mm, &tc->ss, &c, &tc->ff) != 5) {
  690.         av_log(s, AV_LOG_ERROR, "unable to parse timecode, "
  691.                                 "syntax: hh:mm:ss[:;.]ff\n");
  692.         return -1;
  693.     }
  694.  
  695.     tc->color = 0;
  696.     tc->drop = c != ':';
  697.  
  698.     if (fields == 2)
  699.         tc->ff = tc->ff * 2;
  700.  
  701.     return 0;
  702. }
  703.  
  704. static int gxf_write_header(AVFormatContext *s)
  705. {
  706.     AVIOContext *pb = s->pb;
  707.     GXFContext *gxf = s->priv_data;
  708.     GXFStreamContext *vsc = NULL;
  709.     uint8_t tracks[255] = {0};
  710.     int i, media_info = 0;
  711.     int ret;
  712.     AVDictionaryEntry *tcr = av_dict_get(s->metadata, "timecode", NULL, 0);
  713.  
  714.     if (!pb->seekable) {
  715.         av_log(s, AV_LOG_ERROR, "gxf muxer does not support streamed output, patch welcome\n");
  716.         return -1;
  717.     }
  718.  
  719.     gxf->flags |= 0x00080000; /* material is simple clip */
  720.     for (i = 0; i < s->nb_streams; ++i) {
  721.         AVStream *st = s->streams[i];
  722.         GXFStreamContext *sc = av_mallocz(sizeof(*sc));
  723.         if (!sc)
  724.             return AVERROR(ENOMEM);
  725.         st->priv_data = sc;
  726.  
  727.         sc->media_type = ff_codec_get_tag(gxf_media_types, st->codec->codec_id);
  728.         if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
  729.             if (st->codec->codec_id != AV_CODEC_ID_PCM_S16LE) {
  730.                 av_log(s, AV_LOG_ERROR, "only 16 BIT PCM LE allowed for now\n");
  731.                 return -1;
  732.             }
  733.             if (st->codec->sample_rate != 48000) {
  734.                 av_log(s, AV_LOG_ERROR, "only 48000hz sampling rate is allowed\n");
  735.                 return -1;
  736.             }
  737.             if (st->codec->channels != 1) {
  738.                 av_log(s, AV_LOG_ERROR, "only mono tracks are allowed\n");
  739.                 return -1;
  740.             }
  741.             sc->track_type = 2;
  742.             sc->sample_rate = st->codec->sample_rate;
  743.             avpriv_set_pts_info(st, 64, 1, sc->sample_rate);
  744.             sc->sample_size = 16;
  745.             sc->frame_rate_index = -2;
  746.             sc->lines_index = -2;
  747.             sc->fields = -2;
  748.             gxf->audio_tracks++;
  749.             gxf->flags |= 0x04000000; /* audio is 16 bit pcm */
  750.             media_info = 'A';
  751.         } else if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
  752.             if (i != 0) {
  753.                 av_log(s, AV_LOG_ERROR, "video stream must be the first track\n");
  754.                 return -1;
  755.             }
  756.             /* FIXME check from time_base ? */
  757.             if (st->codec->height == 480 || st->codec->height == 512) { /* NTSC or NTSC+VBI */
  758.                 sc->frame_rate_index = 5;
  759.                 sc->sample_rate = 60;
  760.                 gxf->flags |= 0x00000080;
  761.                 gxf->time_base = (AVRational){ 1001, 60000 };
  762.             } else if (st->codec->height == 576 || st->codec->height == 608) { /* PAL or PAL+VBI */
  763.                 sc->frame_rate_index = 6;
  764.                 sc->media_type++;
  765.                 sc->sample_rate = 50;
  766.                 gxf->flags |= 0x00000040;
  767.                 gxf->time_base = (AVRational){ 1, 50 };
  768.             } else {
  769.                 av_log(s, AV_LOG_ERROR, "unsupported video resolution, "
  770.                        "gxf muxer only accepts PAL or NTSC resolutions currently\n");
  771.                 return -1;
  772.             }
  773.             if (!tcr)
  774.                 tcr = av_dict_get(st->metadata, "timecode", NULL, 0);
  775.             avpriv_set_pts_info(st, 64, gxf->time_base.num, gxf->time_base.den);
  776.             if (gxf_find_lines_index(st) < 0)
  777.                 sc->lines_index = -1;
  778.             sc->sample_size = st->codec->bit_rate;
  779.             sc->fields = 2; /* interlaced */
  780.  
  781.             vsc = sc;
  782.  
  783.             switch (st->codec->codec_id) {
  784.             case AV_CODEC_ID_MJPEG:
  785.                 sc->track_type = 1;
  786.                 gxf->flags |= 0x00004000;
  787.                 media_info = 'J';
  788.                 break;
  789.             case AV_CODEC_ID_MPEG1VIDEO:
  790.                 sc->track_type = 9;
  791.                 gxf->mpeg_tracks++;
  792.                 media_info = 'L';
  793.                 break;
  794.             case AV_CODEC_ID_MPEG2VIDEO:
  795.                 sc->first_gop_closed = -1;
  796.                 sc->track_type = 4;
  797.                 gxf->mpeg_tracks++;
  798.                 gxf->flags |= 0x00008000;
  799.                 media_info = 'M';
  800.                 break;
  801.             case AV_CODEC_ID_DVVIDEO:
  802.                 if (st->codec->pix_fmt == AV_PIX_FMT_YUV422P) {
  803.                     sc->media_type += 2;
  804.                     sc->track_type = 6;
  805.                     gxf->flags |= 0x00002000;
  806.                     media_info = 'E';
  807.                 } else {
  808.                     sc->track_type = 5;
  809.                     gxf->flags |= 0x00001000;
  810.                     media_info = 'D';
  811.                 }
  812.                 break;
  813.             default:
  814.                 av_log(s, AV_LOG_ERROR, "video codec not supported\n");
  815.                 return -1;
  816.             }
  817.         }
  818.         /* FIXME first 10 audio tracks are 0 to 9 next 22 are A to V */
  819.         sc->media_info = media_info<<8 | ('0'+tracks[media_info]++);
  820.         sc->order = s->nb_streams - st->index;
  821.     }
  822.  
  823.     if (ff_audio_interleave_init(s, GXF_samples_per_frame, (AVRational){ 1, 48000 }) < 0)
  824.         return -1;
  825.  
  826.     if (tcr && vsc)
  827.         gxf_init_timecode(s, &gxf->tc, tcr->value, vsc->fields);
  828.  
  829.     gxf_init_timecode_track(&gxf->timecode_track, vsc);
  830.     gxf->flags |= 0x200000; // time code track is non-drop frame
  831.  
  832.     if ((ret = gxf_write_map_packet(s, 0)) < 0)
  833.         return ret;
  834.     gxf_write_flt_packet(s);
  835.     gxf_write_umf_packet(s);
  836.  
  837.     gxf->packet_count = 3;
  838.  
  839.     avio_flush(pb);
  840.     return 0;
  841. }
  842.  
  843. static int gxf_write_eos_packet(AVIOContext *pb)
  844. {
  845.     int64_t pos = avio_tell(pb);
  846.  
  847.     gxf_write_packet_header(pb, PKT_EOS);
  848.     return updatePacketSize(pb, pos);
  849. }
  850.  
  851. static int gxf_write_trailer(AVFormatContext *s)
  852. {
  853.     GXFContext *gxf = s->priv_data;
  854.     AVIOContext *pb = s->pb;
  855.     int64_t end;
  856.     int i;
  857.     int ret;
  858.  
  859.     ff_audio_interleave_close(s);
  860.  
  861.     gxf_write_eos_packet(pb);
  862.     end = avio_tell(pb);
  863.     avio_seek(pb, 0, SEEK_SET);
  864.     /* overwrite map, flt and umf packets with new values */
  865.     if ((ret = gxf_write_map_packet(s, 1)) < 0)
  866.         return ret;
  867.     gxf_write_flt_packet(s);
  868.     gxf_write_umf_packet(s);
  869.     avio_flush(pb);
  870.     /* update duration in all map packets */
  871.     for (i = 1; i < gxf->map_offsets_nb; i++) {
  872.         avio_seek(pb, gxf->map_offsets[i], SEEK_SET);
  873.         if ((ret = gxf_write_map_packet(s, 1)) < 0)
  874.             return ret;
  875.         avio_flush(pb);
  876.     }
  877.  
  878.     avio_seek(pb, end, SEEK_SET);
  879.  
  880.     av_freep(&gxf->flt_entries);
  881.     av_freep(&gxf->map_offsets);
  882.  
  883.     return 0;
  884. }
  885.  
  886. static int gxf_parse_mpeg_frame(GXFStreamContext *sc, const uint8_t *buf, int size)
  887. {
  888.     uint32_t c=-1;
  889.     int i;
  890.     for(i=0; i<size-4 && c!=0x100; i++){
  891.         c = (c<<8) + buf[i];
  892.         if(c == 0x1B8 && sc->first_gop_closed == -1) /* GOP start code */
  893.             sc->first_gop_closed= (buf[i+4]>>6)&1;
  894.     }
  895.     return (buf[i+1]>>3)&7;
  896. }
  897.  
  898. static int gxf_write_media_preamble(AVFormatContext *s, AVPacket *pkt, int size)
  899. {
  900.     GXFContext *gxf = s->priv_data;
  901.     AVIOContext *pb = s->pb;
  902.     AVStream *st = s->streams[pkt->stream_index];
  903.     GXFStreamContext *sc = st->priv_data;
  904.     unsigned field_nb;
  905.     /* If the video is frame-encoded, the frame numbers shall be represented by
  906.      * even field numbers.
  907.      * see SMPTE360M-2004  6.4.2.1.3 Media field number */
  908.     if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
  909.         field_nb = gxf->nb_fields;
  910.     } else {
  911.         field_nb = av_rescale_rnd(pkt->dts, gxf->time_base.den,
  912.                                   (int64_t)48000*gxf->time_base.num, AV_ROUND_UP);
  913.     }
  914.  
  915.     avio_w8(pb, sc->media_type);
  916.     avio_w8(pb, st->index);
  917.     avio_wb32(pb, field_nb);
  918.     if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
  919.         avio_wb16(pb, 0);
  920.         avio_wb16(pb, size / 2);
  921.     } else if (st->codec->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
  922.         int frame_type = gxf_parse_mpeg_frame(sc, pkt->data, pkt->size);
  923.         if (frame_type == AV_PICTURE_TYPE_I) {
  924.             avio_w8(pb, 0x0d);
  925.             sc->iframes++;
  926.         } else if (frame_type == AV_PICTURE_TYPE_B) {
  927.             avio_w8(pb, 0x0f);
  928.             sc->bframes++;
  929.         } else {
  930.             avio_w8(pb, 0x0e);
  931.             sc->pframes++;
  932.         }
  933.         avio_wb24(pb, size);
  934.     } else if (st->codec->codec_id == AV_CODEC_ID_DVVIDEO) {
  935.         avio_w8(pb, size / 4096);
  936.         avio_wb24(pb, 0);
  937.     } else
  938.         avio_wb32(pb, size);
  939.     avio_wb32(pb, field_nb);
  940.     avio_w8(pb, 1); /* flags */
  941.     avio_w8(pb, 0); /* reserved */
  942.     return 16;
  943. }
  944.  
  945. static int gxf_write_packet(AVFormatContext *s, AVPacket *pkt)
  946. {
  947.     GXFContext *gxf = s->priv_data;
  948.     AVIOContext *pb = s->pb;
  949.     AVStream *st = s->streams[pkt->stream_index];
  950.     int64_t pos = avio_tell(pb);
  951.     int padding = 0;
  952.     int packet_start_offset = avio_tell(pb) / 1024;
  953.     int ret;
  954.  
  955.     gxf_write_packet_header(pb, PKT_MEDIA);
  956.     if (st->codec->codec_id == AV_CODEC_ID_MPEG2VIDEO && pkt->size % 4) /* MPEG-2 frames must be padded */
  957.         padding = 4 - pkt->size % 4;
  958.     else if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO)
  959.         padding = GXF_AUDIO_PACKET_SIZE - pkt->size;
  960.     gxf_write_media_preamble(s, pkt, pkt->size + padding);
  961.     avio_write(pb, pkt->data, pkt->size);
  962.     gxf_write_padding(pb, padding);
  963.  
  964.     if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
  965.         if (!(gxf->flt_entries_nb % 500)) {
  966.             int err;
  967.             if ((err = av_reallocp_array(&gxf->flt_entries,
  968.                                          gxf->flt_entries_nb + 500,
  969.                                          sizeof(*gxf->flt_entries))) < 0) {
  970.                 gxf->flt_entries_nb = 0;
  971.                 gxf->nb_fields = 0;
  972.                 av_log(s, AV_LOG_ERROR, "could not reallocate flt entries\n");
  973.                 return err;
  974.             }
  975.         }
  976.         gxf->flt_entries[gxf->flt_entries_nb++] = packet_start_offset;
  977.         gxf->nb_fields += 2; // count fields
  978.     }
  979.  
  980.     updatePacketSize(pb, pos);
  981.  
  982.     gxf->packet_count++;
  983.     if (gxf->packet_count == 100) {
  984.         if ((ret = gxf_write_map_packet(s, 0)) < 0)
  985.             return ret;
  986.         gxf->packet_count = 0;
  987.     }
  988.  
  989.     return 0;
  990. }
  991.  
  992. static int gxf_compare_field_nb(AVFormatContext *s, AVPacket *next, AVPacket *cur)
  993. {
  994.     GXFContext *gxf = s->priv_data;
  995.     AVPacket *pkt[2] = { cur, next };
  996.     int i, field_nb[2];
  997.     GXFStreamContext *sc[2];
  998.  
  999.     for (i = 0; i < 2; i++) {
  1000.         AVStream *st = s->streams[pkt[i]->stream_index];
  1001.         sc[i] = st->priv_data;
  1002.         if (st->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
  1003.             field_nb[i] = av_rescale_rnd(pkt[i]->dts, gxf->time_base.den,
  1004.                                          (int64_t)48000*gxf->time_base.num, AV_ROUND_UP);
  1005.             field_nb[i] &= ~1; // compare against even field number because audio must be before video
  1006.         } else
  1007.             field_nb[i] = pkt[i]->dts; // dts are field based
  1008.     }
  1009.  
  1010.     return field_nb[1] > field_nb[0] ||
  1011.         (field_nb[1] == field_nb[0] && sc[1]->order > sc[0]->order);
  1012. }
  1013.  
  1014. static int gxf_interleave_packet(AVFormatContext *s, AVPacket *out, AVPacket *pkt, int flush)
  1015. {
  1016.     if (pkt && s->streams[pkt->stream_index]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
  1017.         pkt->duration = 2; // enforce 2 fields
  1018.     return ff_audio_rechunk_interleave(s, out, pkt, flush,
  1019.                                ff_interleave_packet_per_dts, gxf_compare_field_nb);
  1020. }
  1021.  
  1022. AVOutputFormat ff_gxf_muxer = {
  1023.     .name              = "gxf",
  1024.     .long_name         = NULL_IF_CONFIG_SMALL("GXF (General eXchange Format)"),
  1025.     .extensions        = "gxf",
  1026.     .priv_data_size    = sizeof(GXFContext),
  1027.     .audio_codec       = AV_CODEC_ID_PCM_S16LE,
  1028.     .video_codec       = AV_CODEC_ID_MPEG2VIDEO,
  1029.     .write_header      = gxf_write_header,
  1030.     .write_packet      = gxf_write_packet,
  1031.     .write_trailer     = gxf_write_trailer,
  1032.     .interleave_packet = gxf_interleave_packet,
  1033. };
  1034.