Subversion Repositories Kolibri OS

Rev

Blame | Last modification | View Log | RSS feed

  1. /*
  2.  * Copyright (c) 2014 Intel Corporation. All Rights Reserved.
  3.  *
  4.  * Permission is hereby granted, free of charge, to any person obtaining a
  5.  * copy of this software and associated documentation files (the
  6.  * "Software"), to deal in the Software without restriction, including
  7.  * without limitation the rights to use, copy, modify, merge, publish,
  8.  * distribute, sub license, and/or sell copies of the Software, and to
  9.  * permit persons to whom the Software is furnished to do so, subject to
  10.  * the following conditions:
  11.  *
  12.  * The above copyright notice and this permission notice (including the
  13.  * next paragraph) shall be included in all copies or substantial portions
  14.  * of the Software.
  15.  *
  16.  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
  17.  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  18.  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
  19.  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
  20.  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  21.  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  22.  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
  23.  */
  24. /*
  25.  * Simple JPEG encoder based on libVA.
  26.  *
  27.  * Usage:
  28.  * ./jpegenc <width> <height> <input file> <output file> <input filetype 0(I420)/1(NV12)/2(UYVY)/3(YUY2)/4(Y8)/5(RGBA)> q <quality>
  29.  * Currently supporting only I420/NV12/UYVY/YUY2/Y8 input file formats.
  30.  *
  31.  * NOTE: The intel-driver expects a packed header sent to it. So, the app is responsible to pack the header
  32.  * and send to the driver through LibVA. This unit test also showcases how to send the header to the driver.
  33.  */
  34.  
  35. #include "sysdeps.h"
  36. #include <stdio.h>
  37. #include <string.h>
  38. #include <stdlib.h>
  39. #include <getopt.h>
  40. #include <unistd.h>
  41.  
  42. #include <sys/time.h>
  43. #include <sys/types.h>
  44. #include <sys/stat.h>
  45. #include <fcntl.h>
  46. #include <assert.h>
  47. #include <time.h>
  48.  
  49. #include <pthread.h>
  50.  
  51. #include <va/va.h>
  52. #include <va/va_enc_jpeg.h>
  53. #include "va_display.h"
  54. #include "jpegenc_utils.h"
  55.  
  56. #ifndef VA_FOURCC_I420
  57. #define VA_FOURCC_I420          0x30323449
  58. #endif
  59.  
  60. #define CHECK_VASTATUS(va_status,func)                                  \
  61.     if (va_status != VA_STATUS_SUCCESS) {                                   \
  62.         fprintf(stderr,"%s:%s (%d) failed,exit\n", __func__, func, __LINE__); \
  63.         exit(1);                                                            \
  64.     }
  65.  
  66.  
  67. void show_help()
  68. {
  69.     printf("Usage: ./jpegenc <width> <height> <input file> <output file> <fourcc value 0(I420)/1(NV12)/2(UYVY)/3(YUY2)/4(Y8)/5(RGBA)> q <quality>\n");
  70.     printf("Currently supporting only I420/NV12/UYVY/YUY2/Y8 input file formats.\n");
  71.     printf("Example: ./jpegenc 1024 768 input_file.yuv output.jpeg 0 50\n\n");
  72.     return;
  73. }
  74.  
  75.  
  76. void jpegenc_pic_param_init(VAEncPictureParameterBufferJPEG *pic_param,int width,int height,int quality, YUVComponentSpecs yuvComp)
  77. {
  78.     assert(pic_param);
  79.    
  80.     pic_param->picture_width = width;
  81.     pic_param->picture_height = height;
  82.     pic_param->quality = quality;
  83.    
  84.     pic_param->pic_flags.bits.profile = 0;      //Profile = Baseline
  85.     pic_param->pic_flags.bits.progressive = 0;  //Sequential encoding
  86.     pic_param->pic_flags.bits.huffman = 1;      //Uses Huffman coding
  87.     pic_param->pic_flags.bits.interleaved = 0;  //Input format is interleaved (YUV)
  88.     pic_param->pic_flags.bits.differential = 0; //non-Differential Encoding
  89.    
  90.     pic_param->sample_bit_depth = 8; //only 8 bit sample depth is currently supported
  91.     pic_param->num_scan = 1;
  92.     pic_param->num_components = yuvComp.num_components; // Supporting only upto 3 components maximum
  93.     //set component_id Ci and Tqi
  94.     if(yuvComp.fourcc_val == VA_FOURCC_Y800) {
  95.         pic_param->component_id[0] = 0;
  96.         pic_param->quantiser_table_selector[0] = 0;
  97.     } else {
  98.         pic_param->component_id[0] = pic_param->quantiser_table_selector[0] = 0;
  99.         pic_param->component_id[1] = pic_param->quantiser_table_selector[1] = 1;
  100.         pic_param->component_id[2] = 2; pic_param->quantiser_table_selector[2] = 1;
  101.     }
  102.    
  103.     pic_param->quality = quality;
  104. }
  105.  
  106. void jpegenc_qmatrix_init(VAQMatrixBufferJPEG *quantization_param, YUVComponentSpecs yuvComp)
  107. {
  108.     int i=0;
  109.     quantization_param->load_lum_quantiser_matrix = 1;
  110.    
  111.     //LibVA expects the QM in zigzag order
  112.     for(i=0; i<NUM_QUANT_ELEMENTS; i++) {
  113.         quantization_param->lum_quantiser_matrix[i] = jpeg_luma_quant[jpeg_zigzag[i]];
  114.     }
  115.    
  116.    
  117.     if(yuvComp.fourcc_val == VA_FOURCC_Y800) {
  118.         quantization_param->load_chroma_quantiser_matrix = 0;
  119.     } else {
  120.         quantization_param->load_chroma_quantiser_matrix = 1;
  121.         for(i=0; i<NUM_QUANT_ELEMENTS; i++) {
  122.             quantization_param->chroma_quantiser_matrix[i] = jpeg_chroma_quant[jpeg_zigzag[i]];
  123.         }
  124.     }
  125.    
  126. }
  127.  
  128. void jpegenc_hufftable_init(VAHuffmanTableBufferJPEGBaseline *hufftable_param, YUVComponentSpecs yuvComp)
  129. {
  130.    
  131.     hufftable_param->load_huffman_table[0] = 1; //Load Luma Hufftable
  132.     if(yuvComp.fourcc_val == VA_FOURCC_Y800) {
  133.         hufftable_param->load_huffman_table[1] = 0; //Do not load Chroma Hufftable for Y8
  134.     } else {
  135.         hufftable_param->load_huffman_table[1] = 1; //Load Chroma Hufftable for other formats
  136.     }
  137.    
  138.    //Load Luma hufftable values
  139.    //Load DC codes
  140.    memcpy(hufftable_param->huffman_table[0].num_dc_codes, jpeg_hufftable_luma_dc+1, 16);
  141.    //Load DC Values
  142.    memcpy(hufftable_param->huffman_table[0].dc_values, jpeg_hufftable_luma_dc+17, 12);
  143.    //Load AC codes
  144.    memcpy(hufftable_param->huffman_table[0].num_ac_codes, jpeg_hufftable_luma_ac+1, 16);
  145.    //Load AC Values
  146.    memcpy(hufftable_param->huffman_table[0].ac_values, jpeg_hufftable_luma_ac+17, 162);
  147.    memset(hufftable_param->huffman_table[0].pad, 0, 2);
  148.      
  149.    
  150.    //Load Chroma hufftable values if needed
  151.    if(yuvComp.fourcc_val != VA_FOURCC_Y800) {
  152.        //Load DC codes
  153.        memcpy(hufftable_param->huffman_table[1].num_dc_codes, jpeg_hufftable_chroma_dc+1, 16);
  154.        //Load DC Values
  155.        memcpy(hufftable_param->huffman_table[1].dc_values, jpeg_hufftable_chroma_dc+17, 12);
  156.        //Load AC codes
  157.        memcpy(hufftable_param->huffman_table[1].num_ac_codes, jpeg_hufftable_chroma_ac+1, 16);
  158.        //Load AC Values
  159.        memcpy(hufftable_param->huffman_table[1].ac_values, jpeg_hufftable_chroma_ac+17, 162);
  160.        memset(hufftable_param->huffman_table[1].pad, 0, 2);      
  161.        
  162.    }
  163.    
  164. }
  165.  
  166. void jpegenc_slice_param_init(VAEncSliceParameterBufferJPEG *slice_param, YUVComponentSpecs yuvComp)
  167. {
  168.     slice_param->restart_interval = 0;
  169.    
  170.     slice_param->num_components = yuvComp.num_components;
  171.    
  172.     slice_param->components[0].component_selector = 1;
  173.     slice_param->components[0].dc_table_selector = 0;
  174.     slice_param->components[0].ac_table_selector = 0;        
  175.  
  176.     if(yuvComp.num_components > 1) {
  177.         slice_param->components[1].component_selector = 2;
  178.         slice_param->components[1].dc_table_selector = 1;
  179.         slice_param->components[1].ac_table_selector = 1;        
  180.  
  181.         slice_param->components[2].component_selector = 3;
  182.         slice_param->components[2].dc_table_selector = 1;
  183.         slice_param->components[2].ac_table_selector = 1;        
  184.     }
  185. }
  186.  
  187.  
  188. void populate_quantdata(JPEGQuantSection *quantVal, int type)
  189. {
  190.     uint8_t zigzag_qm[NUM_QUANT_ELEMENTS];
  191.     int i;
  192.  
  193.     quantVal->DQT = DQT;
  194.     quantVal->Pq = 0;
  195.     quantVal->Tq = type;
  196.     if(type == 0) {
  197.         for(i=0; i<NUM_QUANT_ELEMENTS; i++) {
  198.             zigzag_qm[i] = jpeg_luma_quant[jpeg_zigzag[i]];
  199.         }
  200.  
  201.         memcpy(quantVal->Qk, zigzag_qm, NUM_QUANT_ELEMENTS);
  202.     } else {
  203.         for(i=0; i<NUM_QUANT_ELEMENTS; i++) {
  204.             zigzag_qm[i] = jpeg_chroma_quant[jpeg_zigzag[i]];
  205.         }
  206.         memcpy(quantVal->Qk, zigzag_qm, NUM_QUANT_ELEMENTS);
  207.     }
  208.     quantVal->Lq = 3 + NUM_QUANT_ELEMENTS;
  209. }
  210.  
  211. void populate_frame_header(JPEGFrameHeader *frameHdr, YUVComponentSpecs yuvComp, int picture_width, int picture_height)
  212. {
  213.     int i=0;
  214.    
  215.     frameHdr->SOF = SOF0;
  216.     frameHdr->Lf = 8 + (3 * yuvComp.num_components); //Size of FrameHeader in bytes without the Marker SOF
  217.     frameHdr->P = 8;
  218.     frameHdr->Y = picture_height;
  219.     frameHdr->X = picture_width;
  220.     frameHdr->Nf = yuvComp.num_components;
  221.    
  222.     for(i=0; i<yuvComp.num_components; i++) {
  223.         frameHdr->JPEGComponent[i].Ci = i+1;
  224.        
  225.         if(i == 0) {
  226.             frameHdr->JPEGComponent[i].Hi = yuvComp.y_h_subsample;
  227.             frameHdr->JPEGComponent[i].Vi = yuvComp.y_v_subsample;
  228.             frameHdr->JPEGComponent[i].Tqi = 0;
  229.  
  230.         } else {
  231.             //Analyzing the sampling factors for U/V, they are 1 for all formats except for Y8.
  232.             //So, it is okay to have the code below like this. For Y8, we wont reach this code.
  233.             frameHdr->JPEGComponent[i].Hi = yuvComp.u_h_subsample;
  234.             frameHdr->JPEGComponent[i].Vi = yuvComp.u_v_subsample;
  235.             frameHdr->JPEGComponent[i].Tqi = 1;
  236.         }
  237.     }
  238. }
  239.  
  240. void populate_huff_section_header(JPEGHuffSection *huffSectionHdr, int th, int tc)
  241. {
  242.     int i=0, totalCodeWords=0;
  243.    
  244.     huffSectionHdr->DHT = DHT;
  245.     huffSectionHdr->Tc = tc;
  246.     huffSectionHdr->Th = th;
  247.    
  248.     if(th == 0) { //If Luma
  249.  
  250.         //If AC
  251.         if(tc == 1) {
  252.             memcpy(huffSectionHdr->Li, jpeg_hufftable_luma_ac+1, NUM_AC_RUN_SIZE_BITS);
  253.             memcpy(huffSectionHdr->Vij, jpeg_hufftable_luma_ac+17, NUM_AC_CODE_WORDS_HUFFVAL);
  254.         }
  255.                
  256.         //If DC        
  257.         if(tc == 0) {
  258.             memcpy(huffSectionHdr->Li, jpeg_hufftable_luma_dc+1, NUM_DC_RUN_SIZE_BITS);
  259.             memcpy(huffSectionHdr->Vij, jpeg_hufftable_luma_dc+17, NUM_DC_CODE_WORDS_HUFFVAL);
  260.         }
  261.        
  262.         for(i=0; i<NUM_AC_RUN_SIZE_BITS; i++) {
  263.             totalCodeWords += huffSectionHdr->Li[i];
  264.         }
  265.        
  266.         huffSectionHdr->Lh = 3 + 16 + totalCodeWords;
  267.  
  268.     } else { //If Chroma
  269.         //If AC
  270.         if(tc == 1) {
  271.             memcpy(huffSectionHdr->Li, jpeg_hufftable_chroma_ac+1, NUM_AC_RUN_SIZE_BITS);
  272.             memcpy(huffSectionHdr->Vij, jpeg_hufftable_chroma_ac+17, NUM_AC_CODE_WORDS_HUFFVAL);
  273.         }
  274.                
  275.         //If DC        
  276.         if(tc == 0) {
  277.             memcpy(huffSectionHdr->Li, jpeg_hufftable_chroma_dc+1, NUM_DC_RUN_SIZE_BITS);
  278.             memcpy(huffSectionHdr->Vij, jpeg_hufftable_chroma_dc+17, NUM_DC_CODE_WORDS_HUFFVAL);
  279.         }
  280.  
  281.     }
  282. }
  283.  
  284. void populate_scan_header(JPEGScanHeader *scanHdr, YUVComponentSpecs yuvComp)
  285. {
  286.    
  287.     scanHdr->SOS = SOS;
  288.     scanHdr->Ns = yuvComp.num_components;
  289.    
  290.     //Y Component
  291.     scanHdr->ScanComponent[0].Csj = 1;
  292.     scanHdr->ScanComponent[0].Tdj = 0;
  293.     scanHdr->ScanComponent[0].Taj = 0;
  294.    
  295.     if(yuvComp.num_components > 1) {
  296.         //U Component
  297.         scanHdr->ScanComponent[1].Csj = 2;
  298.         scanHdr->ScanComponent[1].Tdj = 1;
  299.         scanHdr->ScanComponent[1].Taj = 1;
  300.        
  301.         //V Component
  302.         scanHdr->ScanComponent[2].Csj = 3;
  303.         scanHdr->ScanComponent[2].Tdj = 1;
  304.         scanHdr->ScanComponent[2].Taj = 1;
  305.     }
  306.    
  307.     scanHdr->Ss = 0;  //0 for Baseline
  308.     scanHdr->Se = 63; //63 for Baseline
  309.     scanHdr->Ah = 0;  //0 for Baseline
  310.     scanHdr->Al = 0;  //0 for Baseline
  311.    
  312.     scanHdr->Ls = 3 + (yuvComp.num_components * 2) + 3;
  313.    
  314. }
  315.  
  316. // This method packs the header information which is to be sent to the driver through LibVA.
  317. // All the information that needs to be inserted in the encoded buffer should be built and sent.
  318. // It is the responsibility of the app talking to LibVA to build this header and send it.
  319. // This includes Markers, Quantization tables (normalized with quality factor), Huffman tables,etc.
  320. int build_packed_jpeg_header_buffer(unsigned char **header_buffer, YUVComponentSpecs yuvComp, int picture_width, int picture_height, uint16_t restart_interval, int quality)
  321. {
  322.     bitstream bs;
  323.     int i=0, j=0;
  324.     uint32_t temp=0;
  325.    
  326.     bitstream_start(&bs);
  327.    
  328.     //Add SOI
  329.     bitstream_put_ui(&bs, SOI, 16);
  330.    
  331.     //Add AppData
  332.     bitstream_put_ui(&bs, APP0, 16);  //APP0 marker
  333.     bitstream_put_ui(&bs, 16, 16);    //Length excluding the marker
  334.     bitstream_put_ui(&bs, 0x4A, 8);   //J
  335.     bitstream_put_ui(&bs, 0x46, 8);   //F
  336.     bitstream_put_ui(&bs, 0x49, 8);   //I
  337.     bitstream_put_ui(&bs, 0x46, 8);   //F
  338.     bitstream_put_ui(&bs, 0x00, 8);   //0
  339.     bitstream_put_ui(&bs, 1, 8);      //Major Version
  340.     bitstream_put_ui(&bs, 1, 8);      //Minor Version
  341.     bitstream_put_ui(&bs, 1, 8);      //Density units 0:no units, 1:pixels per inch, 2: pixels per cm
  342.     bitstream_put_ui(&bs, 72, 16);    //X density
  343.     bitstream_put_ui(&bs, 72, 16);    //Y density
  344.     bitstream_put_ui(&bs, 0, 8);      //Thumbnail width
  345.     bitstream_put_ui(&bs, 0, 8);      //Thumbnail height
  346.  
  347.     // Regarding Quantization matrices: As per JPEG Spec ISO/IEC 10918-1:1993(E), Pg-19:
  348.     // "applications may specify values which customize picture quality for their particular
  349.     // image characteristics, display devices, and viewing conditions"
  350.  
  351.  
  352.     //Normalization of quality factor
  353.     quality = (quality < 50) ? (5000/quality) : (200 - (quality*2));
  354.    
  355.     //Add QTable - Y
  356.     JPEGQuantSection quantLuma;
  357.     populate_quantdata(&quantLuma, 0);
  358.  
  359.     bitstream_put_ui(&bs, quantLuma.DQT, 16);
  360.     bitstream_put_ui(&bs, quantLuma.Lq, 16);
  361.     bitstream_put_ui(&bs, quantLuma.Pq, 4);
  362.     bitstream_put_ui(&bs, quantLuma.Tq, 4);
  363.     for(i=0; i<NUM_QUANT_ELEMENTS; i++) {
  364.         //scale the quantization table with quality factor
  365.         temp = (quantLuma.Qk[i] * quality)/100;
  366.         //clamp to range [1,255]
  367.         temp = (temp > 255) ? 255 : temp;
  368.         temp = (temp < 1) ? 1 : temp;
  369.         quantLuma.Qk[i] = (unsigned char)temp;
  370.         bitstream_put_ui(&bs, quantLuma.Qk[i], 8);
  371.     }
  372.  
  373.     //Add QTable - U/V
  374.     if(yuvComp.fourcc_val != VA_FOURCC_Y800) {
  375.         JPEGQuantSection quantChroma;
  376.         populate_quantdata(&quantChroma, 1);
  377.        
  378.         bitstream_put_ui(&bs, quantChroma.DQT, 16);
  379.         bitstream_put_ui(&bs, quantChroma.Lq, 16);
  380.         bitstream_put_ui(&bs, quantChroma.Pq, 4);
  381.         bitstream_put_ui(&bs, quantChroma.Tq, 4);
  382.         for(i=0; i<NUM_QUANT_ELEMENTS; i++) {
  383.             //scale the quantization table with quality factor
  384.             temp = (quantChroma.Qk[i] * quality)/100;
  385.             //clamp to range [1,255]
  386.             temp = (temp > 255) ? 255 : temp;
  387.             temp = (temp < 1) ? 1 : temp;
  388.             quantChroma.Qk[i] = (unsigned char)temp;
  389.             bitstream_put_ui(&bs, quantChroma.Qk[i], 8);
  390.         }
  391.     }
  392.    
  393.     //Add FrameHeader
  394.     JPEGFrameHeader frameHdr;
  395.     memset(&frameHdr,0,sizeof(JPEGFrameHeader));
  396.     populate_frame_header(&frameHdr, yuvComp, picture_width, picture_height);
  397.  
  398.     bitstream_put_ui(&bs, frameHdr.SOF, 16);
  399.     bitstream_put_ui(&bs, frameHdr.Lf, 16);
  400.     bitstream_put_ui(&bs, frameHdr.P, 8);
  401.     bitstream_put_ui(&bs, frameHdr.Y, 16);
  402.     bitstream_put_ui(&bs, frameHdr.X, 16);
  403.     bitstream_put_ui(&bs, frameHdr.Nf, 8);
  404.     for(i=0; i<frameHdr.Nf;i++) {
  405.         bitstream_put_ui(&bs, frameHdr.JPEGComponent[i].Ci, 8);
  406.         bitstream_put_ui(&bs, frameHdr.JPEGComponent[i].Hi, 4);
  407.         bitstream_put_ui(&bs, frameHdr.JPEGComponent[i].Vi, 4);
  408.         bitstream_put_ui(&bs, frameHdr.JPEGComponent[i].Tqi, 8);
  409.     }
  410.  
  411.     //Add HuffTable AC and DC for Y,U/V components
  412.     JPEGHuffSection acHuffSectionHdr, dcHuffSectionHdr;
  413.        
  414.     for(i=0; (i<yuvComp.num_components && (i<=1)); i++) {
  415.         //Add DC component (Tc = 0)
  416.         populate_huff_section_header(&dcHuffSectionHdr, i, 0);
  417.        
  418.         bitstream_put_ui(&bs, dcHuffSectionHdr.DHT, 16);
  419.         bitstream_put_ui(&bs, dcHuffSectionHdr.Lh, 16);
  420.         bitstream_put_ui(&bs, dcHuffSectionHdr.Tc, 4);
  421.         bitstream_put_ui(&bs, dcHuffSectionHdr.Th, 4);
  422.         for(j=0; j<NUM_DC_RUN_SIZE_BITS; j++) {
  423.             bitstream_put_ui(&bs, dcHuffSectionHdr.Li[j], 8);
  424.         }
  425.        
  426.         for(j=0; j<NUM_DC_CODE_WORDS_HUFFVAL; j++) {
  427.             bitstream_put_ui(&bs, dcHuffSectionHdr.Vij[j], 8);
  428.         }
  429.  
  430.         //Add AC component (Tc = 1)
  431.         populate_huff_section_header(&acHuffSectionHdr, i, 1);
  432.        
  433.         bitstream_put_ui(&bs, acHuffSectionHdr.DHT, 16);
  434.         bitstream_put_ui(&bs, acHuffSectionHdr.Lh, 16);
  435.         bitstream_put_ui(&bs, acHuffSectionHdr.Tc, 4);
  436.         bitstream_put_ui(&bs, acHuffSectionHdr.Th, 4);
  437.         for(j=0; j<NUM_AC_RUN_SIZE_BITS; j++) {
  438.             bitstream_put_ui(&bs, acHuffSectionHdr.Li[j], 8);
  439.         }
  440.  
  441.         for(j=0; j<NUM_AC_CODE_WORDS_HUFFVAL; j++) {
  442.             bitstream_put_ui(&bs, acHuffSectionHdr.Vij[j], 8);
  443.         }
  444.  
  445.         if((yuvComp.fourcc_val == VA_FOURCC_Y800) )
  446.             break;
  447.     }
  448.    
  449.     //Add Restart Interval if restart_interval is not 0
  450.     if(restart_interval != 0) {
  451.         JPEGRestartSection restartHdr;
  452.         restartHdr.DRI = DRI;
  453.         restartHdr.Lr = 4;
  454.         restartHdr.Ri = restart_interval;
  455.  
  456.         bitstream_put_ui(&bs, restartHdr.DRI, 16);
  457.         bitstream_put_ui(&bs, restartHdr.Lr, 16);
  458.         bitstream_put_ui(&bs, restartHdr.Ri, 16);
  459.     }
  460.    
  461.     //Add ScanHeader
  462.     JPEGScanHeader scanHdr;
  463.     populate_scan_header(&scanHdr, yuvComp);
  464.  
  465.     bitstream_put_ui(&bs, scanHdr.SOS, 16);
  466.     bitstream_put_ui(&bs, scanHdr.Ls, 16);
  467.     bitstream_put_ui(&bs, scanHdr.Ns, 8);
  468.    
  469.     for(i=0; i<scanHdr.Ns; i++) {
  470.         bitstream_put_ui(&bs, scanHdr.ScanComponent[i].Csj, 8);
  471.         bitstream_put_ui(&bs, scanHdr.ScanComponent[i].Tdj, 4);
  472.         bitstream_put_ui(&bs, scanHdr.ScanComponent[i].Taj, 4);
  473.     }
  474.  
  475.     bitstream_put_ui(&bs, scanHdr.Ss, 8);
  476.     bitstream_put_ui(&bs, scanHdr.Se, 8);
  477.     bitstream_put_ui(&bs, scanHdr.Ah, 4);
  478.     bitstream_put_ui(&bs, scanHdr.Al, 4);
  479.  
  480.     bitstream_end(&bs);
  481.     *header_buffer = (unsigned char *)bs.buffer;
  482.    
  483.     return bs.bit_offset;
  484. }
  485.  
  486. //Upload the yuv image from the file to the VASurface
  487. void upload_yuv_to_surface(VADisplay va_dpy, FILE *yuv_fp, VASurfaceID surface_id, YUVComponentSpecs yuvComp, int picture_width, int picture_height, int frame_size)
  488. {
  489.  
  490.     VAImage surface_image;
  491.     VAStatus va_status;
  492.     void *surface_p = NULL;
  493.     unsigned char newImageBuffer[frame_size];
  494.     unsigned char *y_src, *u_src, *v_src;
  495.     unsigned char *y_dst, *u_dst;
  496.     int y_size = picture_width * picture_height;
  497.     int u_size = 0;
  498.     int row, col;
  499.     size_t n_items;
  500.  
  501.     //u_size is used for I420, NV12 formats only
  502.     u_size = ((picture_width >> 1) * (picture_height >> 1));
  503.  
  504.     memset(newImageBuffer,0,frame_size);
  505.     do {
  506.         n_items = fread(newImageBuffer, frame_size, 1, yuv_fp);
  507.     } while (n_items != 1);
  508.  
  509.     va_status = vaDeriveImage(va_dpy, surface_id, &surface_image);
  510.     CHECK_VASTATUS(va_status,"vaDeriveImage");
  511.  
  512.     vaMapBuffer(va_dpy, surface_image.buf, &surface_p);
  513.     assert(VA_STATUS_SUCCESS == va_status);
  514.  
  515.     y_src = newImageBuffer;
  516.     u_src = newImageBuffer + y_size; /* UV offset for NV12 */
  517.     v_src = newImageBuffer + y_size + u_size;
  518.  
  519.     y_dst = surface_p + surface_image.offsets[0];
  520.     u_dst = surface_p + surface_image.offsets[1]; /* UV offset for NV12 */
  521.  
  522.     if((yuvComp.fourcc_val == VA_FOURCC_NV12) || (yuvComp.fourcc_val == VA_FOURCC_I420) ||
  523.        (yuvComp.fourcc_val == VA_FOURCC_Y800) ) {
  524.  
  525.         /* Y plane */
  526.         for (row = 0; row < surface_image.height; row++) {
  527.             memcpy(y_dst, y_src, surface_image.width);
  528.             y_dst += surface_image.pitches[0];
  529.             y_src += picture_width;
  530.         }
  531.        
  532.         if(yuvComp.num_components > 1) {
  533.  
  534.             switch(yuvComp.fourcc_val) {
  535.                 case VA_FOURCC_NV12: {
  536.                     for (row = 0; row < surface_image.height/2; row++) {
  537.                         memcpy(u_dst, u_src, surface_image.width);
  538.                         u_dst += surface_image.pitches[1];
  539.                         u_src += (picture_width);
  540.                     }
  541.                     break;
  542.                 }
  543.  
  544.                 case VA_FOURCC_I420: {
  545.                     for (row = 0; row < surface_image.height / 2; row++) {
  546.                         for (col = 0; col < surface_image.width / 2; col++) {
  547.                              u_dst[col * 2] = u_src[col];
  548.                              u_dst[col * 2 + 1] = v_src[col];
  549.                          }
  550.  
  551.                          u_dst += surface_image.pitches[1];
  552.                          u_src += (picture_width / 2);
  553.                          v_src += (picture_width / 2);
  554.                     }
  555.                     break;
  556.                 }
  557.             }//end of switch
  558.        }//end of if check
  559.     } else if((yuvComp.fourcc_val == VA_FOURCC_UYVY) || (yuvComp.fourcc_val == VA_FOURCC_YUY2)) {
  560.  
  561.         for(row = 0; row < surface_image.height; row++) {
  562.             memcpy(y_dst, y_src, surface_image.width*2);
  563.             y_dst += surface_image.pitches[0];
  564.             y_src += picture_width*2;
  565.         }
  566.              
  567.     } else if(yuvComp.fourcc_val == VA_FOURCC_RGBA) {
  568.  
  569.         for (row = 0; row < surface_image.height; row++) {
  570.             memcpy(y_dst, y_src, surface_image.width*4);
  571.             y_dst += surface_image.pitches[0];
  572.             y_src += picture_width*4;
  573.         }
  574.     }
  575.    
  576.     vaUnmapBuffer(va_dpy, surface_image.buf);
  577.     vaDestroyImage(va_dpy, surface_image.image_id);
  578.    
  579. }
  580.  
  581.  
  582.  
  583. void init_yuv_component(YUVComponentSpecs *yuvComponent, int yuv_type, int *surface_type, VASurfaceAttrib *fourcc)
  584. {
  585.    
  586.     //<fourcc value 0(I420)/1(NV12)/2(UYVY)/3(YUY2)/4(Y8)/5(RGBA)>
  587.     switch(yuv_type)
  588.     {
  589.         case 0 :   //I420
  590.         case 1 : { //NV12
  591.             yuvComponent->va_surface_format = (*surface_type) = VA_RT_FORMAT_YUV420;
  592.             if(yuv_type == 0) {
  593.                 yuvComponent->fourcc_val = VA_FOURCC_I420;
  594.                 fourcc->value.value.i = VA_FOURCC_NV12;
  595.             } else {
  596.                 yuvComponent->fourcc_val = fourcc->value.value.i = VA_FOURCC_NV12;
  597.             }
  598.             yuvComponent->num_components = 3;
  599.             yuvComponent->y_h_subsample = 2;
  600.             yuvComponent->y_v_subsample = 2;
  601.             yuvComponent->u_h_subsample = 1;
  602.             yuvComponent->u_v_subsample = 1;
  603.             yuvComponent->v_h_subsample = 1;
  604.             yuvComponent->v_v_subsample = 1;            
  605.             break;
  606.         }
  607.        
  608.         case 2: { //UYVY
  609.             yuvComponent->va_surface_format = (*surface_type) = VA_RT_FORMAT_YUV422;
  610.             yuvComponent->fourcc_val = fourcc->value.value.i = VA_FOURCC_UYVY;
  611.             yuvComponent->num_components = 3;
  612.             yuvComponent->y_h_subsample = 2;
  613.             yuvComponent->y_v_subsample = 1;
  614.             yuvComponent->u_h_subsample = 1;
  615.             yuvComponent->u_v_subsample = 1;
  616.             yuvComponent->v_h_subsample = 1;
  617.             yuvComponent->v_v_subsample = 1;
  618.             break;
  619.         }
  620.        
  621.         case 3: { //YUY2
  622.             yuvComponent->va_surface_format = (*surface_type) = VA_RT_FORMAT_YUV422;
  623.             yuvComponent->fourcc_val = fourcc->value.value.i = VA_FOURCC_YUY2;
  624.             yuvComponent->num_components = 3;
  625.             yuvComponent->y_h_subsample = 2;
  626.             yuvComponent->y_v_subsample = 1;
  627.             yuvComponent->u_h_subsample = 1;
  628.             yuvComponent->u_v_subsample = 1;
  629.             yuvComponent->v_h_subsample = 1;
  630.             yuvComponent->v_v_subsample = 1;
  631.             break;
  632.         }
  633.        
  634.         case 4: { //Y8
  635.             yuvComponent->va_surface_format = (*surface_type) = VA_RT_FORMAT_YUV400;
  636.             yuvComponent->fourcc_val = fourcc->value.value.i = VA_FOURCC_Y800;
  637.             yuvComponent->num_components = 1;
  638.             yuvComponent->y_h_subsample = 1;
  639.             yuvComponent->y_v_subsample = 1;
  640.             yuvComponent->u_h_subsample = 0;
  641.             yuvComponent->u_v_subsample = 0;
  642.             yuvComponent->v_h_subsample = 0;
  643.             yuvComponent->v_v_subsample = 0;
  644.             break;
  645.         }
  646.        
  647.         case 5: { //RGBA
  648.             yuvComponent->va_surface_format = (*surface_type) = VA_RT_FORMAT_RGB32;
  649.             yuvComponent->fourcc_val = fourcc->value.value.i = VA_FOURCC_RGBA;
  650.             yuvComponent->num_components = 3;
  651.             yuvComponent->y_h_subsample = 1;
  652.             yuvComponent->y_v_subsample = 1;
  653.             yuvComponent->u_h_subsample = 1;
  654.             yuvComponent->u_v_subsample = 1;
  655.             yuvComponent->v_h_subsample = 1;
  656.             yuvComponent->v_v_subsample = 1;
  657.             break;
  658.         }
  659.        
  660.         default: {
  661.             printf("Unsupported format:\n");
  662.             show_help();
  663.             break;
  664.         }
  665.        
  666.     }
  667.    
  668. }
  669.  
  670. int encode_input_image(FILE *yuv_fp, FILE *jpeg_fp, int picture_width, int picture_height, int frame_size, int yuv_type, int quality)
  671. {
  672.     int num_entrypoints,enc_entrypoint;
  673.     int major_ver, minor_ver;
  674.     int surface_type;
  675.     VAEntrypoint entrypoints[5];
  676.     VASurfaceAttrib fourcc;
  677.     VAConfigAttrib attrib[2];
  678.     VADisplay   va_dpy;
  679.     VAStatus va_status;
  680.     VAConfigID config_id;
  681.     VASurfaceID surface_id;
  682.     VAContextID context_id;
  683.     VABufferID pic_param_buf_id;                /* Picture parameter id*/
  684.     VABufferID slice_param_buf_id;              /* Slice parameter id, only 1 slice per frame in jpeg encode */
  685.     VABufferID codedbuf_buf_id;                 /* Output buffer id, compressed data */
  686.     VABufferID packed_raw_header_param_buf_id;  /* Header parameter buffer id */
  687.     VABufferID packed_raw_header_buf_id;        /* Header buffer id */
  688.     VABufferID qmatrix_buf_id;                  /* Quantization Matrix id */
  689.     VABufferID huffmantable_buf_id;             /* Huffman table id*/
  690.     VAEncPictureParameterBufferJPEG pic_param;  /* Picture parameter buffer */
  691.     VAEncSliceParameterBufferJPEG slice_param;  /* Slice parameter buffer */
  692.     VAQMatrixBufferJPEG quantization_param;     /* Quantization Matrix buffer */
  693.     VAHuffmanTableBufferJPEGBaseline hufftable_param; /* Huffmantable buffer */
  694.     YUVComponentSpecs yuvComponent;
  695.     int writeToFile = 1;
  696.    
  697.     //Clamp the quality factor value to [1,100]
  698.     if(quality >= 100) quality=100;
  699.     if(quality <= 0) quality=1;
  700.    
  701.     fourcc.type =VASurfaceAttribPixelFormat;
  702.     fourcc.flags=VA_SURFACE_ATTRIB_SETTABLE;
  703.     fourcc.value.type=VAGenericValueTypeInteger;
  704.    
  705.     init_yuv_component(&yuvComponent, yuv_type, &surface_type, &fourcc);
  706.    
  707.     /* 1. Initialize the va driver */
  708.     va_dpy = va_open_display();
  709.     va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
  710.     assert(va_status == VA_STATUS_SUCCESS);
  711.    
  712.     /* 2. Query for the entrypoints for the JPEGBaseline profile */
  713.     va_status = vaQueryConfigEntrypoints(va_dpy, VAProfileJPEGBaseline, entrypoints, &num_entrypoints);
  714.     CHECK_VASTATUS(va_status, "vaQueryConfigEntrypoints");
  715.     // We need picture level encoding (VAEntrypointEncPicture). Find if it is supported.
  716.     for (enc_entrypoint = 0; enc_entrypoint < num_entrypoints; enc_entrypoint++) {
  717.         if (entrypoints[enc_entrypoint] == VAEntrypointEncPicture)
  718.             break;
  719.     }
  720.     if (enc_entrypoint == num_entrypoints) {
  721.         /* No JPEG Encode (VAEntrypointEncPicture) entry point found */
  722.         assert(0);
  723.     }
  724.    
  725.     /* 3. Query for the Render Target format supported */
  726.     attrib[0].type = VAConfigAttribRTFormat;
  727.     attrib[1].type = VAConfigAttribEncJPEG;
  728.     vaGetConfigAttributes(va_dpy, VAProfileJPEGBaseline, VAEntrypointEncPicture, &attrib[0], 2);
  729.  
  730.     // RT should be one of below.
  731.     if(!((attrib[0].value & VA_RT_FORMAT_YUV420) || (attrib[0].value & VA_RT_FORMAT_YUV422) || (attrib[0].value & VA_RT_FORMAT_RGB32)
  732.         ||(attrib[0].value & VA_RT_FORMAT_YUV444) || (attrib[0].value & VA_RT_FORMAT_YUV400)))
  733.     {
  734.         /* Did not find the supported RT format */
  735.         assert(0);        
  736.     }
  737.  
  738.     VAConfigAttribValEncJPEG jpeg_attrib_val;
  739.     jpeg_attrib_val.value = attrib[1].value;
  740.  
  741.     /* Set JPEG profile attribs */
  742.     jpeg_attrib_val.bits.arithmatic_coding_mode = 0;
  743.     jpeg_attrib_val.bits.progressive_dct_mode = 0;
  744.     jpeg_attrib_val.bits.non_interleaved_mode = 1;
  745.     jpeg_attrib_val.bits.differential_mode = 0;
  746.  
  747.     attrib[1].value = jpeg_attrib_val.value;
  748.    
  749.     /* 4. Create Config for the profile=VAProfileJPEGBaseline, entrypoint=VAEntrypointEncPicture,
  750.      * with RT format attribute */
  751.     va_status = vaCreateConfig(va_dpy, VAProfileJPEGBaseline, VAEntrypointEncPicture,
  752.                                &attrib[0], 2, &config_id);
  753.     CHECK_VASTATUS(va_status, "vaQueryConfigEntrypoints");
  754.    
  755.     /* 5. Create Surface for the input picture */
  756.     va_status = vaCreateSurfaces(va_dpy, surface_type, picture_width, picture_height,
  757.                                  &surface_id, 1, &fourcc, 1);
  758.     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
  759.    
  760.     //Map the input yuv file to the input surface created with the surface_id
  761.     upload_yuv_to_surface(va_dpy, yuv_fp, surface_id, yuvComponent, picture_width, picture_height, frame_size);
  762.    
  763.     /* 6. Create Context for the encode pipe*/
  764.     va_status = vaCreateContext(va_dpy, config_id, picture_width, picture_height,
  765.                                 VA_PROGRESSIVE, &surface_id, 1, &context_id);
  766.     CHECK_VASTATUS(va_status, "vaCreateContext");
  767.  
  768.     /* Create buffer for Encoded data to be stored */
  769.     va_status =  vaCreateBuffer(va_dpy, context_id, VAEncCodedBufferType,
  770.                                    frame_size, 1, NULL, &codedbuf_buf_id);
  771.     CHECK_VASTATUS(va_status,"vaCreateBuffer");
  772.    
  773.     //Initialize the picture parameter buffer
  774.     pic_param.coded_buf = codedbuf_buf_id;
  775.     jpegenc_pic_param_init(&pic_param, picture_width, picture_height, quality, yuvComponent);
  776.    
  777.     /* 7. Create buffer for the picture parameter */
  778.     va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
  779.                                sizeof(VAEncPictureParameterBufferJPEG), 1, &pic_param, &pic_param_buf_id);
  780.     CHECK_VASTATUS(va_status,"vaCreateBuffer");
  781.    
  782.     //Load the QMatrix
  783.     jpegenc_qmatrix_init(&quantization_param, yuvComponent);
  784.    
  785.     /* 8. Create buffer for Quantization Matrix */
  786.     va_status = vaCreateBuffer(va_dpy, context_id, VAQMatrixBufferType,
  787.                                sizeof(VAQMatrixBufferJPEG), 1, &quantization_param, &qmatrix_buf_id);
  788.     CHECK_VASTATUS(va_status, "vaCreateBuffer");
  789.    
  790.     //Load the Huffman Tables
  791.     jpegenc_hufftable_init(&hufftable_param, yuvComponent);
  792.    
  793.     /* 9. Create buffer for Huffman Tables */
  794.     va_status = vaCreateBuffer(va_dpy, context_id, VAHuffmanTableBufferType,
  795.                                sizeof(VAHuffmanTableBufferJPEGBaseline), 1, &hufftable_param, &huffmantable_buf_id);
  796.     CHECK_VASTATUS(va_status, "vaCreateBuffer");
  797.    
  798.     //Initialize the slice parameter buffer
  799.     jpegenc_slice_param_init(&slice_param, yuvComponent);
  800.    
  801.     /* 10. Create buffer for slice parameter */
  802.     va_status = vaCreateBuffer(va_dpy, context_id, VAEncSliceParameterBufferType,
  803.                             sizeof(slice_param), 1, &slice_param, &slice_param_buf_id);
  804.     CHECK_VASTATUS(va_status, "vaCreateBuffer");
  805.  
  806.     //Pack headers and send using Raw data buffer
  807.     VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
  808.     unsigned int length_in_bits;
  809.     unsigned char *packed_header_buffer = NULL;
  810.  
  811.     length_in_bits = build_packed_jpeg_header_buffer(&packed_header_buffer, yuvComponent, picture_width, picture_height, slice_param.restart_interval, quality);
  812.     packed_header_param_buffer.type = VAEncPackedHeaderRawData;
  813.     packed_header_param_buffer.bit_length = length_in_bits;
  814.     packed_header_param_buffer.has_emulation_bytes = 0;
  815.    
  816.     /* 11. Create raw buffer for header */
  817.     va_status = vaCreateBuffer(va_dpy,
  818.                                context_id,
  819.                                VAEncPackedHeaderParameterBufferType,
  820.                                sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
  821.                                &packed_raw_header_param_buf_id);
  822.     CHECK_VASTATUS(va_status,"vaCreateBuffer");
  823.  
  824.     va_status = vaCreateBuffer(va_dpy,
  825.                                context_id,
  826.                                VAEncPackedHeaderDataBufferType,
  827.                                (length_in_bits + 7) / 8, 1, packed_header_buffer,
  828.                                &packed_raw_header_buf_id);
  829.     CHECK_VASTATUS(va_status,"vaCreateBuffer");
  830.    
  831.     /* 12. Begin picture */
  832.     va_status = vaBeginPicture(va_dpy, context_id, surface_id);
  833.     CHECK_VASTATUS(va_status, "vaBeginPicture");  
  834.  
  835.     /* 13. Render picture for all the VA buffers created */
  836.     va_status = vaRenderPicture(va_dpy,context_id, &pic_param_buf_id, 1);
  837.     CHECK_VASTATUS(va_status, "vaRenderPicture");
  838.    
  839.     va_status = vaRenderPicture(va_dpy,context_id, &qmatrix_buf_id, 1);
  840.     CHECK_VASTATUS(va_status, "vaRenderPicture");
  841.  
  842.     va_status = vaRenderPicture(va_dpy,context_id, &huffmantable_buf_id, 1);
  843.     CHECK_VASTATUS(va_status, "vaRenderPicture");
  844.    
  845.     va_status = vaRenderPicture(va_dpy,context_id, &slice_param_buf_id, 1);
  846.     CHECK_VASTATUS(va_status, "vaRenderPicture");
  847.    
  848.     va_status = vaRenderPicture(va_dpy,context_id, &packed_raw_header_param_buf_id, 1);
  849.     CHECK_VASTATUS(va_status, "vaRenderPicture");
  850.    
  851.     va_status = vaRenderPicture(va_dpy,context_id, &packed_raw_header_buf_id, 1);
  852.     CHECK_VASTATUS(va_status, "vaRenderPicture");
  853.    
  854.     va_status = vaEndPicture(va_dpy,context_id);
  855.     CHECK_VASTATUS(va_status, "vaEndPicture");
  856.  
  857.     if (writeToFile) {
  858.         VASurfaceStatus surface_status;
  859.         size_t w_items;
  860.         VACodedBufferSegment *coded_buffer_segment;
  861.         unsigned char *coded_mem;
  862.         int slice_data_length;
  863.  
  864.         va_status = vaSyncSurface(va_dpy, surface_id);
  865.         CHECK_VASTATUS(va_status, "vaSyncSurface");
  866.    
  867.         surface_status = 0;
  868.         va_status = vaQuerySurfaceStatus(va_dpy, surface_id, &surface_status);
  869.         CHECK_VASTATUS(va_status,"vaQuerySurfaceStatus");
  870.  
  871.         va_status = vaMapBuffer(va_dpy, codedbuf_buf_id, (void **)(&coded_buffer_segment));
  872.         CHECK_VASTATUS(va_status,"vaMapBuffer");
  873.  
  874.         coded_mem = coded_buffer_segment->buf;
  875.  
  876.        if (coded_buffer_segment->status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK) {
  877.             vaUnmapBuffer(va_dpy, codedbuf_buf_id);
  878.             printf("ERROR......Coded buffer too small\n");
  879.         }
  880.  
  881.  
  882.         slice_data_length = coded_buffer_segment->size;
  883.  
  884.         do {
  885.             w_items = fwrite(coded_mem, slice_data_length, 1, jpeg_fp);
  886.         } while (w_items != 1);
  887.  
  888.         va_status = vaUnmapBuffer(va_dpy, codedbuf_buf_id);
  889.         CHECK_VASTATUS(va_status, "vaUnmapBuffer");
  890.     }
  891.        
  892.     vaDestroyBuffer(va_dpy, pic_param_buf_id);
  893.     vaDestroyBuffer(va_dpy, qmatrix_buf_id);
  894.     vaDestroyBuffer(va_dpy, slice_param_buf_id);
  895.     vaDestroyBuffer(va_dpy, huffmantable_buf_id);
  896.     vaDestroyBuffer(va_dpy, codedbuf_buf_id);
  897.     vaDestroyBuffer(va_dpy, packed_raw_header_param_buf_id);
  898.     vaDestroyBuffer(va_dpy, packed_raw_header_buf_id);
  899.     vaDestroySurfaces(va_dpy,&surface_id,1);
  900.     vaDestroyContext(va_dpy,context_id);
  901.     vaDestroyConfig(va_dpy,config_id);
  902.     vaTerminate(va_dpy);
  903.     va_close_display(va_dpy);
  904.  
  905.     return 0;
  906. }
  907.  
  908.  
  909. int main(int argc, char *argv[])
  910. {
  911.     FILE *yuv_fp;
  912.     FILE *jpeg_fp;
  913.     off_t file_size;
  914.     clock_t start_time, finish_time;
  915.     unsigned int duration;
  916.     unsigned int yuv_type = 0;
  917.     int quality = 0;
  918.     unsigned int picture_width = 0;
  919.     unsigned int picture_height = 0;
  920.     unsigned int frame_size = 0;
  921.    
  922.     va_init_display_args(&argc, argv);
  923.    
  924.     if(argc != 7) {
  925.         show_help();
  926.         return -1;
  927.     }
  928.    
  929.     picture_width = atoi(argv[1]);
  930.     picture_height = atoi(argv[2]);
  931.     yuv_type = atoi(argv[5]);
  932.     quality = atoi(argv[6]);
  933.    
  934.     yuv_fp = fopen(argv[3],"rb");
  935.     if ( yuv_fp == NULL){
  936.         printf("Can't open input YUV file\n");
  937.         return -1;
  938.     }
  939.    
  940.     fseeko(yuv_fp, (off_t)0, SEEK_END);
  941.     file_size = ftello(yuv_fp);
  942.    
  943.     //<input file type: 0(I420)/1(NV12)/2(UYVY)/3(YUY2)/4(Y8)/5(RGBA)>
  944.     switch(yuv_type)
  945.     {
  946.         case 0 :   //I420
  947.         case 1 : { //NV12
  948.             frame_size = picture_width * picture_height +  ((picture_width * picture_height) >> 1) ;
  949.             break;
  950.         }
  951.        
  952.         case 2:  //UYVY
  953.         case 3: { //YUY2
  954.            frame_size = 2 * (picture_width * picture_height);
  955.            break;
  956.         }
  957.        
  958.         case 4: { //Y8
  959.             frame_size = picture_width * picture_height;
  960.             break;
  961.         }
  962.        
  963.         case 5: { //RGBA
  964.             frame_size = 4 * (picture_width * picture_height) ;
  965.             break;
  966.         }
  967.        
  968.         default: {
  969.             printf("Unsupported format:\n");
  970.             show_help();
  971.             break;
  972.         }
  973.        
  974.     }
  975.    
  976.     if ( (file_size < frame_size) || (file_size % frame_size) ) {
  977.         fclose(yuv_fp);
  978.         printf("The YUV file's size is not correct: file_size=%zd, frame_size=%d\n", file_size, frame_size);
  979.         return -1;
  980.     }
  981.    
  982.     fseeko(yuv_fp, (off_t)0, SEEK_SET);
  983.  
  984.     jpeg_fp = fopen(argv[4], "wb");  
  985.     if ( jpeg_fp == NULL) {
  986.         fclose(yuv_fp);
  987.         printf("Can't open output destination jpeg file\n");
  988.         return -1;
  989.     }  
  990.        
  991.     start_time = clock();
  992.     encode_input_image(yuv_fp, jpeg_fp, picture_width, picture_height, frame_size, yuv_type, quality);
  993.     if(yuv_fp != NULL) fclose(yuv_fp);
  994.     if(jpeg_fp != NULL) fclose(jpeg_fp);
  995.     finish_time = clock();
  996.     duration = finish_time - start_time;
  997.     printf("Encoding finished in %u ticks\n", duration);
  998.    
  999.     return 0;  
  1000. }
  1001.  
  1002.