Subversion Repositories Kolibri OS

Compare Revisions

Regard whitespace Rev 5361 → Rev 5362

/contrib/sdk/sources/vaapi/libva-1.4.1/test/encode/Makefile.am
0,0 → 1,55
# Copyright (c) 2007 Intel Corporation. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sub license, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice (including the
# next paragraph) shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
# IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
# ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
 
bin_PROGRAMS = avcenc mpeg2vaenc h264encode
 
AM_CPPFLAGS = \
-Wall \
-I$(top_srcdir) \
-I$(top_srcdir)/va \
$(NULL)
 
h264encode_SOURCES = h264encode.c
h264encode_CFLAGS = -I$(top_srcdir)/test/common -g
h264encode_LDADD = \
$(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la \
-lpthread -lm
 
avcenc_SOURCES = avcenc.c
avcenc_CFLAGS = -I$(top_srcdir)/test/common -g
avcenc_LDADD = \
$(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la \
-lpthread
 
mpeg2vaenc_SOURCES = mpeg2vaenc.c
mpeg2vaenc_CFLAGS = -I$(top_srcdir)/test/common
mpeg2vaenc_LDADD = \
$(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la \
-lpthread
 
valgrind: $(bin_PROGRAMS)
for a in $(bin_PROGRAMS); do \
valgrind --leak-check=full --show-reachable=yes .libs/$$a; \
done
/contrib/sdk/sources/vaapi/libva-1.4.1/test/encode/Makefile.in
0,0 → 1,776
# Makefile.in generated by automake 1.14.1 from Makefile.am.
# @configure_input@
 
# Copyright (C) 1994-2013 Free Software Foundation, Inc.
 
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
 
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
 
@SET_MAKE@
 
# Copyright (c) 2007 Intel Corporation. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sub license, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice (including the
# next paragraph) shall be included in all copies or substantial portions
# of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
# IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
# ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
 
VPATH = @srcdir@
am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)'
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
bin_PROGRAMS = avcenc$(EXEEXT) mpeg2vaenc$(EXEEXT) h264encode$(EXEEXT)
subdir = test/encode
DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \
$(top_srcdir)/depcomp
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/config.h
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
am__installdirs = "$(DESTDIR)$(bindir)"
PROGRAMS = $(bin_PROGRAMS)
am_avcenc_OBJECTS = avcenc-avcenc.$(OBJEXT)
avcenc_OBJECTS = $(am_avcenc_OBJECTS)
avcenc_DEPENDENCIES = $(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la
AM_V_lt = $(am__v_lt_@AM_V@)
am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
am__v_lt_0 = --silent
am__v_lt_1 =
avcenc_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CCLD) $(avcenc_CFLAGS) $(CFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
am_h264encode_OBJECTS = h264encode-h264encode.$(OBJEXT)
h264encode_OBJECTS = $(am_h264encode_OBJECTS)
h264encode_DEPENDENCIES = $(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la
h264encode_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CCLD) $(h264encode_CFLAGS) \
$(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
am_mpeg2vaenc_OBJECTS = mpeg2vaenc-mpeg2vaenc.$(OBJEXT)
mpeg2vaenc_OBJECTS = $(am_mpeg2vaenc_OBJECTS)
mpeg2vaenc_DEPENDENCIES = $(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la
mpeg2vaenc_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CCLD) $(mpeg2vaenc_CFLAGS) \
$(CFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_@AM_V@)
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_@AM_V@)
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CFLAGS) $(CFLAGS)
AM_V_CC = $(am__v_CC_@AM_V@)
am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@)
am__v_CC_0 = @echo " CC " $@;
am__v_CC_1 =
CCLD = $(CC)
LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CCLD = $(am__v_CCLD_@AM_V@)
am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@)
am__v_CCLD_0 = @echo " CCLD " $@;
am__v_CCLD_1 =
SOURCES = $(avcenc_SOURCES) $(h264encode_SOURCES) \
$(mpeg2vaenc_SOURCES)
DIST_SOURCES = $(avcenc_SOURCES) $(h264encode_SOURCES) \
$(mpeg2vaenc_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
AR = @AR@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CXX = @CXX@
CXXCPP = @CXXCPP@
CXXDEPMODE = @CXXDEPMODE@
CXXFLAGS = @CXXFLAGS@
CYGPATH_W = @CYGPATH_W@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DLLTOOL = @DLLTOOL@
DOXYGEN = @DOXYGEN@
DRM_CFLAGS = @DRM_CFLAGS@
DRM_LIBS = @DRM_LIBS@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
EGL_CFLAGS = @EGL_CFLAGS@
EGL_LIBS = @EGL_LIBS@
EGREP = @EGREP@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
GLX_CFLAGS = @GLX_CFLAGS@
GLX_LIBS = @GLX_LIBS@
GREP = @GREP@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
LD = @LD@
LDFLAGS = @LDFLAGS@
LIBDRM_VERSION = @LIBDRM_VERSION@
LIBOBJS = @LIBOBJS@
LIBS = @LIBS@
LIBTOOL = @LIBTOOL@
LIBVA_DRIVERS_PATH = @LIBVA_DRIVERS_PATH@
LIBVA_LT_LDFLAGS = @LIBVA_LT_LDFLAGS@
LIBVA_LT_VERSION = @LIBVA_LT_VERSION@
LIBVA_MAJOR_VERSION = @LIBVA_MAJOR_VERSION@
LIBVA_MICRO_VERSION = @LIBVA_MICRO_VERSION@
LIBVA_MINOR_VERSION = @LIBVA_MINOR_VERSION@
LIBVA_VERSION = @LIBVA_VERSION@
LIPO = @LIPO@
LN_S = @LN_S@
LTLIBOBJS = @LTLIBOBJS@
MAKEINFO = @MAKEINFO@
MANIFEST_TOOL = @MANIFEST_TOOL@
MKDIR_P = @MKDIR_P@
NM = @NM@
NMEDIT = @NMEDIT@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
PKG_CONFIG = @PKG_CONFIG@
PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@
PKG_CONFIG_PATH = @PKG_CONFIG_PATH@
RANLIB = @RANLIB@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
STRIP = @STRIP@
TODAY = @TODAY@
VA_API_MAJOR_VERSION = @VA_API_MAJOR_VERSION@
VA_API_MICRO_VERSION = @VA_API_MICRO_VERSION@
VA_API_MINOR_VERSION = @VA_API_MINOR_VERSION@
VA_API_VERSION = @VA_API_VERSION@
VERSION = @VERSION@
WAYLAND_API_VERSION = @WAYLAND_API_VERSION@
WAYLAND_CFLAGS = @WAYLAND_CFLAGS@
WAYLAND_LIBS = @WAYLAND_LIBS@
WAYLAND_SCANNER_CFLAGS = @WAYLAND_SCANNER_CFLAGS@
WAYLAND_SCANNER_LIBS = @WAYLAND_SCANNER_LIBS@
X11_CFLAGS = @X11_CFLAGS@
X11_LIBS = @X11_LIBS@
XEXT_CFLAGS = @XEXT_CFLAGS@
XEXT_LIBS = @XEXT_LIBS@
XFIXES_CFLAGS = @XFIXES_CFLAGS@
XFIXES_LIBS = @XFIXES_LIBS@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
ac_ct_AR = @ac_ct_AR@
ac_ct_CC = @ac_ct_CC@
ac_ct_CXX = @ac_ct_CXX@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
am__include = @am__include@
am__leading_dot = @am__leading_dot@
am__quote = @am__quote@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build = @build@
build_alias = @build_alias@
build_cpu = @build_cpu@
build_os = @build_os@
build_vendor = @build_vendor@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host = @host@
host_alias = @host_alias@
host_cpu = @host_cpu@
host_os = @host_os@
host_vendor = @host_vendor@
htmldir = @htmldir@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
libdir = @libdir@
libexecdir = @libexecdir@
localedir = @localedir@
localstatedir = @localstatedir@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
pkgconfigdir = @pkgconfigdir@
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
target_alias = @target_alias@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
wayland_protocoldir = @wayland_protocoldir@
wayland_scanner = @wayland_scanner@
AM_CPPFLAGS = \
-Wall \
-I$(top_srcdir) \
-I$(top_srcdir)/va \
$(NULL)
 
h264encode_SOURCES = h264encode.c
h264encode_CFLAGS = -I$(top_srcdir)/test/common -g
h264encode_LDADD = \
$(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la \
-lpthread -lm
 
avcenc_SOURCES = avcenc.c
avcenc_CFLAGS = -I$(top_srcdir)/test/common -g
avcenc_LDADD = \
$(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la \
-lpthread
 
mpeg2vaenc_SOURCES = mpeg2vaenc.c
mpeg2vaenc_CFLAGS = -I$(top_srcdir)/test/common
mpeg2vaenc_LDADD = \
$(top_builddir)/va/libva.la \
$(top_builddir)/test/common/libva-display.la \
-lpthread
 
all: all-am
 
.SUFFIXES:
.SUFFIXES: .c .lo .o .obj
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu test/encode/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --gnu test/encode/Makefile
.PRECIOUS: Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
 
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
 
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
install-binPROGRAMS: $(bin_PROGRAMS)
@$(NORMAL_INSTALL)
@list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \
$(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \
fi; \
for p in $$list; do echo "$$p $$p"; done | \
sed 's/$(EXEEXT)$$//' | \
while read p p1; do if test -f $$p \
|| test -f $$p1 \
; then echo "$$p"; echo "$$p"; else :; fi; \
done | \
sed -e 'p;s,.*/,,;n;h' \
-e 's|.*|.|' \
-e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \
sed 'N;N;N;s,\n, ,g' | \
$(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \
{ d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
if ($$2 == $$4) files[d] = files[d] " " $$1; \
else { print "f", $$3 "/" $$4, $$1; } } \
END { for (d in files) print "f", d, files[d] }' | \
while read type dir files; do \
if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
test -z "$$files" || { \
echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files '$(DESTDIR)$(bindir)$$dir'"; \
$(INSTALL_PROGRAM_ENV) $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL_PROGRAM) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \
} \
; done
 
uninstall-binPROGRAMS:
@$(NORMAL_UNINSTALL)
@list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \
files=`for p in $$list; do echo "$$p"; done | \
sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \
-e 's/$$/$(EXEEXT)/' \
`; \
test -n "$$list" || exit 0; \
echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \
cd "$(DESTDIR)$(bindir)" && rm -f $$files
 
clean-binPROGRAMS:
@list='$(bin_PROGRAMS)'; test -n "$$list" || exit 0; \
echo " rm -f" $$list; \
rm -f $$list || exit $$?; \
test -n "$(EXEEXT)" || exit 0; \
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
 
avcenc$(EXEEXT): $(avcenc_OBJECTS) $(avcenc_DEPENDENCIES) $(EXTRA_avcenc_DEPENDENCIES)
@rm -f avcenc$(EXEEXT)
$(AM_V_CCLD)$(avcenc_LINK) $(avcenc_OBJECTS) $(avcenc_LDADD) $(LIBS)
 
h264encode$(EXEEXT): $(h264encode_OBJECTS) $(h264encode_DEPENDENCIES) $(EXTRA_h264encode_DEPENDENCIES)
@rm -f h264encode$(EXEEXT)
$(AM_V_CCLD)$(h264encode_LINK) $(h264encode_OBJECTS) $(h264encode_LDADD) $(LIBS)
 
mpeg2vaenc$(EXEEXT): $(mpeg2vaenc_OBJECTS) $(mpeg2vaenc_DEPENDENCIES) $(EXTRA_mpeg2vaenc_DEPENDENCIES)
@rm -f mpeg2vaenc$(EXEEXT)
$(AM_V_CCLD)$(mpeg2vaenc_LINK) $(mpeg2vaenc_OBJECTS) $(mpeg2vaenc_LDADD) $(LIBS)
 
mostlyclean-compile:
-rm -f *.$(OBJEXT)
 
distclean-compile:
-rm -f *.tab.c
 
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/avcenc-avcenc.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/h264encode-h264encode.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/mpeg2vaenc-mpeg2vaenc.Po@am__quote@
 
.c.o:
@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ $<
 
.c.obj:
@am__fastdepCC_TRUE@ $(AM_V_CC)$(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
 
.c.lo:
@am__fastdepCC_TRUE@ $(AM_V_CC)$(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $<
 
avcenc-avcenc.o: avcenc.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(avcenc_CFLAGS) $(CFLAGS) -MT avcenc-avcenc.o -MD -MP -MF $(DEPDIR)/avcenc-avcenc.Tpo -c -o avcenc-avcenc.o `test -f 'avcenc.c' || echo '$(srcdir)/'`avcenc.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/avcenc-avcenc.Tpo $(DEPDIR)/avcenc-avcenc.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='avcenc.c' object='avcenc-avcenc.o' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(avcenc_CFLAGS) $(CFLAGS) -c -o avcenc-avcenc.o `test -f 'avcenc.c' || echo '$(srcdir)/'`avcenc.c
 
avcenc-avcenc.obj: avcenc.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(avcenc_CFLAGS) $(CFLAGS) -MT avcenc-avcenc.obj -MD -MP -MF $(DEPDIR)/avcenc-avcenc.Tpo -c -o avcenc-avcenc.obj `if test -f 'avcenc.c'; then $(CYGPATH_W) 'avcenc.c'; else $(CYGPATH_W) '$(srcdir)/avcenc.c'; fi`
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/avcenc-avcenc.Tpo $(DEPDIR)/avcenc-avcenc.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='avcenc.c' object='avcenc-avcenc.obj' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(avcenc_CFLAGS) $(CFLAGS) -c -o avcenc-avcenc.obj `if test -f 'avcenc.c'; then $(CYGPATH_W) 'avcenc.c'; else $(CYGPATH_W) '$(srcdir)/avcenc.c'; fi`
 
h264encode-h264encode.o: h264encode.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(h264encode_CFLAGS) $(CFLAGS) -MT h264encode-h264encode.o -MD -MP -MF $(DEPDIR)/h264encode-h264encode.Tpo -c -o h264encode-h264encode.o `test -f 'h264encode.c' || echo '$(srcdir)/'`h264encode.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/h264encode-h264encode.Tpo $(DEPDIR)/h264encode-h264encode.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='h264encode.c' object='h264encode-h264encode.o' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(h264encode_CFLAGS) $(CFLAGS) -c -o h264encode-h264encode.o `test -f 'h264encode.c' || echo '$(srcdir)/'`h264encode.c
 
h264encode-h264encode.obj: h264encode.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(h264encode_CFLAGS) $(CFLAGS) -MT h264encode-h264encode.obj -MD -MP -MF $(DEPDIR)/h264encode-h264encode.Tpo -c -o h264encode-h264encode.obj `if test -f 'h264encode.c'; then $(CYGPATH_W) 'h264encode.c'; else $(CYGPATH_W) '$(srcdir)/h264encode.c'; fi`
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/h264encode-h264encode.Tpo $(DEPDIR)/h264encode-h264encode.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='h264encode.c' object='h264encode-h264encode.obj' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(h264encode_CFLAGS) $(CFLAGS) -c -o h264encode-h264encode.obj `if test -f 'h264encode.c'; then $(CYGPATH_W) 'h264encode.c'; else $(CYGPATH_W) '$(srcdir)/h264encode.c'; fi`
 
mpeg2vaenc-mpeg2vaenc.o: mpeg2vaenc.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(mpeg2vaenc_CFLAGS) $(CFLAGS) -MT mpeg2vaenc-mpeg2vaenc.o -MD -MP -MF $(DEPDIR)/mpeg2vaenc-mpeg2vaenc.Tpo -c -o mpeg2vaenc-mpeg2vaenc.o `test -f 'mpeg2vaenc.c' || echo '$(srcdir)/'`mpeg2vaenc.c
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/mpeg2vaenc-mpeg2vaenc.Tpo $(DEPDIR)/mpeg2vaenc-mpeg2vaenc.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='mpeg2vaenc.c' object='mpeg2vaenc-mpeg2vaenc.o' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(mpeg2vaenc_CFLAGS) $(CFLAGS) -c -o mpeg2vaenc-mpeg2vaenc.o `test -f 'mpeg2vaenc.c' || echo '$(srcdir)/'`mpeg2vaenc.c
 
mpeg2vaenc-mpeg2vaenc.obj: mpeg2vaenc.c
@am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(mpeg2vaenc_CFLAGS) $(CFLAGS) -MT mpeg2vaenc-mpeg2vaenc.obj -MD -MP -MF $(DEPDIR)/mpeg2vaenc-mpeg2vaenc.Tpo -c -o mpeg2vaenc-mpeg2vaenc.obj `if test -f 'mpeg2vaenc.c'; then $(CYGPATH_W) 'mpeg2vaenc.c'; else $(CYGPATH_W) '$(srcdir)/mpeg2vaenc.c'; fi`
@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/mpeg2vaenc-mpeg2vaenc.Tpo $(DEPDIR)/mpeg2vaenc-mpeg2vaenc.Po
@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='mpeg2vaenc.c' object='mpeg2vaenc-mpeg2vaenc.obj' libtool=no @AMDEPBACKSLASH@
@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(mpeg2vaenc_CFLAGS) $(CFLAGS) -c -o mpeg2vaenc-mpeg2vaenc.obj `if test -f 'mpeg2vaenc.c'; then $(CYGPATH_W) 'mpeg2vaenc.c'; else $(CYGPATH_W) '$(srcdir)/mpeg2vaenc.c'; fi`
 
mostlyclean-libtool:
-rm -f *.lo
 
clean-libtool:
-rm -rf .libs _libs
 
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
 
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
 
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
 
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
 
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
 
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
 
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(PROGRAMS)
installdirs:
for dir in "$(DESTDIR)$(bindir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
 
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
 
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
 
clean-generic:
 
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
 
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
 
clean-am: clean-binPROGRAMS clean-generic clean-libtool mostlyclean-am
 
distclean: distclean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
 
dvi: dvi-am
 
dvi-am:
 
html: html-am
 
html-am:
 
info: info-am
 
info-am:
 
install-data-am:
 
install-dvi: install-dvi-am
 
install-dvi-am:
 
install-exec-am: install-binPROGRAMS
 
install-html: install-html-am
 
install-html-am:
 
install-info: install-info-am
 
install-info-am:
 
install-man:
 
install-pdf: install-pdf-am
 
install-pdf-am:
 
install-ps: install-ps-am
 
install-ps-am:
 
installcheck-am:
 
maintainer-clean: maintainer-clean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
 
mostlyclean: mostlyclean-am
 
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
 
pdf: pdf-am
 
pdf-am:
 
ps: ps-am
 
ps-am:
 
uninstall-am: uninstall-binPROGRAMS
 
.MAKE: install-am install-strip
 
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \
clean-binPROGRAMS clean-generic clean-libtool cscopelist-am \
ctags ctags-am distclean distclean-compile distclean-generic \
distclean-libtool distclean-tags distdir dvi dvi-am html \
html-am info info-am install install-am install-binPROGRAMS \
install-data install-data-am install-dvi install-dvi-am \
install-exec install-exec-am install-html install-html-am \
install-info install-info-am install-man install-pdf \
install-pdf-am install-ps install-ps-am install-strip \
installcheck installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags tags-am uninstall uninstall-am uninstall-binPROGRAMS
 
 
valgrind: $(bin_PROGRAMS)
for a in $(bin_PROGRAMS); do \
valgrind --leak-check=full --show-reachable=yes .libs/$$a; \
done
 
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
/contrib/sdk/sources/vaapi/libva-1.4.1/test/encode/avcenc.c
0,0 → 1,2021
/*
* Copyright (c) 2012 Intel Corporation. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
* Simple AVC encoder based on libVA.
*
* Usage:
* ./avcenc <width> <height> <input file> <output file> [qp]
*/
 
#include "sysdeps.h"
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <getopt.h>
#include <unistd.h>
 
#include <sys/time.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <assert.h>
#include <time.h>
 
#include <pthread.h>
 
#include <va/va.h>
#include <va/va_enc_h264.h>
#include "va_display.h"
 
#define NAL_REF_IDC_NONE 0
#define NAL_REF_IDC_LOW 1
#define NAL_REF_IDC_MEDIUM 2
#define NAL_REF_IDC_HIGH 3
 
#define NAL_NON_IDR 1
#define NAL_IDR 5
#define NAL_SPS 7
#define NAL_PPS 8
#define NAL_SEI 6
 
#define SLICE_TYPE_P 0
#define SLICE_TYPE_B 1
#define SLICE_TYPE_I 2
 
#define FRAME_IDR 7
 
#define ENTROPY_MODE_CAVLC 0
#define ENTROPY_MODE_CABAC 1
 
#define PROFILE_IDC_BASELINE 66
#define PROFILE_IDC_MAIN 77
#define PROFILE_IDC_HIGH 100
 
#define CHECK_VASTATUS(va_status,func) \
if (va_status != VA_STATUS_SUCCESS) { \
fprintf(stderr,"%s:%s (%d) failed,exit\n", __func__, func, __LINE__); \
exit(1); \
}
 
static VADisplay va_dpy;
 
static int picture_width, picture_width_in_mbs;
static int picture_height, picture_height_in_mbs;
static int frame_size;
static unsigned char *newImageBuffer = 0;
 
static int qp_value = 26;
 
static int intra_period = 30;
static int frame_bit_rate = -1;
static int frame_rate = 30;
static int ip_period = 1;
 
#define MAX_SLICES 32
 
 
static unsigned int MaxFrameNum = (1<<12);
static unsigned int MaxPicOrderCntLsb = (1<<8);
static unsigned int Log2MaxFrameNum = 12;
static unsigned int Log2MaxPicOrderCntLsb = 8;
 
static int
build_packed_pic_buffer(unsigned char **header_buffer);
 
static int
build_packed_seq_buffer(unsigned char **header_buffer);
 
static int
build_packed_sei_pic_timing(unsigned int cpb_removal_length,
unsigned int dpb_output_length,
unsigned char **sei_buffer);
 
static int
build_packed_idr_sei_buffer_timing(unsigned int init_cpb_removal_delay_length,
unsigned int cpb_removal_length,
unsigned int dpb_output_length,
unsigned char **sei_buffer);
 
struct upload_thread_param
{
FILE *yuv_fp;
VASurfaceID surface_id;
};
 
static void
upload_yuv_to_surface(FILE *yuv_fp, VASurfaceID surface_id);
 
static struct {
VAProfile profile;
int constraint_set_flag;
VAEncSequenceParameterBufferH264 seq_param;
VAEncPictureParameterBufferH264 pic_param;
VAEncSliceParameterBufferH264 slice_param[MAX_SLICES];
VAContextID context_id;
VAConfigID config_id;
VABufferID seq_param_buf_id; /* Sequence level parameter */
VABufferID pic_param_buf_id; /* Picture level parameter */
VABufferID slice_param_buf_id[MAX_SLICES]; /* Slice level parameter, multil slices */
VABufferID codedbuf_buf_id; /* Output buffer, compressed data */
VABufferID packed_seq_header_param_buf_id;
VABufferID packed_seq_buf_id;
VABufferID packed_pic_header_param_buf_id;
VABufferID packed_pic_buf_id;
VABufferID packed_sei_header_param_buf_id; /* the SEI buffer */
VABufferID packed_sei_buf_id;
VABufferID misc_parameter_hrd_buf_id;
 
int num_slices;
int codedbuf_i_size;
int codedbuf_pb_size;
int current_input_surface;
int rate_control_method;
struct upload_thread_param upload_thread_param;
pthread_t upload_thread_id;
int upload_thread_value;
int i_initial_cpb_removal_delay;
int i_initial_cpb_removal_delay_offset;
int i_initial_cpb_removal_delay_length;
int i_cpb_removal_delay;
int i_cpb_removal_delay_length;
int i_dpb_output_delay_length;
int time_offset_length;
 
unsigned long long idr_frame_num;
unsigned long long prev_idr_cpb_removal;
unsigned long long current_idr_cpb_removal;
unsigned long long current_cpb_removal;
/* This is relative to the current_cpb_removal */
unsigned int current_dpb_removal_delta;
} avcenc_context;
 
static VAPictureH264 ReferenceFrames[16], RefPicList0[32], RefPicList1[32];
 
static void create_encode_pipe()
{
VAEntrypoint entrypoints[5];
int num_entrypoints,slice_entrypoint;
VAConfigAttrib attrib[2];
int major_ver, minor_ver;
VAStatus va_status;
 
va_dpy = va_open_display();
va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
CHECK_VASTATUS(va_status, "vaInitialize");
 
vaQueryConfigEntrypoints(va_dpy, avcenc_context.profile, entrypoints,
&num_entrypoints);
 
for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice)
break;
}
 
if (slice_entrypoint == num_entrypoints) {
/* not find Slice entry point */
assert(0);
}
 
/* find out the format for the render target, and rate control mode */
attrib[0].type = VAConfigAttribRTFormat;
attrib[1].type = VAConfigAttribRateControl;
vaGetConfigAttributes(va_dpy, avcenc_context.profile, VAEntrypointEncSlice,
&attrib[0], 2);
 
if ((attrib[0].value & VA_RT_FORMAT_YUV420) == 0) {
/* not find desired YUV420 RT format */
assert(0);
}
 
if ((attrib[1].value & avcenc_context.rate_control_method) == 0) {
/* Can't find matched RC mode */
printf("Can't find the desired RC mode, exit\n");
assert(0);
}
 
attrib[0].value = VA_RT_FORMAT_YUV420; /* set to desired RT format */
attrib[1].value = avcenc_context.rate_control_method; /* set to desired RC mode */
 
va_status = vaCreateConfig(va_dpy, avcenc_context.profile, VAEntrypointEncSlice,
&attrib[0], 2,&avcenc_context.config_id);
CHECK_VASTATUS(va_status, "vaCreateConfig");
 
/* Create a context for this decode pipe */
va_status = vaCreateContext(va_dpy, avcenc_context.config_id,
picture_width, picture_height,
VA_PROGRESSIVE,
0, 0,
&avcenc_context.context_id);
CHECK_VASTATUS(va_status, "vaCreateContext");
}
 
static void destory_encode_pipe()
{
vaDestroyContext(va_dpy,avcenc_context.context_id);
vaDestroyConfig(va_dpy,avcenc_context.config_id);
vaTerminate(va_dpy);
va_close_display(va_dpy);
}
 
/***************************************************
*
* The encode pipe resource define
*
***************************************************/
#define SID_INPUT_PICTURE_0 0
#define SID_INPUT_PICTURE_1 1
#define SID_REFERENCE_PICTURE_L0 2
#define SID_REFERENCE_PICTURE_L1 3
#define SID_RECON_PICTURE 4
#define SID_NUMBER SID_RECON_PICTURE + 1
 
#define SURFACE_NUM 16 /* 16 surfaces for reference */
 
static VASurfaceID surface_ids[SID_NUMBER];
static VASurfaceID ref_surface[SURFACE_NUM];
 
static unsigned long long current_frame_display = 0;
static unsigned long long current_IDR_display = 0;
 
static VAPictureH264 CurrentCurrPic;
 
#define current_slot (current_frame_display % SURFACE_NUM)
 
static int frame_number;
static unsigned long long enc_frame_number;
static int current_frame_type;
static int current_frame_num;
static unsigned int current_poc;
 
static unsigned int num_ref_frames = 2;
static unsigned int numShortTerm = 0;
/***************************************************/
 
static void *
upload_thread_function(void *data)
{
struct upload_thread_param *param = data;
 
upload_yuv_to_surface(param->yuv_fp, param->surface_id);
 
return NULL;
}
 
static void alloc_encode_resource(FILE *yuv_fp)
{
VAStatus va_status;
 
// Create surface
va_status = vaCreateSurfaces(
va_dpy,
VA_RT_FORMAT_YUV420, picture_width, picture_height,
surface_ids, SID_NUMBER,
NULL, 0
);
 
CHECK_VASTATUS(va_status, "vaCreateSurfaces");
 
// Create surface
va_status = vaCreateSurfaces(
va_dpy,
VA_RT_FORMAT_YUV420, picture_width, picture_height,
ref_surface, SURFACE_NUM,
NULL, 0
);
 
CHECK_VASTATUS(va_status, "vaCreateSurfaces");
 
 
newImageBuffer = (unsigned char *)malloc(frame_size);
 
/* firstly upload YUV data to SID_INPUT_PICTURE_1 */
avcenc_context.upload_thread_param.yuv_fp = yuv_fp;
avcenc_context.upload_thread_param.surface_id = surface_ids[SID_INPUT_PICTURE_1];
 
avcenc_context.upload_thread_value = pthread_create(&avcenc_context.upload_thread_id,
NULL,
upload_thread_function,
(void*)&avcenc_context.upload_thread_param);
}
 
static void release_encode_resource()
{
pthread_join(avcenc_context.upload_thread_id, NULL);
free(newImageBuffer);
 
// Release all the surfaces resource
vaDestroySurfaces(va_dpy, surface_ids, SID_NUMBER);
// Release all the reference surfaces
vaDestroySurfaces(va_dpy, ref_surface, SURFACE_NUM);
}
 
static void avcenc_update_sei_param(int is_idr)
{
VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
unsigned int length_in_bits;
unsigned char *packed_sei_buffer = NULL;
VAStatus va_status;
 
if (is_idr)
length_in_bits = build_packed_idr_sei_buffer_timing(
avcenc_context.i_initial_cpb_removal_delay_length,
avcenc_context.i_cpb_removal_delay_length,
avcenc_context.i_dpb_output_delay_length,
&packed_sei_buffer);
else
length_in_bits = build_packed_sei_pic_timing(
avcenc_context.i_cpb_removal_delay_length,
avcenc_context.i_dpb_output_delay_length,
&packed_sei_buffer);
 
packed_header_param_buffer.type = VAEncPackedHeaderH264_SEI;
packed_header_param_buffer.bit_length = length_in_bits;
packed_header_param_buffer.has_emulation_bytes = 0;
 
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
&avcenc_context.packed_sei_header_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packed_sei_buffer,
&avcenc_context.packed_sei_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
free(packed_sei_buffer);
return;
}
 
#define partition(ref, field, key, ascending) \
while (i <= j) { \
if (ascending) { \
while (ref[i].field < key) \
i++; \
while (ref[j].field > key) \
j--; \
} else { \
while (ref[i].field > key) \
i++; \
while (ref[j].field < key) \
j--; \
} \
if (i <= j) { \
tmp = ref[i]; \
ref[i] = ref[j]; \
ref[j] = tmp; \
i++; \
j--; \
} \
} \
 
static void sort_one(VAPictureH264 ref[], int left, int right,
int ascending, int frame_idx)
{
int i = left, j = right;
unsigned int key;
VAPictureH264 tmp;
 
if (frame_idx) {
key = ref[(left + right) / 2].frame_idx;
partition(ref, frame_idx, key, ascending);
} else {
key = ref[(left + right) / 2].TopFieldOrderCnt;
partition(ref, TopFieldOrderCnt, (signed int)key, ascending);
}
 
/* recursion */
if (left < j)
sort_one(ref, left, j, ascending, frame_idx);
 
if (i < right)
sort_one(ref, i, right, ascending, frame_idx);
}
 
static void sort_two(VAPictureH264 ref[], int left, int right, unsigned int key, unsigned int frame_idx,
int partition_ascending, int list0_ascending, int list1_ascending)
{
int i = left, j = right;
VAPictureH264 tmp;
 
if (frame_idx) {
partition(ref, frame_idx, key, partition_ascending);
} else {
partition(ref, TopFieldOrderCnt, (signed int)key, partition_ascending);
}
 
sort_one(ref, left, i-1, list0_ascending, frame_idx);
sort_one(ref, j+1, right, list1_ascending, frame_idx);
}
 
static int update_RefPicList()
{
 
if (current_frame_type == SLICE_TYPE_P) {
memcpy(RefPicList0, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
sort_one(RefPicList0, 0, numShortTerm-1, 0, 1);
}
 
if (current_frame_type == SLICE_TYPE_B) {
memcpy(RefPicList0, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
sort_two(RefPicList0, 0, numShortTerm-1, current_poc, 0,
1, 0, 1);
 
memcpy(RefPicList1, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
sort_two(RefPicList1, 0, numShortTerm-1, current_poc, 0,
0, 1, 0);
}
 
return 0;
}
 
static int calc_poc(int pic_order_cnt_lsb)
{
static int PicOrderCntMsb_ref = 0, pic_order_cnt_lsb_ref = 0;
int prevPicOrderCntMsb, prevPicOrderCntLsb;
int PicOrderCntMsb, TopFieldOrderCnt;
 
if (current_frame_type == FRAME_IDR)
prevPicOrderCntMsb = prevPicOrderCntLsb = 0;
else {
prevPicOrderCntMsb = PicOrderCntMsb_ref;
prevPicOrderCntLsb = pic_order_cnt_lsb_ref;
}
 
if ((pic_order_cnt_lsb < prevPicOrderCntLsb) &&
((prevPicOrderCntLsb - pic_order_cnt_lsb) >= (int)(MaxPicOrderCntLsb / 2)))
PicOrderCntMsb = prevPicOrderCntMsb + MaxPicOrderCntLsb;
else if ((pic_order_cnt_lsb > prevPicOrderCntLsb) &&
((pic_order_cnt_lsb - prevPicOrderCntLsb) > (int)(MaxPicOrderCntLsb / 2)))
PicOrderCntMsb = prevPicOrderCntMsb - MaxPicOrderCntLsb;
else
PicOrderCntMsb = prevPicOrderCntMsb;
 
TopFieldOrderCnt = PicOrderCntMsb + pic_order_cnt_lsb;
 
if (current_frame_type != SLICE_TYPE_B) {
PicOrderCntMsb_ref = PicOrderCntMsb;
pic_order_cnt_lsb_ref = pic_order_cnt_lsb;
}
 
return TopFieldOrderCnt;
}
 
static void avcenc_update_picture_parameter(int slice_type, int is_idr)
{
VAEncPictureParameterBufferH264 *pic_param;
VAStatus va_status;
 
// Picture level
pic_param = &avcenc_context.pic_param;
 
pic_param->CurrPic.picture_id = ref_surface[current_slot];
pic_param->CurrPic.frame_idx = current_frame_num;
pic_param->CurrPic.flags = 0;
 
pic_param->CurrPic.TopFieldOrderCnt = current_poc;
pic_param->CurrPic.BottomFieldOrderCnt = pic_param->CurrPic.TopFieldOrderCnt;
 
assert(avcenc_context.codedbuf_buf_id != VA_INVALID_ID);
pic_param->coded_buf = avcenc_context.codedbuf_buf_id;
pic_param->frame_num = current_frame_num;
pic_param->pic_fields.bits.idr_pic_flag = !!is_idr;
pic_param->pic_fields.bits.reference_pic_flag = (slice_type != SLICE_TYPE_B);
CurrentCurrPic = pic_param->CurrPic;
 
if (slice_type == SLICE_TYPE_P || slice_type == SLICE_TYPE_B)
memset(pic_param->ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
 
if ((slice_type == SLICE_TYPE_P) || (slice_type == SLICE_TYPE_B)) {
pic_param->ReferenceFrames[0] = RefPicList0[0];
}
if (slice_type == SLICE_TYPE_B) {
pic_param->ReferenceFrames[1] = RefPicList1[0];
}
 
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncPictureParameterBufferType,
sizeof(*pic_param), 1, pic_param,
&avcenc_context.pic_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
}
 
#ifndef VA_FOURCC_I420
#define VA_FOURCC_I420 0x30323449
#endif
 
static void upload_yuv_to_surface(FILE *yuv_fp, VASurfaceID surface_id)
{
VAImage surface_image;
VAStatus va_status;
void *surface_p = NULL;
unsigned char *y_src, *u_src, *v_src;
unsigned char *y_dst, *u_dst, *v_dst;
int y_size = picture_width * picture_height;
int u_size = (picture_width >> 1) * (picture_height >> 1);
int row, col;
size_t n_items;
 
do {
n_items = fread(newImageBuffer, frame_size, 1, yuv_fp);
} while (n_items != 1);
 
va_status = vaDeriveImage(va_dpy, surface_id, &surface_image);
CHECK_VASTATUS(va_status,"vaDeriveImage");
 
vaMapBuffer(va_dpy, surface_image.buf, &surface_p);
assert(VA_STATUS_SUCCESS == va_status);
y_src = newImageBuffer;
u_src = newImageBuffer + y_size; /* UV offset for NV12 */
v_src = newImageBuffer + y_size + u_size;
 
y_dst = surface_p + surface_image.offsets[0];
u_dst = surface_p + surface_image.offsets[1]; /* UV offset for NV12 */
v_dst = surface_p + surface_image.offsets[2];
 
/* Y plane */
for (row = 0; row < surface_image.height; row++) {
memcpy(y_dst, y_src, surface_image.width);
y_dst += surface_image.pitches[0];
y_src += picture_width;
}
 
if (surface_image.format.fourcc == VA_FOURCC_NV12) { /* UV plane */
for (row = 0; row < surface_image.height / 2; row++) {
for (col = 0; col < surface_image.width / 2; col++) {
u_dst[col * 2] = u_src[col];
u_dst[col * 2 + 1] = v_src[col];
}
 
u_dst += surface_image.pitches[1];
u_src += (picture_width / 2);
v_src += (picture_width / 2);
}
} else if (surface_image.format.fourcc == VA_FOURCC_YV12 ||
surface_image.format.fourcc == VA_FOURCC_I420) {
const int U = surface_image.format.fourcc == VA_FOURCC_I420 ? 1 : 2;
const int V = surface_image.format.fourcc == VA_FOURCC_I420 ? 2 : 1;
 
u_dst = surface_p + surface_image.offsets[U];
v_dst = surface_p + surface_image.offsets[V];
 
for (row = 0; row < surface_image.height / 2; row++) {
memcpy(u_dst, u_src, surface_image.width / 2);
memcpy(v_dst, v_src, surface_image.width / 2);
u_dst += surface_image.pitches[U];
v_dst += surface_image.pitches[V];
u_src += (picture_width / 2);
v_src += (picture_width / 2);
}
}
 
vaUnmapBuffer(va_dpy, surface_image.buf);
vaDestroyImage(va_dpy, surface_image.image_id);
}
 
static void avcenc_update_slice_parameter(int slice_type)
{
VAEncSliceParameterBufferH264 *slice_param;
VAStatus va_status;
int i;
 
// Slice level
i = 0;
slice_param = &avcenc_context.slice_param[i];
slice_param->macroblock_address = 0;
slice_param->num_macroblocks = picture_height_in_mbs * picture_width_in_mbs;
slice_param->pic_parameter_set_id = 0;
slice_param->slice_type = slice_type;
slice_param->direct_spatial_mv_pred_flag = 0;
slice_param->num_ref_idx_l0_active_minus1 = 0; /* FIXME: ??? */
slice_param->num_ref_idx_l1_active_minus1 = 0;
slice_param->cabac_init_idc = 0;
slice_param->slice_qp_delta = 0;
slice_param->disable_deblocking_filter_idc = 0;
slice_param->slice_alpha_c0_offset_div2 = 2;
slice_param->slice_beta_offset_div2 = 2;
slice_param->idr_pic_id = 0;
 
/* FIXME: fill other fields */
if ((slice_type == SLICE_TYPE_P) || (slice_type == SLICE_TYPE_B)) {
memset(slice_param->RefPicList0, 0xFF, 32 * sizeof(VAPictureH264));
slice_param->RefPicList0[0] = RefPicList0[0];
}
 
if ((slice_type == SLICE_TYPE_B)) {
memset(slice_param->RefPicList1, 0xFF, 32 * sizeof(VAPictureH264));
slice_param->RefPicList1[0] = RefPicList1[0];
}
 
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncSliceParameterBufferType,
sizeof(*slice_param), 1, slice_param,
&avcenc_context.slice_param_buf_id[i]);
CHECK_VASTATUS(va_status,"vaCreateBuffer");;
i++;
 
#if 0
slice_param = &avcenc_context.slice_param[i];
slice_param->macroblock_address = picture_height_in_mbs * picture_width_in_mbs / 2;
slice_param->num_macroblocks = picture_height_in_mbs * picture_width_in_mbs / 2;
slice_param->pic_parameter_set_id = 0;
slice_param->slice_type = slice_type;
slice_param->direct_spatial_mv_pred_flag = 0;
slice_param->num_ref_idx_l0_active_minus1 = 0; /* FIXME: ??? */
slice_param->num_ref_idx_l1_active_minus1 = 0;
slice_param->cabac_init_idc = 0;
slice_param->slice_qp_delta = 0;
slice_param->disable_deblocking_filter_idc = 0;
slice_param->slice_alpha_c0_offset_div2 = 2;
slice_param->slice_beta_offset_div2 = 2;
slice_param->idr_pic_id = 0;
 
/* FIXME: fill other fields */
 
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncSliceParameterBufferType,
sizeof(*slice_param), 1, slice_param,
&avcenc_context.slice_param_buf_id[i]);
CHECK_VASTATUS(va_status,"vaCreateBuffer");;
i++;
#endif
 
avcenc_context.num_slices = i;
}
 
static int update_ReferenceFrames(void)
{
int i;
 
/* B-frame is not used for reference */
if (current_frame_type == SLICE_TYPE_B)
return 0;
 
CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
numShortTerm++;
if (numShortTerm > num_ref_frames)
numShortTerm = num_ref_frames;
for (i=numShortTerm-1; i>0; i--)
ReferenceFrames[i] = ReferenceFrames[i-1];
ReferenceFrames[0] = CurrentCurrPic;
 
if (current_frame_type != SLICE_TYPE_B)
current_frame_num++;
if (current_frame_num > MaxFrameNum)
current_frame_num = 0;
 
return 0;
}
 
static int begin_picture(FILE *yuv_fp, int frame_num, int display_num, int slice_type, int is_idr)
{
VAStatus va_status;
 
if (avcenc_context.upload_thread_value != 0) {
fprintf(stderr, "FATAL error!!!\n");
exit(1);
}
pthread_join(avcenc_context.upload_thread_id, NULL);
 
avcenc_context.upload_thread_value = -1;
 
if (avcenc_context.current_input_surface == SID_INPUT_PICTURE_0)
avcenc_context.current_input_surface = SID_INPUT_PICTURE_1;
else
avcenc_context.current_input_surface = SID_INPUT_PICTURE_0;
 
if (is_idr) {
VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
unsigned int length_in_bits, offset_in_bytes;
unsigned char *packed_seq_buffer = NULL, *packed_pic_buffer = NULL;
 
assert(slice_type == SLICE_TYPE_I);
length_in_bits = build_packed_seq_buffer(&packed_seq_buffer);
offset_in_bytes = 0;
packed_header_param_buffer.type = VAEncPackedHeaderSequence;
packed_header_param_buffer.bit_length = length_in_bits;
packed_header_param_buffer.has_emulation_bytes = 0;
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
&avcenc_context.packed_seq_header_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packed_seq_buffer,
&avcenc_context.packed_seq_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
length_in_bits = build_packed_pic_buffer(&packed_pic_buffer);
offset_in_bytes = 0;
packed_header_param_buffer.type = VAEncPackedHeaderPicture;
packed_header_param_buffer.bit_length = length_in_bits;
packed_header_param_buffer.has_emulation_bytes = 0;
 
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
&avcenc_context.packed_pic_header_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packed_pic_buffer,
&avcenc_context.packed_pic_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
free(packed_seq_buffer);
free(packed_pic_buffer);
}
 
/* sequence parameter set */
VAEncSequenceParameterBufferH264 *seq_param = &avcenc_context.seq_param;
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncSequenceParameterBufferType,
sizeof(*seq_param), 1, seq_param,
&avcenc_context.seq_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
 
/* hrd parameter */
VAEncMiscParameterBuffer *misc_param;
VAEncMiscParameterHRD *misc_hrd_param;
vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncMiscParameterBufferType,
sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
1,
NULL,
&avcenc_context.misc_parameter_hrd_buf_id);
CHECK_VASTATUS(va_status, "vaCreateBuffer");
 
vaMapBuffer(va_dpy,
avcenc_context.misc_parameter_hrd_buf_id,
(void **)&misc_param);
misc_param->type = VAEncMiscParameterTypeHRD;
misc_hrd_param = (VAEncMiscParameterHRD *)misc_param->data;
 
if (frame_bit_rate > 0) {
misc_hrd_param->initial_buffer_fullness = frame_bit_rate * 1000 * 4;
misc_hrd_param->buffer_size = frame_bit_rate * 1000 * 8;
} else {
misc_hrd_param->initial_buffer_fullness = 0;
misc_hrd_param->buffer_size = 0;
}
 
vaUnmapBuffer(va_dpy, avcenc_context.misc_parameter_hrd_buf_id);
 
return 0;
}
 
int avcenc_render_picture()
{
VAStatus va_status;
VABufferID va_buffers[10];
unsigned int num_va_buffers = 0;
int i;
 
va_buffers[num_va_buffers++] = avcenc_context.seq_param_buf_id;
va_buffers[num_va_buffers++] = avcenc_context.pic_param_buf_id;
 
if (avcenc_context.packed_seq_header_param_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = avcenc_context.packed_seq_header_param_buf_id;
 
if (avcenc_context.packed_seq_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = avcenc_context.packed_seq_buf_id;
 
if (avcenc_context.packed_pic_header_param_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = avcenc_context.packed_pic_header_param_buf_id;
 
if (avcenc_context.packed_pic_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = avcenc_context.packed_pic_buf_id;
 
if (avcenc_context.packed_sei_header_param_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = avcenc_context.packed_sei_header_param_buf_id;
 
if (avcenc_context.packed_sei_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = avcenc_context.packed_sei_buf_id;
 
if (avcenc_context.misc_parameter_hrd_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = avcenc_context.misc_parameter_hrd_buf_id;
 
va_status = vaBeginPicture(va_dpy,
avcenc_context.context_id,
surface_ids[avcenc_context.current_input_surface]);
CHECK_VASTATUS(va_status,"vaBeginPicture");
va_status = vaRenderPicture(va_dpy,
avcenc_context.context_id,
va_buffers,
num_va_buffers);
CHECK_VASTATUS(va_status,"vaRenderPicture");
for(i = 0; i < avcenc_context.num_slices; i++) {
va_status = vaRenderPicture(va_dpy,
avcenc_context.context_id,
&avcenc_context.slice_param_buf_id[i],
1);
CHECK_VASTATUS(va_status,"vaRenderPicture");
}
 
va_status = vaEndPicture(va_dpy, avcenc_context.context_id);
CHECK_VASTATUS(va_status,"vaEndPicture");
 
return 0;
}
 
static int avcenc_destroy_buffers(VABufferID *va_buffers, unsigned int num_va_buffers)
{
VAStatus va_status;
unsigned int i;
 
for (i = 0; i < num_va_buffers; i++) {
if (va_buffers[i] != VA_INVALID_ID) {
va_status = vaDestroyBuffer(va_dpy, va_buffers[i]);
CHECK_VASTATUS(va_status,"vaDestroyBuffer");
va_buffers[i] = VA_INVALID_ID;
}
}
 
return 0;
}
 
static void end_picture()
{
 
update_ReferenceFrames();
avcenc_destroy_buffers(&avcenc_context.seq_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.pic_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.packed_seq_header_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.packed_seq_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.packed_pic_header_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.packed_pic_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.packed_sei_header_param_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.packed_sei_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.slice_param_buf_id[0], avcenc_context.num_slices);
avcenc_destroy_buffers(&avcenc_context.codedbuf_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.misc_parameter_hrd_buf_id, 1);
 
memset(avcenc_context.slice_param, 0, sizeof(avcenc_context.slice_param));
avcenc_context.num_slices = 0;
}
 
#define BITSTREAM_ALLOCATE_STEPPING 4096
 
struct __bitstream {
unsigned int *buffer;
int bit_offset;
int max_size_in_dword;
};
 
typedef struct __bitstream bitstream;
 
#if 0
static int
get_coded_bitsteam_length(unsigned char *buffer, int buffer_length)
{
int i;
 
for (i = 0; i < buffer_length - 3; i++) {
if (!buffer[i] &&
!buffer[i + 1] &&
!buffer[i + 2] &&
!buffer[i + 3])
break;
}
 
return i;
}
#endif
 
static unsigned int
va_swap32(unsigned int val)
{
unsigned char *pval = (unsigned char *)&val;
 
return ((pval[0] << 24) |
(pval[1] << 16) |
(pval[2] << 8) |
(pval[3] << 0));
}
 
static void
bitstream_start(bitstream *bs)
{
bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
bs->buffer = calloc(bs->max_size_in_dword * sizeof(int), 1);
bs->bit_offset = 0;
}
 
static void
bitstream_end(bitstream *bs)
{
int pos = (bs->bit_offset >> 5);
int bit_offset = (bs->bit_offset & 0x1f);
int bit_left = 32 - bit_offset;
 
if (bit_offset) {
bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
}
}
static void
bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
{
int pos = (bs->bit_offset >> 5);
int bit_offset = (bs->bit_offset & 0x1f);
int bit_left = 32 - bit_offset;
 
if (!size_in_bits)
return;
 
bs->bit_offset += size_in_bits;
 
if (bit_left > size_in_bits) {
bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
} else {
size_in_bits -= bit_left;
bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
bs->buffer[pos] = va_swap32(bs->buffer[pos]);
 
if (pos + 1 == bs->max_size_in_dword) {
bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
bs->buffer = realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
}
 
bs->buffer[pos + 1] = val;
}
}
 
static void
bitstream_put_ue(bitstream *bs, unsigned int val)
{
int size_in_bits = 0;
int tmp_val = ++val;
 
while (tmp_val) {
tmp_val >>= 1;
size_in_bits++;
}
 
bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
bitstream_put_ui(bs, val, size_in_bits);
}
 
static void
bitstream_put_se(bitstream *bs, int val)
{
unsigned int new_val;
 
if (val <= 0)
new_val = -2 * val;
else
new_val = 2 * val - 1;
 
bitstream_put_ue(bs, new_val);
}
 
static void
bitstream_byte_aligning(bitstream *bs, int bit)
{
int bit_offset = (bs->bit_offset & 0x7);
int bit_left = 8 - bit_offset;
int new_val;
 
if (!bit_offset)
return;
 
assert(bit == 0 || bit == 1);
 
if (bit)
new_val = (1 << bit_left) - 1;
else
new_val = 0;
 
bitstream_put_ui(bs, new_val, bit_left);
}
 
static void
rbsp_trailing_bits(bitstream *bs)
{
bitstream_put_ui(bs, 1, 1);
bitstream_byte_aligning(bs, 0);
}
 
static void nal_start_code_prefix(bitstream *bs)
{
bitstream_put_ui(bs, 0x00000001, 32);
}
 
static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
{
bitstream_put_ui(bs, 0, 1); /* forbidden_zero_bit: 0 */
bitstream_put_ui(bs, nal_ref_idc, 2);
bitstream_put_ui(bs, nal_unit_type, 5);
}
 
static void sps_rbsp(bitstream *bs)
{
VAEncSequenceParameterBufferH264 *seq_param = &avcenc_context.seq_param;
int profile_idc = PROFILE_IDC_BASELINE;
 
if (avcenc_context.profile == VAProfileH264High)
profile_idc = PROFILE_IDC_HIGH;
else if (avcenc_context.profile == VAProfileH264Main)
profile_idc = PROFILE_IDC_MAIN;
 
bitstream_put_ui(bs, profile_idc, 8); /* profile_idc */
bitstream_put_ui(bs, !!(avcenc_context.constraint_set_flag & 1), 1); /* constraint_set0_flag */
bitstream_put_ui(bs, !!(avcenc_context.constraint_set_flag & 2), 1); /* constraint_set1_flag */
bitstream_put_ui(bs, !!(avcenc_context.constraint_set_flag & 4), 1); /* constraint_set2_flag */
bitstream_put_ui(bs, !!(avcenc_context.constraint_set_flag & 8), 1); /* constraint_set3_flag */
bitstream_put_ui(bs, 0, 4); /* reserved_zero_4bits */
bitstream_put_ui(bs, seq_param->level_idc, 8); /* level_idc */
bitstream_put_ue(bs, seq_param->seq_parameter_set_id); /* seq_parameter_set_id */
 
if ( profile_idc == PROFILE_IDC_HIGH) {
bitstream_put_ue(bs, 1); /* chroma_format_idc = 1, 4:2:0 */
bitstream_put_ue(bs, 0); /* bit_depth_luma_minus8 */
bitstream_put_ue(bs, 0); /* bit_depth_chroma_minus8 */
bitstream_put_ui(bs, 0, 1); /* qpprime_y_zero_transform_bypass_flag */
bitstream_put_ui(bs, 0, 1); /* seq_scaling_matrix_present_flag */
}
 
bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
bitstream_put_ue(bs, seq_param->seq_fields.bits.pic_order_cnt_type); /* pic_order_cnt_type */
 
if (seq_param->seq_fields.bits.pic_order_cnt_type == 0)
bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4); /* log2_max_pic_order_cnt_lsb_minus4 */
else {
assert(0);
}
 
bitstream_put_ue(bs, seq_param->max_num_ref_frames); /* num_ref_frames */
bitstream_put_ui(bs, 0, 1); /* gaps_in_frame_num_value_allowed_flag */
 
bitstream_put_ue(bs, seq_param->picture_width_in_mbs - 1); /* pic_width_in_mbs_minus1 */
bitstream_put_ue(bs, seq_param->picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
bitstream_put_ui(bs, seq_param->seq_fields.bits.frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
 
if (!seq_param->seq_fields.bits.frame_mbs_only_flag) {
assert(0);
}
 
bitstream_put_ui(bs, seq_param->seq_fields.bits.direct_8x8_inference_flag, 1); /* direct_8x8_inference_flag */
bitstream_put_ui(bs, seq_param->frame_cropping_flag, 1); /* frame_cropping_flag */
 
if (seq_param->frame_cropping_flag) {
bitstream_put_ue(bs, seq_param->frame_crop_left_offset); /* frame_crop_left_offset */
bitstream_put_ue(bs, seq_param->frame_crop_right_offset); /* frame_crop_right_offset */
bitstream_put_ue(bs, seq_param->frame_crop_top_offset); /* frame_crop_top_offset */
bitstream_put_ue(bs, seq_param->frame_crop_bottom_offset); /* frame_crop_bottom_offset */
}
if ( frame_bit_rate < 0 ) {
bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
} else {
bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
bitstream_put_ui(bs, 0, 1); /* video_signal_type_present_flag */
bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
{
bitstream_put_ui(bs, 1, 32);
bitstream_put_ui(bs, frame_rate * 2, 32);
bitstream_put_ui(bs, 1, 1);
}
bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
{
// hrd_parameters
bitstream_put_ue(bs, 0); /* cpb_cnt_minus1 */
bitstream_put_ui(bs, 0, 4); /* bit_rate_scale */
bitstream_put_ui(bs, 2, 4); /* cpb_size_scale */
/* the frame_bit_rate is in kbps */
bitstream_put_ue(bs, (((frame_bit_rate * 1000)>> 6) - 1)); /* bit_rate_value_minus1[0] */
bitstream_put_ue(bs, ((frame_bit_rate * 8000) >> 6) - 1); /* cpb_size_value_minus1[0] */
bitstream_put_ui(bs, 1, 1); /* cbr_flag[0] */
 
/* initial_cpb_removal_delay_length_minus1 */
bitstream_put_ui(bs,
(avcenc_context.i_initial_cpb_removal_delay_length - 1), 5);
/* cpb_removal_delay_length_minus1 */
bitstream_put_ui(bs,
(avcenc_context.i_cpb_removal_delay_length - 1), 5);
/* dpb_output_delay_length_minus1 */
bitstream_put_ui(bs,
(avcenc_context.i_dpb_output_delay_length - 1), 5);
/* time_offset_length */
bitstream_put_ui(bs,
(avcenc_context.time_offset_length - 1), 5);
}
bitstream_put_ui(bs, 0, 1); /* vcl_hrd_parameters_present_flag */
bitstream_put_ui(bs, 0, 1); /* low_delay_hrd_flag */
 
bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
}
 
rbsp_trailing_bits(bs); /* rbsp_trailing_bits */
}
 
#if 0
static void build_nal_sps(FILE *avc_fp)
{
bitstream bs;
 
bitstream_start(&bs);
nal_start_code_prefix(&bs);
nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
sps_rbsp(&bs);
bitstream_end(&bs, avc_fp);
}
#endif
 
static void pps_rbsp(bitstream *bs)
{
VAEncPictureParameterBufferH264 *pic_param = &avcenc_context.pic_param;
 
bitstream_put_ue(bs, pic_param->pic_parameter_set_id); /* pic_parameter_set_id */
bitstream_put_ue(bs, pic_param->seq_parameter_set_id); /* seq_parameter_set_id */
 
bitstream_put_ui(bs, pic_param->pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */
 
bitstream_put_ui(bs, 0, 1); /* pic_order_present_flag: 0 */
 
bitstream_put_ue(bs, 0); /* num_slice_groups_minus1 */
 
bitstream_put_ue(bs, pic_param->num_ref_idx_l0_active_minus1); /* num_ref_idx_l0_active_minus1 */
bitstream_put_ue(bs, pic_param->num_ref_idx_l1_active_minus1); /* num_ref_idx_l1_active_minus1 1 */
 
bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_pred_flag, 1); /* weighted_pred_flag: 0 */
bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_bipred_idc, 2); /* weighted_bipred_idc: 0 */
 
bitstream_put_se(bs, pic_param->pic_init_qp - 26); /* pic_init_qp_minus26 */
bitstream_put_se(bs, 0); /* pic_init_qs_minus26 */
bitstream_put_se(bs, 0); /* chroma_qp_index_offset */
 
bitstream_put_ui(bs, pic_param->pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
bitstream_put_ui(bs, 0, 1); /* constrained_intra_pred_flag */
bitstream_put_ui(bs, 0, 1); /* redundant_pic_cnt_present_flag */
/* more_rbsp_data */
bitstream_put_ui(bs, pic_param->pic_fields.bits.transform_8x8_mode_flag, 1); /*transform_8x8_mode_flag */
bitstream_put_ui(bs, 0, 1); /* pic_scaling_matrix_present_flag */
bitstream_put_se(bs, pic_param->second_chroma_qp_index_offset ); /*second_chroma_qp_index_offset */
 
rbsp_trailing_bits(bs);
}
 
#if 0
static void build_nal_pps(FILE *avc_fp)
{
bitstream bs;
 
bitstream_start(&bs);
nal_start_code_prefix(&bs);
nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
pps_rbsp(&bs);
bitstream_end(&bs, avc_fp);
}
 
static void
build_header(FILE *avc_fp)
{
build_nal_sps(avc_fp);
build_nal_pps(avc_fp);
}
#endif
 
static int
build_packed_pic_buffer(unsigned char **header_buffer)
{
bitstream bs;
 
bitstream_start(&bs);
nal_start_code_prefix(&bs);
nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
pps_rbsp(&bs);
bitstream_end(&bs);
 
*header_buffer = (unsigned char *)bs.buffer;
return bs.bit_offset;
}
 
static int
build_packed_seq_buffer(unsigned char **header_buffer)
{
bitstream bs;
 
bitstream_start(&bs);
nal_start_code_prefix(&bs);
nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
sps_rbsp(&bs);
bitstream_end(&bs);
 
*header_buffer = (unsigned char *)bs.buffer;
return bs.bit_offset;
}
 
static int
build_packed_idr_sei_buffer_timing(unsigned int init_cpb_removal_delay_length,
unsigned int cpb_removal_length,
unsigned int dpb_output_length,
unsigned char **sei_buffer)
{
unsigned char *byte_buf;
int bp_byte_size, i, pic_byte_size;
unsigned int cpb_removal_delay;
 
bitstream nal_bs;
bitstream sei_bp_bs, sei_pic_bs;
 
bitstream_start(&sei_bp_bs);
bitstream_put_ue(&sei_bp_bs, 0); /*seq_parameter_set_id*/
/* SEI buffer period info */
/* NALHrdBpPresentFlag == 1 */
bitstream_put_ui(&sei_bp_bs, avcenc_context.i_initial_cpb_removal_delay,
init_cpb_removal_delay_length);
bitstream_put_ui(&sei_bp_bs, avcenc_context.i_initial_cpb_removal_delay_offset,
init_cpb_removal_delay_length);
if ( sei_bp_bs.bit_offset & 0x7) {
bitstream_put_ui(&sei_bp_bs, 1, 1);
}
bitstream_end(&sei_bp_bs);
bp_byte_size = (sei_bp_bs.bit_offset + 7) / 8;
/* SEI pic timing info */
bitstream_start(&sei_pic_bs);
/* The info of CPB and DPB delay is controlled by CpbDpbDelaysPresentFlag,
* which is derived as 1 if one of the following conditions is true:
* nal_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
* vcl_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
*/
cpb_removal_delay = (avcenc_context.current_cpb_removal - avcenc_context.prev_idr_cpb_removal);
bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
bitstream_put_ui(&sei_pic_bs, avcenc_context.current_dpb_removal_delta,
dpb_output_length);
if ( sei_pic_bs.bit_offset & 0x7) {
bitstream_put_ui(&sei_pic_bs, 1, 1);
}
/* The pic_structure_present_flag determines whether the pic_structure
* info is written into the SEI pic timing info.
* Currently it is set to zero.
*/
bitstream_end(&sei_pic_bs);
pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
bitstream_start(&nal_bs);
nal_start_code_prefix(&nal_bs);
nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI);
 
/* Write the SEI buffer period data */
bitstream_put_ui(&nal_bs, 0, 8);
bitstream_put_ui(&nal_bs, bp_byte_size, 8);
byte_buf = (unsigned char *)sei_bp_bs.buffer;
for(i = 0; i < bp_byte_size; i++) {
bitstream_put_ui(&nal_bs, byte_buf[i], 8);
}
free(byte_buf);
/* write the SEI pic timing data */
bitstream_put_ui(&nal_bs, 0x01, 8);
bitstream_put_ui(&nal_bs, pic_byte_size, 8);
byte_buf = (unsigned char *)sei_pic_bs.buffer;
for(i = 0; i < pic_byte_size; i++) {
bitstream_put_ui(&nal_bs, byte_buf[i], 8);
}
free(byte_buf);
 
rbsp_trailing_bits(&nal_bs);
bitstream_end(&nal_bs);
 
*sei_buffer = (unsigned char *)nal_bs.buffer;
return nal_bs.bit_offset;
}
 
static int
build_packed_sei_pic_timing(unsigned int cpb_removal_length,
unsigned int dpb_output_length,
unsigned char **sei_buffer)
{
unsigned char *byte_buf;
int i, pic_byte_size;
unsigned int cpb_removal_delay;
 
bitstream nal_bs;
bitstream sei_pic_bs;
 
bitstream_start(&sei_pic_bs);
/* The info of CPB and DPB delay is controlled by CpbDpbDelaysPresentFlag,
* which is derived as 1 if one of the following conditions is true:
* nal_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
* vcl_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
*/
cpb_removal_delay = (avcenc_context.current_cpb_removal - avcenc_context.current_idr_cpb_removal);
bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
bitstream_put_ui(&sei_pic_bs, avcenc_context.current_dpb_removal_delta,
dpb_output_length);
if ( sei_pic_bs.bit_offset & 0x7) {
bitstream_put_ui(&sei_pic_bs, 1, 1);
}
 
/* The pic_structure_present_flag determines whether the pic_structure
* info is written into the SEI pic timing info.
* Currently it is set to zero.
*/
bitstream_end(&sei_pic_bs);
pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
 
bitstream_start(&nal_bs);
nal_start_code_prefix(&nal_bs);
nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI);
 
/* write the SEI Pic timing data */
bitstream_put_ui(&nal_bs, 0x01, 8);
bitstream_put_ui(&nal_bs, pic_byte_size, 8);
 
byte_buf = (unsigned char *)sei_pic_bs.buffer;
for(i = 0; i < pic_byte_size; i++) {
bitstream_put_ui(&nal_bs, byte_buf[i], 8);
}
free(byte_buf);
 
rbsp_trailing_bits(&nal_bs);
bitstream_end(&nal_bs);
 
*sei_buffer = (unsigned char *)nal_bs.buffer;
 
return nal_bs.bit_offset;
}
 
#if 0
static void
slice_header(bitstream *bs, int frame_num, int display_frame, int slice_type, int nal_ref_idc, int is_idr)
{
VAEncSequenceParameterBufferH264 *seq_param = &avcenc_context.seq_param;
VAEncPictureParameterBufferH264 *pic_param = &avcenc_context.pic_param;
int is_cabac = (pic_param->pic_fields.bits.entropy_coding_mode_flag == ENTROPY_MODE_CABAC);
 
bitstream_put_ue(bs, 0); /* first_mb_in_slice: 0 */
bitstream_put_ue(bs, slice_type); /* slice_type */
bitstream_put_ue(bs, 0); /* pic_parameter_set_id: 0 */
bitstream_put_ui(bs, frame_num & 0x0F, seq_param->seq_fields.bits.log2_max_frame_num_minus4 + 4); /* frame_num */
 
/* frame_mbs_only_flag == 1 */
if (!seq_param->seq_fields.bits.frame_mbs_only_flag) {
/* FIXME: */
assert(0);
}
 
if (is_idr)
bitstream_put_ue(bs, 0); /* idr_pic_id: 0 */
 
if (seq_param->seq_fields.bits.pic_order_cnt_type == 0) {
bitstream_put_ui(bs, (display_frame*2) & 0x3F, seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
/* only support frame */
} else {
/* FIXME: */
assert(0);
}
 
/* redundant_pic_cnt_present_flag == 0 */
/* slice type */
if (slice_type == SLICE_TYPE_P) {
bitstream_put_ui(bs, 0, 1); /* num_ref_idx_active_override_flag: 0 */
/* ref_pic_list_reordering */
bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
} else if (slice_type == SLICE_TYPE_B) {
bitstream_put_ui(bs, 1, 1); /* direct_spatial_mv_pred: 1 */
bitstream_put_ui(bs, 0, 1); /* num_ref_idx_active_override_flag: 0 */
/* ref_pic_list_reordering */
bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l1: 0 */
}
 
/* weighted_pred_flag == 0 */
 
/* dec_ref_pic_marking */
if (nal_ref_idc != 0) {
if ( is_idr) {
bitstream_put_ui(bs, 0, 1); /* no_output_of_prior_pics_flag: 0 */
bitstream_put_ui(bs, 0, 1); /* long_term_reference_flag: 0 */
} else {
bitstream_put_ui(bs, 0, 1); /* adaptive_ref_pic_marking_mode_flag: 0 */
}
}
 
if (is_cabac && (slice_type != SLICE_TYPE_I))
bitstream_put_ue(bs, 0); /* cabac_init_idc: 0 */
 
bitstream_put_se(bs, 0); /* slice_qp_delta: 0 */
 
if (pic_param->pic_fields.bits.deblocking_filter_control_present_flag == 1) {
bitstream_put_ue(bs, 0); /* disable_deblocking_filter_idc: 0 */
bitstream_put_se(bs, 2); /* slice_alpha_c0_offset_div2: 2 */
bitstream_put_se(bs, 2); /* slice_beta_offset_div2: 2 */
}
}
 
static void
slice_data(bitstream *bs)
{
VACodedBufferSegment *coded_buffer_segment;
unsigned char *coded_mem;
int i, slice_data_length;
VAStatus va_status;
VASurfaceStatus surface_status;
 
va_status = vaSyncSurface(va_dpy, surface_ids[avcenc_context.current_input_surface]);
CHECK_VASTATUS(va_status,"vaSyncSurface");
 
surface_status = 0;
va_status = vaQuerySurfaceStatus(va_dpy, surface_ids[avcenc_context.current_input_surface], &surface_status);
CHECK_VASTATUS(va_status,"vaQuerySurfaceStatus");
 
va_status = vaMapBuffer(va_dpy, avcenc_context.codedbuf_buf_id, (void **)(&coded_buffer_segment));
CHECK_VASTATUS(va_status,"vaMapBuffer");
coded_mem = coded_buffer_segment->buf;
 
slice_data_length = get_coded_bitsteam_length(coded_mem, codedbuf_size);
 
for (i = 0; i < slice_data_length; i++) {
bitstream_put_ui(bs, *coded_mem, 8);
coded_mem++;
}
 
vaUnmapBuffer(va_dpy, avcenc_context.codedbuf_buf_id);
}
 
static void
build_nal_slice(FILE *avc_fp, int frame_num, int display_frame, int slice_type, int is_idr)
{
bitstream bs;
 
bitstream_start(&bs);
slice_data(&bs);
bitstream_end(&bs, avc_fp);
}
 
#endif
 
static int
store_coded_buffer(FILE *avc_fp, int slice_type)
{
VACodedBufferSegment *coded_buffer_segment;
unsigned char *coded_mem;
int slice_data_length;
VAStatus va_status;
VASurfaceStatus surface_status;
size_t w_items;
 
va_status = vaSyncSurface(va_dpy, surface_ids[avcenc_context.current_input_surface]);
CHECK_VASTATUS(va_status,"vaSyncSurface");
 
surface_status = 0;
va_status = vaQuerySurfaceStatus(va_dpy, surface_ids[avcenc_context.current_input_surface], &surface_status);
CHECK_VASTATUS(va_status,"vaQuerySurfaceStatus");
 
va_status = vaMapBuffer(va_dpy, avcenc_context.codedbuf_buf_id, (void **)(&coded_buffer_segment));
CHECK_VASTATUS(va_status,"vaMapBuffer");
coded_mem = coded_buffer_segment->buf;
 
if (coded_buffer_segment->status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK) {
if (slice_type == SLICE_TYPE_I)
avcenc_context.codedbuf_i_size *= 2;
else
avcenc_context.codedbuf_pb_size *= 2;
 
vaUnmapBuffer(va_dpy, avcenc_context.codedbuf_buf_id);
return -1;
}
 
slice_data_length = coded_buffer_segment->size;
 
do {
w_items = fwrite(coded_mem, slice_data_length, 1, avc_fp);
} while (w_items != 1);
 
vaUnmapBuffer(va_dpy, avcenc_context.codedbuf_buf_id);
 
return 0;
}
 
/*
* It is from the h264encode.c but it simplifies something.
* For example: When one frame is encoded as I-frame under the scenario with
* P-B frames, it will be regarded as IDR frame(key-frame) and then new GOP is
* started. If the video clip is encoded as all I-frames, the first frame
* is regarded as IDR and the remaining is regarded as I-frame.
*
*/
 
static void encoding2display_order(
unsigned long long encoding_order,int gop_size,
int ip_period,
unsigned long long *displaying_order,
int *frame_type)
{
int encoding_order_gop = 0;
 
/* When ip_period is 0, all are I/IDR frames */
if (ip_period == 0) { /* all are I/IDR frames */
if (encoding_order == 0)
*frame_type = FRAME_IDR;
else
*frame_type = SLICE_TYPE_I;
 
*displaying_order = encoding_order;
return;
}
 
/* new sequence like
* IDR PPPPP IDRPPPPP
* IDR (PBB)(PBB)(PBB)(PBB) IDR (PBB)(PBB)(PBB)(PBB)
*/
encoding_order_gop = encoding_order % gop_size;
 
if (encoding_order_gop == 0) { /* the first frame */
*frame_type = FRAME_IDR;
*displaying_order = encoding_order;
} else {
int gop_delta;
 
gop_delta = 1;
 
if ((ip_period != 1) && ((gop_size - 1) % ip_period)) {
int ipb_size;
ipb_size = (gop_size - 1) / ip_period * ip_period + 1;
if (encoding_order_gop >= ipb_size) {
gop_delta = ipb_size;
ip_period = gop_size - ipb_size;
}
}
 
if (((encoding_order_gop - gop_delta) % ip_period) == 0) { /* P frames */
*frame_type = SLICE_TYPE_P;
*displaying_order = encoding_order + ip_period - 1;
} else {
*frame_type = SLICE_TYPE_B;
*displaying_order = encoding_order - 1;
}
}
}
 
 
static void
encode_picture(FILE *yuv_fp, FILE *avc_fp,
int frame_num, int display_num,
int is_idr,
int slice_type, int next_is_bpic,
int next_display_num)
{
VAStatus va_status;
int ret = 0, codedbuf_size;
begin_picture(yuv_fp, frame_num, display_num, slice_type, is_idr);
 
//if (next_display_num < frame_number) {
if (1) {
int index;
 
/* prepare for next frame */
if (avcenc_context.current_input_surface == SID_INPUT_PICTURE_0)
index = SID_INPUT_PICTURE_1;
else
index = SID_INPUT_PICTURE_0;
if ( next_display_num >= frame_number )
next_display_num = frame_number - 1;
fseek(yuv_fp, frame_size * next_display_num, SEEK_SET);
 
avcenc_context.upload_thread_param.yuv_fp = yuv_fp;
avcenc_context.upload_thread_param.surface_id = surface_ids[index];
 
avcenc_context.upload_thread_value = pthread_create(&avcenc_context.upload_thread_id,
NULL,
upload_thread_function,
(void*)&avcenc_context.upload_thread_param);
}
 
do {
avcenc_destroy_buffers(&avcenc_context.codedbuf_buf_id, 1);
avcenc_destroy_buffers(&avcenc_context.pic_param_buf_id, 1);
 
 
if (SLICE_TYPE_I == slice_type) {
codedbuf_size = avcenc_context.codedbuf_i_size;
} else {
codedbuf_size = avcenc_context.codedbuf_pb_size;
}
 
/* coded buffer */
va_status = vaCreateBuffer(va_dpy,
avcenc_context.context_id,
VAEncCodedBufferType,
codedbuf_size, 1, NULL,
&avcenc_context.codedbuf_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
/* Update the RefPicList */
update_RefPicList();
 
/* picture parameter set */
avcenc_update_picture_parameter(slice_type, is_idr);
 
/* slice parameter */
avcenc_update_slice_parameter(slice_type);
 
if (avcenc_context.rate_control_method == VA_RC_CBR)
avcenc_update_sei_param(is_idr);
 
avcenc_render_picture();
 
ret = store_coded_buffer(avc_fp, slice_type);
} while (ret);
 
end_picture(slice_type, next_is_bpic);
}
 
static void show_help()
{
printf("Usage: avnenc <width> <height> <input_yuvfile> <output_avcfile> [qp=qpvalue|fb=framebitrate] [mode=0(I frames only)/1(I and P frames)/2(I, P and B frames)\n");
}
 
static void avcenc_context_seq_param_init(VAEncSequenceParameterBufferH264 *seq_param,
int width, int height)
 
{
int width_in_mbs = (width + 15) / 16;
int height_in_mbs = (height + 15) / 16;
int frame_cropping_flag = 0;
int frame_crop_bottom_offset = 0;
 
seq_param->seq_parameter_set_id = 0;
seq_param->level_idc = 41;
seq_param->intra_period = intra_period;
seq_param->intra_idr_period = seq_param->intra_period;
seq_param->ip_period = ip_period;
seq_param->max_num_ref_frames = 4;
seq_param->picture_width_in_mbs = width_in_mbs;
seq_param->picture_height_in_mbs = height_in_mbs;
seq_param->seq_fields.bits.frame_mbs_only_flag = 1;
seq_param->seq_fields.bits.chroma_format_idc = 1;
 
if (frame_bit_rate > 0)
seq_param->bits_per_second = 1000 * frame_bit_rate; /* use kbps as input */
else
seq_param->bits_per_second = 0;
seq_param->time_scale = frame_rate * 2;
seq_param->num_units_in_tick = 1; /* Tc = num_units_in_tick / time_sacle */
 
if (height_in_mbs * 16 - height) {
frame_cropping_flag = 1;
frame_crop_bottom_offset =
(height_in_mbs * 16 - height) / (2 * (!seq_param->seq_fields.bits.frame_mbs_only_flag + 1));
}
 
seq_param->frame_cropping_flag = frame_cropping_flag;
seq_param->frame_crop_left_offset = 0;
seq_param->frame_crop_right_offset = 0;
seq_param->frame_crop_top_offset = 0;
seq_param->frame_crop_bottom_offset = frame_crop_bottom_offset;
 
seq_param->seq_fields.bits.pic_order_cnt_type = 0;
seq_param->seq_fields.bits.direct_8x8_inference_flag = 0;
seq_param->seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;
seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
if (frame_bit_rate > 0)
seq_param->vui_parameters_present_flag = 1; //HRD info located in vui
else
seq_param->vui_parameters_present_flag = 0;
}
 
static void avcenc_context_pic_param_init(VAEncPictureParameterBufferH264 *pic_param)
{
pic_param->seq_parameter_set_id = 0;
pic_param->pic_parameter_set_id = 0;
 
pic_param->last_picture = 0;
pic_param->frame_num = 0;
pic_param->pic_init_qp = (qp_value >= 0 ? qp_value : 26);
pic_param->num_ref_idx_l0_active_minus1 = 0;
pic_param->num_ref_idx_l1_active_minus1 = 0;
 
pic_param->pic_fields.bits.idr_pic_flag = 0;
pic_param->pic_fields.bits.reference_pic_flag = 0;
pic_param->pic_fields.bits.entropy_coding_mode_flag = ENTROPY_MODE_CABAC;
pic_param->pic_fields.bits.weighted_pred_flag = 0;
pic_param->pic_fields.bits.weighted_bipred_idc = 0;
if (avcenc_context.constraint_set_flag & 0x7)
pic_param->pic_fields.bits.transform_8x8_mode_flag = 0;
else
pic_param->pic_fields.bits.transform_8x8_mode_flag = 1;
 
pic_param->pic_fields.bits.deblocking_filter_control_present_flag = 1;
 
memset(pic_param->ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
}
 
static void avcenc_context_sei_init()
{
int init_cpb_size;
int target_bit_rate;
 
/* it comes for the bps defined in SPS */
target_bit_rate = avcenc_context.seq_param.bits_per_second;
init_cpb_size = (target_bit_rate * 8) >> 10;
avcenc_context.i_initial_cpb_removal_delay = 2 * 90000;
avcenc_context.i_initial_cpb_removal_delay_offset = 2 * 90000;
 
avcenc_context.i_cpb_removal_delay = 2;
avcenc_context.i_initial_cpb_removal_delay_length = 24;
avcenc_context.i_cpb_removal_delay_length = 24;
avcenc_context.i_dpb_output_delay_length = 24;
avcenc_context.time_offset_length = 24;
 
avcenc_context.prev_idr_cpb_removal = avcenc_context.i_initial_cpb_removal_delay / 90000;
avcenc_context.current_idr_cpb_removal = avcenc_context.prev_idr_cpb_removal;
avcenc_context.current_cpb_removal = 0;
avcenc_context.idr_frame_num = 0;
}
 
static void avcenc_context_init(int width, int height)
{
int i;
memset(&avcenc_context, 0, sizeof(avcenc_context));
avcenc_context.profile = VAProfileH264Main;
 
switch (avcenc_context.profile) {
case VAProfileH264Baseline:
avcenc_context.constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
break;
 
case VAProfileH264Main:
avcenc_context.constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
break;
 
case VAProfileH264High:
avcenc_context.constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
break;
default:
break;
}
avcenc_context.seq_param_buf_id = VA_INVALID_ID;
avcenc_context.pic_param_buf_id = VA_INVALID_ID;
avcenc_context.packed_seq_header_param_buf_id = VA_INVALID_ID;
avcenc_context.packed_seq_buf_id = VA_INVALID_ID;
avcenc_context.packed_pic_header_param_buf_id = VA_INVALID_ID;
avcenc_context.packed_pic_buf_id = VA_INVALID_ID;
avcenc_context.codedbuf_buf_id = VA_INVALID_ID;
avcenc_context.misc_parameter_hrd_buf_id = VA_INVALID_ID;
avcenc_context.codedbuf_i_size = width * height;
avcenc_context.codedbuf_pb_size = width * height;
avcenc_context.current_input_surface = SID_INPUT_PICTURE_0;
avcenc_context.upload_thread_value = -1;
avcenc_context.packed_sei_header_param_buf_id = VA_INVALID_ID;
avcenc_context.packed_sei_buf_id = VA_INVALID_ID;
 
if (qp_value == -1)
avcenc_context.rate_control_method = VA_RC_CBR;
else if (qp_value == -2)
avcenc_context.rate_control_method = VA_RC_VBR;
else {
assert(qp_value >= 0 && qp_value <= 51);
avcenc_context.rate_control_method = VA_RC_CQP;
}
 
for (i = 0; i < MAX_SLICES; i++) {
avcenc_context.slice_param_buf_id[i] = VA_INVALID_ID;
}
 
avcenc_context_seq_param_init(&avcenc_context.seq_param, width, height);
avcenc_context_pic_param_init(&avcenc_context.pic_param);
if (avcenc_context.rate_control_method == VA_RC_CBR)
avcenc_context_sei_init();
}
 
int main(int argc, char *argv[])
{
int f;
FILE *yuv_fp;
FILE *avc_fp;
off_t file_size;
int mode_value;
struct timeval tpstart,tpend;
float timeuse;
 
va_init_display_args(&argc, argv);
 
//TODO may be we should using option analytics library
if(argc != 5 && argc != 6 && argc != 7) {
show_help();
return -1;
}
 
picture_width = atoi(argv[1]);
picture_height = atoi(argv[2]);
picture_width_in_mbs = (picture_width + 15) / 16;
picture_height_in_mbs = (picture_height + 15) / 16;
 
if (argc == 6 || argc == 7) {
qp_value = -1;
sscanf(argv[5], "qp=%d", &qp_value);
if ( qp_value == -1 ) {
frame_bit_rate = -1;
sscanf(argv[5], "fb=%d", &frame_bit_rate);
if ( frame_bit_rate == -1 ) {
show_help();
return -1;
}
} else if (qp_value > 51) {
qp_value = 51;
} else if (qp_value < 0) {
qp_value = 0;
}
} else
qp_value = 28; //default const QP mode
 
if (argc == 7) {
sscanf(argv[6], "mode=%d", &mode_value);
if ( mode_value == 0 ) {
ip_period = 0;
}
else if ( mode_value == 1) {
ip_period = 1;
}
else if ( mode_value == 2 ) {
/* Hack mechanism before adding the parameter of B-frame number */
ip_period = 3;
}
else {
printf("mode_value=%d\n",mode_value);
show_help();
return -1;
}
}
 
yuv_fp = fopen(argv[3],"rb");
if ( yuv_fp == NULL){
printf("Can't open input YUV file\n");
return -1;
}
fseek(yuv_fp,0l, SEEK_END);
file_size = ftell(yuv_fp);
frame_size = picture_width * picture_height + ((picture_width * picture_height) >> 1) ;
 
if ( (file_size < frame_size) || (file_size % frame_size) ) {
fclose(yuv_fp);
printf("The YUV file's size is not correct\n");
return -1;
}
frame_number = file_size / frame_size;
fseek(yuv_fp, 0l, SEEK_SET);
 
avc_fp = fopen(argv[4], "wb");
if ( avc_fp == NULL) {
fclose(yuv_fp);
printf("Can't open output avc file\n");
return -1;
}
gettimeofday(&tpstart,NULL);
avcenc_context_init(picture_width, picture_height);
create_encode_pipe();
alloc_encode_resource(yuv_fp);
 
enc_frame_number = 0;
for ( f = 0; f < frame_number; f++) { //picture level loop
unsigned long long next_frame_display;
int next_frame_type;
 
enc_frame_number = f;
 
encoding2display_order(enc_frame_number, intra_period, ip_period,
&current_frame_display, &current_frame_type);
 
encoding2display_order(enc_frame_number + 1, intra_period, ip_period,
&next_frame_display, &next_frame_type);
 
if (current_frame_type == FRAME_IDR) {
numShortTerm = 0;
current_frame_num = 0;
current_IDR_display = current_frame_display;
if (avcenc_context.rate_control_method == VA_RC_CBR) {
unsigned long long frame_interval;
 
frame_interval = enc_frame_number - avcenc_context.idr_frame_num;
 
/* Based on the H264 spec the removal time of the IDR access
* unit is derived as the following:
* the removal time of previous IDR unit + Tc * cpb_removal_delay(n)
*/
avcenc_context.current_cpb_removal = avcenc_context.prev_idr_cpb_removal +
frame_interval * 2;
avcenc_context.idr_frame_num = enc_frame_number;
avcenc_context.current_idr_cpb_removal = avcenc_context.current_cpb_removal;
if (ip_period)
avcenc_context.current_dpb_removal_delta = (ip_period + 1) * 2;
else
avcenc_context.current_dpb_removal_delta = 2;
}
} else {
if (avcenc_context.rate_control_method == VA_RC_CBR) {
unsigned long long frame_interval;
 
frame_interval = enc_frame_number - avcenc_context.idr_frame_num;
 
/* Based on the H264 spec the removal time of the non-IDR access
* unit is derived as the following:
* the removal time of current IDR unit + Tc * cpb_removal_delay(n)
*/
avcenc_context.current_cpb_removal = avcenc_context.current_idr_cpb_removal +
frame_interval * 2;
if (current_frame_type == SLICE_TYPE_I ||
current_frame_type == SLICE_TYPE_P) {
if (ip_period)
avcenc_context.current_dpb_removal_delta = (ip_period + 1) * 2;
else
avcenc_context.current_dpb_removal_delta = 2;
} else
avcenc_context.current_dpb_removal_delta = 2;
}
}
 
/* use the simple mechanism to calc the POC */
current_poc = (current_frame_display - current_IDR_display) * 2;
 
encode_picture(yuv_fp, avc_fp, frame_number, current_frame_display,
(current_frame_type == FRAME_IDR) ? 1 : 0,
(current_frame_type == FRAME_IDR) ? SLICE_TYPE_I : current_frame_type,
(next_frame_type == SLICE_TYPE_B) ? 1 : 0,
next_frame_display);
if ((current_frame_type == FRAME_IDR) &&
(avcenc_context.rate_control_method == VA_RC_CBR)) {
/* after one IDR frame is written, it needs to update the
* prev_idr_cpb_removal for next IDR
*/
avcenc_context.prev_idr_cpb_removal = avcenc_context.current_idr_cpb_removal;
}
printf("\r %d/%d ...", f, frame_number);
fflush(stdout);
}
 
gettimeofday(&tpend,NULL);
timeuse=1000000*(tpend.tv_sec-tpstart.tv_sec)+ tpend.tv_usec-tpstart.tv_usec;
timeuse/=1000000;
printf("\ndone!\n");
printf("encode %d frames in %f secondes, FPS is %.1f\n",frame_number, timeuse, frame_number/timeuse);
release_encode_resource();
destory_encode_pipe();
 
fclose(yuv_fp);
fclose(avc_fp);
 
return 0;
}
/contrib/sdk/sources/vaapi/libva-1.4.1/test/encode/h264encode.c
0,0 → 1,2387
/*
* Copyright (c) 2007-2013 Intel Corporation. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include "sysdeps.h"
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <getopt.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <fcntl.h>
#include <assert.h>
#include <pthread.h>
#include <errno.h>
#include <math.h>
#include <va/va.h>
#include <va/va_enc_h264.h>
#include "va_display.h"
 
#define CHECK_VASTATUS(va_status,func) \
if (va_status != VA_STATUS_SUCCESS) { \
fprintf(stderr,"%s:%s (%d) failed,exit\n", __func__, func, __LINE__); \
exit(1); \
}
 
#include "../loadsurface.h"
 
#define NAL_REF_IDC_NONE 0
#define NAL_REF_IDC_LOW 1
#define NAL_REF_IDC_MEDIUM 2
#define NAL_REF_IDC_HIGH 3
 
#define NAL_NON_IDR 1
#define NAL_IDR 5
#define NAL_SPS 7
#define NAL_PPS 8
#define NAL_SEI 6
 
#define SLICE_TYPE_P 0
#define SLICE_TYPE_B 1
#define SLICE_TYPE_I 2
#define IS_P_SLICE(type) (SLICE_TYPE_P == (type))
#define IS_B_SLICE(type) (SLICE_TYPE_B == (type))
#define IS_I_SLICE(type) (SLICE_TYPE_I == (type))
 
 
#define ENTROPY_MODE_CAVLC 0
#define ENTROPY_MODE_CABAC 1
 
#define PROFILE_IDC_BASELINE 66
#define PROFILE_IDC_MAIN 77
#define PROFILE_IDC_HIGH 100
#define BITSTREAM_ALLOCATE_STEPPING 4096
 
#define SURFACE_NUM 16 /* 16 surfaces for source YUV */
#define SURFACE_NUM 16 /* 16 surfaces for reference */
static VADisplay va_dpy;
static VAProfile h264_profile = ~0;
static VAConfigAttrib attrib[VAConfigAttribTypeMax];
static VAConfigAttrib config_attrib[VAConfigAttribTypeMax];
static int config_attrib_num = 0, enc_packed_header_idx;
static VASurfaceID src_surface[SURFACE_NUM];
static VABufferID coded_buf[SURFACE_NUM];
static VASurfaceID ref_surface[SURFACE_NUM];
static VAConfigID config_id;
static VAContextID context_id;
static VAEncSequenceParameterBufferH264 seq_param;
static VAEncPictureParameterBufferH264 pic_param;
static VAEncSliceParameterBufferH264 slice_param;
static VAPictureH264 CurrentCurrPic;
static VAPictureH264 ReferenceFrames[16], RefPicList0_P[32], RefPicList0_B[32], RefPicList1_B[32];
 
static unsigned int MaxFrameNum = (2<<16);
static unsigned int MaxPicOrderCntLsb = (2<<8);
static unsigned int Log2MaxFrameNum = 16;
static unsigned int Log2MaxPicOrderCntLsb = 8;
 
static unsigned int num_ref_frames = 2;
static unsigned int numShortTerm = 0;
static int constraint_set_flag = 0;
static int h264_packedheader = 0; /* support pack header? */
static int h264_maxref = (1<<16|1);
static int h264_entropy_mode = 1; /* cabac */
 
static char *coded_fn = NULL, *srcyuv_fn = NULL, *recyuv_fn = NULL;
static FILE *coded_fp = NULL, *srcyuv_fp = NULL, *recyuv_fp = NULL;
static unsigned long long srcyuv_frames = 0;
static int srcyuv_fourcc = VA_FOURCC_NV12;
static int calc_psnr = 0;
 
static int frame_width = 176;
static int frame_height = 144;
static int frame_width_mbaligned;
static int frame_height_mbaligned;
static int frame_rate = 30;
static unsigned int frame_count = 60;
static unsigned int frame_coded = 0;
static unsigned int frame_bitrate = 0;
static unsigned int frame_slices = 1;
static double frame_size = 0;
static int initial_qp = 26;
static int minimal_qp = 0;
static int intra_period = 30;
static int intra_idr_period = 60;
static int ip_period = 1;
static int rc_mode = -1;
static int rc_default_modes[] = {
VA_RC_VBR,
VA_RC_CQP,
VA_RC_VBR_CONSTRAINED,
VA_RC_CBR,
VA_RC_VCM,
VA_RC_NONE,
};
static unsigned long long current_frame_encoding = 0;
static unsigned long long current_frame_display = 0;
static unsigned long long current_IDR_display = 0;
static unsigned int current_frame_num = 0;
static int current_frame_type;
#define current_slot (current_frame_display % SURFACE_NUM)
 
static int misc_priv_type = 0;
static int misc_priv_value = 0;
 
#define MIN(a, b) ((a)>(b)?(b):(a))
#define MAX(a, b) ((a)>(b)?(a):(b))
 
/* thread to save coded data/upload source YUV */
struct storage_task_t {
void *next;
unsigned long long display_order;
unsigned long long encode_order;
};
static struct storage_task_t *storage_task_header = NULL, *storage_task_tail = NULL;
#define SRC_SURFACE_IN_ENCODING 0
#define SRC_SURFACE_IN_STORAGE 1
static int srcsurface_status[SURFACE_NUM];
static int encode_syncmode = 0;
static pthread_mutex_t encode_mutex = PTHREAD_MUTEX_INITIALIZER;
static pthread_cond_t encode_cond = PTHREAD_COND_INITIALIZER;
static pthread_t encode_thread;
/* for performance profiling */
static unsigned int UploadPictureTicks=0;
static unsigned int BeginPictureTicks=0;
static unsigned int RenderPictureTicks=0;
static unsigned int EndPictureTicks=0;
static unsigned int SyncPictureTicks=0;
static unsigned int SavePictureTicks=0;
static unsigned int TotalTicks=0;
 
struct __bitstream {
unsigned int *buffer;
int bit_offset;
int max_size_in_dword;
};
typedef struct __bitstream bitstream;
 
 
static unsigned int
va_swap32(unsigned int val)
{
unsigned char *pval = (unsigned char *)&val;
 
return ((pval[0] << 24) |
(pval[1] << 16) |
(pval[2] << 8) |
(pval[3] << 0));
}
 
static void
bitstream_start(bitstream *bs)
{
bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
bs->buffer = calloc(bs->max_size_in_dword * sizeof(int), 1);
bs->bit_offset = 0;
}
 
static void
bitstream_end(bitstream *bs)
{
int pos = (bs->bit_offset >> 5);
int bit_offset = (bs->bit_offset & 0x1f);
int bit_left = 32 - bit_offset;
 
if (bit_offset) {
bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
}
}
static void
bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
{
int pos = (bs->bit_offset >> 5);
int bit_offset = (bs->bit_offset & 0x1f);
int bit_left = 32 - bit_offset;
 
if (!size_in_bits)
return;
 
bs->bit_offset += size_in_bits;
 
if (bit_left > size_in_bits) {
bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
} else {
size_in_bits -= bit_left;
bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
bs->buffer[pos] = va_swap32(bs->buffer[pos]);
 
if (pos + 1 == bs->max_size_in_dword) {
bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
bs->buffer = realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
}
 
bs->buffer[pos + 1] = val;
}
}
 
static void
bitstream_put_ue(bitstream *bs, unsigned int val)
{
int size_in_bits = 0;
int tmp_val = ++val;
 
while (tmp_val) {
tmp_val >>= 1;
size_in_bits++;
}
 
bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
bitstream_put_ui(bs, val, size_in_bits);
}
 
static void
bitstream_put_se(bitstream *bs, int val)
{
unsigned int new_val;
 
if (val <= 0)
new_val = -2 * val;
else
new_val = 2 * val - 1;
 
bitstream_put_ue(bs, new_val);
}
 
static void
bitstream_byte_aligning(bitstream *bs, int bit)
{
int bit_offset = (bs->bit_offset & 0x7);
int bit_left = 8 - bit_offset;
int new_val;
 
if (!bit_offset)
return;
 
assert(bit == 0 || bit == 1);
 
if (bit)
new_val = (1 << bit_left) - 1;
else
new_val = 0;
 
bitstream_put_ui(bs, new_val, bit_left);
}
 
static void
rbsp_trailing_bits(bitstream *bs)
{
bitstream_put_ui(bs, 1, 1);
bitstream_byte_aligning(bs, 0);
}
 
static void nal_start_code_prefix(bitstream *bs)
{
bitstream_put_ui(bs, 0x00000001, 32);
}
 
static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
{
bitstream_put_ui(bs, 0, 1); /* forbidden_zero_bit: 0 */
bitstream_put_ui(bs, nal_ref_idc, 2);
bitstream_put_ui(bs, nal_unit_type, 5);
}
 
static void sps_rbsp(bitstream *bs)
{
int profile_idc = PROFILE_IDC_BASELINE;
 
if (h264_profile == VAProfileH264High)
profile_idc = PROFILE_IDC_HIGH;
else if (h264_profile == VAProfileH264Main)
profile_idc = PROFILE_IDC_MAIN;
 
bitstream_put_ui(bs, profile_idc, 8); /* profile_idc */
bitstream_put_ui(bs, !!(constraint_set_flag & 1), 1); /* constraint_set0_flag */
bitstream_put_ui(bs, !!(constraint_set_flag & 2), 1); /* constraint_set1_flag */
bitstream_put_ui(bs, !!(constraint_set_flag & 4), 1); /* constraint_set2_flag */
bitstream_put_ui(bs, !!(constraint_set_flag & 8), 1); /* constraint_set3_flag */
bitstream_put_ui(bs, 0, 4); /* reserved_zero_4bits */
bitstream_put_ui(bs, seq_param.level_idc, 8); /* level_idc */
bitstream_put_ue(bs, seq_param.seq_parameter_set_id); /* seq_parameter_set_id */
 
if ( profile_idc == PROFILE_IDC_HIGH) {
bitstream_put_ue(bs, 1); /* chroma_format_idc = 1, 4:2:0 */
bitstream_put_ue(bs, 0); /* bit_depth_luma_minus8 */
bitstream_put_ue(bs, 0); /* bit_depth_chroma_minus8 */
bitstream_put_ui(bs, 0, 1); /* qpprime_y_zero_transform_bypass_flag */
bitstream_put_ui(bs, 0, 1); /* seq_scaling_matrix_present_flag */
}
 
bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
bitstream_put_ue(bs, seq_param.seq_fields.bits.pic_order_cnt_type); /* pic_order_cnt_type */
 
if (seq_param.seq_fields.bits.pic_order_cnt_type == 0)
bitstream_put_ue(bs, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4); /* log2_max_pic_order_cnt_lsb_minus4 */
else {
assert(0);
}
 
bitstream_put_ue(bs, seq_param.max_num_ref_frames); /* num_ref_frames */
bitstream_put_ui(bs, 0, 1); /* gaps_in_frame_num_value_allowed_flag */
 
bitstream_put_ue(bs, seq_param.picture_width_in_mbs - 1); /* pic_width_in_mbs_minus1 */
bitstream_put_ue(bs, seq_param.picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
bitstream_put_ui(bs, seq_param.seq_fields.bits.frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
 
if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
assert(0);
}
 
bitstream_put_ui(bs, seq_param.seq_fields.bits.direct_8x8_inference_flag, 1); /* direct_8x8_inference_flag */
bitstream_put_ui(bs, seq_param.frame_cropping_flag, 1); /* frame_cropping_flag */
 
if (seq_param.frame_cropping_flag) {
bitstream_put_ue(bs, seq_param.frame_crop_left_offset); /* frame_crop_left_offset */
bitstream_put_ue(bs, seq_param.frame_crop_right_offset); /* frame_crop_right_offset */
bitstream_put_ue(bs, seq_param.frame_crop_top_offset); /* frame_crop_top_offset */
bitstream_put_ue(bs, seq_param.frame_crop_bottom_offset); /* frame_crop_bottom_offset */
}
//if ( frame_bit_rate < 0 ) { //TODO EW: the vui header isn't correct
if ( 1 ) {
bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
} else {
bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
bitstream_put_ui(bs, 0, 1); /* video_signal_type_present_flag */
bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
{
bitstream_put_ui(bs, 15, 32);
bitstream_put_ui(bs, 900, 32);
bitstream_put_ui(bs, 1, 1);
}
bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
{
// hrd_parameters
bitstream_put_ue(bs, 0); /* cpb_cnt_minus1 */
bitstream_put_ui(bs, 4, 4); /* bit_rate_scale */
bitstream_put_ui(bs, 6, 4); /* cpb_size_scale */
bitstream_put_ue(bs, frame_bitrate - 1); /* bit_rate_value_minus1[0] */
bitstream_put_ue(bs, frame_bitrate*8 - 1); /* cpb_size_value_minus1[0] */
bitstream_put_ui(bs, 1, 1); /* cbr_flag[0] */
 
bitstream_put_ui(bs, 23, 5); /* initial_cpb_removal_delay_length_minus1 */
bitstream_put_ui(bs, 23, 5); /* cpb_removal_delay_length_minus1 */
bitstream_put_ui(bs, 23, 5); /* dpb_output_delay_length_minus1 */
bitstream_put_ui(bs, 23, 5); /* time_offset_length */
}
bitstream_put_ui(bs, 0, 1); /* vcl_hrd_parameters_present_flag */
bitstream_put_ui(bs, 0, 1); /* low_delay_hrd_flag */
 
bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
}
 
rbsp_trailing_bits(bs); /* rbsp_trailing_bits */
}
 
 
static void pps_rbsp(bitstream *bs)
{
bitstream_put_ue(bs, pic_param.pic_parameter_set_id); /* pic_parameter_set_id */
bitstream_put_ue(bs, pic_param.seq_parameter_set_id); /* seq_parameter_set_id */
 
bitstream_put_ui(bs, pic_param.pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */
 
bitstream_put_ui(bs, 0, 1); /* pic_order_present_flag: 0 */
 
bitstream_put_ue(bs, 0); /* num_slice_groups_minus1 */
 
bitstream_put_ue(bs, pic_param.num_ref_idx_l0_active_minus1); /* num_ref_idx_l0_active_minus1 */
bitstream_put_ue(bs, pic_param.num_ref_idx_l1_active_minus1); /* num_ref_idx_l1_active_minus1 1 */
 
bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_pred_flag, 1); /* weighted_pred_flag: 0 */
bitstream_put_ui(bs, pic_param.pic_fields.bits.weighted_bipred_idc, 2); /* weighted_bipred_idc: 0 */
 
bitstream_put_se(bs, pic_param.pic_init_qp - 26); /* pic_init_qp_minus26 */
bitstream_put_se(bs, 0); /* pic_init_qs_minus26 */
bitstream_put_se(bs, 0); /* chroma_qp_index_offset */
 
bitstream_put_ui(bs, pic_param.pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
bitstream_put_ui(bs, 0, 1); /* constrained_intra_pred_flag */
bitstream_put_ui(bs, 0, 1); /* redundant_pic_cnt_present_flag */
/* more_rbsp_data */
bitstream_put_ui(bs, pic_param.pic_fields.bits.transform_8x8_mode_flag, 1); /*transform_8x8_mode_flag */
bitstream_put_ui(bs, 0, 1); /* pic_scaling_matrix_present_flag */
bitstream_put_se(bs, pic_param.second_chroma_qp_index_offset ); /*second_chroma_qp_index_offset */
 
rbsp_trailing_bits(bs);
}
 
static void slice_header(bitstream *bs)
{
int first_mb_in_slice = slice_param.macroblock_address;
 
bitstream_put_ue(bs, first_mb_in_slice); /* first_mb_in_slice: 0 */
bitstream_put_ue(bs, slice_param.slice_type); /* slice_type */
bitstream_put_ue(bs, slice_param.pic_parameter_set_id); /* pic_parameter_set_id: 0 */
bitstream_put_ui(bs, pic_param.frame_num, seq_param.seq_fields.bits.log2_max_frame_num_minus4 + 4); /* frame_num */
 
/* frame_mbs_only_flag == 1 */
if (!seq_param.seq_fields.bits.frame_mbs_only_flag) {
/* FIXME: */
assert(0);
}
 
if (pic_param.pic_fields.bits.idr_pic_flag)
bitstream_put_ue(bs, slice_param.idr_pic_id); /* idr_pic_id: 0 */
 
if (seq_param.seq_fields.bits.pic_order_cnt_type == 0) {
bitstream_put_ui(bs, pic_param.CurrPic.TopFieldOrderCnt, seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
/* pic_order_present_flag == 0 */
} else {
/* FIXME: */
assert(0);
}
 
/* redundant_pic_cnt_present_flag == 0 */
/* slice type */
if (IS_P_SLICE(slice_param.slice_type)) {
bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1); /* num_ref_idx_active_override_flag: */
 
if (slice_param.num_ref_idx_active_override_flag)
bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
 
/* ref_pic_list_reordering */
bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
} else if (IS_B_SLICE(slice_param.slice_type)) {
bitstream_put_ui(bs, slice_param.direct_spatial_mv_pred_flag, 1); /* direct_spatial_mv_pred: 1 */
 
bitstream_put_ui(bs, slice_param.num_ref_idx_active_override_flag, 1); /* num_ref_idx_active_override_flag: */
 
if (slice_param.num_ref_idx_active_override_flag) {
bitstream_put_ue(bs, slice_param.num_ref_idx_l0_active_minus1);
bitstream_put_ue(bs, slice_param.num_ref_idx_l1_active_minus1);
}
 
/* ref_pic_list_reordering */
bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
bitstream_put_ui(bs, 0, 1); /* ref_pic_list_reordering_flag_l1: 0 */
}
 
if ((pic_param.pic_fields.bits.weighted_pred_flag &&
IS_P_SLICE(slice_param.slice_type)) ||
((pic_param.pic_fields.bits.weighted_bipred_idc == 1) &&
IS_B_SLICE(slice_param.slice_type))) {
/* FIXME: fill weight/offset table */
assert(0);
}
 
/* dec_ref_pic_marking */
if (pic_param.pic_fields.bits.reference_pic_flag) { /* nal_ref_idc != 0 */
unsigned char no_output_of_prior_pics_flag = 0;
unsigned char long_term_reference_flag = 0;
unsigned char adaptive_ref_pic_marking_mode_flag = 0;
 
if (pic_param.pic_fields.bits.idr_pic_flag) {
bitstream_put_ui(bs, no_output_of_prior_pics_flag, 1); /* no_output_of_prior_pics_flag: 0 */
bitstream_put_ui(bs, long_term_reference_flag, 1); /* long_term_reference_flag: 0 */
} else {
bitstream_put_ui(bs, adaptive_ref_pic_marking_mode_flag, 1); /* adaptive_ref_pic_marking_mode_flag: 0 */
}
}
 
if (pic_param.pic_fields.bits.entropy_coding_mode_flag &&
!IS_I_SLICE(slice_param.slice_type))
bitstream_put_ue(bs, slice_param.cabac_init_idc); /* cabac_init_idc: 0 */
 
bitstream_put_se(bs, slice_param.slice_qp_delta); /* slice_qp_delta: 0 */
 
/* ignore for SP/SI */
 
if (pic_param.pic_fields.bits.deblocking_filter_control_present_flag) {
bitstream_put_ue(bs, slice_param.disable_deblocking_filter_idc); /* disable_deblocking_filter_idc: 0 */
 
if (slice_param.disable_deblocking_filter_idc != 1) {
bitstream_put_se(bs, slice_param.slice_alpha_c0_offset_div2); /* slice_alpha_c0_offset_div2: 2 */
bitstream_put_se(bs, slice_param.slice_beta_offset_div2); /* slice_beta_offset_div2: 2 */
}
}
 
if (pic_param.pic_fields.bits.entropy_coding_mode_flag) {
bitstream_byte_aligning(bs, 1);
}
}
 
static int
build_packed_pic_buffer(unsigned char **header_buffer)
{
bitstream bs;
 
bitstream_start(&bs);
nal_start_code_prefix(&bs);
nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
pps_rbsp(&bs);
bitstream_end(&bs);
 
*header_buffer = (unsigned char *)bs.buffer;
return bs.bit_offset;
}
 
static int
build_packed_seq_buffer(unsigned char **header_buffer)
{
bitstream bs;
 
bitstream_start(&bs);
nal_start_code_prefix(&bs);
nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
sps_rbsp(&bs);
bitstream_end(&bs);
 
*header_buffer = (unsigned char *)bs.buffer;
return bs.bit_offset;
}
 
static int
build_packed_sei_buffer_timing(unsigned int init_cpb_removal_length,
unsigned int init_cpb_removal_delay,
unsigned int init_cpb_removal_delay_offset,
unsigned int cpb_removal_length,
unsigned int cpb_removal_delay,
unsigned int dpb_output_length,
unsigned int dpb_output_delay,
unsigned char **sei_buffer)
{
unsigned char *byte_buf;
int bp_byte_size, i, pic_byte_size;
 
bitstream nal_bs;
bitstream sei_bp_bs, sei_pic_bs;
 
bitstream_start(&sei_bp_bs);
bitstream_put_ue(&sei_bp_bs, 0); /*seq_parameter_set_id*/
bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay, cpb_removal_length);
bitstream_put_ui(&sei_bp_bs, init_cpb_removal_delay_offset, cpb_removal_length);
if ( sei_bp_bs.bit_offset & 0x7) {
bitstream_put_ui(&sei_bp_bs, 1, 1);
}
bitstream_end(&sei_bp_bs);
bp_byte_size = (sei_bp_bs.bit_offset + 7) / 8;
bitstream_start(&sei_pic_bs);
bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
bitstream_put_ui(&sei_pic_bs, dpb_output_delay, dpb_output_length);
if ( sei_pic_bs.bit_offset & 0x7) {
bitstream_put_ui(&sei_pic_bs, 1, 1);
}
bitstream_end(&sei_pic_bs);
pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
bitstream_start(&nal_bs);
nal_start_code_prefix(&nal_bs);
nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI);
 
/* Write the SEI buffer period data */
bitstream_put_ui(&nal_bs, 0, 8);
bitstream_put_ui(&nal_bs, bp_byte_size, 8);
byte_buf = (unsigned char *)sei_bp_bs.buffer;
for(i = 0; i < bp_byte_size; i++) {
bitstream_put_ui(&nal_bs, byte_buf[i], 8);
}
free(byte_buf);
/* write the SEI timing data */
bitstream_put_ui(&nal_bs, 0x01, 8);
bitstream_put_ui(&nal_bs, pic_byte_size, 8);
byte_buf = (unsigned char *)sei_pic_bs.buffer;
for(i = 0; i < pic_byte_size; i++) {
bitstream_put_ui(&nal_bs, byte_buf[i], 8);
}
free(byte_buf);
 
rbsp_trailing_bits(&nal_bs);
bitstream_end(&nal_bs);
 
*sei_buffer = (unsigned char *)nal_bs.buffer;
return nal_bs.bit_offset;
}
 
static int build_packed_slice_buffer(unsigned char **header_buffer)
{
bitstream bs;
int is_idr = !!pic_param.pic_fields.bits.idr_pic_flag;
int is_ref = !!pic_param.pic_fields.bits.reference_pic_flag;
 
bitstream_start(&bs);
nal_start_code_prefix(&bs);
 
if (IS_I_SLICE(slice_param.slice_type)) {
nal_header(&bs, NAL_REF_IDC_HIGH, is_idr ? NAL_IDR : NAL_NON_IDR);
} else if (IS_P_SLICE(slice_param.slice_type)) {
nal_header(&bs, NAL_REF_IDC_MEDIUM, NAL_NON_IDR);
} else {
assert(IS_B_SLICE(slice_param.slice_type));
nal_header(&bs, is_ref ? NAL_REF_IDC_LOW : NAL_REF_IDC_NONE, NAL_NON_IDR);
}
 
slice_header(&bs);
bitstream_end(&bs);
 
*header_buffer = (unsigned char *)bs.buffer;
return bs.bit_offset;
}
 
 
/*
* Helper function for profiling purposes
*/
static unsigned int GetTickCount()
{
struct timeval tv;
if (gettimeofday(&tv, NULL))
return 0;
return tv.tv_usec/1000+tv.tv_sec*1000;
}
 
/*
Assume frame sequence is: Frame#0,#1,#2,...,#M,...,#X,... (encoding order)
1) period between Frame #X and Frame #N = #X - #N
2) 0 means infinite for intra_period/intra_idr_period, and 0 is invalid for ip_period
3) intra_idr_period % intra_period (intra_period > 0) and intra_period % ip_period must be 0
4) intra_period and intra_idr_period take precedence over ip_period
5) if ip_period > 1, intra_period and intra_idr_period are not the strict periods
of I/IDR frames, see bellow examples
-------------------------------------------------------------------
intra_period intra_idr_period ip_period frame sequence (intra_period/intra_idr_period/ip_period)
0 ignored 1 IDRPPPPPPP ... (No IDR/I any more)
0 ignored >=2 IDR(PBB)(PBB)... (No IDR/I any more)
1 0 ignored IDRIIIIIII... (No IDR any more)
1 1 ignored IDR IDR IDR IDR...
1 >=2 ignored IDRII IDRII IDR... (1/3/ignore)
>=2 0 1 IDRPPP IPPP I... (3/0/1)
>=2 0 >=2 IDR(PBB)(PBB)(IBB) (6/0/3)
(PBB)(IBB)(PBB)(IBB)...
>=2 >=2 1 IDRPPPPP IPPPPP IPPPPP (6/18/1)
IDRPPPPP IPPPPP IPPPPP...
>=2 >=2 >=2 {IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)} (6/18/3)
{IDR(PBB)(PBB)(IBB)(PBB)(IBB)(PBB)}...
{IDR(PBB)(PBB)(IBB)(PBB)} (6/12/3)
{IDR(PBB)(PBB)(IBB)(PBB)}...
{IDR(PBB)(PBB)} (6/6/3)
{IDR(PBB)(PBB)}.
*/
 
/*
* Return displaying order with specified periods and encoding order
* displaying_order: displaying order
* frame_type: frame type
*/
#define FRAME_P 0
#define FRAME_B 1
#define FRAME_I 2
#define FRAME_IDR 7
void encoding2display_order(
unsigned long long encoding_order,int intra_period,
int intra_idr_period,int ip_period,
unsigned long long *displaying_order,
int *frame_type)
{
int encoding_order_gop = 0;
 
if (intra_period == 1) { /* all are I/IDR frames */
*displaying_order = encoding_order;
if (intra_idr_period == 0)
*frame_type = (encoding_order == 0)?FRAME_IDR:FRAME_I;
else
*frame_type = (encoding_order % intra_idr_period == 0)?FRAME_IDR:FRAME_I;
return;
}
 
if (intra_period == 0)
intra_idr_period = 0;
 
/* new sequence like
* IDR PPPPP IPPPPP
* IDR (PBB)(PBB)(IBB)(PBB)
*/
encoding_order_gop = (intra_idr_period == 0)? encoding_order:
(encoding_order % (intra_idr_period + ((ip_period == 1)?0:1)));
if (encoding_order_gop == 0) { /* the first frame */
*frame_type = FRAME_IDR;
*displaying_order = encoding_order;
} else if (((encoding_order_gop - 1) % ip_period) != 0) { /* B frames */
*frame_type = FRAME_B;
*displaying_order = encoding_order - 1;
} else if ((intra_period != 0) && /* have I frames */
(encoding_order_gop >= 2) &&
((ip_period == 1 && encoding_order_gop % intra_period == 0) || /* for IDR PPPPP IPPPP */
/* for IDR (PBB)(PBB)(IBB) */
(ip_period >= 2 && ((encoding_order_gop - 1) / ip_period % (intra_period / ip_period)) == 0))) {
*frame_type = FRAME_I;
*displaying_order = encoding_order + ip_period - 1;
} else {
*frame_type = FRAME_P;
*displaying_order = encoding_order + ip_period - 1;
}
}
 
 
static char *fourcc_to_string(int fourcc)
{
switch (fourcc) {
case VA_FOURCC_NV12:
return "NV12";
case VA_FOURCC_IYUV:
return "IYUV";
case VA_FOURCC_YV12:
return "YV12";
case VA_FOURCC_UYVY:
return "UYVY";
default:
return "Unknown";
}
}
 
static int string_to_fourcc(char *str)
{
int fourcc;
if (!strncmp(str, "NV12", 4))
fourcc = VA_FOURCC_NV12;
else if (!strncmp(str, "IYUV", 4))
fourcc = VA_FOURCC_IYUV;
else if (!strncmp(str, "YV12", 4))
fourcc = VA_FOURCC_YV12;
else if (!strncmp(str, "UYVY", 4))
fourcc = VA_FOURCC_UYVY;
else {
printf("Unknow FOURCC\n");
fourcc = -1;
}
return fourcc;
}
 
 
static char *rc_to_string(int rcmode)
{
switch (rc_mode) {
case VA_RC_NONE:
return "NONE";
case VA_RC_CBR:
return "CBR";
case VA_RC_VBR:
return "VBR";
case VA_RC_VCM:
return "VCM";
case VA_RC_CQP:
return "CQP";
case VA_RC_VBR_CONSTRAINED:
return "VBR_CONSTRAINED";
default:
return "Unknown";
}
}
 
static int string_to_rc(char *str)
{
int rc_mode;
if (!strncmp(str, "NONE", 4))
rc_mode = VA_RC_NONE;
else if (!strncmp(str, "CBR", 3))
rc_mode = VA_RC_CBR;
else if (!strncmp(str, "VBR", 3))
rc_mode = VA_RC_VBR;
else if (!strncmp(str, "VCM", 3))
rc_mode = VA_RC_VCM;
else if (!strncmp(str, "CQP", 3))
rc_mode = VA_RC_CQP;
else if (!strncmp(str, "VBR_CONSTRAINED", 15))
rc_mode = VA_RC_VBR_CONSTRAINED;
else {
printf("Unknown RC mode\n");
rc_mode = -1;
}
return rc_mode;
}
 
 
static int print_help(void)
{
printf("./h264encode <options>\n");
printf(" -w <width> -h <height>\n");
printf(" -framecount <frame number>\n");
printf(" -n <frame number>\n");
printf(" if set to 0 and srcyuv is set, the frame count is from srcuv file\n");
printf(" -o <coded file>\n");
printf(" -f <frame rate>\n");
printf(" --intra_period <number>\n");
printf(" --idr_period <number>\n");
printf(" --ip_period <number>\n");
printf(" --bitrate <bitrate>\n");
printf(" --initialqp <number>\n");
printf(" --minqp <number>\n");
printf(" --rcmode <NONE|CBR|VBR|VCM|CQP|VBR_CONTRAINED>\n");
printf(" --syncmode: sequentially upload source, encoding, save result, no multi-thread\n");
printf(" --srcyuv <filename> load YUV from a file\n");
printf(" --fourcc <NV12|IYUV|YV12> source YUV fourcc\n");
printf(" --recyuv <filename> save reconstructed YUV into a file\n");
printf(" --enablePSNR calculate PSNR of recyuv vs. srcyuv\n");
printf(" --entropy <0|1>, 1 means cabac, 0 cavlc\n");
printf(" --profile <BP|MP|HP>\n");
return 0;
}
 
static int process_cmdline(int argc, char *argv[])
{
char c;
const struct option long_opts[] = {
{"help", no_argument, NULL, 0 },
{"bitrate", required_argument, NULL, 1 },
{"minqp", required_argument, NULL, 2 },
{"initialqp", required_argument, NULL, 3 },
{"intra_period", required_argument, NULL, 4 },
{"idr_period", required_argument, NULL, 5 },
{"ip_period", required_argument, NULL, 6 },
{"rcmode", required_argument, NULL, 7 },
{"srcyuv", required_argument, NULL, 9 },
{"recyuv", required_argument, NULL, 10 },
{"fourcc", required_argument, NULL, 11 },
{"syncmode", no_argument, NULL, 12 },
{"enablePSNR", no_argument, NULL, 13 },
{"prit", required_argument, NULL, 14 },
{"priv", required_argument, NULL, 15 },
{"framecount", required_argument, NULL, 16 },
{"entropy", required_argument, NULL, 17 },
{"profile", required_argument, NULL, 18 },
{NULL, no_argument, NULL, 0 }};
int long_index;
while ((c =getopt_long_only(argc,argv,"w:h:n:f:o:?",long_opts,&long_index)) != EOF) {
switch (c) {
case 'w':
frame_width = atoi(optarg);
break;
case 'h':
frame_height = atoi(optarg);
break;
case 'n':
case 16:
frame_count = atoi(optarg);
break;
case 'f':
frame_rate = atoi(optarg);
break;
case 'o':
coded_fn = strdup(optarg);
break;
case 0:
print_help();
exit(0);
case 1:
frame_bitrate = atoi(optarg);
break;
case 2:
minimal_qp = atoi(optarg);
break;
case 3:
initial_qp = atoi(optarg);
break;
case 4:
intra_period = atoi(optarg);
break;
case 5:
intra_idr_period = atoi(optarg);
break;
case 6:
ip_period = atoi(optarg);
break;
case 7:
rc_mode = string_to_rc(optarg);
if (rc_mode < 0) {
print_help();
exit(1);
}
break;
case 9:
srcyuv_fn = strdup(optarg);
break;
case 10:
recyuv_fn = strdup(optarg);
break;
case 11:
srcyuv_fourcc = string_to_fourcc(optarg);
if (srcyuv_fourcc <= 0) {
print_help();
exit(1);
}
break;
case 12:
encode_syncmode = 1;
break;
case 13:
calc_psnr = 1;
break;
case 14:
misc_priv_type = strtol(optarg, NULL, 0);
break;
case 15:
misc_priv_value = strtol(optarg, NULL, 0);
break;
case 17:
h264_entropy_mode = atoi(optarg) ? 1: 0;
break;
case 18:
if (strncmp(optarg, "BP", 2) == 0)
h264_profile = VAProfileH264Baseline;
else if (strncmp(optarg, "MP", 2) == 0)
h264_profile = VAProfileH264Main;
else if (strncmp(optarg, "HP", 2) == 0)
h264_profile = VAProfileH264High;
else
h264_profile = 0;
break;
case ':':
case '?':
print_help();
exit(0);
}
}
 
if (ip_period < 1) {
printf(" ip_period must be greater than 0\n");
exit(0);
}
if (intra_period != 1 && intra_period % ip_period != 0) {
printf(" intra_period must be a multiplier of ip_period\n");
exit(0);
}
if (intra_period != 0 && intra_idr_period % intra_period != 0) {
printf(" intra_idr_period must be a multiplier of intra_period\n");
exit(0);
}
 
if (frame_bitrate == 0)
frame_bitrate = frame_width * frame_height * 12 * frame_rate / 50;
/* open source file */
if (srcyuv_fn) {
srcyuv_fp = fopen(srcyuv_fn,"r");
if (srcyuv_fp == NULL)
printf("Open source YUV file %s failed, use auto-generated YUV data\n", srcyuv_fn);
else {
struct stat tmp;
 
fstat(fileno(srcyuv_fp), &tmp);
srcyuv_frames = tmp.st_size / (frame_width * frame_height * 1.5);
printf("Source YUV file %s with %llu frames\n", srcyuv_fn, srcyuv_frames);
 
if (frame_count == 0)
frame_count = srcyuv_frames;
}
}
 
/* open source file */
if (recyuv_fn) {
recyuv_fp = fopen(recyuv_fn,"w+");
if (recyuv_fp == NULL)
printf("Open reconstructed YUV file %s failed\n", recyuv_fn);
}
if (coded_fn == NULL) {
struct stat buf;
if (stat("/tmp", &buf) == 0)
coded_fn = strdup("/tmp/test.264");
else if (stat("/sdcard", &buf) == 0)
coded_fn = strdup("/sdcard/test.264");
else
coded_fn = strdup("./test.264");
}
/* store coded data into a file */
coded_fp = fopen(coded_fn,"w+");
if (coded_fp == NULL) {
printf("Open file %s failed, exit\n", coded_fn);
exit(1);
}
 
frame_width_mbaligned = (frame_width + 15) & (~15);
frame_height_mbaligned = (frame_height + 15) & (~15);
if (frame_width != frame_width_mbaligned ||
frame_height != frame_height_mbaligned) {
printf("Source frame is %dx%d and will code clip to %dx%d with crop\n",
frame_width, frame_height,
frame_width_mbaligned, frame_height_mbaligned
);
}
return 0;
}
 
static int init_va(void)
{
VAProfile profile_list[]={VAProfileH264High,VAProfileH264Main,VAProfileH264Baseline,VAProfileH264ConstrainedBaseline};
VAEntrypoint *entrypoints;
int num_entrypoints, slice_entrypoint;
int support_encode = 0;
int major_ver, minor_ver;
VAStatus va_status;
unsigned int i;
 
va_dpy = va_open_display();
va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
CHECK_VASTATUS(va_status, "vaInitialize");
 
num_entrypoints = vaMaxNumEntrypoints(va_dpy);
entrypoints = malloc(num_entrypoints * sizeof(*entrypoints));
if (!entrypoints) {
fprintf(stderr, "error: failed to initialize VA entrypoints array\n");
exit(1);
}
 
/* use the highest profile */
for (i = 0; i < sizeof(profile_list)/sizeof(profile_list[0]); i++) {
if ((h264_profile != ~0) && h264_profile != profile_list[i])
continue;
h264_profile = profile_list[i];
vaQueryConfigEntrypoints(va_dpy, h264_profile, entrypoints, &num_entrypoints);
for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
if (entrypoints[slice_entrypoint] == VAEntrypointEncSlice) {
support_encode = 1;
break;
}
}
if (support_encode == 1)
break;
}
if (support_encode == 0) {
printf("Can't find VAEntrypointEncSlice for H264 profiles\n");
exit(1);
} else {
switch (h264_profile) {
case VAProfileH264Baseline:
printf("Use profile VAProfileH264Baseline\n");
ip_period = 1;
constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
h264_entropy_mode = 0;
break;
case VAProfileH264ConstrainedBaseline:
printf("Use profile VAProfileH264ConstrainedBaseline\n");
constraint_set_flag |= (1 << 0 | 1 << 1); /* Annex A.2.2 */
ip_period = 1;
break;
 
case VAProfileH264Main:
printf("Use profile VAProfileH264Main\n");
constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
break;
 
case VAProfileH264High:
constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
printf("Use profile VAProfileH264High\n");
break;
default:
printf("unknow profile. Set to Baseline");
h264_profile = VAProfileH264Baseline;
ip_period = 1;
constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
break;
}
}
 
/* find out the format for the render target, and rate control mode */
for (i = 0; i < VAConfigAttribTypeMax; i++)
attrib[i].type = i;
 
va_status = vaGetConfigAttributes(va_dpy, h264_profile, VAEntrypointEncSlice,
&attrib[0], VAConfigAttribTypeMax);
CHECK_VASTATUS(va_status, "vaGetConfigAttributes");
/* check the interested configattrib */
if ((attrib[VAConfigAttribRTFormat].value & VA_RT_FORMAT_YUV420) == 0) {
printf("Not find desired YUV420 RT format\n");
exit(1);
} else {
config_attrib[config_attrib_num].type = VAConfigAttribRTFormat;
config_attrib[config_attrib_num].value = VA_RT_FORMAT_YUV420;
config_attrib_num++;
}
if (attrib[VAConfigAttribRateControl].value != VA_ATTRIB_NOT_SUPPORTED) {
int tmp = attrib[VAConfigAttribRateControl].value;
 
printf("Support rate control mode (0x%x):", tmp);
if (tmp & VA_RC_NONE)
printf("NONE ");
if (tmp & VA_RC_CBR)
printf("CBR ");
if (tmp & VA_RC_VBR)
printf("VBR ");
if (tmp & VA_RC_VCM)
printf("VCM ");
if (tmp & VA_RC_CQP)
printf("CQP ");
if (tmp & VA_RC_VBR_CONSTRAINED)
printf("VBR_CONSTRAINED ");
 
printf("\n");
 
if (rc_mode == -1 || !(rc_mode & tmp)) {
if (rc_mode != -1) {
printf("Warning: Don't support the specified RateControl mode: %s!!!, switch to ", rc_to_string(rc_mode));
}
 
for (i = 0; i < sizeof(rc_default_modes) / sizeof(rc_default_modes[0]); i++) {
if (rc_default_modes[i] & tmp) {
rc_mode = rc_default_modes[i];
break;
}
}
 
printf("RateControl mode: %s\n", rc_to_string(rc_mode));
}
 
config_attrib[config_attrib_num].type = VAConfigAttribRateControl;
config_attrib[config_attrib_num].value = rc_mode;
config_attrib_num++;
}
 
if (attrib[VAConfigAttribEncPackedHeaders].value != VA_ATTRIB_NOT_SUPPORTED) {
int tmp = attrib[VAConfigAttribEncPackedHeaders].value;
 
printf("Support VAConfigAttribEncPackedHeaders\n");
h264_packedheader = 1;
config_attrib[config_attrib_num].type = VAConfigAttribEncPackedHeaders;
config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
if (tmp & VA_ENC_PACKED_HEADER_SEQUENCE) {
printf("Support packed sequence headers\n");
config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SEQUENCE;
}
if (tmp & VA_ENC_PACKED_HEADER_PICTURE) {
printf("Support packed picture headers\n");
config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_PICTURE;
}
if (tmp & VA_ENC_PACKED_HEADER_SLICE) {
printf("Support packed slice headers\n");
config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_SLICE;
}
if (tmp & VA_ENC_PACKED_HEADER_MISC) {
printf("Support packed misc headers\n");
config_attrib[config_attrib_num].value |= VA_ENC_PACKED_HEADER_MISC;
}
enc_packed_header_idx = config_attrib_num;
config_attrib_num++;
}
 
if (attrib[VAConfigAttribEncInterlaced].value != VA_ATTRIB_NOT_SUPPORTED) {
int tmp = attrib[VAConfigAttribEncInterlaced].value;
printf("Support VAConfigAttribEncInterlaced\n");
 
if (tmp & VA_ENC_INTERLACED_FRAME)
printf("support VA_ENC_INTERLACED_FRAME\n");
if (tmp & VA_ENC_INTERLACED_FIELD)
printf("Support VA_ENC_INTERLACED_FIELD\n");
if (tmp & VA_ENC_INTERLACED_MBAFF)
printf("Support VA_ENC_INTERLACED_MBAFF\n");
if (tmp & VA_ENC_INTERLACED_PAFF)
printf("Support VA_ENC_INTERLACED_PAFF\n");
config_attrib[config_attrib_num].type = VAConfigAttribEncInterlaced;
config_attrib[config_attrib_num].value = VA_ENC_PACKED_HEADER_NONE;
config_attrib_num++;
}
if (attrib[VAConfigAttribEncMaxRefFrames].value != VA_ATTRIB_NOT_SUPPORTED) {
h264_maxref = attrib[VAConfigAttribEncMaxRefFrames].value;
printf("Support %d RefPicList0 and %d RefPicList1\n",
h264_maxref & 0xffff, (h264_maxref >> 16) & 0xffff );
}
 
if (attrib[VAConfigAttribEncMaxSlices].value != VA_ATTRIB_NOT_SUPPORTED)
printf("Support %d slices\n", attrib[VAConfigAttribEncMaxSlices].value);
 
if (attrib[VAConfigAttribEncSliceStructure].value != VA_ATTRIB_NOT_SUPPORTED) {
int tmp = attrib[VAConfigAttribEncSliceStructure].value;
printf("Support VAConfigAttribEncSliceStructure\n");
 
if (tmp & VA_ENC_SLICE_STRUCTURE_ARBITRARY_ROWS)
printf("Support VA_ENC_SLICE_STRUCTURE_ARBITRARY_ROWS\n");
if (tmp & VA_ENC_SLICE_STRUCTURE_POWER_OF_TWO_ROWS)
printf("Support VA_ENC_SLICE_STRUCTURE_POWER_OF_TWO_ROWS\n");
if (tmp & VA_ENC_SLICE_STRUCTURE_ARBITRARY_MACROBLOCKS)
printf("Support VA_ENC_SLICE_STRUCTURE_ARBITRARY_MACROBLOCKS\n");
}
if (attrib[VAConfigAttribEncMacroblockInfo].value != VA_ATTRIB_NOT_SUPPORTED) {
printf("Support VAConfigAttribEncMacroblockInfo\n");
}
 
free(entrypoints);
return 0;
}
 
static int setup_encode()
{
VAStatus va_status;
VASurfaceID *tmp_surfaceid;
int codedbuf_size, i;
va_status = vaCreateConfig(va_dpy, h264_profile, VAEntrypointEncSlice,
&config_attrib[0], config_attrib_num, &config_id);
CHECK_VASTATUS(va_status, "vaCreateConfig");
 
/* create source surfaces */
va_status = vaCreateSurfaces(va_dpy,
VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
&src_surface[0], SURFACE_NUM,
NULL, 0);
CHECK_VASTATUS(va_status, "vaCreateSurfaces");
 
/* create reference surfaces */
va_status = vaCreateSurfaces(
va_dpy,
VA_RT_FORMAT_YUV420, frame_width_mbaligned, frame_height_mbaligned,
&ref_surface[0], SURFACE_NUM,
NULL, 0
);
CHECK_VASTATUS(va_status, "vaCreateSurfaces");
 
tmp_surfaceid = calloc(2 * SURFACE_NUM, sizeof(VASurfaceID));
memcpy(tmp_surfaceid, src_surface, SURFACE_NUM * sizeof(VASurfaceID));
memcpy(tmp_surfaceid + SURFACE_NUM, ref_surface, SURFACE_NUM * sizeof(VASurfaceID));
/* Create a context for this encode pipe */
va_status = vaCreateContext(va_dpy, config_id,
frame_width_mbaligned, frame_height_mbaligned,
VA_PROGRESSIVE,
tmp_surfaceid, 2 * SURFACE_NUM,
&context_id);
CHECK_VASTATUS(va_status, "vaCreateContext");
free(tmp_surfaceid);
 
codedbuf_size = (frame_width_mbaligned * frame_height_mbaligned * 400) / (16*16);
 
for (i = 0; i < SURFACE_NUM; i++) {
/* create coded buffer once for all
* other VA buffers which won't be used again after vaRenderPicture.
* so APP can always vaCreateBuffer for every frame
* but coded buffer need to be mapped and accessed after vaRenderPicture/vaEndPicture
* so VA won't maintain the coded buffer
*/
va_status = vaCreateBuffer(va_dpy,context_id,VAEncCodedBufferType,
codedbuf_size, 1, NULL, &coded_buf[i]);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
}
return 0;
}
 
 
 
#define partition(ref, field, key, ascending) \
while (i <= j) { \
if (ascending) { \
while (ref[i].field < key) \
i++; \
while (ref[j].field > key) \
j--; \
} else { \
while (ref[i].field > key) \
i++; \
while (ref[j].field < key) \
j--; \
} \
if (i <= j) { \
tmp = ref[i]; \
ref[i] = ref[j]; \
ref[j] = tmp; \
i++; \
j--; \
} \
} \
 
static void sort_one(VAPictureH264 ref[], int left, int right,
int ascending, int frame_idx)
{
int i = left, j = right;
unsigned int key;
VAPictureH264 tmp;
 
if (frame_idx) {
key = ref[(left + right) / 2].frame_idx;
partition(ref, frame_idx, key, ascending);
} else {
key = ref[(left + right) / 2].TopFieldOrderCnt;
partition(ref, TopFieldOrderCnt, (signed int)key, ascending);
}
/* recursion */
if (left < j)
sort_one(ref, left, j, ascending, frame_idx);
if (i < right)
sort_one(ref, i, right, ascending, frame_idx);
}
 
static void sort_two(VAPictureH264 ref[], int left, int right, unsigned int key, unsigned int frame_idx,
int partition_ascending, int list0_ascending, int list1_ascending)
{
int i = left, j = right;
VAPictureH264 tmp;
 
if (frame_idx) {
partition(ref, frame_idx, key, partition_ascending);
} else {
partition(ref, TopFieldOrderCnt, (signed int)key, partition_ascending);
}
 
sort_one(ref, left, i-1, list0_ascending, frame_idx);
sort_one(ref, j+1, right, list1_ascending, frame_idx);
}
 
static int update_ReferenceFrames(void)
{
int i;
if (current_frame_type == FRAME_B)
return 0;
 
CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
numShortTerm++;
if (numShortTerm > num_ref_frames)
numShortTerm = num_ref_frames;
for (i=numShortTerm-1; i>0; i--)
ReferenceFrames[i] = ReferenceFrames[i-1];
ReferenceFrames[0] = CurrentCurrPic;
if (current_frame_type != FRAME_B)
current_frame_num++;
if (current_frame_num > MaxFrameNum)
current_frame_num = 0;
return 0;
}
 
 
static int update_RefPicList(void)
{
unsigned int current_poc = CurrentCurrPic.TopFieldOrderCnt;
if (current_frame_type == FRAME_P) {
memcpy(RefPicList0_P, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
sort_one(RefPicList0_P, 0, numShortTerm-1, 0, 1);
}
if (current_frame_type == FRAME_B) {
memcpy(RefPicList0_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
sort_two(RefPicList0_B, 0, numShortTerm-1, current_poc, 0,
1, 0, 1);
 
memcpy(RefPicList1_B, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
sort_two(RefPicList1_B, 0, numShortTerm-1, current_poc, 0,
0, 1, 0);
}
return 0;
}
 
 
static int render_sequence(void)
{
VABufferID seq_param_buf, rc_param_buf, misc_param_tmpbuf, render_id[2];
VAStatus va_status;
VAEncMiscParameterBuffer *misc_param, *misc_param_tmp;
VAEncMiscParameterRateControl *misc_rate_ctrl;
seq_param.level_idc = 41 /*SH_LEVEL_3*/;
seq_param.picture_width_in_mbs = frame_width_mbaligned / 16;
seq_param.picture_height_in_mbs = frame_height_mbaligned / 16;
seq_param.bits_per_second = frame_bitrate;
 
seq_param.intra_period = intra_period;
seq_param.intra_idr_period = intra_idr_period;
seq_param.ip_period = ip_period;
 
seq_param.max_num_ref_frames = num_ref_frames;
seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
seq_param.time_scale = 900;
seq_param.num_units_in_tick = 15; /* Tc = num_units_in_tick / time_sacle */
seq_param.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
seq_param.seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;;
seq_param.seq_fields.bits.frame_mbs_only_flag = 1;
seq_param.seq_fields.bits.chroma_format_idc = 1;
seq_param.seq_fields.bits.direct_8x8_inference_flag = 1;
if (frame_width != frame_width_mbaligned ||
frame_height != frame_height_mbaligned) {
seq_param.frame_cropping_flag = 1;
seq_param.frame_crop_left_offset = 0;
seq_param.frame_crop_right_offset = (frame_width_mbaligned - frame_width)/2;
seq_param.frame_crop_top_offset = 0;
seq_param.frame_crop_bottom_offset = (frame_height_mbaligned - frame_height)/2;
}
va_status = vaCreateBuffer(va_dpy, context_id,
VAEncSequenceParameterBufferType,
sizeof(seq_param),1,&seq_param,&seq_param_buf);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
va_status = vaCreateBuffer(va_dpy, context_id,
VAEncMiscParameterBufferType,
sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
1,NULL,&rc_param_buf);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
vaMapBuffer(va_dpy, rc_param_buf,(void **)&misc_param);
misc_param->type = VAEncMiscParameterTypeRateControl;
misc_rate_ctrl = (VAEncMiscParameterRateControl *)misc_param->data;
memset(misc_rate_ctrl, 0, sizeof(*misc_rate_ctrl));
misc_rate_ctrl->bits_per_second = frame_bitrate;
misc_rate_ctrl->target_percentage = 66;
misc_rate_ctrl->window_size = 1000;
misc_rate_ctrl->initial_qp = initial_qp;
misc_rate_ctrl->min_qp = minimal_qp;
misc_rate_ctrl->basic_unit_size = 0;
vaUnmapBuffer(va_dpy, rc_param_buf);
 
render_id[0] = seq_param_buf;
render_id[1] = rc_param_buf;
va_status = vaRenderPicture(va_dpy,context_id, &render_id[0], 2);
CHECK_VASTATUS(va_status,"vaRenderPicture");;
 
if (misc_priv_type != 0) {
va_status = vaCreateBuffer(va_dpy, context_id,
VAEncMiscParameterBufferType,
sizeof(VAEncMiscParameterBuffer),
1, NULL, &misc_param_tmpbuf);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
vaMapBuffer(va_dpy, misc_param_tmpbuf,(void **)&misc_param_tmp);
misc_param_tmp->type = misc_priv_type;
misc_param_tmp->data[0] = misc_priv_value;
vaUnmapBuffer(va_dpy, misc_param_tmpbuf);
va_status = vaRenderPicture(va_dpy,context_id, &misc_param_tmpbuf, 1);
}
return 0;
}
 
static int calc_poc(int pic_order_cnt_lsb)
{
static int PicOrderCntMsb_ref = 0, pic_order_cnt_lsb_ref = 0;
int prevPicOrderCntMsb, prevPicOrderCntLsb;
int PicOrderCntMsb, TopFieldOrderCnt;
if (current_frame_type == FRAME_IDR)
prevPicOrderCntMsb = prevPicOrderCntLsb = 0;
else {
prevPicOrderCntMsb = PicOrderCntMsb_ref;
prevPicOrderCntLsb = pic_order_cnt_lsb_ref;
}
if ((pic_order_cnt_lsb < prevPicOrderCntLsb) &&
((prevPicOrderCntLsb - pic_order_cnt_lsb) >= (int)(MaxPicOrderCntLsb / 2)))
PicOrderCntMsb = prevPicOrderCntMsb + MaxPicOrderCntLsb;
else if ((pic_order_cnt_lsb > prevPicOrderCntLsb) &&
((pic_order_cnt_lsb - prevPicOrderCntLsb) > (int)(MaxPicOrderCntLsb / 2)))
PicOrderCntMsb = prevPicOrderCntMsb - MaxPicOrderCntLsb;
else
PicOrderCntMsb = prevPicOrderCntMsb;
TopFieldOrderCnt = PicOrderCntMsb + pic_order_cnt_lsb;
 
if (current_frame_type != FRAME_B) {
PicOrderCntMsb_ref = PicOrderCntMsb;
pic_order_cnt_lsb_ref = pic_order_cnt_lsb;
}
return TopFieldOrderCnt;
}
 
static int render_picture(void)
{
VABufferID pic_param_buf;
VAStatus va_status;
int i = 0;
 
pic_param.CurrPic.picture_id = ref_surface[current_slot];
pic_param.CurrPic.frame_idx = current_frame_num;
pic_param.CurrPic.flags = 0;
pic_param.CurrPic.TopFieldOrderCnt = calc_poc((current_frame_display - current_IDR_display) % MaxPicOrderCntLsb);
pic_param.CurrPic.BottomFieldOrderCnt = pic_param.CurrPic.TopFieldOrderCnt;
CurrentCurrPic = pic_param.CurrPic;
 
if (getenv("TO_DEL")) { /* set RefPicList into ReferenceFrames */
update_RefPicList(); /* calc RefPicList */
memset(pic_param.ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
if (current_frame_type == FRAME_P) {
pic_param.ReferenceFrames[0] = RefPicList0_P[0];
} else if (current_frame_type == FRAME_B) {
pic_param.ReferenceFrames[0] = RefPicList0_B[0];
pic_param.ReferenceFrames[1] = RefPicList1_B[0];
}
} else {
memcpy(pic_param.ReferenceFrames, ReferenceFrames, numShortTerm*sizeof(VAPictureH264));
for (i = numShortTerm; i < SURFACE_NUM; i++) {
pic_param.ReferenceFrames[i].picture_id = VA_INVALID_SURFACE;
pic_param.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
}
}
pic_param.pic_fields.bits.idr_pic_flag = (current_frame_type == FRAME_IDR);
pic_param.pic_fields.bits.reference_pic_flag = (current_frame_type != FRAME_B);
pic_param.pic_fields.bits.entropy_coding_mode_flag = h264_entropy_mode;
pic_param.pic_fields.bits.deblocking_filter_control_present_flag = 1;
pic_param.frame_num = current_frame_num;
pic_param.coded_buf = coded_buf[current_slot];
pic_param.last_picture = (current_frame_encoding == frame_count);
pic_param.pic_init_qp = initial_qp;
 
va_status = vaCreateBuffer(va_dpy, context_id,VAEncPictureParameterBufferType,
sizeof(pic_param),1,&pic_param, &pic_param_buf);
CHECK_VASTATUS(va_status,"vaCreateBuffer");;
 
va_status = vaRenderPicture(va_dpy,context_id, &pic_param_buf, 1);
CHECK_VASTATUS(va_status,"vaRenderPicture");
 
return 0;
}
 
static int render_packedsequence(void)
{
VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
VABufferID packedseq_para_bufid, packedseq_data_bufid, render_id[2];
unsigned int length_in_bits;
unsigned char *packedseq_buffer = NULL;
VAStatus va_status;
 
length_in_bits = build_packed_seq_buffer(&packedseq_buffer);
packedheader_param_buffer.type = VAEncPackedHeaderSequence;
packedheader_param_buffer.bit_length = length_in_bits; /*length_in_bits*/
packedheader_param_buffer.has_emulation_bytes = 0;
va_status = vaCreateBuffer(va_dpy,
context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
&packedseq_para_bufid);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(va_dpy,
context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packedseq_buffer,
&packedseq_data_bufid);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
render_id[0] = packedseq_para_bufid;
render_id[1] = packedseq_data_bufid;
va_status = vaRenderPicture(va_dpy,context_id, render_id, 2);
CHECK_VASTATUS(va_status,"vaRenderPicture");
 
free(packedseq_buffer);
return 0;
}
 
 
static int render_packedpicture(void)
{
VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
VABufferID packedpic_para_bufid, packedpic_data_bufid, render_id[2];
unsigned int length_in_bits;
unsigned char *packedpic_buffer = NULL;
VAStatus va_status;
 
length_in_bits = build_packed_pic_buffer(&packedpic_buffer);
packedheader_param_buffer.type = VAEncPackedHeaderPicture;
packedheader_param_buffer.bit_length = length_in_bits;
packedheader_param_buffer.has_emulation_bytes = 0;
 
va_status = vaCreateBuffer(va_dpy,
context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
&packedpic_para_bufid);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(va_dpy,
context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packedpic_buffer,
&packedpic_data_bufid);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
render_id[0] = packedpic_para_bufid;
render_id[1] = packedpic_data_bufid;
va_status = vaRenderPicture(va_dpy,context_id, render_id, 2);
CHECK_VASTATUS(va_status,"vaRenderPicture");
 
free(packedpic_buffer);
return 0;
}
 
static void render_packedsei(void)
{
VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
VABufferID packed_sei_header_param_buf_id, packed_sei_buf_id, render_id[2];
unsigned int length_in_bits /*offset_in_bytes*/;
unsigned char *packed_sei_buffer = NULL;
VAStatus va_status;
int init_cpb_size, target_bit_rate, i_initial_cpb_removal_delay_length, i_initial_cpb_removal_delay;
int i_cpb_removal_delay, i_dpb_output_delay_length, i_cpb_removal_delay_length;
 
/* it comes for the bps defined in SPS */
target_bit_rate = frame_bitrate;
init_cpb_size = (target_bit_rate * 8) >> 10;
i_initial_cpb_removal_delay = init_cpb_size * 0.5 * 1024 / target_bit_rate * 90000;
 
i_cpb_removal_delay = 2;
i_initial_cpb_removal_delay_length = 24;
i_cpb_removal_delay_length = 24;
i_dpb_output_delay_length = 24;
 
length_in_bits = build_packed_sei_buffer_timing(
i_initial_cpb_removal_delay_length,
i_initial_cpb_removal_delay,
0,
i_cpb_removal_delay_length,
i_cpb_removal_delay * current_frame_encoding,
i_dpb_output_delay_length,
0,
&packed_sei_buffer);
 
//offset_in_bytes = 0;
packed_header_param_buffer.type = VAEncPackedHeaderH264_SEI;
packed_header_param_buffer.bit_length = length_in_bits;
packed_header_param_buffer.has_emulation_bytes = 0;
 
va_status = vaCreateBuffer(va_dpy,
context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
&packed_sei_header_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(va_dpy,
context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packed_sei_buffer,
&packed_sei_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
 
render_id[0] = packed_sei_header_param_buf_id;
render_id[1] = packed_sei_buf_id;
va_status = vaRenderPicture(va_dpy,context_id, render_id, 2);
CHECK_VASTATUS(va_status,"vaRenderPicture");
 
free(packed_sei_buffer);
return;
}
 
 
static int render_hrd(void)
{
VABufferID misc_parameter_hrd_buf_id;
VAStatus va_status;
VAEncMiscParameterBuffer *misc_param;
VAEncMiscParameterHRD *misc_hrd_param;
va_status = vaCreateBuffer(va_dpy, context_id,
VAEncMiscParameterBufferType,
sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterHRD),
1,
NULL,
&misc_parameter_hrd_buf_id);
CHECK_VASTATUS(va_status, "vaCreateBuffer");
 
vaMapBuffer(va_dpy,
misc_parameter_hrd_buf_id,
(void **)&misc_param);
misc_param->type = VAEncMiscParameterTypeHRD;
misc_hrd_param = (VAEncMiscParameterHRD *)misc_param->data;
 
if (frame_bitrate > 0) {
misc_hrd_param->initial_buffer_fullness = frame_bitrate * 1024 * 4;
misc_hrd_param->buffer_size = frame_bitrate * 1024 * 8;
} else {
misc_hrd_param->initial_buffer_fullness = 0;
misc_hrd_param->buffer_size = 0;
}
vaUnmapBuffer(va_dpy, misc_parameter_hrd_buf_id);
 
va_status = vaRenderPicture(va_dpy,context_id, &misc_parameter_hrd_buf_id, 1);
CHECK_VASTATUS(va_status,"vaRenderPicture");;
 
return 0;
}
 
static void render_packedslice()
{
VAEncPackedHeaderParameterBuffer packedheader_param_buffer;
VABufferID packedslice_para_bufid, packedslice_data_bufid, render_id[2];
unsigned int length_in_bits;
unsigned char *packedslice_buffer = NULL;
VAStatus va_status;
 
length_in_bits = build_packed_slice_buffer(&packedslice_buffer);
packedheader_param_buffer.type = VAEncPackedHeaderSlice;
packedheader_param_buffer.bit_length = length_in_bits;
packedheader_param_buffer.has_emulation_bytes = 0;
 
va_status = vaCreateBuffer(va_dpy,
context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packedheader_param_buffer), 1, &packedheader_param_buffer,
&packedslice_para_bufid);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(va_dpy,
context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packedslice_buffer,
&packedslice_data_bufid);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
render_id[0] = packedslice_para_bufid;
render_id[1] = packedslice_data_bufid;
va_status = vaRenderPicture(va_dpy,context_id, render_id, 2);
CHECK_VASTATUS(va_status,"vaRenderPicture");
 
free(packedslice_buffer);
}
 
static int render_slice(void)
{
VABufferID slice_param_buf;
VAStatus va_status;
int i;
 
update_RefPicList();
/* one frame, one slice */
slice_param.macroblock_address = 0;
slice_param.num_macroblocks = frame_width_mbaligned * frame_height_mbaligned/(16*16); /* Measured by MB */
slice_param.slice_type = (current_frame_type == FRAME_IDR)?2:current_frame_type;
if (current_frame_type == FRAME_IDR) {
if (current_frame_encoding != 0)
++slice_param.idr_pic_id;
} else if (current_frame_type == FRAME_P) {
int refpiclist0_max = h264_maxref & 0xffff;
memcpy(slice_param.RefPicList0, RefPicList0_P, refpiclist0_max*sizeof(VAPictureH264));
 
for (i = refpiclist0_max; i < 32; i++) {
slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
}
} else if (current_frame_type == FRAME_B) {
int refpiclist0_max = h264_maxref & 0xffff;
int refpiclist1_max = (h264_maxref >> 16) & 0xffff;
 
memcpy(slice_param.RefPicList0, RefPicList0_B, refpiclist0_max*sizeof(VAPictureH264));
for (i = refpiclist0_max; i < 32; i++) {
slice_param.RefPicList0[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
}
 
memcpy(slice_param.RefPicList1, RefPicList1_B, refpiclist1_max*sizeof(VAPictureH264));
for (i = refpiclist1_max; i < 32; i++) {
slice_param.RefPicList1[i].picture_id = VA_INVALID_SURFACE;
slice_param.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
}
}
 
slice_param.slice_alpha_c0_offset_div2 = 0;
slice_param.slice_beta_offset_div2 = 0;
slice_param.direct_spatial_mv_pred_flag = 1;
slice_param.pic_order_cnt_lsb = (current_frame_display - current_IDR_display) % MaxPicOrderCntLsb;
 
if (h264_packedheader &&
config_attrib[enc_packed_header_idx].value & VA_ENC_PACKED_HEADER_SLICE)
render_packedslice();
 
va_status = vaCreateBuffer(va_dpy,context_id,VAEncSliceParameterBufferType,
sizeof(slice_param),1,&slice_param,&slice_param_buf);
CHECK_VASTATUS(va_status,"vaCreateBuffer");;
 
va_status = vaRenderPicture(va_dpy,context_id, &slice_param_buf, 1);
CHECK_VASTATUS(va_status,"vaRenderPicture");
return 0;
}
 
 
static int upload_source_YUV_once_for_all()
{
int box_width=8;
int row_shift=0;
int i;
 
for (i = 0; i < SURFACE_NUM; i++) {
printf("\rLoading data into surface %d.....", i);
upload_surface(va_dpy, src_surface[i], box_width, row_shift, 0);
 
row_shift++;
if (row_shift==(2*box_width)) row_shift= 0;
}
printf("Complete surface loading\n");
 
return 0;
}
 
static int load_surface(VASurfaceID surface_id, unsigned long long display_order)
{
unsigned char *srcyuv_ptr = NULL, *src_Y = NULL, *src_U = NULL, *src_V = NULL;
unsigned long long frame_start, mmap_start;
char *mmap_ptr = NULL;
int frame_size, mmap_size;
if (srcyuv_fp == NULL)
return 0;
/* allow encoding more than srcyuv_frames */
display_order = display_order % srcyuv_frames;
frame_size = frame_width * frame_height * 3 / 2; /* for YUV420 */
frame_start = display_order * frame_size;
mmap_start = frame_start & (~0xfff);
mmap_size = (frame_size + (frame_start & 0xfff) + 0xfff) & (~0xfff);
mmap_ptr = mmap(0, mmap_size, PROT_READ, MAP_SHARED,
fileno(srcyuv_fp), mmap_start);
if (mmap_ptr == MAP_FAILED) {
printf("Failed to mmap YUV file (%s)\n", strerror(errno));
return 1;
}
srcyuv_ptr = (unsigned char *)mmap_ptr + (frame_start & 0xfff);
if (srcyuv_fourcc == VA_FOURCC_NV12) {
src_Y = srcyuv_ptr;
src_U = src_Y + frame_width * frame_height;
src_V = NULL;
} else if (srcyuv_fourcc == VA_FOURCC_IYUV ||
srcyuv_fourcc == VA_FOURCC_YV12) {
src_Y = srcyuv_ptr;
if (srcyuv_fourcc == VA_FOURCC_IYUV) {
src_U = src_Y + frame_width * frame_height;
src_V = src_U + (frame_width/2) * (frame_height/2);
} else { /* YV12 */
src_V = src_Y + frame_width * frame_height;
src_U = src_V + (frame_width/2) * (frame_height/2);
}
} else {
printf("Unsupported source YUV format\n");
exit(1);
}
upload_surface_yuv(va_dpy, surface_id,
srcyuv_fourcc, frame_width, frame_height,
src_Y, src_U, src_V);
if (mmap_ptr)
munmap(mmap_ptr, mmap_size);
 
return 0;
}
 
 
static int save_recyuv(VASurfaceID surface_id,
unsigned long long display_order,
unsigned long long encode_order)
{
unsigned char *dst_Y = NULL, *dst_U = NULL, *dst_V = NULL;
 
if (recyuv_fp == NULL)
return 0;
 
if (srcyuv_fourcc == VA_FOURCC_NV12) {
int uv_size = 2 * (frame_width/2) * (frame_height/2);
dst_Y = malloc(2*uv_size);
dst_U = malloc(uv_size);
} else if (srcyuv_fourcc == VA_FOURCC_IYUV ||
srcyuv_fourcc == VA_FOURCC_YV12) {
int uv_size = (frame_width/2) * (frame_height/2);
dst_Y = malloc(4*uv_size);
dst_U = malloc(uv_size);
dst_V = malloc(uv_size);
} else {
printf("Unsupported source YUV format\n");
exit(1);
}
download_surface_yuv(va_dpy, surface_id,
srcyuv_fourcc, frame_width, frame_height,
dst_Y, dst_U, dst_V);
fseek(recyuv_fp, display_order * frame_width * frame_height * 1.5, SEEK_SET);
 
if (srcyuv_fourcc == VA_FOURCC_NV12) {
int uv_size = 2 * (frame_width/2) * (frame_height/2);
fwrite(dst_Y, uv_size * 2, 1, recyuv_fp);
fwrite(dst_U, uv_size, 1, recyuv_fp);
} else if (srcyuv_fourcc == VA_FOURCC_IYUV ||
srcyuv_fourcc == VA_FOURCC_YV12) {
int uv_size = (frame_width/2) * (frame_height/2);
fwrite(dst_Y, uv_size * 4, 1, recyuv_fp);
if (srcyuv_fourcc == VA_FOURCC_IYUV) {
fwrite(dst_U, uv_size, 1, recyuv_fp);
fwrite(dst_V, uv_size, 1, recyuv_fp);
} else {
fwrite(dst_V, uv_size, 1, recyuv_fp);
fwrite(dst_U, uv_size, 1, recyuv_fp);
}
} else {
printf("Unsupported YUV format\n");
exit(1);
}
if (dst_Y)
free(dst_Y);
if (dst_U)
free(dst_U);
if (dst_V)
free(dst_V);
 
fflush(recyuv_fp);
 
return 0;
}
 
 
static int save_codeddata(unsigned long long display_order, unsigned long long encode_order)
{
VACodedBufferSegment *buf_list = NULL;
VAStatus va_status;
unsigned int coded_size = 0;
 
va_status = vaMapBuffer(va_dpy,coded_buf[display_order % SURFACE_NUM],(void **)(&buf_list));
CHECK_VASTATUS(va_status,"vaMapBuffer");
while (buf_list != NULL) {
coded_size += fwrite(buf_list->buf, 1, buf_list->size, coded_fp);
buf_list = (VACodedBufferSegment *) buf_list->next;
 
frame_size += coded_size;
}
vaUnmapBuffer(va_dpy,coded_buf[display_order % SURFACE_NUM]);
 
printf("\r "); /* return back to startpoint */
switch (encode_order % 4) {
case 0:
printf("|");
break;
case 1:
printf("/");
break;
case 2:
printf("-");
break;
case 3:
printf("\\");
break;
}
printf("%08lld", encode_order);
printf("(%06d bytes coded)",coded_size);
 
fflush(coded_fp);
return 0;
}
 
 
static struct storage_task_t * storage_task_dequeue(void)
{
struct storage_task_t *header;
 
pthread_mutex_lock(&encode_mutex);
 
header = storage_task_header;
if (storage_task_header != NULL) {
if (storage_task_tail == storage_task_header)
storage_task_tail = NULL;
storage_task_header = header->next;
}
pthread_mutex_unlock(&encode_mutex);
return header;
}
 
static int storage_task_queue(unsigned long long display_order, unsigned long long encode_order)
{
struct storage_task_t *tmp;
 
tmp = calloc(1, sizeof(struct storage_task_t));
tmp->display_order = display_order;
tmp->encode_order = encode_order;
 
pthread_mutex_lock(&encode_mutex);
if (storage_task_header == NULL) {
storage_task_header = tmp;
storage_task_tail = tmp;
} else {
storage_task_tail->next = tmp;
storage_task_tail = tmp;
}
 
srcsurface_status[display_order % SURFACE_NUM] = SRC_SURFACE_IN_STORAGE;
pthread_cond_signal(&encode_cond);
pthread_mutex_unlock(&encode_mutex);
return 0;
}
 
static void storage_task(unsigned long long display_order, unsigned long long encode_order)
{
unsigned int tmp;
VAStatus va_status;
tmp = GetTickCount();
va_status = vaSyncSurface(va_dpy, src_surface[display_order % SURFACE_NUM]);
CHECK_VASTATUS(va_status,"vaSyncSurface");
SyncPictureTicks += GetTickCount() - tmp;
tmp = GetTickCount();
save_codeddata(display_order, encode_order);
SavePictureTicks += GetTickCount() - tmp;
 
save_recyuv(ref_surface[display_order % SURFACE_NUM], display_order, encode_order);
 
/* reload a new frame data */
tmp = GetTickCount();
if (srcyuv_fp != NULL)
load_surface(src_surface[display_order % SURFACE_NUM], display_order + SURFACE_NUM);
UploadPictureTicks += GetTickCount() - tmp;
 
pthread_mutex_lock(&encode_mutex);
srcsurface_status[display_order % SURFACE_NUM] = SRC_SURFACE_IN_ENCODING;
pthread_mutex_unlock(&encode_mutex);
}
 
static void * storage_task_thread(void *t)
{
while (1) {
struct storage_task_t *current;
current = storage_task_dequeue();
if (current == NULL) {
pthread_mutex_lock(&encode_mutex);
pthread_cond_wait(&encode_cond, &encode_mutex);
pthread_mutex_unlock(&encode_mutex);
continue;
}
storage_task(current->display_order, current->encode_order);
free(current);
 
/* all frames are saved, exit the thread */
if (++frame_coded >= frame_count)
break;
}
 
return 0;
}
 
 
static int encode_frames(void)
{
unsigned int i, tmp;
VAStatus va_status;
//VASurfaceStatus surface_status;
 
/* upload RAW YUV data into all surfaces */
tmp = GetTickCount();
if (srcyuv_fp != NULL) {
for (i = 0; i < SURFACE_NUM; i++)
load_surface(src_surface[i], i);
} else
upload_source_YUV_once_for_all();
UploadPictureTicks += GetTickCount() - tmp;
/* ready for encoding */
memset(srcsurface_status, SRC_SURFACE_IN_ENCODING, sizeof(srcsurface_status));
memset(&seq_param, 0, sizeof(seq_param));
memset(&pic_param, 0, sizeof(pic_param));
memset(&slice_param, 0, sizeof(slice_param));
 
if (encode_syncmode == 0)
pthread_create(&encode_thread, NULL, storage_task_thread, NULL);
for (current_frame_encoding = 0; current_frame_encoding < frame_count; current_frame_encoding++) {
encoding2display_order(current_frame_encoding, intra_period, intra_idr_period, ip_period,
&current_frame_display, &current_frame_type);
if (current_frame_type == FRAME_IDR) {
numShortTerm = 0;
current_frame_num = 0;
current_IDR_display = current_frame_display;
}
 
/* check if the source frame is ready */
while (srcsurface_status[current_slot] != SRC_SURFACE_IN_ENCODING) {
usleep(1);
}
tmp = GetTickCount();
va_status = vaBeginPicture(va_dpy, context_id, src_surface[current_slot]);
CHECK_VASTATUS(va_status,"vaBeginPicture");
BeginPictureTicks += GetTickCount() - tmp;
tmp = GetTickCount();
if (current_frame_type == FRAME_IDR) {
render_sequence();
render_picture();
if (h264_packedheader) {
render_packedsequence();
render_packedpicture();
}
//if (rc_mode == VA_RC_CBR)
// render_packedsei();
//render_hrd();
} else {
//render_sequence();
render_picture();
//if (rc_mode == VA_RC_CBR)
// render_packedsei();
//render_hrd();
}
render_slice();
RenderPictureTicks += GetTickCount() - tmp;
tmp = GetTickCount();
va_status = vaEndPicture(va_dpy,context_id);
CHECK_VASTATUS(va_status,"vaEndPicture");;
EndPictureTicks += GetTickCount() - tmp;
 
if (encode_syncmode)
storage_task(current_frame_display, current_frame_encoding);
else /* queue the storage task queue */
storage_task_queue(current_frame_display, current_frame_encoding);
update_ReferenceFrames();
}
 
if (encode_syncmode == 0) {
int ret;
pthread_join(encode_thread, (void **)&ret);
}
return 0;
}
 
 
static int release_encode()
{
int i;
vaDestroySurfaces(va_dpy,&src_surface[0],SURFACE_NUM);
vaDestroySurfaces(va_dpy,&ref_surface[0],SURFACE_NUM);
 
for (i = 0; i < SURFACE_NUM; i++)
vaDestroyBuffer(va_dpy,coded_buf[i]);
vaDestroyContext(va_dpy,context_id);
vaDestroyConfig(va_dpy,config_id);
 
return 0;
}
 
static int deinit_va()
{
vaTerminate(va_dpy);
 
va_close_display(va_dpy);
 
return 0;
}
 
 
static int print_input()
{
printf("\n\nINPUT:Try to encode H264...\n");
if (rc_mode != -1)
printf("INPUT: RateControl : %s\n", rc_to_string(rc_mode));
printf("INPUT: Resolution : %dx%d, %d frames\n",
frame_width, frame_height, frame_count);
printf("INPUT: FrameRate : %d\n", frame_rate);
printf("INPUT: Bitrate : %d\n", frame_bitrate);
printf("INPUT: Slieces : %d\n", frame_slices);
printf("INPUT: IntraPeriod : %d\n", intra_period);
printf("INPUT: IDRPeriod : %d\n", intra_idr_period);
printf("INPUT: IpPeriod : %d\n", ip_period);
printf("INPUT: Initial QP : %d\n", initial_qp);
printf("INPUT: Min QP : %d\n", minimal_qp);
printf("INPUT: Source YUV : %s", srcyuv_fp?"FILE":"AUTO generated");
if (srcyuv_fp)
printf(":%s (fourcc %s)\n", srcyuv_fn, fourcc_to_string(srcyuv_fourcc));
else
printf("\n");
printf("INPUT: Coded Clip : %s\n", coded_fn);
if (recyuv_fp == NULL)
printf("INPUT: Rec Clip : %s\n", "Not save reconstructed frame");
else
printf("INPUT: Rec Clip : Save reconstructed frame into %s (fourcc %s)\n", recyuv_fn,
fourcc_to_string(srcyuv_fourcc));
printf("\n\n"); /* return back to startpoint */
return 0;
}
 
static int calc_PSNR(double *psnr)
{
char *srcyuv_ptr = NULL, *recyuv_ptr = NULL, tmp;
unsigned long long min_size;
unsigned long long i, sse=0;
double ssemean;
int fourM = 0x400000; /* 4M */
 
min_size = MIN(srcyuv_frames, frame_count) * frame_width * frame_height * 1.5;
for (i=0; i<min_size; i++) {
unsigned long long j = i % fourM;
if ((i % fourM) == 0) {
if (srcyuv_ptr)
munmap(srcyuv_ptr, fourM);
if (recyuv_ptr)
munmap(recyuv_ptr, fourM);
srcyuv_ptr = mmap(0, fourM, PROT_READ, MAP_SHARED, fileno(srcyuv_fp), i);
recyuv_ptr = mmap(0, fourM, PROT_READ, MAP_SHARED, fileno(recyuv_fp), i);
if ((srcyuv_ptr == MAP_FAILED) || (recyuv_ptr == MAP_FAILED)) {
printf("Failed to mmap YUV files\n");
return 1;
}
}
tmp = srcyuv_ptr[j] - recyuv_ptr[j];
sse += tmp * tmp;
}
ssemean = (double)sse/(double)min_size;
*psnr = 20.0*log10(255) - 10.0*log10(ssemean);
 
if (srcyuv_ptr)
munmap(srcyuv_ptr, fourM);
if (recyuv_ptr)
munmap(recyuv_ptr, fourM);
return 0;
}
 
static int print_performance(unsigned int PictureCount)
{
unsigned int psnr_ret = 1, others = 0;
double psnr = 0, total_size = frame_width * frame_height * 1.5 * frame_count;
 
if (calc_psnr && srcyuv_fp && recyuv_fp)
psnr_ret = calc_PSNR(&psnr);
others = TotalTicks - UploadPictureTicks - BeginPictureTicks
- RenderPictureTicks - EndPictureTicks - SyncPictureTicks - SavePictureTicks;
 
printf("\n\n");
 
printf("PERFORMANCE: Frame Rate : %.2f fps (%d frames, %d ms (%.2f ms per frame))\n",
(double) 1000*PictureCount / TotalTicks, PictureCount,
TotalTicks, ((double) TotalTicks) / (double) PictureCount);
printf("PERFORMANCE: Compression ratio : %d:1\n", (unsigned int)(total_size / frame_size));
if (psnr_ret == 0)
printf("PERFORMANCE: PSNR : %.2f (%lld frames calculated)\n",
psnr, MIN(frame_count, srcyuv_frames));
 
printf("PERFORMANCE: UploadPicture : %d ms (%.2f, %.2f%% percent)\n",
(int) UploadPictureTicks, ((double) UploadPictureTicks) / (double) PictureCount,
UploadPictureTicks/(double) TotalTicks/0.01);
printf("PERFORMANCE: vaBeginPicture : %d ms (%.2f, %.2f%% percent)\n",
(int) BeginPictureTicks, ((double) BeginPictureTicks) / (double) PictureCount,
BeginPictureTicks/(double) TotalTicks/0.01);
printf("PERFORMANCE: vaRenderHeader : %d ms (%.2f, %.2f%% percent)\n",
(int) RenderPictureTicks, ((double) RenderPictureTicks) / (double) PictureCount,
RenderPictureTicks/(double) TotalTicks/0.01);
printf("PERFORMANCE: vaEndPicture : %d ms (%.2f, %.2f%% percent)\n",
(int) EndPictureTicks, ((double) EndPictureTicks) / (double) PictureCount,
EndPictureTicks/(double) TotalTicks/0.01);
printf("PERFORMANCE: vaSyncSurface : %d ms (%.2f, %.2f%% percent)\n",
(int) SyncPictureTicks, ((double) SyncPictureTicks) / (double) PictureCount,
SyncPictureTicks/(double) TotalTicks/0.01);
printf("PERFORMANCE: SavePicture : %d ms (%.2f, %.2f%% percent)\n",
(int) SavePictureTicks, ((double) SavePictureTicks) / (double) PictureCount,
SavePictureTicks/(double) TotalTicks/0.01);
printf("PERFORMANCE: Others : %d ms (%.2f, %.2f%% percent)\n",
(int) others, ((double) others) / (double) PictureCount,
others/(double) TotalTicks/0.01);
 
if (encode_syncmode == 0)
printf("(Multithread enabled, the timing is only for reference)\n");
return 0;
}
 
 
int main(int argc,char **argv)
{
unsigned int start;
process_cmdline(argc, argv);
 
print_input();
start = GetTickCount();
init_va();
setup_encode();
encode_frames();
 
release_encode();
deinit_va();
 
TotalTicks += GetTickCount() - start;
print_performance(frame_count);
return 0;
}
/contrib/sdk/sources/vaapi/libva-1.4.1/test/encode/mpeg2vaenc.c
0,0 → 1,1546
/*
* Copyright (c) 2012 Intel Corporation. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/*
* Simple MPEG-2 encoder based on libVA.
*
*/
 
#include "sysdeps.h"
 
#include <getopt.h>
#include <unistd.h>
 
#include <sys/time.h>
#include <sys/types.h>
#include <fcntl.h>
#include <time.h>
#include <pthread.h>
 
#include <va/va.h>
#include <va/va_enc_mpeg2.h>
 
#include "va_display.h"
 
#define START_CODE_PICUTRE 0x00000100
#define START_CODE_SLICE 0x00000101
#define START_CODE_USER 0x000001B2
#define START_CODE_SEQ 0x000001B3
#define START_CODE_EXT 0x000001B5
#define START_CODE_GOP 0x000001B8
 
#define CHROMA_FORMAT_RESERVED 0
#define CHROMA_FORMAT_420 1
#define CHROMA_FORMAT_422 2
#define CHROMA_FORMAT_444 3
 
#define MAX_SLICES 128
 
enum {
MPEG2_MODE_I = 0,
MPEG2_MODE_IP,
MPEG2_MODE_IPB,
};
 
enum {
MPEG2_LEVEL_LOW = 0,
MPEG2_LEVEL_MAIN,
MPEG2_LEVEL_HIGH,
};
 
#define CHECK_VASTATUS(va_status, func) \
if (va_status != VA_STATUS_SUCCESS) { \
fprintf(stderr, "%s:%s (%d) failed, exit\n", __func__, func, __LINE__); \
exit(1); \
}
 
static VAProfile mpeg2_va_profiles[] = {
VAProfileMPEG2Simple,
VAProfileMPEG2Main
};
 
static struct _mpeg2_sampling_density
{
int samplers_per_line;
int line_per_frame;
int frame_per_sec;
} mpeg2_upper_samplings[2][3] = {
{ { 0, 0, 0 },
{ 720, 576, 30 },
{ 0, 0, 0 },
},
 
{ { 352, 288, 30 },
{ 720, 576, 30 },
{ 1920, 1152, 60 },
}
};
 
struct mpeg2enc_context {
/* args */
int rate_control_mode;
int fps;
int mode; /* 0:I, 1:I/P, 2:I/P/B */
VAProfile profile;
int level;
int width;
int height;
int frame_size;
int num_pictures;
int qp;
FILE *ifp;
FILE *ofp;
unsigned char *frame_data_buffer;
int intra_period;
int ip_period;
int bit_rate; /* in kbps */
VAEncPictureType next_type;
int next_display_order;
int next_bframes;
int new_sequence;
int new_gop_header;
int gop_header_in_display_order;
 
/* VA resource */
VADisplay va_dpy;
VAEncSequenceParameterBufferMPEG2 seq_param;
VAEncPictureParameterBufferMPEG2 pic_param;
VAEncSliceParameterBufferMPEG2 slice_param[MAX_SLICES];
VAContextID context_id;
VAConfigID config_id;
VABufferID seq_param_buf_id; /* Sequence level parameter */
VABufferID pic_param_buf_id; /* Picture level parameter */
VABufferID slice_param_buf_id[MAX_SLICES]; /* Slice level parameter, multil slices */
VABufferID codedbuf_buf_id; /* Output buffer, compressed data */
VABufferID packed_seq_header_param_buf_id;
VABufferID packed_seq_buf_id;
VABufferID packed_pic_header_param_buf_id;
VABufferID packed_pic_buf_id;
int num_slice_groups;
int codedbuf_i_size;
int codedbuf_pb_size;
 
/* thread */
pthread_t upload_thread_id;
int upload_thread_value;
int current_input_surface;
int current_upload_surface;
};
 
/*
* mpeg2enc helpers
*/
#define BITSTREAM_ALLOCATE_STEPPING 4096
 
struct __bitstream {
unsigned int *buffer;
int bit_offset;
int max_size_in_dword;
};
 
typedef struct __bitstream bitstream;
 
static unsigned int
swap32(unsigned int val)
{
unsigned char *pval = (unsigned char *)&val;
 
return ((pval[0] << 24) |
(pval[1] << 16) |
(pval[2] << 8) |
(pval[3] << 0));
}
 
static void
bitstream_start(bitstream *bs)
{
bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
bs->buffer = calloc(bs->max_size_in_dword * sizeof(int), 1);
bs->bit_offset = 0;
}
 
static void
bitstream_end(bitstream *bs)
{
int pos = (bs->bit_offset >> 5);
int bit_offset = (bs->bit_offset & 0x1f);
int bit_left = 32 - bit_offset;
 
if (bit_offset) {
bs->buffer[pos] = swap32((bs->buffer[pos] << bit_left));
}
}
static void
bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
{
int pos = (bs->bit_offset >> 5);
int bit_offset = (bs->bit_offset & 0x1f);
int bit_left = 32 - bit_offset;
 
if (!size_in_bits)
return;
 
if (size_in_bits < 32)
val &= ((1 << size_in_bits) - 1);
 
bs->bit_offset += size_in_bits;
 
if (bit_left > size_in_bits) {
bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
} else {
size_in_bits -= bit_left;
bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
bs->buffer[pos] = swap32(bs->buffer[pos]);
 
if (pos + 1 == bs->max_size_in_dword) {
bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
bs->buffer = realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
}
 
bs->buffer[pos + 1] = val;
}
}
 
static void
bitstream_byte_aligning(bitstream *bs, int bit)
{
int bit_offset = (bs->bit_offset & 0x7);
int bit_left = 8 - bit_offset;
int new_val;
 
if (!bit_offset)
return;
 
assert(bit == 0 || bit == 1);
 
if (bit)
new_val = (1 << bit_left) - 1;
else
new_val = 0;
 
bitstream_put_ui(bs, new_val, bit_left);
}
 
static struct mpeg2_frame_rate {
int code;
float value;
} frame_rate_tab[] = {
{1, 23.976},
{2, 24.0},
{3, 25.0},
{4, 29.97},
{5, 30},
{6, 50},
{7, 59.94},
{8, 60}
};
 
static int
find_frame_rate_code(const VAEncSequenceParameterBufferMPEG2 *seq_param)
{
unsigned int delta = -1;
int code = 1, i;
float frame_rate_value = seq_param->frame_rate *
(seq_param->sequence_extension.bits.frame_rate_extension_d + 1) /
(seq_param->sequence_extension.bits.frame_rate_extension_n + 1);
 
for (i = 0; i < sizeof(frame_rate_tab) / sizeof(frame_rate_tab[0]); i++) {
 
if (abs(1000 * frame_rate_tab[i].value - 1000 * frame_rate_value) < delta) {
code = frame_rate_tab[i].code;
delta = abs(1000 * frame_rate_tab[i].value - 1000 * frame_rate_value);
}
}
 
return code;
}
 
static void
sps_rbsp(struct mpeg2enc_context *ctx,
const VAEncSequenceParameterBufferMPEG2 *seq_param,
bitstream *bs)
{
int frame_rate_code = find_frame_rate_code(seq_param);
 
if (ctx->new_sequence) {
bitstream_put_ui(bs, START_CODE_SEQ, 32);
bitstream_put_ui(bs, seq_param->picture_width, 12);
bitstream_put_ui(bs, seq_param->picture_height, 12);
bitstream_put_ui(bs, seq_param->aspect_ratio_information, 4);
bitstream_put_ui(bs, frame_rate_code, 4); /* frame_rate_code */
bitstream_put_ui(bs, (seq_param->bits_per_second + 399) / 400, 18); /* the low 18 bits of bit_rate */
bitstream_put_ui(bs, 1, 1); /* marker_bit */
bitstream_put_ui(bs, seq_param->vbv_buffer_size, 10);
bitstream_put_ui(bs, 0, 1); /* constraint_parameter_flag, always 0 for MPEG-2 */
bitstream_put_ui(bs, 0, 1); /* load_intra_quantiser_matrix */
bitstream_put_ui(bs, 0, 1); /* load_non_intra_quantiser_matrix */
 
bitstream_byte_aligning(bs, 0);
 
bitstream_put_ui(bs, START_CODE_EXT, 32);
bitstream_put_ui(bs, 1, 4); /* sequence_extension id */
bitstream_put_ui(bs, seq_param->sequence_extension.bits.profile_and_level_indication, 8);
bitstream_put_ui(bs, seq_param->sequence_extension.bits.progressive_sequence, 1);
bitstream_put_ui(bs, seq_param->sequence_extension.bits.chroma_format, 2);
bitstream_put_ui(bs, seq_param->picture_width >> 12, 2);
bitstream_put_ui(bs, seq_param->picture_height >> 12, 2);
bitstream_put_ui(bs, ((seq_param->bits_per_second + 399) / 400) >> 18, 12); /* bit_rate_extension */
bitstream_put_ui(bs, 1, 1); /* marker_bit */
bitstream_put_ui(bs, seq_param->vbv_buffer_size >> 10, 8);
bitstream_put_ui(bs, seq_param->sequence_extension.bits.low_delay, 1);
bitstream_put_ui(bs, seq_param->sequence_extension.bits.frame_rate_extension_n, 2);
bitstream_put_ui(bs, seq_param->sequence_extension.bits.frame_rate_extension_d, 5);
 
bitstream_byte_aligning(bs, 0);
}
 
if (ctx->new_gop_header) {
bitstream_put_ui(bs, START_CODE_GOP, 32);
bitstream_put_ui(bs, seq_param->gop_header.bits.time_code, 25);
bitstream_put_ui(bs, seq_param->gop_header.bits.closed_gop, 1);
bitstream_put_ui(bs, seq_param->gop_header.bits.broken_link, 1);
 
bitstream_byte_aligning(bs, 0);
}
}
 
static void
pps_rbsp(const VAEncSequenceParameterBufferMPEG2 *seq_param,
const VAEncPictureParameterBufferMPEG2 *pic_param,
bitstream *bs)
{
int i;
int chroma_420_type;
 
if (seq_param->sequence_extension.bits.chroma_format == CHROMA_FORMAT_420)
chroma_420_type = pic_param->picture_coding_extension.bits.progressive_frame;
else
chroma_420_type = 0;
 
bitstream_put_ui(bs, START_CODE_PICUTRE, 32);
bitstream_put_ui(bs, pic_param->temporal_reference, 10);
bitstream_put_ui(bs,
pic_param->picture_type == VAEncPictureTypeIntra ? 1 :
pic_param->picture_type == VAEncPictureTypePredictive ? 2 : 3,
3);
bitstream_put_ui(bs, 0xFFFF, 16); /* vbv_delay, always 0xFFFF */
if (pic_param->picture_type == VAEncPictureTypePredictive ||
pic_param->picture_type == VAEncPictureTypeBidirectional) {
bitstream_put_ui(bs, 0, 1); /* full_pel_forward_vector, always 0 for MPEG-2 */
bitstream_put_ui(bs, 7, 3); /* forward_f_code, always 7 for MPEG-2 */
}
 
if (pic_param->picture_type == VAEncPictureTypeBidirectional) {
bitstream_put_ui(bs, 0, 1); /* full_pel_backward_vector, always 0 for MPEG-2 */
bitstream_put_ui(bs, 7, 3); /* backward_f_code, always 7 for MPEG-2 */
}
bitstream_put_ui(bs, 0, 1); /* extra_bit_picture, 0 */
 
bitstream_byte_aligning(bs, 0);
 
bitstream_put_ui(bs, START_CODE_EXT, 32);
bitstream_put_ui(bs, 8, 4); /* Picture Coding Extension ID: 8 */
bitstream_put_ui(bs, pic_param->f_code[0][0], 4);
bitstream_put_ui(bs, pic_param->f_code[0][1], 4);
bitstream_put_ui(bs, pic_param->f_code[1][0], 4);
bitstream_put_ui(bs, pic_param->f_code[1][1], 4);
 
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.intra_dc_precision, 2);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.picture_structure, 2);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.top_field_first, 1);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.frame_pred_frame_dct, 1);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.concealment_motion_vectors, 1);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.q_scale_type, 1);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.intra_vlc_format, 1);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.alternate_scan, 1);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.repeat_first_field, 1);
bitstream_put_ui(bs, chroma_420_type, 1);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.progressive_frame, 1);
bitstream_put_ui(bs, pic_param->picture_coding_extension.bits.composite_display_flag, 1);
 
bitstream_byte_aligning(bs, 0);
}
 
static int
build_packed_pic_buffer(const VAEncSequenceParameterBufferMPEG2 *seq_param,
const VAEncPictureParameterBufferMPEG2 *pic_param,
unsigned char **header_buffer)
{
bitstream bs;
 
bitstream_start(&bs);
pps_rbsp(seq_param, pic_param, &bs);
bitstream_end(&bs);
 
*header_buffer = (unsigned char *)bs.buffer;
return bs.bit_offset;
}
 
static int
build_packed_seq_buffer(struct mpeg2enc_context *ctx,
const VAEncSequenceParameterBufferMPEG2 *seq_param,
unsigned char **header_buffer)
{
bitstream bs;
 
bitstream_start(&bs);
sps_rbsp(ctx, seq_param, &bs);
bitstream_end(&bs);
 
*header_buffer = (unsigned char *)bs.buffer;
return bs.bit_offset;
}
 
/*
* mpeg2enc
*/
#define SID_INPUT_PICTURE_0 0
#define SID_INPUT_PICTURE_1 1
#define SID_REFERENCE_PICTURE_L0 2
#define SID_REFERENCE_PICTURE_L1 3
#define SID_RECON_PICTURE 4
#define SID_NUMBER SID_RECON_PICTURE + 1
 
static VASurfaceID surface_ids[SID_NUMBER];
 
/*
* upload thread function
*/
static void *
upload_yuv_to_surface(void *data)
{
struct mpeg2enc_context *ctx = data;
VAImage surface_image;
VAStatus va_status;
void *surface_p = NULL;
unsigned char *y_src, *u_src, *v_src;
unsigned char *y_dst, *u_dst, *v_dst;
int y_size = ctx->width * ctx->height;
int u_size = (ctx->width >> 1) * (ctx->height >> 1);
int row, col;
size_t n_items;
 
do {
n_items = fread(ctx->frame_data_buffer, ctx->frame_size, 1, ctx->ifp);
} while (n_items != 1);
 
va_status = vaDeriveImage(ctx->va_dpy, surface_ids[ctx->current_upload_surface], &surface_image);
CHECK_VASTATUS(va_status,"vaDeriveImage");
 
vaMapBuffer(ctx->va_dpy, surface_image.buf, &surface_p);
assert(VA_STATUS_SUCCESS == va_status);
y_src = ctx->frame_data_buffer;
u_src = ctx->frame_data_buffer + y_size; /* UV offset for NV12 */
v_src = ctx->frame_data_buffer + y_size + u_size;
 
y_dst = surface_p + surface_image.offsets[0];
u_dst = surface_p + surface_image.offsets[1]; /* UV offset for NV12 */
v_dst = surface_p + surface_image.offsets[2];
 
/* Y plane */
for (row = 0; row < surface_image.height; row++) {
memcpy(y_dst, y_src, surface_image.width);
y_dst += surface_image.pitches[0];
y_src += ctx->width;
}
 
if (surface_image.format.fourcc == VA_FOURCC_NV12) { /* UV plane */
for (row = 0; row < surface_image.height / 2; row++) {
for (col = 0; col < surface_image.width / 2; col++) {
u_dst[col * 2] = u_src[col];
u_dst[col * 2 + 1] = v_src[col];
}
 
u_dst += surface_image.pitches[1];
u_src += (ctx->width / 2);
v_src += (ctx->width / 2);
}
} else {
for (row = 0; row < surface_image.height / 2; row++) {
for (col = 0; col < surface_image.width / 2; col++) {
u_dst[col] = u_src[col];
v_dst[col] = v_src[col];
}
 
u_dst += surface_image.pitches[1];
v_dst += surface_image.pitches[2];
u_src += (ctx->width / 2);
v_src += (ctx->width / 2);
}
}
 
vaUnmapBuffer(ctx->va_dpy, surface_image.buf);
vaDestroyImage(ctx->va_dpy, surface_image.image_id);
 
return NULL;
}
 
static void
mpeg2enc_exit(struct mpeg2enc_context *ctx, int exit_code)
{
if (ctx->frame_data_buffer) {
free(ctx->frame_data_buffer);
ctx->frame_data_buffer = NULL;
}
 
if (ctx->ifp) {
fclose(ctx->ifp);
ctx->ifp = NULL;
}
 
if (ctx->ofp) {
fclose(ctx->ofp);
ctx->ofp = NULL;
}
 
exit(exit_code);
}
 
static void
usage(char *program)
{
fprintf(stderr, "Usage: %s --help\n", program);
fprintf(stderr, "\t--help print this message\n");
fprintf(stderr, "Usage: %s <width> <height> <ifile> <ofile> [options]\n", program);
fprintf(stderr, "\t<width> specifies the frame width\n");
fprintf(stderr, "\t<height> specifies the frame height\n");
fprintf(stderr, "\t<ifile> specifies the I420/IYUV YUV file\n");
fprintf(stderr, "\t<ofile> specifies the encoded MPEG-2 file\n");
fprintf(stderr, "where options include:\n");
fprintf(stderr, "\t--cqp <QP> const qp mode with specified <QP>\n");
fprintf(stderr, "\t--fps <FPS> specify the frame rate\n");
fprintf(stderr, "\t--mode <MODE> specify the mode 0 (I), 1 (I/P) and 2 (I/P/B)\n");
fprintf(stderr, "\t--profile <PROFILE> specify the profile 0(Simple), or 1(Main, default)\n");
fprintf(stderr, "\t--level <LEVEL> specify the level 0(Low), 1(Main, default) or 2(High)\n");
}
 
void
mpeg2_profile_level(struct mpeg2enc_context *ctx,
int profile,
int level)
{
int l = 2, p;
 
for (p = profile; p < 2; p++) {
for (l = level; l < 3; l++) {
if (ctx->width <= mpeg2_upper_samplings[p][l].samplers_per_line &&
ctx->height <= mpeg2_upper_samplings[p][l].line_per_frame &&
ctx->fps <= mpeg2_upper_samplings[p][l].frame_per_sec) {
goto __find;
break;
}
}
}
 
if (p == 2) {
fprintf(stderr, "Warning: can't find a proper profile and level for the specified width/height/fps\n");
p = 1;
l = 2;
}
 
__find:
ctx->profile = mpeg2_va_profiles[p];
ctx->level = l;
}
 
static void
parse_args(struct mpeg2enc_context *ctx, int argc, char **argv)
{
int c, tmp;
int option_index = 0;
long file_size;
int profile = 1, level = 1;
 
static struct option long_options[] = {
{"help", no_argument, 0, 'h'},
{"cqp", required_argument, 0, 'c'},
{"fps", required_argument, 0, 'f'},
{"mode", required_argument, 0, 'm'},
{"profile", required_argument, 0, 'p'},
{"level", required_argument, 0, 'l'},
{ NULL, 0, NULL, 0 }
};
 
if ((argc == 2 && strcmp(argv[1], "--help") == 0) ||
(argc < 5))
goto print_usage;
 
ctx->width = atoi(argv[1]);
ctx->height = atoi(argv[2]);
 
if (ctx->width <= 0 || ctx->height <= 0) {
fprintf(stderr, "<width> and <height> must be greater than 0\n");
goto err_exit;
}
 
ctx->ifp = fopen(argv[3], "rb");
 
if (ctx->ifp == NULL) {
fprintf(stderr, "Can't open the input file\n");
goto err_exit;
}
 
fseek(ctx->ifp, 0l, SEEK_END);
file_size = ftell(ctx->ifp);
ctx->frame_size = ctx->width * ctx->height * 3 / 2;
 
if ((file_size < ctx->frame_size) ||
(file_size % ctx->frame_size)) {
fprintf(stderr, "The input file size %ld isn't a multiple of the frame size %d\n", file_size, ctx->frame_size);
goto err_exit;
}
 
ctx->num_pictures = file_size / ctx->frame_size;
fseek(ctx->ifp, 0l, SEEK_SET);
ctx->ofp = fopen(argv[4], "wb");
if (ctx->ofp == NULL) {
fprintf(stderr, "Can't create the output file\n");
goto err_exit;
}
 
opterr = 0;
ctx->fps = 30;
ctx->qp = 8;
ctx->rate_control_mode = VA_RC_CQP;
ctx->mode = MPEG2_MODE_IP;
ctx->profile = VAProfileMPEG2Main;
ctx->level = MPEG2_LEVEL_MAIN;
 
optind = 5;
 
while((c = getopt_long(argc, argv,
"",
long_options,
&option_index)) != -1) {
switch(c) {
case 'c':
tmp = atoi(optarg);
 
/* only support q_scale_type = 0 */
if (tmp > 62 || tmp < 2) {
fprintf(stderr, "Warning: QP must be in [2, 62]\n");
 
if (tmp > 62)
tmp = 62;
 
if (tmp < 2)
tmp = 2;
}
 
ctx->qp = tmp & 0xFE;
ctx->rate_control_mode = VA_RC_CQP;
 
break;
 
case 'f':
tmp = atoi(optarg);
 
if (tmp <= 0)
fprintf(stderr, "Warning: FPS must be greater than 0\n");
else
ctx->fps = tmp;
 
ctx->rate_control_mode = VA_RC_CBR;
 
break;
 
case 'm':
tmp = atoi(optarg);
if (tmp < MPEG2_MODE_I || tmp > MPEG2_MODE_IPB)
fprintf(stderr, "Waning: MODE must be 0, 1, or 2\n");
else
ctx->mode = tmp;
 
break;
 
case 'p':
tmp = atoi(optarg);
if (tmp < 0 || tmp > 1)
fprintf(stderr, "Waning: PROFILE must be 0 or 1\n");
else
profile = tmp;
 
break;
 
case 'l':
tmp = atoi(optarg);
if (tmp < MPEG2_LEVEL_LOW || tmp > MPEG2_LEVEL_HIGH)
fprintf(stderr, "Waning: LEVEL must be 0, 1, or 2\n");
else
level = tmp;
 
break;
 
case '?':
fprintf(stderr, "Error: unkown command options\n");
 
case 'h':
goto print_usage;
}
}
 
mpeg2_profile_level(ctx, profile, level);
 
return;
 
print_usage:
usage(argv[0]);
err_exit:
mpeg2enc_exit(ctx, 1);
}
 
/*
* init
*/
void
mpeg2enc_init_sequence_parameter(struct mpeg2enc_context *ctx,
VAEncSequenceParameterBufferMPEG2 *seq_param)
{
int profile = 4, level = 8;
 
switch (ctx->profile) {
case VAProfileMPEG2Simple:
profile = 5;
break;
 
case VAProfileMPEG2Main:
profile = 4;
break;
 
default:
assert(0);
break;
}
 
switch (ctx->level) {
case MPEG2_LEVEL_LOW:
level = 10;
break;
 
case MPEG2_LEVEL_MAIN:
level = 8;
break;
 
case MPEG2_LEVEL_HIGH:
level = 4;
break;
 
default:
assert(0);
break;
}
seq_param->intra_period = ctx->intra_period;
seq_param->ip_period = ctx->ip_period; /* FIXME: ??? */
seq_param->picture_width = ctx->width;
seq_param->picture_height = ctx->height;
 
if (ctx->bit_rate > 0)
seq_param->bits_per_second = 1024 * ctx->bit_rate; /* use kbps as input */
else
seq_param->bits_per_second = 0x3FFFF * 400;
 
seq_param->frame_rate = ctx->fps;
seq_param->aspect_ratio_information = 1;
seq_param->vbv_buffer_size = 3; /* B = 16 * 1024 * vbv_buffer_size */
 
seq_param->sequence_extension.bits.profile_and_level_indication = profile << 4 | level;
seq_param->sequence_extension.bits.progressive_sequence = 1; /* progressive frame-pictures */
seq_param->sequence_extension.bits.chroma_format = CHROMA_FORMAT_420; /* 4:2:0 */
seq_param->sequence_extension.bits.low_delay = 0; /* FIXME */
seq_param->sequence_extension.bits.frame_rate_extension_n = 0;
seq_param->sequence_extension.bits.frame_rate_extension_d = 0;
 
seq_param->gop_header.bits.time_code = (1 << 12); /* bit12: marker_bit */
seq_param->gop_header.bits.closed_gop = 0;
seq_param->gop_header.bits.broken_link = 0;
}
 
static void
mpeg2enc_init_picture_parameter(struct mpeg2enc_context *ctx,
VAEncPictureParameterBufferMPEG2 *pic_param)
{
pic_param->forward_reference_picture = VA_INVALID_ID;
pic_param->backward_reference_picture = VA_INVALID_ID;
pic_param->reconstructed_picture = VA_INVALID_ID;
pic_param->coded_buf = VA_INVALID_ID;
pic_param->picture_type = VAEncPictureTypeIntra;
 
pic_param->temporal_reference = 0;
pic_param->f_code[0][0] = 0xf;
pic_param->f_code[0][1] = 0xf;
pic_param->f_code[1][0] = 0xf;
pic_param->f_code[1][1] = 0xf;
 
pic_param->picture_coding_extension.bits.intra_dc_precision = 0; /* 8bits */
pic_param->picture_coding_extension.bits.picture_structure = 3; /* frame picture */
pic_param->picture_coding_extension.bits.top_field_first = 0;
pic_param->picture_coding_extension.bits.frame_pred_frame_dct = 1; /* FIXME */
pic_param->picture_coding_extension.bits.concealment_motion_vectors = 0;
pic_param->picture_coding_extension.bits.q_scale_type = 0;
pic_param->picture_coding_extension.bits.intra_vlc_format = 0;
pic_param->picture_coding_extension.bits.alternate_scan = 0;
pic_param->picture_coding_extension.bits.repeat_first_field = 0;
pic_param->picture_coding_extension.bits.progressive_frame = 1;
pic_param->picture_coding_extension.bits.composite_display_flag = 0;
}
 
static void
mpeg2enc_alloc_va_resources(struct mpeg2enc_context *ctx)
{
VAEntrypoint *entrypoint_list;
VAConfigAttrib attrib_list[2];
VAStatus va_status;
int max_entrypoints, num_entrypoints, entrypoint;
int major_ver, minor_ver;
 
ctx->va_dpy = va_open_display();
va_status = vaInitialize(ctx->va_dpy,
&major_ver,
&minor_ver);
CHECK_VASTATUS(va_status, "vaInitialize");
 
max_entrypoints = vaMaxNumEntrypoints(ctx->va_dpy);
entrypoint_list = malloc(max_entrypoints * sizeof(VAEntrypoint));
vaQueryConfigEntrypoints(ctx->va_dpy,
ctx->profile,
entrypoint_list,
&num_entrypoints);
 
for (entrypoint = 0; entrypoint < num_entrypoints; entrypoint++) {
if (entrypoint_list[entrypoint] == VAEntrypointEncSlice)
break;
}
 
free(entrypoint_list);
 
if (entrypoint == num_entrypoints) {
/* not find Slice entry point */
assert(0);
}
 
/* find out the format for the render target, and rate control mode */
attrib_list[0].type = VAConfigAttribRTFormat;
attrib_list[1].type = VAConfigAttribRateControl;
vaGetConfigAttributes(ctx->va_dpy,
ctx->profile,
VAEntrypointEncSlice,
&attrib_list[0],
2);
 
if ((attrib_list[0].value & VA_RT_FORMAT_YUV420) == 0) {
/* not find desired YUV420 RT format */
assert(0);
}
 
if ((attrib_list[1].value & ctx->rate_control_mode) == 0) {
/* Can't find matched RC mode */
fprintf(stderr, "RC mode %d isn't found, exit\n", ctx->rate_control_mode);
assert(0);
}
 
attrib_list[0].value = VA_RT_FORMAT_YUV420; /* set to desired RT format */
attrib_list[1].value = ctx->rate_control_mode; /* set to desired RC mode */
 
va_status = vaCreateConfig(ctx->va_dpy,
ctx->profile,
VAEntrypointEncSlice,
attrib_list,
2,
&ctx->config_id);
CHECK_VASTATUS(va_status, "vaCreateConfig");
 
/* Create a context for this decode pipe */
va_status = vaCreateContext(ctx->va_dpy,
ctx->config_id,
ctx->width,
ctx->height,
VA_PROGRESSIVE,
0,
0,
&ctx->context_id);
CHECK_VASTATUS(va_status, "vaCreateContext");
 
va_status = vaCreateSurfaces(ctx->va_dpy,
VA_RT_FORMAT_YUV420,
ctx->width,
ctx->height,
surface_ids,
SID_NUMBER,
NULL,
0);
CHECK_VASTATUS(va_status, "vaCreateSurfaces");
}
 
static void
mpeg2enc_init(struct mpeg2enc_context *ctx)
{
int i;
 
ctx->frame_data_buffer = (unsigned char *)malloc(ctx->frame_size);
ctx->seq_param_buf_id = VA_INVALID_ID;
ctx->pic_param_buf_id = VA_INVALID_ID;
ctx->packed_seq_header_param_buf_id = VA_INVALID_ID;
ctx->packed_seq_buf_id = VA_INVALID_ID;
ctx->packed_pic_header_param_buf_id = VA_INVALID_ID;
ctx->packed_pic_buf_id = VA_INVALID_ID;
ctx->codedbuf_buf_id = VA_INVALID_ID;
ctx->codedbuf_i_size = ctx->frame_size;
ctx->codedbuf_pb_size = 0;
ctx->next_display_order = 0;
ctx->next_type = VAEncPictureTypeIntra;
 
if (ctx->mode == MPEG2_MODE_I) {
ctx->intra_period = 1;
ctx->ip_period = 0;
} else if (ctx->mode == MPEG2_MODE_IP) {
ctx->intra_period = 16;
ctx->ip_period = 0;
} else {
ctx->intra_period = 16;
ctx->ip_period = 2;
}
 
ctx->next_bframes = ctx->ip_period;
 
ctx->new_sequence = 1;
ctx->new_gop_header = 1;
ctx->gop_header_in_display_order = 0;
 
ctx->bit_rate = -1;
 
for (i = 0; i < MAX_SLICES; i++) {
ctx->slice_param_buf_id[i] = VA_INVALID_ID;
}
 
mpeg2enc_init_sequence_parameter(ctx, &ctx->seq_param);
mpeg2enc_init_picture_parameter(ctx, &ctx->pic_param);
mpeg2enc_alloc_va_resources(ctx);
 
/* thread */
ctx->current_input_surface = SID_INPUT_PICTURE_0;
ctx->current_upload_surface = SID_INPUT_PICTURE_1;
ctx->upload_thread_value = pthread_create(&ctx->upload_thread_id,
NULL,
upload_yuv_to_surface,
ctx);
}
 
static int
mpeg2enc_time_code(VAEncSequenceParameterBufferMPEG2 *seq_param,
int num_frames)
{
int fps = (int)(seq_param->frame_rate + 0.5);
int time_code = 0;
int time_code_pictures, time_code_seconds, time_code_minutes, time_code_hours;
int drop_frame_flag = 0;
 
assert(fps <= 60);
 
time_code_seconds = num_frames / fps;
time_code_pictures = num_frames % fps;
time_code |= time_code_pictures;
 
time_code_minutes = time_code_seconds / 60;
time_code_seconds = time_code_seconds % 60;
time_code |= (time_code_seconds << 6);
 
time_code_hours = time_code_minutes / 60;
time_code_minutes = time_code_minutes % 60;
 
time_code |= (1 << 12); /* marker_bit */
time_code |= (time_code_minutes << 13);
 
time_code_hours = time_code_hours % 24;
time_code |= (time_code_hours << 19);
 
time_code |= (drop_frame_flag << 24);
 
return time_code;
}
 
/*
* run
*/
static void
mpeg2enc_update_sequence_parameter(struct mpeg2enc_context *ctx,
VAEncPictureType picture_type,
int coded_order,
int display_order)
{
VAEncSequenceParameterBufferMPEG2 *seq_param = &ctx->seq_param;
 
/* update the time_code info for the new GOP */
if (ctx->new_gop_header) {
seq_param->gop_header.bits.time_code = mpeg2enc_time_code(seq_param, display_order);
}
}
 
static void
mpeg2enc_update_picture_parameter(struct mpeg2enc_context *ctx,
VAEncPictureType picture_type,
int coded_order,
int display_order)
{
VAEncPictureParameterBufferMPEG2 *pic_param = &ctx->pic_param;
uint8_t f_code_x, f_code_y;
 
pic_param->picture_type = picture_type;
pic_param->temporal_reference = (display_order - ctx->gop_header_in_display_order) & 0x3FF;
pic_param->reconstructed_picture = surface_ids[SID_RECON_PICTURE];
pic_param->forward_reference_picture = surface_ids[SID_REFERENCE_PICTURE_L0];
pic_param->backward_reference_picture = surface_ids[SID_REFERENCE_PICTURE_L1];
 
f_code_x = 0xf;
f_code_y = 0xf;
if (pic_param->picture_type != VAEncPictureTypeIntra) {
if (ctx->level == MPEG2_LEVEL_LOW) {
f_code_x = 7;
f_code_y = 4;
} else if (ctx->level == MPEG2_LEVEL_MAIN) {
f_code_x = 8;
f_code_y = 5;
} else {
f_code_x = 9;
f_code_y = 5;
}
}
if (pic_param->picture_type == VAEncPictureTypeIntra) {
pic_param->f_code[0][0] = 0xf;
pic_param->f_code[0][1] = 0xf;
pic_param->f_code[1][0] = 0xf;
pic_param->f_code[1][1] = 0xf;
pic_param->forward_reference_picture = VA_INVALID_SURFACE;
pic_param->backward_reference_picture = VA_INVALID_SURFACE;
 
} else if (pic_param->picture_type == VAEncPictureTypePredictive) {
pic_param->f_code[0][0] = f_code_x;
pic_param->f_code[0][1] = f_code_y;
pic_param->f_code[1][0] = 0xf;
pic_param->f_code[1][1] = 0xf;
pic_param->forward_reference_picture = surface_ids[SID_REFERENCE_PICTURE_L0];
pic_param->backward_reference_picture = VA_INVALID_SURFACE;
} else if (pic_param->picture_type == VAEncPictureTypeBidirectional) {
pic_param->f_code[0][0] = f_code_x;
pic_param->f_code[0][1] = f_code_y;
pic_param->f_code[1][0] = f_code_x;
pic_param->f_code[1][1] = f_code_y;
pic_param->forward_reference_picture = surface_ids[SID_REFERENCE_PICTURE_L0];
pic_param->backward_reference_picture = surface_ids[SID_REFERENCE_PICTURE_L1];
} else {
assert(0);
}
}
 
static void
mpeg2enc_update_picture_parameter_buffer(struct mpeg2enc_context *ctx,
VAEncPictureType picture_type,
int coded_order,
int display_order)
{
VAEncPictureParameterBufferMPEG2 *pic_param = &ctx->pic_param;
VAStatus va_status;
 
/* update the coded buffer id */
pic_param->coded_buf = ctx->codedbuf_buf_id;
va_status = vaCreateBuffer(ctx->va_dpy,
ctx->context_id,
VAEncPictureParameterBufferType,
sizeof(*pic_param),
1,
pic_param,
&ctx->pic_param_buf_id);
CHECK_VASTATUS(va_status, "vaCreateBuffer");
}
 
static void
mpeg2enc_update_slice_parameter(struct mpeg2enc_context *ctx, VAEncPictureType picture_type)
{
VAEncSequenceParameterBufferMPEG2 *seq_param;
VAEncPictureParameterBufferMPEG2 *pic_param;
VAEncSliceParameterBufferMPEG2 *slice_param;
VAStatus va_status;
int i, width_in_mbs, height_in_mbs;
 
pic_param = &ctx->pic_param;
assert(pic_param->picture_coding_extension.bits.q_scale_type == 0);
 
seq_param = &ctx->seq_param;
width_in_mbs = (seq_param->picture_width + 15) / 16;
height_in_mbs = (seq_param->picture_height + 15) / 16;
ctx->num_slice_groups = 1;
 
for (i = 0; i < height_in_mbs; i++) {
slice_param = &ctx->slice_param[i];
slice_param->macroblock_address = i * width_in_mbs;
slice_param->num_macroblocks = width_in_mbs;
slice_param->is_intra_slice = (picture_type == VAEncPictureTypeIntra);
slice_param->quantiser_scale_code = ctx->qp / 2;
}
 
va_status = vaCreateBuffer(ctx->va_dpy,
ctx->context_id,
VAEncSliceParameterBufferType,
sizeof(*slice_param),
height_in_mbs,
ctx->slice_param,
ctx->slice_param_buf_id);
CHECK_VASTATUS(va_status, "vaCreateBuffer");;
}
 
static int
begin_picture(struct mpeg2enc_context *ctx,
int coded_order,
int display_order,
VAEncPictureType picture_type)
{
VAStatus va_status;
int tmp;
VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
unsigned int length_in_bits;
unsigned char *packed_seq_buffer = NULL, *packed_pic_buffer = NULL;
 
if (ctx->upload_thread_value != 0) {
fprintf(stderr, "FATAL error!!!\n");
exit(1);
}
pthread_join(ctx->upload_thread_id, NULL);
 
ctx->upload_thread_value = -1;
tmp = ctx->current_input_surface;
ctx->current_input_surface = ctx->current_upload_surface;
ctx->current_upload_surface = tmp;
 
mpeg2enc_update_sequence_parameter(ctx, picture_type, coded_order, display_order);
mpeg2enc_update_picture_parameter(ctx, picture_type, coded_order, display_order);
 
if (ctx->new_sequence || ctx->new_gop_header) {
assert(picture_type == VAEncPictureTypeIntra);
length_in_bits = build_packed_seq_buffer(ctx, &ctx->seq_param, &packed_seq_buffer);
packed_header_param_buffer.type = VAEncPackedHeaderMPEG2_SPS;
packed_header_param_buffer.has_emulation_bytes = 0;
packed_header_param_buffer.bit_length = length_in_bits;
va_status = vaCreateBuffer(ctx->va_dpy,
ctx->context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
&ctx->packed_seq_header_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(ctx->va_dpy,
ctx->context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packed_seq_buffer,
&ctx->packed_seq_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
free(packed_seq_buffer);
}
 
length_in_bits = build_packed_pic_buffer(&ctx->seq_param, &ctx->pic_param, &packed_pic_buffer);
packed_header_param_buffer.type = VAEncPackedHeaderMPEG2_PPS;
packed_header_param_buffer.has_emulation_bytes = 0;
packed_header_param_buffer.bit_length = length_in_bits;
 
va_status = vaCreateBuffer(ctx->va_dpy,
ctx->context_id,
VAEncPackedHeaderParameterBufferType,
sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
&ctx->packed_pic_header_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
va_status = vaCreateBuffer(ctx->va_dpy,
ctx->context_id,
VAEncPackedHeaderDataBufferType,
(length_in_bits + 7) / 8, 1, packed_pic_buffer,
&ctx->packed_pic_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
free(packed_pic_buffer);
 
/* sequence parameter set */
VAEncSequenceParameterBufferMPEG2 *seq_param = &ctx->seq_param;
va_status = vaCreateBuffer(ctx->va_dpy,
ctx->context_id,
VAEncSequenceParameterBufferType,
sizeof(*seq_param),
1,
seq_param,
&ctx->seq_param_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");;
 
/* slice parameter */
mpeg2enc_update_slice_parameter(ctx, picture_type);
 
return 0;
}
 
static int
mpeg2enc_render_picture(struct mpeg2enc_context *ctx)
{
VAStatus va_status;
VABufferID va_buffers[16];
unsigned int num_va_buffers = 0;
 
va_buffers[num_va_buffers++] = ctx->seq_param_buf_id;
va_buffers[num_va_buffers++] = ctx->pic_param_buf_id;
 
if (ctx->packed_seq_header_param_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = ctx->packed_seq_header_param_buf_id;
 
if (ctx->packed_seq_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = ctx->packed_seq_buf_id;
 
if (ctx->packed_pic_header_param_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = ctx->packed_pic_header_param_buf_id;
 
if (ctx->packed_pic_buf_id != VA_INVALID_ID)
va_buffers[num_va_buffers++] = ctx->packed_pic_buf_id;
 
va_status = vaBeginPicture(ctx->va_dpy,
ctx->context_id,
surface_ids[ctx->current_input_surface]);
CHECK_VASTATUS(va_status,"vaBeginPicture");
 
va_status = vaRenderPicture(ctx->va_dpy,
ctx->context_id,
va_buffers,
num_va_buffers);
CHECK_VASTATUS(va_status,"vaRenderPicture");
 
va_status = vaRenderPicture(ctx->va_dpy,
ctx->context_id,
&ctx->slice_param_buf_id[0],
ctx->num_slice_groups);
CHECK_VASTATUS(va_status,"vaRenderPicture");
 
va_status = vaEndPicture(ctx->va_dpy, ctx->context_id);
CHECK_VASTATUS(va_status,"vaEndPicture");
 
return 0;
}
 
static int
mpeg2enc_destroy_buffers(struct mpeg2enc_context *ctx, VABufferID *va_buffers, unsigned int num_va_buffers)
{
VAStatus va_status;
unsigned int i;
 
for (i = 0; i < num_va_buffers; i++) {
if (va_buffers[i] != VA_INVALID_ID) {
va_status = vaDestroyBuffer(ctx->va_dpy, va_buffers[i]);
CHECK_VASTATUS(va_status,"vaDestroyBuffer");
va_buffers[i] = VA_INVALID_ID;
}
}
 
return 0;
}
 
static void
end_picture(struct mpeg2enc_context *ctx, VAEncPictureType picture_type, int next_is_bpic)
{
VABufferID tempID;
 
/* Prepare for next picture */
tempID = surface_ids[SID_RECON_PICTURE];
 
if (picture_type != VAEncPictureTypeBidirectional) {
if (next_is_bpic) {
surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L1];
surface_ids[SID_REFERENCE_PICTURE_L1] = tempID;
} else {
surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L0];
surface_ids[SID_REFERENCE_PICTURE_L0] = tempID;
}
} else {
if (!next_is_bpic) {
surface_ids[SID_RECON_PICTURE] = surface_ids[SID_REFERENCE_PICTURE_L0];
surface_ids[SID_REFERENCE_PICTURE_L0] = surface_ids[SID_REFERENCE_PICTURE_L1];
surface_ids[SID_REFERENCE_PICTURE_L1] = tempID;
}
}
 
mpeg2enc_destroy_buffers(ctx, &ctx->seq_param_buf_id, 1);
mpeg2enc_destroy_buffers(ctx, &ctx->pic_param_buf_id, 1);
mpeg2enc_destroy_buffers(ctx, &ctx->packed_seq_header_param_buf_id, 1);
mpeg2enc_destroy_buffers(ctx, &ctx->packed_seq_buf_id, 1);
mpeg2enc_destroy_buffers(ctx, &ctx->packed_pic_header_param_buf_id, 1);
mpeg2enc_destroy_buffers(ctx, &ctx->packed_pic_buf_id, 1);
mpeg2enc_destroy_buffers(ctx, &ctx->slice_param_buf_id[0], ctx->num_slice_groups);
mpeg2enc_destroy_buffers(ctx, &ctx->codedbuf_buf_id, 1);
memset(ctx->slice_param, 0, sizeof(ctx->slice_param));
ctx->num_slice_groups = 0;
}
 
static int
store_coded_buffer(struct mpeg2enc_context *ctx, VAEncPictureType picture_type)
{
VACodedBufferSegment *coded_buffer_segment;
unsigned char *coded_mem;
int slice_data_length;
VAStatus va_status;
VASurfaceStatus surface_status;
size_t w_items;
 
va_status = vaSyncSurface(ctx->va_dpy, surface_ids[ctx->current_input_surface]);
CHECK_VASTATUS(va_status,"vaSyncSurface");
 
surface_status = 0;
va_status = vaQuerySurfaceStatus(ctx->va_dpy, surface_ids[ctx->current_input_surface], &surface_status);
CHECK_VASTATUS(va_status,"vaQuerySurfaceStatus");
 
va_status = vaMapBuffer(ctx->va_dpy, ctx->codedbuf_buf_id, (void **)(&coded_buffer_segment));
CHECK_VASTATUS(va_status,"vaMapBuffer");
coded_mem = coded_buffer_segment->buf;
 
if (coded_buffer_segment->status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK) {
if (picture_type == VAEncPictureTypeIntra)
ctx->codedbuf_i_size *= 2;
else
ctx->codedbuf_pb_size *= 2;
 
vaUnmapBuffer(ctx->va_dpy, ctx->codedbuf_buf_id);
return -1;
}
 
slice_data_length = coded_buffer_segment->size;
 
do {
w_items = fwrite(coded_mem, slice_data_length, 1, ctx->ofp);
} while (w_items != 1);
 
if (picture_type == VAEncPictureTypeIntra) {
if (ctx->codedbuf_i_size > slice_data_length * 3 / 2) {
ctx->codedbuf_i_size = slice_data_length * 3 / 2;
}
if (ctx->codedbuf_pb_size < slice_data_length) {
ctx->codedbuf_pb_size = slice_data_length;
}
} else {
if (ctx->codedbuf_pb_size > slice_data_length * 3 / 2) {
ctx->codedbuf_pb_size = slice_data_length * 3 / 2;
}
}
 
vaUnmapBuffer(ctx->va_dpy, ctx->codedbuf_buf_id);
 
return 0;
}
 
static void
encode_picture(struct mpeg2enc_context *ctx,
int coded_order,
int display_order,
VAEncPictureType picture_type,
int next_is_bpic,
int next_display_order)
{
VAStatus va_status;
int ret = 0, codedbuf_size;
begin_picture(ctx, coded_order, display_order, picture_type);
 
if (1) {
/* upload YUV data to VA surface for next frame */
if (next_display_order >= ctx->num_pictures)
next_display_order = ctx->num_pictures - 1;
 
fseek(ctx->ifp, ctx->frame_size * next_display_order, SEEK_SET);
ctx->upload_thread_value = pthread_create(&ctx->upload_thread_id,
NULL,
upload_yuv_to_surface,
ctx);
}
 
do {
mpeg2enc_destroy_buffers(ctx, &ctx->codedbuf_buf_id, 1);
mpeg2enc_destroy_buffers(ctx, &ctx->pic_param_buf_id, 1);
 
 
if (VAEncPictureTypeIntra == picture_type) {
codedbuf_size = ctx->codedbuf_i_size;
} else {
codedbuf_size = ctx->codedbuf_pb_size;
}
 
/* coded buffer */
va_status = vaCreateBuffer(ctx->va_dpy,
ctx->context_id,
VAEncCodedBufferType,
codedbuf_size, 1, NULL,
&ctx->codedbuf_buf_id);
CHECK_VASTATUS(va_status,"vaCreateBuffer");
 
/* picture parameter set */
mpeg2enc_update_picture_parameter_buffer(ctx, picture_type, coded_order, display_order);
 
mpeg2enc_render_picture(ctx);
 
ret = store_coded_buffer(ctx, picture_type);
} while (ret);
 
end_picture(ctx, picture_type, next_is_bpic);
}
 
static void
update_next_frame_info(struct mpeg2enc_context *ctx,
VAEncPictureType curr_type,
int curr_coded_order,
int curr_display_order)
{
if (((curr_coded_order + 1) % ctx->intra_period) == 0) {
ctx->next_type = VAEncPictureTypeIntra;
ctx->next_display_order = curr_coded_order + 1;
return;
}
 
if (curr_type == VAEncPictureTypeIntra) {
assert(curr_display_order == curr_coded_order);
ctx->next_type = VAEncPictureTypePredictive;
ctx->next_bframes = ctx->ip_period;
ctx->next_display_order = curr_display_order + ctx->next_bframes + 1;
} else if (curr_type == VAEncPictureTypePredictive) {
if (ctx->ip_period == 0) {
assert(curr_display_order == curr_coded_order);
ctx->next_type = VAEncPictureTypePredictive;
ctx->next_display_order = curr_display_order + 1;
} else {
ctx->next_type = VAEncPictureTypeBidirectional;
ctx->next_display_order = curr_display_order - ctx->next_bframes;
ctx->next_bframes--;
}
} else if (curr_type == VAEncPictureTypeBidirectional) {
if (ctx->next_bframes == 0) {
ctx->next_type = VAEncPictureTypePredictive;
ctx->next_bframes = ctx->ip_period;
ctx->next_display_order = curr_display_order + ctx->next_bframes + 2;
} else {
ctx->next_type = VAEncPictureTypeBidirectional;
ctx->next_display_order = curr_display_order + 1;
ctx->next_bframes--;
}
}
 
if (ctx->next_display_order >= ctx->num_pictures) {
int rtmp = ctx->next_display_order - (ctx->num_pictures - 1);
ctx->next_display_order = ctx->num_pictures - 1;
ctx->next_bframes -= rtmp;
}
}
 
static void
mpeg2enc_run(struct mpeg2enc_context *ctx)
{
int display_order = 0, coded_order = 0;
VAEncPictureType type;
 
ctx->new_sequence = 1;
ctx->new_gop_header = 1;
ctx->gop_header_in_display_order = display_order;
 
while (coded_order < ctx->num_pictures) {
type = ctx->next_type;
display_order = ctx->next_display_order;
/* follow the IPBxxBPBxxB mode */
update_next_frame_info(ctx, type, coded_order, display_order);
encode_picture(ctx,
coded_order,
display_order,
type,
ctx->next_type == VAEncPictureTypeBidirectional,
ctx->next_display_order);
 
/* update gop_header */
ctx->new_sequence = 0;
ctx->new_gop_header = ctx->next_type == VAEncPictureTypeIntra;
 
if (ctx->new_gop_header)
ctx->gop_header_in_display_order += ctx->intra_period;
 
coded_order++;
 
fprintf(stderr, "\r %d/%d ...", coded_order, ctx->num_pictures);
fflush(stdout);
}
}
 
/*
* end
*/
static void
mpeg2enc_release_va_resources(struct mpeg2enc_context *ctx)
{
vaDestroySurfaces(ctx->va_dpy, surface_ids, SID_NUMBER);
vaDestroyContext(ctx->va_dpy, ctx->context_id);
vaDestroyConfig(ctx->va_dpy, ctx->config_id);
vaTerminate(ctx->va_dpy);
va_close_display(ctx->va_dpy);
}
 
static void
mpeg2enc_end(struct mpeg2enc_context *ctx)
{
pthread_join(ctx->upload_thread_id, NULL);
mpeg2enc_release_va_resources(ctx);
}
 
int
main(int argc, char *argv[])
{
struct mpeg2enc_context ctx;
struct timeval tpstart, tpend;
float timeuse;
 
gettimeofday(&tpstart, NULL);
 
memset(&ctx, 0, sizeof(ctx));
parse_args(&ctx, argc, argv);
mpeg2enc_init(&ctx);
mpeg2enc_run(&ctx);
mpeg2enc_end(&ctx);
 
gettimeofday(&tpend, NULL);
timeuse = 1000000 * (tpend.tv_sec - tpstart.tv_sec) + tpend.tv_usec - tpstart.tv_usec;
timeuse /= 1000000;
fprintf(stderr, "\ndone!\n");
fprintf(stderr, "encode %d frames in %f secondes, FPS is %.1f\n", ctx.num_pictures, timeuse, ctx.num_pictures / timeuse);
 
mpeg2enc_exit(&ctx, 0);
 
return 0;
}