Subversion Repositories Kolibri OS

Compare Revisions

Regard whitespace Rev 6133 → Rev 6132

/contrib/media/fplay/list.h
File deleted
/contrib/media/fplay/fplay.c
26,6 → 26,9
 
uint32_t win_width, win_height;
 
 
AVFrame *pFrame;
 
int have_sound = 0;
 
uint8_t *decoder_buffer;
144,15 → 147,6
printf("codec id %x name %s\n",vst.vCtx->codec_id, vst.vCodec->name);
printf("ctx->pix_fmt %d\n", vst.vCtx->pix_fmt);
 
INIT_LIST_HEAD(&vst.input_list);
INIT_LIST_HEAD(&vst.output_list);
mutex_init(&vst.q_video.lock);
mutex_init(&vst.q_audio.lock);
mutex_init(&vst.gpu_lock);
mutex_init(&vst.decoder_lock);
mutex_init(&vst.input_lock);
mutex_init(&vst.output_lock);
 
if(vst.vCodec == NULL)
{
printf("Unsupported codec with id %d for input stream %d\n",
172,6 → 166,10
 
printf("ctx->pix_fmt %d\n", vst.vCtx->pix_fmt);
 
mutex_init(&vst.q_video.lock);
mutex_init(&vst.q_audio.lock);
mutex_init(&vst.gpu_lock);
 
if (vst.aCtx->channels > 0)
vst.aCtx->request_channels = FFMIN(2, vst.aCtx->channels);
else
226,11 → 224,12
if(!init_video(&vst))
return 0;
 
mutex_lock_timeout(&vst.decoder_lock, 3000);
 
decoder(&vst);
 
// Free the YUV frame
av_free(pFrame);
 
 
//__asm__ __volatile__("int3");
 
while( threads_running &
242,7 → 241,7
 
mutex_destroy(&vst.q_video.lock);
mutex_destroy(&vst.q_audio.lock);
mutex_destroy(&vst.decoder_lock);
 
return 0;
}
 
335,7 → 334,8
}
decode_video(vst);
ret = decode_audio(vst->aCtx, &vst->q_audio);
}while(astream.count < resampler_size*2 && ret == 1);
}while(astream.count < resampler_size*2 &&
ret == 1);
 
sound_state = PREPARE;
decoder_state = PLAY;
/contrib/media/fplay/fplay.h
1,7 → 1,6
 
#include <libsync.h>
#include <pixlib3.h>
#include "list.h"
#include "pixlib3.h"
 
#define BLACK_MAGIC_SOUND
#define BLACK_MAGIC_VIDEO
19,15 → 18,10
 
typedef struct
{
struct list_head list;
enum AVPixelFormat format;
AVPicture picture;
planar_t* planar;
int is_hw_pic;
int index;
int planar;
double pts;
double pkt_pts;
double pkt_dts;
volatile int ready;
}vframe_t;
 
59,7 → 53,7
EMPTY, INIT }state;
enum win_state win_state;
 
void (*draw)(render_t *render, vframe_t *vframe);
void (*draw)(render_t *render, AVPicture *picture);
};
 
enum player_state
111,7 → 105,6
int put_packet(queue_t *q, AVPacket *pkt);
int get_packet(queue_t *q, AVPacket *pkt);
 
#define HWDEC_NUM_SURFACES 16
struct vstate
{
AVFormatContext *fCtx; /* format context */
126,14 → 119,10
queue_t q_audio; /* audio packets queue */
 
mutex_t gpu_lock; /* gpu access lock. libdrm not yet thread safe :( */
mutex_t decoder_lock;
 
mutex_t input_lock;
mutex_t output_lock;
struct list_head input_list;
struct list_head output_list;
 
vframe_t *decoder_frame;
vframe_t vframe[4]; /* decoder workset */
int vfx; /* index of decoded frame */
int dfx; /* index of renderd frame */
void *hwCtx; /* hardware context */
int hwdec; /* hardware decoder */
};
176,7 → 165,7
::"a"(68),"b"(14),"c"(event));
}
 
void va_create_planar(vst_t *vst, vframe_t *vframe);
void va_convert_picture(vst_t *vst, int width, int height, AVPicture *pic);
 
int init_fontlib();
char *get_moviefile();
/contrib/media/fplay/vaapi.c
17,7 → 17,7
{
enum AVCodecID av_codec;
int ff_profile;
VAProfile va_profile;
uint64_t va_profile;
};
 
 
39,7 → 39,7
static int drm_fd = 0;
static struct vaapi_context *v_context;
 
static VASurfaceID v_surface_id[HWDEC_NUM_SURFACES];
static VASurfaceID v_surface_id[4];
 
#define HAS_HEVC VA_CHECK_VERSION(0, 38, 0)
#define HAS_VP9 (VA_CHECK_VERSION(0, 38, 1) && defined(FF_PROFILE_VP9_0))
48,7 → 48,7
{AV_CODEC_ID_ ## av_codec_id, FF_PROFILE_ ## ff_profile, \
VAProfile ## vdp_profile}
 
static const struct hw_profile hw_profiles[] = {
static const struct hw_profile profiles[] = {
PE(MPEG2VIDEO, MPEG2_MAIN, MPEG2Main),
PE(MPEG2VIDEO, MPEG2_SIMPLE, MPEG2Simple),
PE(MPEG4, MPEG4_ADVANCED_SIMPLE, MPEG4AdvancedSimple),
75,8 → 75,8
 
int va_check_codec_support(enum AVCodecID id)
{
for (int n = 0; hw_profiles[n].av_codec; n++) {
if (hw_profiles[n].av_codec == id)
for (int n = 0; profiles[n].av_codec; n++) {
if (profiles[n].av_codec == id)
return 1;
}
return 0;
343,7 → 343,8
 
printf("vaCreateSurfaces %dx%d\n",picture_width,picture_height);
status = vaCreateSurfaces(vaapi->display, VA_RT_FORMAT_YUV420, picture_width, picture_height,
v_surface_id,HWDEC_NUM_SURFACES,NULL,0);
v_surface_id,4,NULL,0);
printf("v_surface_id_3 %x\n", v_surface_id[3]);
if (!vaapi_check_status(status, "vaCreateSurfaces()"))
{
FAIL();
378,7 → 379,7
status = vaCreateContext(vaapi->display, config_id,
picture_width, picture_height,
VA_PROGRESSIVE,
v_surface_id, HWDEC_NUM_SURFACES,
v_surface_id, 4,
&context_id);
if (!vaapi_check_status(status, "vaCreateContext()"))
{
396,29 → 397,41
static enum PixelFormat get_format(struct AVCodecContext *avctx,
const enum AVPixelFormat *fmt)
{
VAProfile profile = VAProfileNone;
int i, profile;
 
ENTER();
 
for (int i = 0; fmt[i] != PIX_FMT_NONE; i++)
{
enum AVCodecID codec = avctx->codec_id;
// for (int i = 0; fmt[i] != AV_PIX_FMT_NONE; i++)
// printf(" %s", av_get_pix_fmt_name(fmt[i]));
 
for (i = 0; fmt[i] != PIX_FMT_NONE; i++) {
printf("pixformat %x\n", fmt[i]);
if (fmt[i] != AV_PIX_FMT_VAAPI_VLD)
continue;
 
if (codec == AV_CODEC_ID_H264)
switch (avctx->codec_id)
{
if (profile == FF_PROFILE_H264_CONSTRAINED_BASELINE)
profile = FF_PROFILE_H264_MAIN;
};
 
for (int n = 0; hw_profiles[n].av_codec; n++)
{
if (hw_profiles[n].av_codec == codec &&
hw_profiles[n].ff_profile == avctx->profile)
{
profile = hw_profiles[n].va_profile;
case CODEC_ID_MPEG2VIDEO:
profile = VAProfileMPEG2Main;
break;
case CODEC_ID_MPEG4:
case CODEC_ID_H263:
profile = VAProfileMPEG4AdvancedSimple;
break;
case CODEC_ID_H264:
profile = VAProfileH264High;
break;
case CODEC_ID_WMV3:
profile = VAProfileVC1Main;
break;
case CODEC_ID_VC1:
profile = VAProfileVC1Advanced;
break;
default:
profile = -1;
break;
}
if (profile >= 0) {
if (vaapi_init_decoder(profile, VAEntrypointVLD, avctx->width, avctx->height) == 0)
{
avctx->hwaccel_context = v_context;
427,8 → 440,6
}
}
}
 
}
FAIL();
return PIX_FMT_NONE;
}
443,6 → 454,9
static void av_release_buffer(void *opaque, uint8_t *data)
{
struct av_surface surface = *(struct av_surface*)data;
// VDPAUContext *ctx = opaque;
 
// ctx->video_surface_destroy(surface);
av_freep(&data);
}
 
449,10 → 463,8
static int get_buffer2(AVCodecContext *avctx, AVFrame *pic, int flags)
{
vst_t *vst = (vst_t*)avctx->opaque;
void *surface;
void *surface = (void *)(uintptr_t)v_surface_id[vst->dfx];
 
surface = (void *)(uintptr_t)v_surface_id[vst->decoder_frame->index];
 
pic->data[3] = surface;
 
struct av_surface *avsurface;
481,17 → 493,21
 
if(vst->hwCtx != NULL)
{
for(int i = 0; i < HWDEC_NUM_SURFACES; i++)
for(int i = 0; i < 4; i++)
{
vframe_t *vframe = calloc(1, sizeof(*vframe));
int ret;
 
vframe->format = AV_PIX_FMT_NONE;
vframe->is_hw_pic = 1;
vframe->index = i;
vframe->pts = 0;
vframe->ready = 0;
list_add_tail(&vframe->list, &vst->input_list);
ret = avpicture_alloc(&vst->vframe[i].picture, AV_PIX_FMT_BGRA,
vst->vCtx->width, vst->vCtx->height);
if ( ret != 0 )
{
printf("Cannot alloc video buffer\n\r");
return ret;
};
vst->vframe[i].format = AV_PIX_FMT_BGRA;
vst->vframe[i].pts = 0;
vst->vframe[i].ready = 0;
};
 
vst->hwdec = 1;
vCtx->opaque = vst;
504,14 → 520,11
 
vst->hwdec = 0;
 
for(int i = 0; i < HWDEC_NUM_SURFACES; i++)
for(int i = 0; i < 4; i++)
{
vframe_t *vframe;
int ret;
 
vframe = calloc(1, sizeof(*vframe));
 
ret = avpicture_alloc(&vframe->picture, vst->vCtx->pix_fmt,
ret = avpicture_alloc(&vst->vframe[i].picture, vst->vCtx->pix_fmt,
vst->vCtx->width, vst->vCtx->height);
if ( ret != 0 )
{
518,11 → 531,9
printf("Cannot alloc video buffer\n\r");
return ret;
};
vframe->format = vst->vCtx->pix_fmt;
vframe->index = i;
vframe->pts = 0;
vframe->ready = 0;
list_add_tail(&vframe->list, &vst->input_list);
vst->vframe[i].format = vst->vCtx->pix_fmt;
vst->vframe[i].pts = 0;
vst->vframe[i].ready = 0;
};
 
return 0;
529,94 → 540,32
}
 
 
#define EGL_TEXTURE_Y_U_V_WL 0x31D7
#define EGL_TEXTURE_Y_UV_WL 0x31D8
#define EGL_TEXTURE_Y_XUXV_WL 0x31D9
struct SwsContext *vacvt_ctx;
 
enum wl_drm_format {
WL_DRM_FORMAT_C8 = 0x20203843,
WL_DRM_FORMAT_RGB332 = 0x38424752,
WL_DRM_FORMAT_BGR233 = 0x38524742,
WL_DRM_FORMAT_XRGB4444 = 0x32315258,
WL_DRM_FORMAT_XBGR4444 = 0x32314258,
WL_DRM_FORMAT_RGBX4444 = 0x32315852,
WL_DRM_FORMAT_BGRX4444 = 0x32315842,
WL_DRM_FORMAT_ARGB4444 = 0x32315241,
WL_DRM_FORMAT_ABGR4444 = 0x32314241,
WL_DRM_FORMAT_RGBA4444 = 0x32314152,
WL_DRM_FORMAT_BGRA4444 = 0x32314142,
WL_DRM_FORMAT_XRGB1555 = 0x35315258,
WL_DRM_FORMAT_XBGR1555 = 0x35314258,
WL_DRM_FORMAT_RGBX5551 = 0x35315852,
WL_DRM_FORMAT_BGRX5551 = 0x35315842,
WL_DRM_FORMAT_ARGB1555 = 0x35315241,
WL_DRM_FORMAT_ABGR1555 = 0x35314241,
WL_DRM_FORMAT_RGBA5551 = 0x35314152,
WL_DRM_FORMAT_BGRA5551 = 0x35314142,
WL_DRM_FORMAT_RGB565 = 0x36314752,
WL_DRM_FORMAT_BGR565 = 0x36314742,
WL_DRM_FORMAT_RGB888 = 0x34324752,
WL_DRM_FORMAT_BGR888 = 0x34324742,
WL_DRM_FORMAT_XRGB8888 = 0x34325258,
WL_DRM_FORMAT_XBGR8888 = 0x34324258,
WL_DRM_FORMAT_RGBX8888 = 0x34325852,
WL_DRM_FORMAT_BGRX8888 = 0x34325842,
WL_DRM_FORMAT_ARGB8888 = 0x34325241,
WL_DRM_FORMAT_ABGR8888 = 0x34324241,
WL_DRM_FORMAT_RGBA8888 = 0x34324152,
WL_DRM_FORMAT_BGRA8888 = 0x34324142,
WL_DRM_FORMAT_XRGB2101010 = 0x30335258,
WL_DRM_FORMAT_XBGR2101010 = 0x30334258,
WL_DRM_FORMAT_RGBX1010102 = 0x30335852,
WL_DRM_FORMAT_BGRX1010102 = 0x30335842,
WL_DRM_FORMAT_ARGB2101010 = 0x30335241,
WL_DRM_FORMAT_ABGR2101010 = 0x30334241,
WL_DRM_FORMAT_RGBA1010102 = 0x30334152,
WL_DRM_FORMAT_BGRA1010102 = 0x30334142,
WL_DRM_FORMAT_YUYV = 0x56595559,
WL_DRM_FORMAT_YVYU = 0x55595659,
WL_DRM_FORMAT_UYVY = 0x59565955,
WL_DRM_FORMAT_VYUY = 0x59555956,
WL_DRM_FORMAT_AYUV = 0x56555941,
WL_DRM_FORMAT_NV12 = 0x3231564e,
WL_DRM_FORMAT_NV21 = 0x3132564e,
WL_DRM_FORMAT_NV16 = 0x3631564e,
WL_DRM_FORMAT_NV61 = 0x3136564e,
WL_DRM_FORMAT_YUV410 = 0x39565559,
WL_DRM_FORMAT_YVU410 = 0x39555659,
WL_DRM_FORMAT_YUV411 = 0x31315559,
WL_DRM_FORMAT_YVU411 = 0x31315659,
WL_DRM_FORMAT_YUV420 = 0x32315559,
WL_DRM_FORMAT_YVU420 = 0x32315659,
WL_DRM_FORMAT_YUV422 = 0x36315559,
WL_DRM_FORMAT_YVU422 = 0x36315659,
WL_DRM_FORMAT_YUV444 = 0x34325559,
WL_DRM_FORMAT_YVU444 = 0x34325659,
};
 
void va_create_planar(vst_t *vst, vframe_t *vframe)
void va_convert_picture(vst_t *vst, int width, int height, AVPicture *pic)
{
struct vaapi_context* const vaapi = v_context;
VABufferInfo info = {0};
 
uint8_t *src_data[4];
int src_linesize[4];
VAImage vaimage;
VAStatus status;
planar_t *planar;
uint8_t *vdata;
struct vaapi_context* const vaapi = v_context;
 
vaSyncSurface(vaapi->display,v_surface_id[vframe->index]);
vaSyncSurface(vaapi->display,v_surface_id[vst->dfx]);
 
if(vframe->format != AV_PIX_FMT_NONE)
return;
 
ENTER();
 
status = vaDeriveImage(vaapi->display,v_surface_id[vframe->index],&vaimage);
status = vaDeriveImage(vaapi->display,v_surface_id[vst->dfx],&vaimage);
if (!vaapi_check_status(status, "vaDeriveImage()"))
{
FAIL();
return;
};
/*
 
static int once = 2;
 
if(once && vst->dfx == 0)
{
VABufferInfo info = {0};
 
printf("vaDeriveImage: %x fourcc: %x\n"
"offset0: %d pitch0: %d\n"
"offset1: %d pitch1: %d\n"
625,33 → 574,53
vaimage.offsets[0],vaimage.pitches[0],
vaimage.offsets[1],vaimage.pitches[1],
vaimage.offsets[2],vaimage.pitches[2]);
*/
 
info.mem_type = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM;
status = vaAcquireBufferHandle(vaapi->display, vaimage.buf, &info);
if (!vaapi_check_status(status, "vaAcquireBufferHandle()"))
if (vaapi_check_status(status, "vaAcquireBufferHandle()"))
{
vaDestroyImage(vaapi->display, vaimage.image_id);
FAIL();
return;
};
/*
printf("vaAcquireBufferHandle: %x type: %x\n"
"mem type: %x mem size: %d\n",
info.handle, info.type, info.mem_type, info.mem_size);
*/
planar = pxCreatePlanar(info.handle, WL_DRM_FORMAT_NV12,
vaimage.width, vaimage.height,
vaimage.offsets[0],vaimage.pitches[0],
vaimage.offsets[1],vaimage.pitches[1],
vaimage.offsets[2],vaimage.pitches[2]);
if(planar != NULL)
 
vaReleaseBufferHandle(vaapi->display, vaimage.buf);
}
once--;
};
 
src_linesize[0] = vaimage.pitches[0];
src_linesize[1] = vaimage.pitches[1];
src_linesize[2] = vaimage.pitches[2];
src_linesize[3] = 0;
 
status = vaMapBuffer(vaapi->display,vaimage.buf,(void **)&vdata);
if (!vaapi_check_status(status, "vaMapBuffer()"))
{
printf("create planar image\n",planar);
vframe->planar = planar;
vframe->format = AV_PIX_FMT_NV12;
FAIL();
return;
};
 
vaReleaseBufferHandle(vaapi->display, vaimage.buf);
// printf("vdata: %x offset0: %d offset1: %d offset2: %d\n", vdata,
// vaimage.offsets[0],
// vaimage.offsets[1],
// vaimage.offsets[2]);
 
src_data[0] = vdata + vaimage.offsets[0];
src_data[1] = vdata + vaimage.offsets[1];
src_data[2] = vdata + vaimage.offsets[2];
src_data[3] = 0;
 
vacvt_ctx = sws_getCachedContext(vacvt_ctx, width, height, AV_PIX_FMT_NV12,
width, height, AV_PIX_FMT_BGRA,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
if(vacvt_ctx == NULL)
{
printf("Cannot initialize the conversion context!\n");
return ;
};
 
sws_scale(vacvt_ctx, (const uint8_t* const *)src_data, src_linesize, 0, height, pic->data, pic->linesize);
 
vaUnmapBuffer (vaapi->display, vaimage.buf);
vaDestroyImage(vaapi->display, vaimage.image_id);
LEAVE();
}
/contrib/media/fplay/video.c
28,38 → 28,35
 
render_t *main_render;
 
int width;
int height;
 
AVRational video_time_base;
AVFrame *Frame;
 
void get_client_rect(rect_t *rc);
void run_render(window_t *win, void *render);
void window_update_layout(window_t *win);
int fini_winlib();
 
void flush_video(vst_t *vst)
{
vframe_t *vframe, *tmp;
int i;
 
mutex_lock(&vst->output_lock);
mutex_lock(&vst->input_lock);
 
list_for_each_entry_safe(vframe, tmp, &vst->output_list, list)
list_move_tail(&vframe->list, &vst->input_list);
 
list_for_each_entry(vframe, &vst->output_list, list)
for(i = 0; i < 4; i++)
{
vframe->pts = 0;
vframe->ready = 0;
}
 
mutex_unlock(&vst->input_lock);
mutex_unlock(&vst->output_lock);
 
vst->vframe[i].pts = 0;
vst->vframe[i].ready = 0;
};
vst->vfx = 0;
vst->dfx = 0;
frames_count = 0;
};
 
int init_video(vst_t *vst)
{
int i;
 
width = vst->vCtx->width;
height = vst->vCtx->height;
 
Frame = av_frame_alloc();
if ( Frame == NULL )
{
67,45 → 64,45
return 0;
};
 
mutex_lock(&vst->decoder_lock);
 
create_thread(video_thread, vst, 1024*1024);
 
delay(50);
return 1;
};
 
static double dts = 0.0;
 
int decode_video(vst_t* vst)
{
AVPacket pkt;
double pts;
int frameFinished;
double current_clock;
 
if(vst->decoder_frame == NULL)
{
mutex_lock(&vst->input_lock);
if(list_empty(&vst->input_list))
{
mutex_unlock(&vst->input_lock);
if(vst->vframe[vst->dfx].ready != 0 )
return -1;
}
vst->decoder_frame = list_first_entry(&vst->input_list, vframe_t, list);
list_del(&vst->decoder_frame->list);
mutex_unlock(&vst->input_lock);
 
vframe_t *vframe = vst->decoder_frame;
};
 
if( get_packet(&vst->q_video, &pkt) == 0 )
{
return 0;
};
 
/*
current_clock = -90.0 + get_master_clock();
 
if( pkt.dts == AV_NOPTS_VALUE &&
Frame->reordered_opaque != AV_NOPTS_VALUE)
pts = Frame->reordered_opaque;
else if(pkt.dts != AV_NOPTS_VALUE)
pts= pkt.dts;
else
pts= 0;
 
 
pts *= av_q2d(video_time_base)*1000.0;
*/
if( 1 /*pts > current_clock*/)
{
frameFinished = 0;
if(dts == 0)
dts = pkt.pts;
 
vst->vCtx->reordered_opaque = pkt.pts;
 
mutex_lock(&vst->gpu_lock);
 
if(avcodec_decode_video2(vst->vCtx, Frame, &frameFinished, &pkt) <= 0)
113,67 → 110,35
 
if(frameFinished)
{
vframe_t *vframe;
AVPicture *dst_pic;
 
pts = av_frame_get_best_effort_timestamp(Frame);
if( pkt.dts == AV_NOPTS_VALUE &&
Frame->reordered_opaque != AV_NOPTS_VALUE)
pts = Frame->reordered_opaque;
else if(pkt.dts != AV_NOPTS_VALUE)
pts = pkt.dts;
else
pts = 0;
 
pts *= av_q2d(video_time_base);
 
vframe = vst->decoder_frame;
dst_pic = &vframe->picture;
dst_pic = &vst->vframe[vst->dfx].picture;
 
if(vframe->is_hw_pic == 0)
if(vst->hwdec == 0)
av_image_copy(dst_pic->data, dst_pic->linesize,
(const uint8_t**)Frame->data,
Frame->linesize, vst->vCtx->pix_fmt, vst->vCtx->width, vst->vCtx->height);
else
va_create_planar(vst, vframe);
va_convert_picture(vst, vst->vCtx->width, vst->vCtx->height, dst_pic);
 
vframe->pts = pts*1000.0;
vframe->pkt_pts = pkt.pts*av_q2d(video_time_base)*1000.0;
vframe->pkt_dts = dts*av_q2d(video_time_base)*1000.0;
vframe->ready = 1;
 
 
mutex_lock(&vst->output_lock);
 
if(list_empty(&vst->output_list))
list_add_tail(&vframe->list, &vst->output_list);
else
{
vframe_t *cur;
 
cur = list_first_entry(&vst->output_list,vframe_t,list);
if(vframe->pkt_pts < cur->pkt_pts)
{
list_add_tail(&vframe->list, &vst->output_list);
}
else
{
list_for_each_entry_reverse(cur,&vst->output_list,list)
{
if(vframe->pkt_pts > cur->pkt_pts)
{
list_add(&vframe->list, &cur->list);
break;
};
};
};
};
mutex_unlock(&vst->output_lock);
 
 
// printf("decoded index: %d pts: %f pkt_pts %f pkt_dts %f\n",
// vst->dfx, vst->vframe[vst->dfx].pts,
// vst->vframe[vst->dfx].pkt_pts, vst->vframe[vst->dfx].pkt_dts);
 
vst->decoder_frame = NULL;
vst->vframe[vst->dfx].pts = pts*1000.0;
vst->vframe[vst->dfx].ready = 1;
vst->dfx = (vst->dfx + 1) & 3;
frames_count++;
dts = 0;
};
av_frame_unref(Frame);
mutex_unlock(&vst->gpu_lock);
 
};
av_free_packet(&pkt);
 
return 1;
392,6 → 357,8
return 0;
};
 
#define VERSION_A 1
 
void render_time(render_t *render)
{
progress_t *prg = main_render->win->panel.prg;
400,6 → 367,8
double ctime; /* milliseconds */
double fdelay; /* milliseconds */
 
//again:
 
if(player_state == CLOSED)
{
render->win->win_command = WIN_CLOSED;
421,38 → 390,51
return;
};
 
mutex_lock(&vst->output_lock);
if(list_empty(&vst->output_list))
 
#ifdef VERSION_A
if(vst->vframe[vst->vfx].ready == 1 )
{
mutex_unlock(&vst->output_lock);
delay(1);
}
else
{
vframe_t *vframe;
int sys_time;
 
vframe = list_first_entry(&vst->output_list, vframe_t, list);
list_del(&vframe->list);
mutex_unlock(&vst->output_lock);
 
ctime = get_master_clock();
fdelay = (vframe->pkt_pts - ctime);
fdelay = (vst->vframe[vst->vfx].pts - ctime);
 
// printf("pts %f time %f delay %f\n",
// frames[vfx].pts, ctime, fdelay);
 
if(fdelay > 15.0)
{
delay((int)fdelay/10);
// return;
};
#if 0
ctime = get_master_clock();
fdelay = (vst->vframe[vst->vfx].pts - ctime);
 
// printf("output index: %d pts: %f pkt_pts %f pkt_dts %f\n",
// vframe->index,vframe->pts,vframe->pkt_pts,vframe->pkt_dts);
// while(fdelay > 0)
// {
// yield();
// ctime = get_master_clock();
// fdelay = (frames[vfx].pts - ctime);
// }
 
main_render->draw(main_render, vframe);
// sys_time = get_tick_count();
 
// if(fdelay < 0)
// printf("systime %d pts %f time %f delay %f\n",
// sys_time*10, frames[vfx].pts, ctime, fdelay);
 
printf("pts %f time %f delay %f\n",
vst->vframe[vst->vfx].pts, ctime, fdelay);
printf("video cache %d audio cache %d\n", q_video.size/1024, q_audio.size/1024);
#endif
 
main_render->draw(main_render, &vst->vframe[vst->vfx].picture);
if(main_render->win->win_state != FULLSCREEN)
{
prg->current = vframe->pkt_pts * 1000;
lvl->current = vframe->index & 1 ? sound_level_1 : sound_level_0;
prg->current = vst->vframe[vst->vfx].pts*1000;
// printf("current %f\n", prg->current);
lvl->current = vst->vfx & 1 ? sound_level_1 : sound_level_0;
 
send_message(&prg->ctrl, PRG_PROGRESS, 0, 0);
 
461,15 → 443,72
}
 
frames_count--;
vframe->ready = 0;
vst->vframe[vst->vfx].ready = 0;
vst->vfx = (vst->vfx + 1) & 3;
}
else delay(1);
 
mutex_lock(&vst->input_lock);
list_add_tail(&vframe->list, &vst->input_list);
mutex_unlock(&vst->input_lock);
#else
 
if(vst->vframe[vfx].ready == 1 )
{
ctime = get_master_clock();
fdelay = (vst->vrame[vst->vfx].pts - ctime);
 
// printf("pts %f time %f delay %f\n",
// frames[vfx].pts, ctime, fdelay);
 
if(fdelay < 0.0 )
{
int next_vfx;
fdelay = 0;
next_vfx = (vst->vfx+1) & 3;
if( vst->vrame[next_vfx].ready == 1 )
{
if(vst->vrame[next_vfx].pts <= ctime)
{
vst->vrame[vst->vfx].ready = 0; // skip this frame
vst->vfx = (vst->vfx + 1) & 3;
}
else
{
if( (vst->vrame[next_vfx].pts - ctime) <
( ctime - frames[vst->vfx].pts) )
{
vst->vrame[vst->vfx].ready = 0; // skip this frame
vst->vfx = (vst->vfx + 1) & 3;
fdelay = (vst->vrame[next_vfx].pts - ctime);
}
}
};
};
 
if(fdelay > 10.0)
{
int val = fdelay;
printf("pts %f time %f delay %d\n",
vst->vrame[vst->vfx].pts, ctime, val);
delay(val/10);
};
 
ctime = get_master_clock();
fdelay = (vst->vrame[vst->vfx].pts - ctime);
 
printf("pts %f time %f delay %f\n",
vst->vrame[vst->vfx].pts, ctime, fdelay);
 
main_render->draw(main_render, &vst->vrame[vfx].picture);
main_render->win->panel.prg->current = vst->vrame[vfx].pts;
// send_message(&render->win->panel.prg->ctrl, MSG_PAINT, 0, 0);
vst->vrame[vst->vfx].ready = 0;
vst->vfx = (vst->vfx + 1) & 3;
}
else yield();
#endif
 
}
 
 
extern char *movie_file;
 
int video_thread(void *param)
480,7 → 519,7
init_winlib();
 
MainWindow = create_window(movie_file,0,
10,10,vst->vCtx->width,vst->vCtx->height+CAPTION_HEIGHT+PANEL_HEIGHT,MainWindowProc);
10,10,width,height+CAPTION_HEIGHT+PANEL_HEIGHT,MainWindowProc);
 
MainWindow->panel.prg->max = stream_duration;
 
489,7 → 528,6
main_render = create_render(vst, MainWindow, HW_TEX_BLIT|HW_BIT_BLIT);
if( main_render == NULL)
{
mutex_unlock(&vst->decoder_lock);
printf("Cannot create render\n\r");
return 0;
};
499,8 → 537,6
render_draw_client(main_render);
player_state = PLAY;
 
mutex_unlock(&vst->decoder_lock);
 
run_render(MainWindow, main_render);
 
__sync_and_and_fetch(&threads_running,~VIDEO_THREAD);
512,8 → 548,8
};
 
 
void draw_hw_picture(render_t *render, vframe_t *vframe);
void draw_sw_picture(render_t *render, vframe_t *vframe);
void draw_hw_picture(render_t *render, AVPicture *picture);
void draw_sw_picture(render_t *render, AVPicture *picture);
 
render_t *create_render(vst_t *vst, window_t *win, uint32_t flags)
{
747,26 → 783,8
return;
};
 
static void render_hw_planar(render_t *render, vframe_t *vframe)
void draw_hw_picture(render_t *render, AVPicture *picture)
{
vst_t *vst = render->vst;
planar_t *planar = vframe->planar;
 
if(vframe->is_hw_pic != 0 && vframe->format != AV_PIX_FMT_NONE)
{
mutex_lock(&render->vst->gpu_lock);
 
pxBlitPlanar(planar, render->rcvideo.l,
CAPTION_HEIGHT+render->rcvideo.t,
render->rcvideo.r, render->rcvideo.b,0,0);
mutex_unlock(&render->vst->gpu_lock);
 
}
};
 
void draw_hw_picture(render_t *render, vframe_t *vframe)
{
AVPicture *picture;
int dst_width;
int dst_height;
bitmap_t *bitmap;
776,8 → 794,6
int linesize[4];
enum AVPixelFormat format;
 
vst_t *vst = render->vst;
 
if(render->win->win_state == MINIMIZED ||
render->win->win_state == ROLLED)
return;
793,16 → 809,9
dst_height = render->rcvideo.b;
};
 
if(vst->hwdec)
{
render_hw_planar(render, vframe);
return;
};
 
picture = &vframe->picture;
 
format = render->vst->hwdec == 0 ? render->ctx_format : AV_PIX_FMT_BGRA;
cvt_ctx = sws_getCachedContext(cvt_ctx, render->ctx_width, render->ctx_height, format,
format = render->vst->hwdec == 0 ? render->ctx_format : AV_PIX_FMT_BGRA
cvt_ctx = sws_getCachedContext(cvt_ctx,
render->ctx_width, render->ctx_height, format,
dst_width, dst_height, AV_PIX_FMT_BGRA,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
if(cvt_ctx == NULL)
820,6 → 829,7
return ;
}
 
// printf("sws_getCachedContext\n");
data[0] = bitmap_data;
data[1] = bitmap_data+1;
data[2] = bitmap_data+2;
864,9 → 874,8
render->target&= 1;
}
 
void draw_sw_picture(render_t *render, vframe_t *vframe)
void draw_sw_picture(render_t *render, AVPicture *picture)
{
AVPicture *picture;
uint8_t *bitmap_data;
uint32_t bitmap_pitch;
uint8_t *data[4];
876,8 → 885,6
render->win->win_state == ROLLED)
return;
 
picture = &vframe->picture;
 
cvt_ctx = sws_getCachedContext(cvt_ctx,
render->ctx_width, render->ctx_height,
render->ctx_format,
930,9 → 937,9
 
if(player_state == PAUSE)
{
// if(vst->vframe[vst->vfx].ready == 1 )
// main_render->draw(main_render, &vst->vframe[vst->vfx].picture);
// else
if(vst->vframe[vst->vfx].ready == 1 )
main_render->draw(main_render, &vst->vframe[vst->vfx].picture);
else
draw_bar(0, y, render->win_width,
render->rcvideo.b, 0);
}
/contrib/media/fplay/audio.c
417,3 → 417,4
return -1;
 
};
 
/contrib/media/fplay/winlib/window.c
35,9 → 35,6
 
 
void adjust_frame(window_t *win);
void blit_panel(panel_t *panel);
void update_panel_size(window_t *win);
void update_caption_size(window_t *win);
 
//#include "timer.h"
ctrl_t *win_get_child(window_t *win, int x, int y)
/contrib/media/fplay/winlib/link.h
38,7 → 38,7
link_initialize(link);
}
 
static inline int llist_empty(link_t *head)
static inline int list_empty(link_t *head)
{
return head->next == head ? 1 : 0;
}