Subversion Repositories Kolibri OS

Compare Revisions

Regard whitespace Rev 2348 → Rev 2349

/programs/media/Fplay/audio.c
14,33 → 14,13
 
static SNDBUF hBuff;
 
extern uint8_t *decoder_buffer;
 
extern volatile uint32_t status;
 
void audio_thread(void *param);
extern volatile uint32_t driver_lock;
 
void spinlock_lock(volatile uint32_t *val)
{
uint32_t tmp;
 
__asm__ __volatile__ (
"0:\n\t"
"mov %0, %1\n\t"
"testl %1, %1\n\t"
"jz 1f\n\t"
 
"movl $68, %%eax\n\t"
"movl $1, %%ebx\n\t"
"int $0x40\n\t"
"jmp 0b\n\t"
"1:\n\t"
"incl %1\n\t"
"xchgl %0, %1\n\t"
"testl %1, %1\n\t"
"jnz 0b\n"
: "+m" (*val), "=&r"(tmp)
::"eax","ebx" );
}
 
static int snd_format;
int sample_rate;
 
50,11 → 30,17
int version =-1;
char *errstr;
 
mutex_lock(&driver_lock);
 
if((err = InitSound(&version)) !=0 )
{
mutex_unlock(&driver_lock);
errstr = "Sound service not installed\n\r";
goto exit_whith_error;
}
};
 
mutex_unlock(&driver_lock);
 
printf("sound version 0x%x\n", version);
 
if( (SOUND_VERSION>(version&0xFFFF)) ||
66,8 → 52,6
 
snd_format = format;
 
asm volatile ( "xchgw %bx, %bx");
 
create_thread(audio_thread, 0, 163840);
 
return 1;
89,9 → 73,52
return tstamp - audio_delta;
};
 
int decode_audio(AVCodecContext *ctx, queue_t *qa)
{
AVPacket pkt;
AVPacket pkt_tmp;
 
void audio_thread(void *param)
uint8_t *audio_data;
int audio_size;
int len;
int data_size=0;
 
if( astream.count > AVCODEC_MAX_AUDIO_FRAME_SIZE*7)
return 1;
 
if( get_packet(qa, &pkt) == 0 )
return 0;
 
// __asm__("int3");
 
pkt_tmp = pkt;
 
while(pkt_tmp.size > 0)
{
data_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
 
len = avcodec_decode_audio3(ctx,(int16_t*)decoder_buffer,
&data_size, &pkt_tmp);
 
if(len >= 0)
{
pkt_tmp.data += len;
pkt_tmp.size -= len;
 
mutex_lock(&astream.lock);
memcpy(astream.buffer+astream.count, decoder_buffer, data_size);
astream.count += data_size;
mutex_unlock(&astream.lock);
}
else pkt_tmp.size = 0;
}
av_free_packet(&pkt);
return 1;
};
 
 
int audio_thread(void *param)
{
SND_EVENT evnt;
int buffsize;
int samples;
121,13 → 148,13
(status != 0) )
yield();
 
spinlock_lock(&astream.lock);
mutex_lock(&astream.lock);
{
SetBuffer(hBuff, astream.buffer, 0, buffsize);
astream.count -= buffsize;
if(astream.count)
memcpy(astream.buffer, astream.buffer+buffsize, astream.count);
spinlock_unlock(&astream.lock);
mutex_unlock(&astream.lock);
};
 
if((err = PlayBuffer(hBuff, 0)) !=0 )
163,13 → 190,13
 
offset = evnt.offset;
 
spinlock_lock(&astream.lock);
mutex_lock(&astream.lock);
{
SetBuffer(hBuff, astream.buffer, offset, buffsize);
astream.count -= buffsize;
if(astream.count)
memcpy(astream.buffer, astream.buffer+buffsize, astream.count);
spinlock_unlock(&astream.lock);
mutex_unlock(&astream.lock);
};
break;
};
221,20 → 248,23
if((too_late == 1) || (status == 0))
continue;
 
spinlock_lock(&astream.lock);
mutex_lock(&astream.lock);
SetBuffer(hBuff, astream.buffer, offset, buffsize);
astream.count -= buffsize;
if(astream.count)
memcpy(astream.buffer, astream.buffer+buffsize, astream.count);
spinlock_unlock(&astream.lock);
mutex_unlock(&astream.lock);
}
 
return;
StopBuffer(hBuff);
DestroyBuffer(hBuff);
 
return 0;
 
exit_whith_error:
 
printf(errstr);
return ;
return -1;
 
};
 
/programs/media/Fplay/fplay.c
9,6 → 9,8
#include <stdio.h>
#include <string.h>
#include <fcntl.h>
#include <ctype.h>
 
#include "sound.h"
#include "fplay.h"
 
29,10 → 31,14
 
int have_sound = 0;
 
 
uint8_t *decoder_buffer;
extern int sample_rate;
char *movie_file;
 
 
queue_t q_video;
queue_t q_audio;
 
int main( int argc, char *argv[])
{
int i;
42,6 → 48,7
return -1;
}
 
movie_file = argv[1];
/* register all codecs, demux and protocols */
 
avcodec_register_all();
48,18 → 55,16
avdevice_register_all();
av_register_all();
 
 
// Open video file
if(av_open_input_file(&pFormatCtx, argv[1], NULL, 0, NULL)!=0)
if( avformat_open_input(&pFormatCtx, argv[1], NULL, NULL) < 0)
{
printf("Cannot open file %s\n\r", argv[1]);
return -1; // Couldn't open file
};
 
// __asm__ __volatile__("int3");
printf("%s\n\r", __FUNCTION__);
 
// Retrieve stream information
if(av_find_stream_info(pFormatCtx)<0)
if(avformat_find_stream_info(pFormatCtx, NULL)<0)
{
printf("Cannot find streams\n\r");
return -1;
74,7 → 79,7
audioStream=-1;
for(i=0; i < pFormatCtx->nb_streams; i++)
{
if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO
&& videoStream < 0)
{
videoStream=i;
81,7 → 86,7
video_time_base = pFormatCtx->streams[i]->time_base;
 
}
if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_AUDIO &&
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO &&
audioStream < 0)
{
audioStream=i;
94,22 → 99,24
return -1; // Didn't find a video stream
}
 
// __asm__ __volatile__("int3");
 
// Get a pointer to the codec context for the video stream
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
 
aCodecCtx=pFormatCtx->streams[audioStream]->codec;
 
// Find the decoder for the video stream
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
printf("Unsupported video codec!\n");
printf("Unsupported codec with id %d for input stream %d\n",
pCodecCtx->codec_id, videoStream);
return -1; // Codec not found
}
// Open codec
if(avcodec_open(pCodecCtx, pCodec) < 0)
 
if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
{
printf("Cannot open video codec\n\r");
printf("Error while opening codec for input stream %d\n",
videoStream);
return -1; // Could not open codec
};
 
122,7 → 129,7
 
if(aCodec)
{
if(avcodec_open(aCodecCtx, aCodec) >= 0 )
if(avcodec_open2(aCodecCtx, aCodec, NULL) >= 0 )
{
WAVEHEADER whdr;
int fmt;
143,7 → 150,7
 
if( init_audio(fmt) )
{
decoder_buffer = (uint8_t*)av_mallocz(AVCODEC_MAX_AUDIO_FRAME_SIZE);
decoder_buffer = (uint8_t*)av_mallocz(AVCODEC_MAX_AUDIO_FRAME_SIZE*2+64);
if( decoder_buffer != NULL )
{
astream.lock = 0;
167,20 → 174,14
if( !init_video(pCodecCtx))
return 0;
 
// Assign appropriate parts of buffer to image planes in pFrameRGB
// Note that pFrameRGB is an AVFrame, but AVFrame is a superset
// of AVPicture
 
// __asm__ __volatile__("int3");
 
decoder();
 
status = 0;
 
 
// Free the YUV frame
av_free(pFrame);
 
 
//__asm__ __volatile__("int3");
 
// Close the codec
196,61 → 197,59
 
void decoder()
{
int eof = 0;
AVPacket packet;
int ret;
 
while(av_read_frame(pFormatCtx, &packet) >=0 )
while( status != 0 && !eof)
{
int err;
 
// __asm__ __volatile__("int3");
 
if(q_video.size+q_audio.size < 16*1024*1024)
{
err = av_read_frame(pFormatCtx, &packet);
if( err < 0)
{
eof = 1;
if (err != AVERROR_EOF)
printf("av_read_frame: error %x\n", err);
continue;
}
if(packet.stream_index==videoStream)
{
decode_video(pCodecCtx, &packet);
put_packet(&q_video, &packet);
}
else if( (packet.stream_index == audioStream) &&
(have_sound != 0) )
{
uint8_t *audio_data;
int audio_size;
int len;
int data_size=0;
 
audio_data = packet.data;
audio_size = packet.size;
 
while(audio_size > 0)
put_packet(&q_audio, &packet);
}
else
{
data_size = AVCODEC_MAX_AUDIO_FRAME_SIZE;
 
len = avcodec_decode_audio2(aCodecCtx,(int16_t*)decoder_buffer,
&data_size, audio_data, audio_size);
 
if(len >= 0)
{
audio_data += len;
audio_size -= len;
 
while((astream.count + data_size) >
AVCODEC_MAX_AUDIO_FRAME_SIZE*8)
{
yield();
}
spinlock_lock(&astream.lock);
memcpy(astream.buffer+astream.count, decoder_buffer, data_size);
astream.count += data_size;
spinlock_unlock(&astream.lock);
}
else audio_size = 0;
}
}
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
};
decode_video(pCodecCtx, &q_video);
decode_audio(aCodecCtx, &q_audio);
continue;
};
decode_video(pCodecCtx, &q_video);
decode_audio(aCodecCtx, &q_audio);
delay(1);
};
 
ret = 1;
 
__int64 _lseeki64(int fd, __int64 offset, int origin )
while(status != 0 && ret)
{
int off = offset;
return lseek(fd, off, origin);
}
ret = decode_video(pCodecCtx, &q_video);
ret |= decode_audio(aCodecCtx, &q_audio);
delay(1);
};
delay(50);
status = 0;
printf("status = 0\n");
delay(200);
};
 
 
 
/programs/media/Fplay/fplay.h
2,8 → 2,50
#define BLACK_MAGIC_SOUND
#define BLACK_MAGIC_VIDEO
 
typedef unsigned int color_t;
typedef unsigned int count_t;
 
typedef struct
{
int left;
int top;
int right;
int bottom;
}rect_t;
 
typedef struct
{
uint32_t width;
uint32_t height;
uint32_t pitch;
uint32_t handle;
uint8_t *data;
}bitmap_t;
 
typedef struct render render_t;
 
struct render
{
uint32_t caps;
uint32_t ctx_width;
uint32_t ctx_height;
uint32_t win_width;
uint32_t win_height;
 
bitmap_t bitmap[4];
uint32_t ctx_format;
int target;
enum{
EMPTY, INIT }state;
enum{
NORMAL, MINIMIZED, ROLLED
}win_state;
 
void (*draw)(render_t *render, AVPicture *picture);
};
 
typedef struct
{
volatile uint32_t lock;
char *buffer;
volatile uint32_t count;
19,20 → 61,54
unsigned int unused[2];
}SND_EVENT;
 
typedef struct
{
unsigned handle;
unsigned io_code;
void *input;
int inp_size;
void *output;
int out_size;
}ioctl_t;
 
typedef struct {
AVPacketList *first_pkt;
AVPacketList *last_pkt;
int size;
int count;
volatile uint32_t lock;
} queue_t;
 
int put_packet(queue_t *q, AVPacket *pkt);
int get_packet(queue_t *q, AVPacket *pkt);
 
 
extern astream_t astream;
extern AVRational video_time_base;
 
render_t *create_render(uint32_t width, uint32_t height,
uint32_t ctx_format, uint32_t flags);
 
int init_render(render_t *render, int width, int height);
void render_adjust_size(render_t *render);
 
int init_audio(int format);
int audio_thread(void *param);
 
int init_video(AVCodecContext *ctx);
int decode_video(AVCodecContext *ctx, AVPacket *pkt);
int video_thread(void *param);
 
int decode_video(AVCodecContext *ctx, queue_t *qv);
int decode_audio(AVCodecContext *ctx, queue_t *qa);
 
double get_master_clock();
 
 
int create_thread(void (*proc)(void *param), void *param, int stack_size);
int create_thread(int (*proc)(void *param), void *param, int stack_size);
 
void spinlock_lock(volatile uint32_t *val);
void mutex_lock(volatile uint32_t *val);
 
static inline void spinlock_unlock(volatile uint32_t *val)
static inline void mutex_unlock(volatile uint32_t *val)
{
*val = 0;
}
77,3 → 153,59
"int $0x40"
::"a"(5), "b"(time));
};
 
static inline draw_bitmap(void *bitmap, int x, int y, int w, int h)
{
__asm__ __volatile__(
"int $0x40"
::"a"(7), "b"(bitmap),
"c"((w << 16) | h),
"d"((x << 16) | y));
}
 
static inline void BeginDraw(void)
{
__asm__ __volatile__(
"int $0x40" ::"a"(12),"b"(1));
};
 
static inline void EndDraw(void)
{
__asm__ __volatile__(
"int $0x40" ::"a"(12),"b"(2));
};
 
 
static inline void DrawWindow(int x, int y, int w, int h, char *name,
color_t workcolor, uint32_t style)
{
 
__asm__ __volatile__(
"int $0x40"
::"a"(0),
"b"((x << 16) | (w & 0xFFFF)),
"c"((y << 16) | (h & 0xFFFF)),
"d"((style << 24) | (workcolor & 0xFFFFFF)),
"D"(name));
};
 
static inline void get_proc_info(char *info)
{
__asm__ __volatile__(
"int $0x40"
::"a"(9), "b"(info), "c"(-1)
:"memory");
}
 
#define HW_BIT_BLIT (1<<0) /* BGRX blitter */
#define HW_TEX_BLIT (1<<1) /* stretch blit */
#define HW_VID_BLIT (1<<2) /* planar and packed video */
 
uint32_t InitPixlib(uint32_t flags);
 
int create_bitmap(bitmap_t *bitmap);
int resize_bitmap(bitmap_t *bitmap);
int blit_bitmap(bitmap_t *bitmap, int dst_x, int dst_y,
int w, int h);
 
 
/programs/media/Fplay/pixlib2.c
0,0 → 1,327
#include <stdint.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <stdio.h>
#include <fcntl.h>
#include "fplay.h"
 
#define DISPLAY_VERSION 0x0200 /* 2.00 */
 
#define SRV_GETVERSION 0
#define SRV_GET_CAPS 3
 
#define SRV_CREATE_SURFACE 10
#define SRV_BLIT_VIDEO 20
 
#define __ALIGN_MASK(x,mask) (((x)+(mask))&~(mask))
#define ALIGN(x,a) __ALIGN_MASK(x,(typeof(x))(a)-1)
 
//void InitPixlib(uint32_t flags);
 
static uint32_t service;
static uint32_t blit_caps;
 
typedef struct
{
uint32_t idx;
union
{
uint32_t opt[2];
struct {
uint32_t max_tex_width;
uint32_t max_tex_height;
}cap1;
};
}hwcaps_t;
 
static uint32_t get_service(char *name)
{
uint32_t retval = 0;
asm volatile ("int $0x40"
:"=a"(retval)
:"a"(68),"b"(16),"c"(name)
:"memory");
 
return retval;
};
 
static int call_service(ioctl_t *io)
{
int retval;
 
asm volatile("int $0x40"
:"=a"(retval)
:"a"(68),"b"(17),"c"(io)
:"memory","cc");
 
return retval;
};
 
#define BUFFER_SIZE(n) ((n)*sizeof(uint32_t))
 
 
 
uint32_t InitPixlib(uint32_t caps)
{
uint32_t api_version;
hwcaps_t hwcaps;
ioctl_t io;
 
// __asm__ __volatile__("int3");
 
service = get_service("DISPLAY");
if(service == 0)
goto fail;
 
io.handle = service;
io.io_code = SRV_GETVERSION;
io.input = NULL;
io.inp_size = 0;
io.output = &api_version;
io.out_size = BUFFER_SIZE(1);
 
if (call_service(&io)!=0)
goto fail;
 
if( (DISPLAY_VERSION > (api_version & 0xFFFF)) ||
(DISPLAY_VERSION < (api_version >> 16)))
goto fail;
 
/*
* Let's see what this service can do
*/
hwcaps.idx = 0;
 
io.handle = service;
io.io_code = SRV_GET_CAPS;
io.input = &hwcaps;
io.inp_size = sizeof(hwcaps);
io.output = NULL;
io.out_size = 0;
 
if (call_service(&io)!=0)
goto fail;
 
blit_caps = hwcaps.opt[0];
 
printf("\nDISPLAY service handle %x\n", service);
 
if( blit_caps )
printf("service caps %s%s%s\n",
(blit_caps & HW_BIT_BLIT) != 0 ?"HW_BIT_BLIT ":"",
(blit_caps & HW_TEX_BLIT) != 0 ?"HW_TEX_BLIT ":"",
(blit_caps & HW_VID_BLIT) != 0 ?"HW_VID_BLIT ":"");
 
blit_caps&= caps;
return blit_caps;
 
fail:
service = 0;
return 0;
};
 
 
int create_bitmap(bitmap_t *bitmap)
{
// __asm__ __volatile__("int3");
 
if( blit_caps & HW_BIT_BLIT )
{
struct __attribute__((packed)) /* SRV_CREATE_SURFACE */
{
uint32_t handle; // ignored
void *data; // ignored
 
uint32_t width;
uint32_t height;
uint32_t pitch; // ignored
 
uint32_t max_width;
uint32_t max_height;
uint32_t format; // reserved mbz
}io_10;
 
ioctl_t io;
int err;
 
// printf("create bitmap %d x %d\n",
// bitmap->width, bitmap->height);
 
io_10.width = bitmap->width;
io_10.height = bitmap->height;
io_10.max_width = 0;
io_10.max_height = 0;
io_10.format = 0;
 
io.handle = service;
io.io_code = SRV_CREATE_SURFACE;
io.input = &io_10;
io.inp_size = BUFFER_SIZE(8);
io.output = NULL;
io.out_size = 0;
 
err = call_service(&io);
if(err==0)
{
bitmap->handle = io_10.handle;
bitmap->pitch = io_10.pitch;
bitmap->data = io_10.data;
// printf("Create hardware surface %x pitch %d, buffer %x\n",
// bitmap->handle, bitmap->pitch, bitmap->data);
return 0;
};
return err;
};
 
uint32_t size;
uint32_t pitch;
uint8_t *buffer;
 
pitch = ALIGN(bitmap->width*4, 16);
size = pitch * bitmap->height;
 
buffer = (uint8_t*)user_alloc(size);
if( buffer )
{
bitmap->handle = 0;
bitmap->pitch = pitch;
bitmap->data = buffer;
return 0;
};
 
printf("Cannot alloc frame buffer\n\r");
 
return -1;
};
 
struct blit_call
{
int dstx;
int dsty;
int w;
int h;
 
int srcx;
int srcy;
int srcw;
int srch;
 
unsigned char *bitmap;
int stride;
};
 
int blit_bitmap(bitmap_t *bitmap, int dst_x, int dst_y,
int w, int h)
{
 
if( blit_caps & HW_BIT_BLIT )
{
 
/*
* Now you will experience the full power of the dark side...
*/
 
struct __attribute__((packed))
{
uint32_t handle;
int dst_x;
int dst_y;
int src_x;
int src_y;
uint32_t w;
uint32_t h;
}io_20;
 
ioctl_t io;
int err;
 
io_20.handle = bitmap->handle;
io_20.dst_x = dst_x;
io_20.dst_y = dst_y;
io_20.src_x = 0;
io_20.src_y = 0;
io_20.w = w;
io_20.h = h;
 
io.handle = service;
io.io_code = SRV_BLIT_VIDEO;
io.input = &io_20;
io.inp_size = BUFFER_SIZE(7);
io.output = NULL;
io.out_size = 0;
 
// printf("do blit %x pitch %d\n",bitmap->handle,
// bitmap->pitch);
err = call_service(&io);
if (call_service(&io)==0)
{
//bitmap->data = NULL; Not now, Serge
// printf("blit done\n");
delay(1);
return 0;
};
return err;
};
 
volatile struct blit_call bc;
 
bc.dstx = dst_x;
bc.dsty = dst_y;
bc.w = w;
bc.h = h;
bc.srcx = 0;
bc.srcy = 0;
bc.srcw = w;
bc.srch = h;
bc.stride = bitmap->pitch;
bc.bitmap = bitmap->data;
 
__asm__ __volatile__(
"int $0x40"
::"a"(73),"b"(0),"c"(&bc));
};
 
 
static inline void* user_realloc(void *mem, size_t size)
{
void *val;
__asm__ __volatile__(
"int $0x40"
:"=eax"(val)
:"a"(68),"b"(12),"c"(size),"d"(mem)
:"memory");
 
return val;
}
 
int resize_bitmap(bitmap_t *bitmap)
{
// __asm__ __volatile__("int3");
 
if( blit_caps & HW_BIT_BLIT )
{
/* work in progress */
};
 
uint32_t size;
uint32_t pitch;
uint8_t *buffer;
 
pitch = ALIGN(bitmap->width*4, 16);
size = pitch * bitmap->height;
 
buffer = (uint8_t*)user_realloc(bitmap->data, size);
if( buffer )
{
bitmap->handle = 0;
bitmap->pitch = pitch;
bitmap->data = buffer;
return 0;
};
 
printf("Cannot realloc frame buffer\n\r");
 
return -1;
};
 
/programs/media/Fplay/sound.h
74,27 → 74,9
#define SND_SETTIMEBASE 18
#define SND_GETTIMESTAMP 19
 
 
#define PLAY_SYNC 0x80000000
 
typedef struct
{
unsigned int riff_id;
unsigned int riff_size;
unsigned int riff_format;
 
unsigned int fmt_id;
unsigned int fmt_size;
 
unsigned short int wFormatTag;
unsigned short int nChannels;
unsigned int nSamplesPerSec;
unsigned int nAvgBytesPerSec;
unsigned short int nBlockAlign;
unsigned short int wBitsPerSample;
unsigned int data_id;
unsigned int data_size;
} WAVEHEADER;
 
typedef unsigned int SNDBUF;
 
int _stdcall InitSound(int *version);
127,7 → 109,29
 
int _stdcall SetTimeBase(SNDBUF hBuff, double base);
int _stdcall GetTimeStamp(SNDBUF hBuff, double *stamp);
int _stdcall GetDevTime(int *stamp);
 
 
typedef struct
{
unsigned int riff_id;
unsigned int riff_size;
unsigned int riff_format;
 
unsigned int fmt_id;
unsigned int fmt_size;
 
unsigned short int wFormatTag;
unsigned short int nChannels;
unsigned int nSamplesPerSec;
unsigned int nAvgBytesPerSec;
unsigned short int nBlockAlign;
unsigned short int wBitsPerSample;
unsigned int data_id;
unsigned int data_size;
} WAVEHEADER;
 
 
unsigned int _stdcall test_wav(WAVEHEADER *hdr);
 
#ifdef __cplusplus
/programs/media/Fplay/utils.c
0,0 → 1,98
 
#include <stdint.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <stdio.h>
#include <fcntl.h>
#include "fplay.h"
 
extern uint32_t hw2d ;
 
void mutex_lock(volatile uint32_t *val)
{
uint32_t tmp;
 
__asm__ __volatile__ (
"0:\n\t"
"mov %0, %1\n\t"
"testl %1, %1\n\t"
"jz 1f\n\t"
 
"movl $68, %%eax\n\t"
"movl $1, %%ebx\n\t"
"int $0x40\n\t"
"jmp 0b\n\t"
"1:\n\t"
"incl %1\n\t"
"xchgl %0, %1\n\t"
"testl %1, %1\n\t"
"jnz 0b\n"
: "+m" (*val), "=&r"(tmp)
::"eax","ebx" );
}
 
 
int64_t _lseeki64(int fd, int64_t offset, int origin )
{
int off = offset;
return lseek(fd, off, origin);
}
 
 
int put_packet(queue_t *q, AVPacket *pkt)
{
AVPacketList *q_pkt;
 
/* duplicate the packet */
if (av_dup_packet(pkt) < 0)
return -1;
 
q_pkt = av_malloc(sizeof(AVPacketList));
if (!q_pkt)
return -1;
 
q_pkt->pkt = *pkt;
q_pkt->next = NULL;
 
mutex_lock(&q->lock);
 
if (!q->last_pkt)
q->first_pkt = q_pkt;
else
q->last_pkt->next = q_pkt;
 
q->last_pkt = q_pkt;
q->size += q_pkt->pkt.size + sizeof(*q_pkt);
q->count++;
 
mutex_unlock(&q->lock);
 
return 0;
}
 
int get_packet(queue_t *q, AVPacket *pkt)
{
AVPacketList *q_pkt;
int ret = 0;
 
mutex_lock(&q->lock);
 
q_pkt = q->first_pkt;
if (q_pkt)
{
q->first_pkt = q_pkt->next;
if (!q->first_pkt)
q->last_pkt = NULL;
 
q->count--;
q->size -= q_pkt->pkt.size + sizeof(*q_pkt);
*pkt = q_pkt->pkt;
av_free(q_pkt);
ret = 1;
};
 
mutex_unlock(&q->lock);
 
return ret;
}
/programs/media/Fplay/video.c
3,35 → 3,26
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
 
#include "sound.h"
#include "fplay.h"
 
void video_thread(void *param);
 
void draw_bitmap(void *bitmap, int x, int y, int w, int h)
{
__asm__ __volatile__(
"int $0x40"
::"a"(7), "b"(bitmap),
"c"((w << 16) | h),
"d"((x << 16) | y));
}
 
typedef struct
{
AVFrame *frame;
uint8_t *buffer;
AVPicture picture;
double pts;
volatile int ready;
}vframe_t;
 
vframe_t frames[8];
vframe_t frames[4];
 
struct SwsContext *cvt_ctx;
struct SwsContext *cvt_ctx = NULL;
 
int vfx = 0;
int dfx = 0;
 
render_t *render;
 
int width;
int height;
 
38,9 → 29,13
AVRational video_time_base;
AVFrame *Frame;
 
volatile uint32_t driver_lock;
 
void get_client_rect(rect_t *rc);
 
 
int init_video(AVCodecContext *ctx)
{
uint32_t size;
int i;
 
width = ctx->width;
48,141 → 43,112
 
printf("w = %d h = %d\n\r", width, height);
 
Frame = avcodec_alloc_frame();
if ( Frame == NULL )
// __asm__ __volatile__("int3");
 
render = create_render(ctx->width, ctx->height,
ctx->pix_fmt, HW_BIT_BLIT|HW_TEX_BLIT);
if( render == NULL)
{
printf("Cannot alloc video buffer\n\r");
printf("Cannot create render\n\r");
return 0;
};
 
cvt_ctx = sws_getContext(
ctx->width,
ctx->height,
ctx->pix_fmt,
ctx->width,
ctx->height,
PIX_FMT_BGR24,
SWS_BILINEAR,
NULL, NULL, NULL);
if(cvt_ctx == NULL)
Frame = avcodec_alloc_frame();
if ( Frame == NULL )
{
printf("Cannot initialize the conversion context!\n");
printf("Cannot alloc video frame\n\r");
return 0;
}
};
 
size = avpicture_get_size(PIX_FMT_RGB24, ctx->width, ctx->height);
 
for( i=0; i < 8; i++)
for( i=0; i < 4; i++)
{
AVFrame *frame;
int ret;
 
frame = avcodec_alloc_frame();
// printf("alloc picture %d %d %x\n",
// ctx->width, ctx->height, ctx->pix_fmt );
 
if( frame )
ret = avpicture_alloc(&frames[i].picture, ctx->pix_fmt,
ctx->width, ctx->height);
if ( ret != 0 )
{
uint8_t *buffer = (uint8_t*)av_malloc(size);
printf("Cannot alloc video buffer\n\r");
return 0;
};
 
if( buffer )
{
avpicture_fill((AVPicture *)frame, buffer, PIX_FMT_BGR24,
ctx->width, ctx->height);
 
frames[i].frame = frame;
frames[i].buffer = buffer;
frames[i].pts = 0;
frames[i].ready = 0;
continue;
};
};
printf("Cannot alloc frame buffer\n\r");
return 0;
};
 
create_thread(video_thread, 0, 163840);
create_thread(video_thread, ctx, 1024*1024);
 
delay(50);
return 1;
};
 
int frameFinished=0;
static int frame_count;
 
int decode_video(AVCodecContext *ctx, AVPacket *pkt)
int decode_video(AVCodecContext *ctx, queue_t *qv)
{
AVPacket pkt;
double pts;
AVPicture pict;
const uint8_t *data[4];
double av_time;
 
// __asm__("int3");
if(frames[dfx].ready != 0 )
return 1;
 
if(avcodec_decode_video(ctx, Frame, &frameFinished,
pkt->data, pkt->size) <= 0)
printf("decode error\n");
if( get_packet(qv, &pkt) == 0 )
return 0;
 
if( pkt->dts == AV_NOPTS_VALUE &&
frameFinished = 0;
 
ctx->reordered_opaque = pkt.pts;
 
if(avcodec_decode_video2(ctx, Frame, &frameFinished, &pkt) <= 0)
printf("video decoder error\n");
 
if(frameFinished)
{
AVPicture *dst_pic;
 
 
if( pkt.dts == AV_NOPTS_VALUE &&
Frame->reordered_opaque != AV_NOPTS_VALUE)
pts= Frame->reordered_opaque;
else if(pkt->dts != AV_NOPTS_VALUE)
pts= pkt->dts;
else if(pkt.dts != AV_NOPTS_VALUE)
pts= pkt.dts;
else
pts= 0;
 
pts *= av_q2d(video_time_base);
// pts = *(int64_t*)av_opt_ptr(avcodec_get_frame_class(),
// Frame, "best_effort_timestamp");
 
if(frameFinished)
{
while( frames[dfx].ready != 0 )
yield();
// if (pts == AV_NOPTS_VALUE)
// pts = 0;
 
pict.data[0] = frames[dfx].frame->data[0];
pict.data[1] = frames[dfx].frame->data[1];
pict.data[2] = frames[dfx].frame->data[2];
pict.data[3] = NULL;
pts *= av_q2d(video_time_base);
 
pict.linesize[0] = frames[dfx].frame->linesize[0];
pict.linesize[1] = frames[dfx].frame->linesize[1];
pict.linesize[2] = frames[dfx].frame->linesize[2];
pict.linesize[3] = 0;
dst_pic = &frames[dfx].picture;
 
data[0] = Frame->data[0];
data[1] = Frame->data[1];
data[2] = Frame->data[2];
data[3] = NULL;
av_image_copy(dst_pic->data, dst_pic->linesize, Frame->data,
Frame->linesize, ctx->pix_fmt, ctx->width, ctx->height);
 
sws_scale(cvt_ctx, data, Frame->linesize, 0, ctx->height,
pict.data, pict.linesize);
 
frames[dfx].pts = pts*1000.0;
frames[dfx].ready = 1;
 
dfx++;
dfx&= 7;
dfx&= 3;
};
av_free_packet(&pkt);
 
return 0;
return 1;
}
 
extern volatile uint32_t status;
rect_t win_rect;
 
typedef unsigned int color_t;
typedef unsigned int count_t;
typedef unsigned int u32_t;
 
static void DrawWindow(int x, int y, int w, int h, char *name,
color_t workcolor, u32_t style)
int check_events()
{
 
__asm__ __volatile__(
"int $0x40"
::"a"(0),
"b"((x << 16) | (w & 0xFFFF)),
"c"((y << 16) | (h & 0xFFFF)),
"d"((style << 24) | (workcolor & 0xFFFFFF)),
"D"(name));
};
 
 
static int check_events()
{
int ev;
 
ev = check_os_event();
190,7 → 156,10
switch(ev)
{
case 1:
DrawWindow(10, 10, width+9, height+26, NULL, 0x000000,0x74);
render_adjust_size(render);
BeginDraw();
DrawWindow(0,0,0,0, NULL, 0x000000,0x73);
EndDraw();
break;
 
case 3:
202,15 → 171,18
}
 
 
extern char __cmdline[];
extern char *movie_file;
 
void video_thread(void *param)
int video_thread(void *param)
{
char *path;
rect_t rc;
AVCodecContext *ctx = param;
 
path = strrchr(__cmdline,'/')+1;
BeginDraw();
DrawWindow(10, 10, width+9, height+26, movie_file, 0x000000,0x73);
EndDraw();
 
DrawWindow(10, 10, width+9, height+26, path, 0x000000,0x74);
render_adjust_size(render);
 
while( status != 0)
{
223,14 → 195,15
{
ctime = get_master_clock();
fdelay = (frames[vfx].pts - ctime);
// printf("ctime %f pts %f delay %f\n",
// ctime, frames[vfx].pts, fdelay);
 
// printf("pts %f time %f delay %f\n",
// frames[vfx].pts, ctime, fdelay);
 
if(fdelay < 0.0 )
{
int next_vfx;
fdelay = 0;
next_vfx = (vfx+1) & 7;
next_vfx = (vfx+1) & 3;
if( frames[next_vfx].ready == 1 )
{
if(frames[next_vfx].pts <= ctime)
237,7 → 210,7
{
frames[vfx].ready = 0; // skip this frame
vfx++;
vfx&= 7;
vfx&= 3;
}
else
{
246,7 → 219,7
{
frames[vfx].ready = 0; // skip this frame
vfx++;
vfx&= 7;
vfx&= 3;
fdelay = (frames[next_vfx].pts - ctime);
}
}
258,10 → 231,12
delay( (uint32_t)(fdelay/10.0));
};
 
draw_bitmap(frames[vfx].buffer, 0, 0, width, height);
// blit_bitmap(&frames[vfx].bitmap, 5, 22, width, height);
// frames[vfx].frame->linesize[0]);
render->draw(render, &frames[vfx].picture);
frames[vfx].ready = 0;
vfx++;
vfx&= 7;
vfx&= 3;
}
else
{
268,5 → 243,247
yield();
};
};
return 0;
};
 
 
void draw_hw_picture(render_t *render, AVPicture *picture);
void draw_sw_picture(render_t *render, AVPicture *picture);
 
render_t *create_render(uint32_t width, uint32_t height,
uint32_t ctx_format, uint32_t flags)
{
render_t *ren;
 
render = (render_t*)malloc(sizeof(*ren));
memset(ren, 0, sizeof(*ren));
 
render->ctx_width = width;
render->ctx_height = height;
render->ctx_format = ctx_format;
 
mutex_lock(&driver_lock);
render->caps = InitPixlib(flags);
mutex_unlock(&driver_lock);
 
if(render->caps==0)
{
printf("FPlay render engine: Hardware acceleration disabled\n");
render->draw = draw_sw_picture;
}
else
{
render->target = 0;
render->draw = draw_hw_picture;
};
 
render->state = EMPTY;
return render;
};
 
int render_set_size(render_t *render, int width, int height)
{
int i;
 
render->win_width = width;
render->win_height = height;
render->win_state = NORMAL;
 
// printf("%s %dx%d\n",__FUNCTION__, width, height);
 
if(render->state == EMPTY)
{
if(render->caps & HW_TEX_BLIT)
{
for( i=0; i < 4; i++)
{
render->bitmap[i].width = render->ctx_width;
render->bitmap[i].height = render->ctx_height;
 
if( create_bitmap(&render->bitmap[i]) != 0 )
{
status = 0;
/*
* Epic fail. Need exit_thread() here
*
*/
return 0;
};
}
}
else
{
render->bitmap[0].width = width;
render->bitmap[0].height = height;
 
if( create_bitmap(&render->bitmap[0]) != 0 )
return 0;
};
render->state = INIT;
return 0;
};
 
if(render->caps & HW_TEX_BLIT) /* hw scaler */
return 0;
 
render->bitmap[0].width = width;
render->bitmap[0].height = height;
resize_bitmap(&render->bitmap[0]);
 
return 0;
};
 
void render_adjust_size(render_t *render)
{
char proc_info[1024];
 
uint32_t right, bottom, new_w, new_h;
uint32_t s, sw, sh;
uint8_t state;
 
get_proc_info(proc_info);
 
right = *(uint32_t*)(proc_info+62)+1;
bottom = *(uint32_t*)(proc_info+66)+1;
state = *(uint8_t*)(proc_info+70);
 
if(state & 2)
{ render->win_state = MINIMIZED;
return;
}
if(state & 4)
{
render->win_state = ROLLED;
return;
};
 
render->win_state = NORMAL;
 
if( right == render->win_width &&
bottom == render->win_height)
return;
 
new_w = bottom*render->ctx_width/render->ctx_height;
new_h = right*render->ctx_height/render->ctx_width;
 
// printf("right %d bottom %d\n", right, bottom);
// printf("new_w %d new_h %d\n", new_w, new_h);
 
s = right * bottom;
sw = right * new_h;
sh = bottom * new_w;
 
if( abs(s-sw) > abs(s-sh))
new_h = bottom;
else
new_w = right;
 
if(new_w < 64)
{
new_w = 64;
new_h = 64*render->ctx_height/render->ctx_width;
};
__asm__ __volatile__(
"int $0x40"
::"a"(67), "b"(-1), "c"(-1),
"d"(new_w+9),"S"(new_h+26)
:"memory" );
render_set_size(render, new_w, new_h);
 
};
 
void draw_hw_picture(render_t *render, AVPicture *picture)
{
int dst_width, dst_height;
uint8_t *data[4];
int linesize[4];
 
if(render->win_state != NORMAL)
return;
 
if(render->caps & HW_TEX_BLIT)
{
dst_width = render->ctx_width;
dst_height = render->ctx_height;
}
else
{
dst_width = render->win_width;
dst_height = render->win_height;
};
 
cvt_ctx = sws_getCachedContext(cvt_ctx,
render->ctx_width, render->ctx_height, render->ctx_format,
dst_width, dst_height, PIX_FMT_BGRA,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
if(cvt_ctx == NULL)
{
printf("Cannot initialize the conversion context!\n");
return ;
};
// printf("sws_getCachedContext\n");
data[0] = render->bitmap[render->target].data;
data[1] = render->bitmap[render->target].data+1;
data[2] = render->bitmap[render->target].data+2;
data[3] = render->bitmap[render->target].data+3;
 
linesize[0] = render->bitmap[render->target].pitch;
linesize[1] = render->bitmap[render->target].pitch;
linesize[2] = render->bitmap[render->target].pitch;
linesize[3] = render->bitmap[render->target].pitch;
 
sws_scale(cvt_ctx, (const uint8_t* const *)picture->data,
picture->linesize, 0, render->ctx_height, data, linesize);
// printf("sws_scale\n");
 
blit_bitmap(&render->bitmap[render->target], 5, 22,
render->win_width, render->win_height);
// printf("blit_bitmap\n");
 
delay(2);
render->target++;
render->target&= 3;
}
 
void draw_sw_picture(render_t *render, AVPicture *picture)
{
uint8_t *data[4];
int linesize[4];
 
if(render->win_state != NORMAL)
return;
 
cvt_ctx = sws_getCachedContext(cvt_ctx,
render->ctx_width, render->ctx_height,
render->ctx_format,
render->win_width, render->win_height,
PIX_FMT_BGRA, SWS_FAST_BILINEAR, NULL, NULL, NULL);
if(cvt_ctx == NULL)
{
printf("Cannot initialize the conversion context!\n");
return ;
}
 
data[0] = render->bitmap[0].data;
data[1] = render->bitmap[0].data+1;
data[2] = render->bitmap[0].data+2;
data[3] = render->bitmap[0].data+3;
 
 
linesize[0] = render->bitmap[0].pitch;
linesize[1] = render->bitmap[0].pitch;
linesize[2] = render->bitmap[0].pitch;
linesize[3] = render->bitmap[0].pitch;
 
sws_scale(cvt_ctx, (const uint8_t* const *)picture->data,
picture->linesize, 0, render->ctx_height, data, linesize);
 
blit_bitmap(&render->bitmap[0], 5, 22,
render->win_width, render->win_height);
}