Use add instead of lshift in mmxext vp8 idct
[libav.git] / ffplay.c
CommitLineData
01310af2 1/*
f05ef45c 2 * FFplay : Simple Media Player based on the FFmpeg libraries
01310af2
FB
3 * Copyright (c) 2003 Fabrice Bellard
4 *
b78e7197
DB
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
01310af2
FB
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
b78e7197 10 * version 2.1 of the License, or (at your option) any later version.
01310af2 11 *
b78e7197 12 * FFmpeg is distributed in the hope that it will be useful,
01310af2
FB
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
b78e7197 18 * License along with FFmpeg; if not, write to the Free Software
5509bffa 19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2 20 */
364a9607 21
ba11257e 22#include "config.h"
8a3ceaf4 23#include <inttypes.h>
0f4e8165
RB
24#include <math.h>
25#include <limits.h>
245976da 26#include "libavutil/avstring.h"
718c7b18 27#include "libavutil/pixdesc.h"
245976da 28#include "libavformat/avformat.h"
245976da
DB
29#include "libavdevice/avdevice.h"
30#include "libswscale/swscale.h"
5a4476e2 31#include "libavcodec/audioconvert.h"
a7e6312b 32#include "libavcodec/colorspace.h"
e43d7a18 33#include "libavcodec/opt.h"
166621ab 34#include "libavcodec/avfft.h"
01310af2 35
917d2bb3
MN
36#if CONFIG_AVFILTER
37# include "libavfilter/avfilter.h"
38# include "libavfilter/avfiltergraph.h"
39# include "libavfilter/graphparser.h"
40#endif
41
01310af2
FB
42#include "cmdutils.h"
43
44#include <SDL.h>
45#include <SDL_thread.h>
46
2f30a81d 47#ifdef __MINGW32__
31319a8c
FB
48#undef main /* We don't want SDL to override our main() */
49#endif
50
d38c9e7a
MN
51#include <unistd.h>
52#include <assert.h>
53
64555bd9 54const char program_name[] = "FFplay";
ea9c581f 55const int program_birth_year = 2003;
4cfac5bc 56
638c9d91
FB
57//#define DEBUG_SYNC
58
79ee4683
MN
59#define MAX_QUEUE_SIZE (15 * 1024 * 1024)
60#define MIN_AUDIOQ_SIZE (20 * 16 * 1024)
61#define MIN_FRAMES 5
01310af2 62
638c9d91
FB
63/* SDL audio buffer size, in samples. Should be small to have precise
64 A/V sync as SDL does not have hardware buffer fullness info. */
65#define SDL_AUDIO_BUFFER_SIZE 1024
66
67/* no AV sync correction is done if below the AV sync threshold */
7e0140cb 68#define AV_SYNC_THRESHOLD 0.01
638c9d91
FB
69/* no AV correction is done if too big error */
70#define AV_NOSYNC_THRESHOLD 10.0
71
d38c9e7a
MN
72#define FRAME_SKIP_FACTOR 0.05
73
638c9d91
FB
74/* maximum audio speed change to get correct sync */
75#define SAMPLE_CORRECTION_PERCENT_MAX 10
76
77/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
78#define AUDIO_DIFF_AVG_NB 20
79
01310af2
FB
80/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
81#define SAMPLE_ARRAY_SIZE (2*65536)
82
03ae87a3
LA
83static int sws_flags = SWS_BICUBIC;
84
01310af2
FB
85typedef struct PacketQueue {
86 AVPacketList *first_pkt, *last_pkt;
87 int nb_packets;
88 int size;
89 int abort_request;
90 SDL_mutex *mutex;
91 SDL_cond *cond;
92} PacketQueue;
93
562f382c 94#define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b 95#define SUBPICTURE_QUEUE_SIZE 4
01310af2
FB
96
97typedef struct VideoPicture {
267e9dfa 98 double pts; ///<presentation time stamp for this picture
d38c9e7a 99 double target_clock; ///<av_gettime() time at which this should be displayed ideally
1a620dd7 100 int64_t pos; ///<byte position in file
01310af2
FB
101 SDL_Overlay *bmp;
102 int width, height; /* source height & width */
103 int allocated;
917d2bb3
MN
104 enum PixelFormat pix_fmt;
105
106#if CONFIG_AVFILTER
107 AVFilterPicRef *picref;
108#endif
01310af2
FB
109} VideoPicture;
110
72ce053b
IC
111typedef struct SubPicture {
112 double pts; /* presentation time stamp for this picture */
113 AVSubtitle sub;
114} SubPicture;
115
01310af2
FB
116enum {
117 AV_SYNC_AUDIO_MASTER, /* default choice */
118 AV_SYNC_VIDEO_MASTER,
638c9d91 119 AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
FB
120};
121
122typedef struct VideoState {
123 SDL_Thread *parse_tid;
124 SDL_Thread *video_tid;
d38c9e7a 125 SDL_Thread *refresh_tid;
638c9d91 126 AVInputFormat *iformat;
01310af2
FB
127 int no_background;
128 int abort_request;
129 int paused;
416e3508 130 int last_paused;
72ea344b 131 int seek_req;
3ba1438d 132 int seek_flags;
72ea344b 133 int64_t seek_pos;
4ed29207 134 int64_t seek_rel;
f5668147 135 int read_pause_return;
01310af2
FB
136 AVFormatContext *ic;
137 int dtg_active_format;
138
139 int audio_stream;
115329f1 140
01310af2 141 int av_sync_type;
638c9d91
FB
142 double external_clock; /* external clock base */
143 int64_t external_clock_time;
115329f1 144
638c9d91
FB
145 double audio_clock;
146 double audio_diff_cum; /* used for AV difference average computation */
147 double audio_diff_avg_coef;
148 double audio_diff_threshold;
149 int audio_diff_avg_count;
01310af2
FB
150 AVStream *audio_st;
151 PacketQueue audioq;
152 int audio_hw_buf_size;
153 /* samples output by the codec. we reserve more space for avsync
154 compensation */
c6727809
MR
155 DECLARE_ALIGNED(16,uint8_t,audio_buf1)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
156 DECLARE_ALIGNED(16,uint8_t,audio_buf2)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
5a4476e2 157 uint8_t *audio_buf;
7fea94ce 158 unsigned int audio_buf_size; /* in bytes */
01310af2 159 int audio_buf_index; /* in bytes */
bea18375 160 AVPacket audio_pkt_temp;
01310af2 161 AVPacket audio_pkt;
5a4476e2
PR
162 enum SampleFormat audio_src_fmt;
163 AVAudioConvert *reformat_ctx;
115329f1 164
01310af2
FB
165 int show_audio; /* if true, display audio samples */
166 int16_t sample_array[SAMPLE_ARRAY_SIZE];
167 int sample_array_index;
5e0257e3 168 int last_i_start;
166621ab 169 RDFTContext *rdft;
12eeda34
MN
170 int rdft_bits;
171 int xpos;
115329f1 172
72ce053b
IC
173 SDL_Thread *subtitle_tid;
174 int subtitle_stream;
175 int subtitle_stream_changed;
176 AVStream *subtitle_st;
177 PacketQueue subtitleq;
178 SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
179 int subpq_size, subpq_rindex, subpq_windex;
180 SDL_mutex *subpq_mutex;
181 SDL_cond *subpq_cond;
115329f1 182
638c9d91
FB
183 double frame_timer;
184 double frame_last_pts;
185 double frame_last_delay;
115329f1 186 double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
01310af2
FB
187 int video_stream;
188 AVStream *video_st;
189 PacketQueue videoq;
267e9dfa 190 double video_current_pts; ///<current displayed pts (different from video_clock if frame fifos are used)
68aefbe8 191 double video_current_pts_drift; ///<video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
1a620dd7 192 int64_t video_current_pos; ///<current displayed file pos
01310af2
FB
193 VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
194 int pictq_size, pictq_rindex, pictq_windex;
195 SDL_mutex *pictq_mutex;
196 SDL_cond *pictq_cond;
917d2bb3 197#if !CONFIG_AVFILTER
3ac56e28 198 struct SwsContext *img_convert_ctx;
917d2bb3 199#endif
115329f1 200
01310af2
FB
201 // QETimer *video_timer;
202 char filename[1024];
203 int width, height, xleft, ytop;
41db429d
MN
204
205 int64_t faulty_pts;
206 int64_t faulty_dts;
207 int64_t last_dts_for_fault_detection;
208 int64_t last_pts_for_fault_detection;
209
917d2bb3
MN
210#if CONFIG_AVFILTER
211 AVFilterContext *out_video_filter; ///<the last filter in the video chain
212#endif
d38c9e7a
MN
213
214 float skip_frames;
215 float skip_frames_index;
216 int refresh;
01310af2
FB
217} VideoState;
218
358061f6 219static void show_help(void);
638c9d91 220static int audio_write_get_buf_size(VideoState *is);
01310af2
FB
221
222/* options specified by the user */
223static AVInputFormat *file_iformat;
224static const char *input_filename;
076db5ed 225static const char *window_title;
01310af2
FB
226static int fs_screen_width;
227static int fs_screen_height;
fccb19e3
MN
228static int screen_width = 0;
229static int screen_height = 0;
e4b89522
LW
230static int frame_width = 0;
231static int frame_height = 0;
232static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
01310af2
FB
233static int audio_disable;
234static int video_disable;
72415b2a
SS
235static int wanted_stream[AVMEDIA_TYPE_NB]={
236 [AVMEDIA_TYPE_AUDIO]=-1,
237 [AVMEDIA_TYPE_VIDEO]=-1,
238 [AVMEDIA_TYPE_SUBTITLE]=-1,
5b369983 239};
70a4764d 240static int seek_by_bytes=-1;
01310af2 241static int display_disable;
1e1a0b18 242static int show_status = 1;
638c9d91 243static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b 244static int64_t start_time = AV_NOPTS_VALUE;
d834d63b 245static int64_t duration = AV_NOPTS_VALUE;
e26a8335 246static int debug = 0;
0c9bbaec 247static int debug_mv = 0;
bba04f1e 248static int step = 0;
c62c07d3 249static int thread_count = 1;
6387c3e6 250static int workaround_bugs = 1;
6fc5b059 251static int fast = 0;
30bc6613 252static int genpts = 0;
178fcca8
MN
253static int lowres = 0;
254static int idct = FF_IDCT_AUTO;
8c3eba7c
MN
255static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
256static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
257static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
047599a4 258static int error_recognition = FF_ER_CAREFUL;
1b51e051 259static int error_concealment = 3;
41db429d 260static int decoder_reorder_pts= -1;
2d1653b0 261static int autoexit;
1922c0a7 262static int loop=1;
d38c9e7a 263static int framedrop=1;
2b3da32f
MN
264
265static int rdftspeed=20;
917d2bb3
MN
266#if CONFIG_AVFILTER
267static char *vfilters = NULL;
268#endif
01310af2
FB
269
270/* current context */
271static int is_full_screen;
272static VideoState *cur_stream;
5e0257e3 273static int64_t audio_callback_time;
01310af2 274
2c676c33 275static AVPacket flush_pkt;
39c6a118 276
01310af2
FB
277#define FF_ALLOC_EVENT (SDL_USEREVENT)
278#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91 279#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
01310af2 280
2c676c33 281static SDL_Surface *screen;
01310af2 282
515bd00e
MN
283static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
284
01310af2
FB
285/* packet queue handling */
286static void packet_queue_init(PacketQueue *q)
287{
288 memset(q, 0, sizeof(PacketQueue));
289 q->mutex = SDL_CreateMutex();
290 q->cond = SDL_CreateCond();
515bd00e 291 packet_queue_put(q, &flush_pkt);
01310af2
FB
292}
293
72ea344b 294static void packet_queue_flush(PacketQueue *q)
01310af2
FB
295{
296 AVPacketList *pkt, *pkt1;
297
687fae2b 298 SDL_LockMutex(q->mutex);
01310af2
FB
299 for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
300 pkt1 = pkt->next;
301 av_free_packet(&pkt->pkt);
da6c4573 302 av_freep(&pkt);
01310af2 303 }
72ea344b
FB
304 q->last_pkt = NULL;
305 q->first_pkt = NULL;
306 q->nb_packets = 0;
307 q->size = 0;
687fae2b 308 SDL_UnlockMutex(q->mutex);
72ea344b
FB
309}
310
311static void packet_queue_end(PacketQueue *q)
312{
313 packet_queue_flush(q);
01310af2
FB
314 SDL_DestroyMutex(q->mutex);
315 SDL_DestroyCond(q->cond);
316}
317
318static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
319{
320 AVPacketList *pkt1;
321
72ea344b 322 /* duplicate the packet */
39c6a118 323 if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
72ea344b 324 return -1;
115329f1 325
01310af2
FB
326 pkt1 = av_malloc(sizeof(AVPacketList));
327 if (!pkt1)
328 return -1;
329 pkt1->pkt = *pkt;
330 pkt1->next = NULL;
331
72ea344b 332
01310af2
FB
333 SDL_LockMutex(q->mutex);
334
335 if (!q->last_pkt)
336
337 q->first_pkt = pkt1;
338 else
339 q->last_pkt->next = pkt1;
340 q->last_pkt = pkt1;
341 q->nb_packets++;
7b776589 342 q->size += pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
343 /* XXX: should duplicate packet data in DV case */
344 SDL_CondSignal(q->cond);
345
346 SDL_UnlockMutex(q->mutex);
347 return 0;
348}
349
350static void packet_queue_abort(PacketQueue *q)
351{
352 SDL_LockMutex(q->mutex);
353
354 q->abort_request = 1;
115329f1 355
01310af2
FB
356 SDL_CondSignal(q->cond);
357
358 SDL_UnlockMutex(q->mutex);
359}
360
361/* return < 0 if aborted, 0 if no packet and > 0 if packet. */
362static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
363{
364 AVPacketList *pkt1;
365 int ret;
366
367 SDL_LockMutex(q->mutex);
368
369 for(;;) {
370 if (q->abort_request) {
371 ret = -1;
372 break;
373 }
115329f1 374
01310af2
FB
375 pkt1 = q->first_pkt;
376 if (pkt1) {
377 q->first_pkt = pkt1->next;
378 if (!q->first_pkt)
379 q->last_pkt = NULL;
380 q->nb_packets--;
7b776589 381 q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
382 *pkt = pkt1->pkt;
383 av_free(pkt1);
384 ret = 1;
385 break;
386 } else if (!block) {
387 ret = 0;
388 break;
389 } else {
390 SDL_CondWait(q->cond, q->mutex);
391 }
392 }
393 SDL_UnlockMutex(q->mutex);
394 return ret;
395}
396
115329f1 397static inline void fill_rectangle(SDL_Surface *screen,
01310af2
FB
398 int x, int y, int w, int h, int color)
399{
400 SDL_Rect rect;
401 rect.x = x;
402 rect.y = y;
403 rect.w = w;
404 rect.h = h;
405 SDL_FillRect(screen, &rect, color);
406}
407
408#if 0
409/* draw only the border of a rectangle */
410void fill_border(VideoState *s, int x, int y, int w, int h, int color)
411{
412 int w1, w2, h1, h2;
413
414 /* fill the background */
415 w1 = x;
416 if (w1 < 0)
417 w1 = 0;
418 w2 = s->width - (x + w);
419 if (w2 < 0)
420 w2 = 0;
421 h1 = y;
422 if (h1 < 0)
423 h1 = 0;
424 h2 = s->height - (y + h);
425 if (h2 < 0)
426 h2 = 0;
115329f1
DB
427 fill_rectangle(screen,
428 s->xleft, s->ytop,
429 w1, s->height,
01310af2 430 color);
115329f1
DB
431 fill_rectangle(screen,
432 s->xleft + s->width - w2, s->ytop,
433 w2, s->height,
01310af2 434 color);
115329f1
DB
435 fill_rectangle(screen,
436 s->xleft + w1, s->ytop,
437 s->width - w1 - w2, h1,
01310af2 438 color);
115329f1 439 fill_rectangle(screen,
01310af2
FB
440 s->xleft + w1, s->ytop + s->height - h2,
441 s->width - w1 - w2, h2,
442 color);
443}
444#endif
445
72ce053b
IC
446#define ALPHA_BLEND(a, oldp, newp, s)\
447((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
448
449#define RGBA_IN(r, g, b, a, s)\
450{\
451 unsigned int v = ((const uint32_t *)(s))[0];\
452 a = (v >> 24) & 0xff;\
453 r = (v >> 16) & 0xff;\
454 g = (v >> 8) & 0xff;\
455 b = v & 0xff;\
456}
457
458#define YUVA_IN(y, u, v, a, s, pal)\
459{\
57cf99f2 460 unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
IC
461 a = (val >> 24) & 0xff;\
462 y = (val >> 16) & 0xff;\
463 u = (val >> 8) & 0xff;\
464 v = val & 0xff;\
465}
466
467#define YUVA_OUT(d, y, u, v, a)\
468{\
469 ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
470}
471
472
473#define BPP 1
474
0a8cd696 475static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
IC
476{
477 int wrap, wrap3, width2, skip2;
478 int y, u, v, a, u1, v1, a1, w, h;
479 uint8_t *lum, *cb, *cr;
480 const uint8_t *p;
481 const uint32_t *pal;
9cb5a11e
RD
482 int dstx, dsty, dstw, dsth;
483
7cf9c6ae
MN
484 dstw = av_clip(rect->w, 0, imgw);
485 dsth = av_clip(rect->h, 0, imgh);
486 dstx = av_clip(rect->x, 0, imgw - dstw);
487 dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
RD
488 lum = dst->data[0] + dsty * dst->linesize[0];
489 cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
490 cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
491
f54b31b9 492 width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e 493 skip2 = dstx >> 1;
72ce053b 494 wrap = dst->linesize[0];
25b4c651
MN
495 wrap3 = rect->pict.linesize[0];
496 p = rect->pict.data[0];
497 pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */
115329f1 498
9cb5a11e
RD
499 if (dsty & 1) {
500 lum += dstx;
72ce053b
IC
501 cb += skip2;
502 cr += skip2;
115329f1 503
9cb5a11e 504 if (dstx & 1) {
72ce053b
IC
505 YUVA_IN(y, u, v, a, p, pal);
506 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
507 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
508 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
509 cb++;
510 cr++;
511 lum++;
512 p += BPP;
513 }
9cb5a11e 514 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
515 YUVA_IN(y, u, v, a, p, pal);
516 u1 = u;
517 v1 = v;
518 a1 = a;
519 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
520
521 YUVA_IN(y, u, v, a, p + BPP, pal);
522 u1 += u;
523 v1 += v;
524 a1 += a;
525 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
526 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
527 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
528 cb++;
529 cr++;
530 p += 2 * BPP;
531 lum += 2;
532 }
533 if (w) {
534 YUVA_IN(y, u, v, a, p, pal);
535 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
536 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
537 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
BA
538 p++;
539 lum++;
72ce053b 540 }
4606a059
BA
541 p += wrap3 - dstw * BPP;
542 lum += wrap - dstw - dstx;
72ce053b
IC
543 cb += dst->linesize[1] - width2 - skip2;
544 cr += dst->linesize[2] - width2 - skip2;
545 }
9cb5a11e
RD
546 for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
547 lum += dstx;
72ce053b
IC
548 cb += skip2;
549 cr += skip2;
115329f1 550
9cb5a11e 551 if (dstx & 1) {
72ce053b
IC
552 YUVA_IN(y, u, v, a, p, pal);
553 u1 = u;
554 v1 = v;
555 a1 = a;
556 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
557 p += wrap3;
558 lum += wrap;
559 YUVA_IN(y, u, v, a, p, pal);
560 u1 += u;
561 v1 += v;
562 a1 += a;
563 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
564 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
565 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
566 cb++;
567 cr++;
568 p += -wrap3 + BPP;
569 lum += -wrap + 1;
570 }
9cb5a11e 571 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
572 YUVA_IN(y, u, v, a, p, pal);
573 u1 = u;
574 v1 = v;
575 a1 = a;
576 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
577
f8ca63e8 578 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
579 u1 += u;
580 v1 += v;
581 a1 += a;
582 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
583 p += wrap3;
584 lum += wrap;
585
586 YUVA_IN(y, u, v, a, p, pal);
587 u1 += u;
588 v1 += v;
589 a1 += a;
590 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
591
f8ca63e8 592 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
593 u1 += u;
594 v1 += v;
595 a1 += a;
596 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
597
598 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
599 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
600
601 cb++;
602 cr++;
603 p += -wrap3 + 2 * BPP;
604 lum += -wrap + 2;
605 }
606 if (w) {
607 YUVA_IN(y, u, v, a, p, pal);
608 u1 = u;
609 v1 = v;
610 a1 = a;
611 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
612 p += wrap3;
613 lum += wrap;
614 YUVA_IN(y, u, v, a, p, pal);
615 u1 += u;
616 v1 += v;
617 a1 += a;
618 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
619 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
620 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
621 cb++;
622 cr++;
623 p += -wrap3 + BPP;
624 lum += -wrap + 1;
625 }
9cb5a11e
RD
626 p += wrap3 + (wrap3 - dstw * BPP);
627 lum += wrap + (wrap - dstw - dstx);
72ce053b
IC
628 cb += dst->linesize[1] - width2 - skip2;
629 cr += dst->linesize[2] - width2 - skip2;
630 }
631 /* handle odd height */
632 if (h) {
9cb5a11e 633 lum += dstx;
72ce053b
IC
634 cb += skip2;
635 cr += skip2;
115329f1 636
9cb5a11e 637 if (dstx & 1) {
72ce053b
IC
638 YUVA_IN(y, u, v, a, p, pal);
639 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
640 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
641 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
642 cb++;
643 cr++;
644 lum++;
645 p += BPP;
646 }
9cb5a11e 647 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
648 YUVA_IN(y, u, v, a, p, pal);
649 u1 = u;
650 v1 = v;
651 a1 = a;
652 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
653
654 YUVA_IN(y, u, v, a, p + BPP, pal);
655 u1 += u;
656 v1 += v;
657 a1 += a;
658 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
659 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
660 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
661 cb++;
662 cr++;
663 p += 2 * BPP;
664 lum += 2;
665 }
666 if (w) {
667 YUVA_IN(y, u, v, a, p, pal);
668 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
669 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
670 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
671 }
672 }
673}
674
675static void free_subpicture(SubPicture *sp)
676{
677 int i;
115329f1 678
72ce053b
IC
679 for (i = 0; i < sp->sub.num_rects; i++)
680 {
25b4c651
MN
681 av_freep(&sp->sub.rects[i]->pict.data[0]);
682 av_freep(&sp->sub.rects[i]->pict.data[1]);
db4fac64 683 av_freep(&sp->sub.rects[i]);
72ce053b 684 }
115329f1 685
72ce053b 686 av_free(sp->sub.rects);
115329f1 687
72ce053b
IC
688 memset(&sp->sub, 0, sizeof(AVSubtitle));
689}
690
01310af2
FB
691static void video_image_display(VideoState *is)
692{
693 VideoPicture *vp;
72ce053b
IC
694 SubPicture *sp;
695 AVPicture pict;
01310af2
FB
696 float aspect_ratio;
697 int width, height, x, y;
698 SDL_Rect rect;
72ce053b 699 int i;
01310af2
FB
700
701 vp = &is->pictq[is->pictq_rindex];
702 if (vp->bmp) {
917d2bb3
MN
703#if CONFIG_AVFILTER
704 if (vp->picref->pixel_aspect.num == 0)
705 aspect_ratio = 0;
706 else
707 aspect_ratio = av_q2d(vp->picref->pixel_aspect);
708#else
709
01310af2 710 /* XXX: use variable in the frame */
c30a4489
AJ
711 if (is->video_st->sample_aspect_ratio.num)
712 aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
713 else if (is->video_st->codec->sample_aspect_ratio.num)
714 aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
72ea344b 715 else
c30a4489 716 aspect_ratio = 0;
917d2bb3 717#endif
01310af2 718 if (aspect_ratio <= 0.0)
c30a4489 719 aspect_ratio = 1.0;
917d2bb3 720 aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
FB
721 /* if an active format is indicated, then it overrides the
722 mpeg format */
723#if 0
01f4895c
MN
724 if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
725 is->dtg_active_format = is->video_st->codec->dtg_active_format;
01310af2
FB
726 printf("dtg_active_format=%d\n", is->dtg_active_format);
727 }
728#endif
729#if 0
01f4895c 730 switch(is->video_st->codec->dtg_active_format) {
01310af2
FB
731 case FF_DTG_AFD_SAME:
732 default:
733 /* nothing to do */
734 break;
735 case FF_DTG_AFD_4_3:
736 aspect_ratio = 4.0 / 3.0;
737 break;
738 case FF_DTG_AFD_16_9:
739 aspect_ratio = 16.0 / 9.0;
740 break;
741 case FF_DTG_AFD_14_9:
742 aspect_ratio = 14.0 / 9.0;
743 break;
744 case FF_DTG_AFD_4_3_SP_14_9:
745 aspect_ratio = 14.0 / 9.0;
746 break;
747 case FF_DTG_AFD_16_9_SP_14_9:
748 aspect_ratio = 14.0 / 9.0;
749 break;
750 case FF_DTG_AFD_SP_4_3:
751 aspect_ratio = 4.0 / 3.0;
752 break;
753 }
754#endif
755
72ce053b
IC
756 if (is->subtitle_st)
757 {
758 if (is->subpq_size > 0)
759 {
760 sp = &is->subpq[is->subpq_rindex];
761
762 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
763 {
764 SDL_LockYUVOverlay (vp->bmp);
765
766 pict.data[0] = vp->bmp->pixels[0];
767 pict.data[1] = vp->bmp->pixels[2];
768 pict.data[2] = vp->bmp->pixels[1];
769
770 pict.linesize[0] = vp->bmp->pitches[0];
771 pict.linesize[1] = vp->bmp->pitches[2];
772 pict.linesize[2] = vp->bmp->pitches[1];
773
774 for (i = 0; i < sp->sub.num_rects; i++)
db4fac64 775 blend_subrect(&pict, sp->sub.rects[i],
0a8cd696 776 vp->bmp->w, vp->bmp->h);
72ce053b
IC
777
778 SDL_UnlockYUVOverlay (vp->bmp);
779 }
780 }
781 }
782
783
01310af2
FB
784 /* XXX: we suppose the screen has a 1.0 pixel ratio */
785 height = is->height;
bb6c34e5 786 width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
FB
787 if (width > is->width) {
788 width = is->width;
bb6c34e5 789 height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
FB
790 }
791 x = (is->width - width) / 2;
792 y = (is->height - height) / 2;
793 if (!is->no_background) {
794 /* fill the background */
795 // fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
796 } else {
797 is->no_background = 0;
798 }
799 rect.x = is->xleft + x;
2f6547fb 800 rect.y = is->ytop + y;
01310af2
FB
801 rect.w = width;
802 rect.h = height;
803 SDL_DisplayYUVOverlay(vp->bmp, &rect);
804 } else {
805#if 0
115329f1
DB
806 fill_rectangle(screen,
807 is->xleft, is->ytop, is->width, is->height,
01310af2
FB
808 QERGB(0x00, 0x00, 0x00));
809#endif
810 }
811}
812
813static inline int compute_mod(int a, int b)
814{
815 a = a % b;
115329f1 816 if (a >= 0)
01310af2
FB
817 return a;
818 else
819 return a + b;
820}
821
822static void video_audio_display(VideoState *s)
823{
824 int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
825 int ch, channels, h, h2, bgcolor, fgcolor;
826 int16_t time_diff;
4c7c7645
MN
827 int rdft_bits, nb_freq;
828
829 for(rdft_bits=1; (1<<rdft_bits)<2*s->height; rdft_bits++)
830 ;
831 nb_freq= 1<<(rdft_bits-1);
115329f1 832
01310af2 833 /* compute display index : center on currently output samples */
01f4895c 834 channels = s->audio_st->codec->channels;
01310af2 835 nb_display_channels = channels;
5e0257e3 836 if (!s->paused) {
4c7c7645 837 int data_used= s->show_audio==1 ? s->width : (2*nb_freq);
5e0257e3
FB
838 n = 2 * channels;
839 delay = audio_write_get_buf_size(s);
840 delay /= n;
115329f1 841
5e0257e3
FB
842 /* to be more precise, we take into account the time spent since
843 the last buffer computation */
844 if (audio_callback_time) {
845 time_diff = av_gettime() - audio_callback_time;
122dcdcb 846 delay -= (time_diff * s->audio_st->codec->sample_rate) / 1000000;
5e0257e3 847 }
115329f1 848
122dcdcb 849 delay += 2*data_used;
4c7c7645
MN
850 if (delay < data_used)
851 delay = data_used;
ac50bcc8
MN
852
853 i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
12eeda34 854 if(s->show_audio==1){
6c7165c7
JM
855 h= INT_MIN;
856 for(i=0; i<1000; i+=channels){
857 int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
858 int a= s->sample_array[idx];
859 int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
860 int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
861 int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
862 int score= a-d;
863 if(h<score && (b^c)<0){
864 h= score;
865 i_start= idx;
866 }
ac50bcc8
MN
867 }
868 }
869
5e0257e3
FB
870 s->last_i_start = i_start;
871 } else {
872 i_start = s->last_i_start;
01310af2
FB
873 }
874
01310af2 875 bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 876 if(s->show_audio==1){
6c7165c7
JM
877 fill_rectangle(screen,
878 s->xleft, s->ytop, s->width, s->height,
879 bgcolor);
880
881 fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
882
883 /* total height for one channel */
884 h = s->height / nb_display_channels;
885 /* graph height / 2 */
886 h2 = (h * 9) / 20;
887 for(ch = 0;ch < nb_display_channels; ch++) {
888 i = i_start + ch;
889 y1 = s->ytop + ch * h + (h / 2); /* position of center line */
890 for(x = 0; x < s->width; x++) {
891 y = (s->sample_array[i] * h2) >> 15;
892 if (y < 0) {
893 y = -y;
894 ys = y1 - y;
895 } else {
896 ys = y1;
897 }
898 fill_rectangle(screen,
899 s->xleft + x, ys, 1, y,
900 fgcolor);
901 i += channels;
902 if (i >= SAMPLE_ARRAY_SIZE)
903 i -= SAMPLE_ARRAY_SIZE;
01310af2 904 }
01310af2 905 }
01310af2 906
6c7165c7 907 fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2 908
6c7165c7
JM
909 for(ch = 1;ch < nb_display_channels; ch++) {
910 y = s->ytop + ch * h;
911 fill_rectangle(screen,
912 s->xleft, y, s->width, 1,
913 fgcolor);
914 }
915 SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
12eeda34 916 }else{
12eeda34 917 nb_display_channels= FFMIN(nb_display_channels, 2);
12eeda34 918 if(rdft_bits != s->rdft_bits){
166621ab
MR
919 av_rdft_end(s->rdft);
920 s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
12eeda34
MN
921 s->rdft_bits= rdft_bits;
922 }
12eeda34
MN
923 {
924 FFTSample data[2][2*nb_freq];
925 for(ch = 0;ch < nb_display_channels; ch++) {
926 i = i_start + ch;
927 for(x = 0; x < 2*nb_freq; x++) {
928 double w= (x-nb_freq)*(1.0/nb_freq);
929 data[ch][x]= s->sample_array[i]*(1.0-w*w);
930 i += channels;
931 if (i >= SAMPLE_ARRAY_SIZE)
932 i -= SAMPLE_ARRAY_SIZE;
933 }
166621ab 934 av_rdft_calc(s->rdft, data[ch]);
12eeda34
MN
935 }
936 //least efficient way to do this, we should of course directly access it but its more than fast enough
092421cf 937 for(y=0; y<s->height; y++){
12eeda34
MN
938 double w= 1/sqrt(nb_freq);
939 int a= sqrt(w*sqrt(data[0][2*y+0]*data[0][2*y+0] + data[0][2*y+1]*data[0][2*y+1]));
940 int b= sqrt(w*sqrt(data[1][2*y+0]*data[1][2*y+0] + data[1][2*y+1]*data[1][2*y+1]));
941 a= FFMIN(a,255);
942 b= FFMIN(b,255);
943 fgcolor = SDL_MapRGB(screen->format, a, b, (a+b)/2);
944
945 fill_rectangle(screen,
946 s->xpos, s->height-y, 1, 1,
947 fgcolor);
948 }
949 }
950 SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
951 s->xpos++;
952 if(s->xpos >= s->width)
953 s->xpos= s->xleft;
954 }
01310af2
FB
955}
956
990c8438
MN
957static int video_open(VideoState *is){
958 int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
959 int w,h;
960
fb84155b
MN
961 if(is_full_screen) flags |= SDL_FULLSCREEN;
962 else flags |= SDL_RESIZABLE;
963
990c8438
MN
964 if (is_full_screen && fs_screen_width) {
965 w = fs_screen_width;
966 h = fs_screen_height;
fb84155b
MN
967 } else if(!is_full_screen && screen_width){
968 w = screen_width;
969 h = screen_height;
917d2bb3
MN
970#if CONFIG_AVFILTER
971 }else if (is->out_video_filter && is->out_video_filter->inputs[0]){
972 w = is->out_video_filter->inputs[0]->w;
973 h = is->out_video_filter->inputs[0]->h;
974#else
fb84155b
MN
975 }else if (is->video_st && is->video_st->codec->width){
976 w = is->video_st->codec->width;
977 h = is->video_st->codec->height;
917d2bb3 978#endif
990c8438 979 } else {
fb84155b
MN
980 w = 640;
981 h = 480;
990c8438 982 }
d3d7b12e
MN
983 if(screen && is->width == screen->w && screen->w == w
984 && is->height== screen->h && screen->h == h)
985 return 0;
986
c97f5402 987#ifndef __APPLE__
990c8438
MN
988 screen = SDL_SetVideoMode(w, h, 0, flags);
989#else
990 /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
991 screen = SDL_SetVideoMode(w, h, 24, flags);
992#endif
993 if (!screen) {
994 fprintf(stderr, "SDL: could not set video mode - exiting\n");
995 return -1;
996 }
076db5ed
RK
997 if (!window_title)
998 window_title = input_filename;
999 SDL_WM_SetCaption(window_title, window_title);
990c8438
MN
1000
1001 is->width = screen->w;
1002 is->height = screen->h;
1003
1004 return 0;
1005}
8c982c5d 1006
01310af2
FB
1007/* display the current picture, if any */
1008static void video_display(VideoState *is)
1009{
8c982c5d
MN
1010 if(!screen)
1011 video_open(cur_stream);
115329f1 1012 if (is->audio_st && is->show_audio)
01310af2
FB
1013 video_audio_display(is);
1014 else if (is->video_st)
1015 video_image_display(is);
1016}
1017
d38c9e7a 1018static int refresh_thread(void *opaque)
01310af2 1019{
d38c9e7a
MN
1020 VideoState *is= opaque;
1021 while(!is->abort_request){
01310af2
FB
1022 SDL_Event event;
1023 event.type = FF_REFRESH_EVENT;
1024 event.user.data1 = opaque;
d38c9e7a
MN
1025 if(!is->refresh){
1026 is->refresh=1;
01310af2 1027 SDL_PushEvent(&event);
d38c9e7a 1028 }
2b3da32f 1029 usleep(is->audio_st && is->show_audio ? rdftspeed*1000 : 5000); //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
d38c9e7a
MN
1030 }
1031 return 0;
01310af2
FB
1032}
1033
638c9d91
FB
1034/* get the current audio clock value */
1035static double get_audio_clock(VideoState *is)
1036{
1037 double pts;
1038 int hw_buf_size, bytes_per_sec;
1039 pts = is->audio_clock;
1040 hw_buf_size = audio_write_get_buf_size(is);
1041 bytes_per_sec = 0;
1042 if (is->audio_st) {
115329f1 1043 bytes_per_sec = is->audio_st->codec->sample_rate *
01f4895c 1044 2 * is->audio_st->codec->channels;
638c9d91
FB
1045 }
1046 if (bytes_per_sec)
1047 pts -= (double)hw_buf_size / bytes_per_sec;
1048 return pts;
1049}
1050
1051/* get the current video clock value */
1052static double get_video_clock(VideoState *is)
1053{
04108619 1054 if (is->paused) {
41a4cd0c 1055 return is->video_current_pts;
72ea344b 1056 } else {
68aefbe8 1057 return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b 1058 }
638c9d91
FB
1059}
1060
1061/* get the current external clock value */
1062static double get_external_clock(VideoState *is)
1063{
1064 int64_t ti;
1065 ti = av_gettime();
1066 return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
1067}
1068
1069/* get the current master clock value */
1070static double get_master_clock(VideoState *is)
1071{
1072 double val;
1073
72ea344b
FB
1074 if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
1075 if (is->video_st)
1076 val = get_video_clock(is);
1077 else
1078 val = get_audio_clock(is);
1079 } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
1080 if (is->audio_st)
1081 val = get_audio_clock(is);
1082 else
1083 val = get_video_clock(is);
1084 } else {
638c9d91 1085 val = get_external_clock(is);
72ea344b 1086 }
638c9d91
FB
1087 return val;
1088}
1089
72ea344b 1090/* seek in the stream */
2ef46053 1091static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b 1092{
687fae2b
IW
1093 if (!is->seek_req) {
1094 is->seek_pos = pos;
4ed29207 1095 is->seek_rel = rel;
3890dd3a 1096 is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
SH
1097 if (seek_by_bytes)
1098 is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
IW
1099 is->seek_req = 1;
1100 }
72ea344b
FB
1101}
1102
1103/* pause or resume the video */
1104static void stream_pause(VideoState *is)
1105{
68aefbe8
MN
1106 if (is->paused) {
1107 is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
f5668147 1108 if(is->read_pause_return != AVERROR(ENOSYS)){
68aefbe8 1109 is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147 1110 }
68aefbe8 1111 is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b 1112 }
68aefbe8 1113 is->paused = !is->paused;
72ea344b
FB
1114}
1115
d38c9e7a 1116static double compute_target_time(double frame_current_pts, VideoState *is)
49410784 1117{
d38c9e7a 1118 double delay, sync_threshold, diff;
49410784
TB
1119
1120 /* compute nominal delay */
1121 delay = frame_current_pts - is->frame_last_pts;
1122 if (delay <= 0 || delay >= 10.0) {
1123 /* if incorrect delay, use previous one */
1124 delay = is->frame_last_delay;
443658fd 1125 } else {
712de377 1126 is->frame_last_delay = delay;
443658fd 1127 }
49410784
TB
1128 is->frame_last_pts = frame_current_pts;
1129
1130 /* update delay to follow master synchronisation source */
1131 if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1132 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1133 /* if video is slave, we try to correct big delays by
1134 duplicating or deleting a frame */
f04c6e35 1135 diff = get_video_clock(is) - get_master_clock(is);
49410784
TB
1136
1137 /* skip or repeat frame. We take into account the
1138 delay to compute the threshold. I still don't know
1139 if it is the best guess */
1140 sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
1141 if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1142 if (diff <= -sync_threshold)
1143 delay = 0;
1144 else if (diff >= sync_threshold)
1145 delay = 2 * delay;
1146 }
1147 }
49410784 1148 is->frame_timer += delay;
eecc17a7
TB
1149#if defined(DEBUG_SYNC)
1150 printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1151 delay, actual_delay, frame_current_pts, -diff);
1152#endif
1153
d38c9e7a 1154 return is->frame_timer;
49410784
TB
1155}
1156
01310af2
FB
1157/* called to display each frame */
1158static void video_refresh_timer(void *opaque)
1159{
1160 VideoState *is = opaque;
1161 VideoPicture *vp;
638c9d91 1162
72ce053b 1163 SubPicture *sp, *sp2;
01310af2
FB
1164
1165 if (is->video_st) {
d38c9e7a 1166retry:
01310af2 1167 if (is->pictq_size == 0) {
d38c9e7a 1168 //nothing to do, no picture to display in the que
01310af2 1169 } else {
d38c9e7a
MN
1170 double time= av_gettime()/1000000.0;
1171 double next_target;
638c9d91 1172 /* dequeue the picture */
01310af2 1173 vp = &is->pictq[is->pictq_rindex];
638c9d91 1174
d38c9e7a
MN
1175 if(time < vp->target_clock)
1176 return;
638c9d91
FB
1177 /* update current video pts */
1178 is->video_current_pts = vp->pts;
d38c9e7a 1179 is->video_current_pts_drift = is->video_current_pts - time;
a3cc2160 1180 is->video_current_pos = vp->pos;
d38c9e7a
MN
1181 if(is->pictq_size > 1){
1182 VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
1183 assert(nextvp->target_clock >= vp->target_clock);
1184 next_target= nextvp->target_clock;
1185 }else{
1186 next_target= vp->target_clock + is->video_clock - vp->pts; //FIXME pass durations cleanly
1187 }
1188 if(framedrop && time > next_target){
1189 is->skip_frames *= 1.0 + FRAME_SKIP_FACTOR;
1190 if(is->pictq_size > 1 || time > next_target + 0.5){
1191 /* update queue size and signal for next picture */
1192 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1193 is->pictq_rindex = 0;
1194
1195 SDL_LockMutex(is->pictq_mutex);
1196 is->pictq_size--;
1197 SDL_CondSignal(is->pictq_cond);
1198 SDL_UnlockMutex(is->pictq_mutex);
1199 goto retry;
1200 }
1201 }
638c9d91 1202
72ce053b
IC
1203 if(is->subtitle_st) {
1204 if (is->subtitle_stream_changed) {
1205 SDL_LockMutex(is->subpq_mutex);
115329f1 1206
72ce053b
IC
1207 while (is->subpq_size) {
1208 free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1 1209
72ce053b
IC
1210 /* update queue size and signal for next picture */
1211 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1212 is->subpq_rindex = 0;
115329f1 1213
72ce053b
IC
1214 is->subpq_size--;
1215 }
1216 is->subtitle_stream_changed = 0;
1217
1218 SDL_CondSignal(is->subpq_cond);
1219 SDL_UnlockMutex(is->subpq_mutex);
1220 } else {
1221 if (is->subpq_size > 0) {
1222 sp = &is->subpq[is->subpq_rindex];
1223
1224 if (is->subpq_size > 1)
1225 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1226 else
1227 sp2 = NULL;
1228
1229 if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1230 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1231 {
1232 free_subpicture(sp);
1233
1234 /* update queue size and signal for next picture */
1235 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1236 is->subpq_rindex = 0;
1237
1238 SDL_LockMutex(is->subpq_mutex);
1239 is->subpq_size--;
1240 SDL_CondSignal(is->subpq_cond);
1241 SDL_UnlockMutex(is->subpq_mutex);
1242 }
1243 }
1244 }
1245 }
1246
01310af2
FB
1247 /* display picture */
1248 video_display(is);
115329f1 1249
01310af2
FB
1250 /* update queue size and signal for next picture */
1251 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1252 is->pictq_rindex = 0;
115329f1 1253
01310af2
FB
1254 SDL_LockMutex(is->pictq_mutex);
1255 is->pictq_size--;
1256 SDL_CondSignal(is->pictq_cond);
1257 SDL_UnlockMutex(is->pictq_mutex);
1258 }
1259 } else if (is->audio_st) {
1260 /* draw the next audio frame */
1261
01310af2
FB
1262 /* if only audio stream, then display the audio bars (better
1263 than nothing, just to test the implementation */
115329f1 1264
01310af2
FB
1265 /* display picture */
1266 video_display(is);
01310af2
FB
1267 }
1268 if (show_status) {
1269 static int64_t last_time;
1270 int64_t cur_time;
72ce053b 1271 int aqsize, vqsize, sqsize;
638c9d91 1272 double av_diff;
115329f1 1273
01310af2 1274 cur_time = av_gettime();
1e1a0b18 1275 if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
FB
1276 aqsize = 0;
1277 vqsize = 0;
72ce053b 1278 sqsize = 0;
01310af2
FB
1279 if (is->audio_st)
1280 aqsize = is->audioq.size;
1281 if (is->video_st)
1282 vqsize = is->videoq.size;
72ce053b
IC
1283 if (is->subtitle_st)
1284 sqsize = is->subtitleq.size;
638c9d91
FB
1285 av_diff = 0;
1286 if (is->audio_st && is->video_st)
1287 av_diff = get_audio_clock(is) - get_video_clock(is);
382f3a5b
MN
1288 printf("%7.2f A-V:%7.3f s:%3.1f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
1289 get_master_clock(is), av_diff, FFMAX(is->skip_frames-1, 0), aqsize / 1024, vqsize / 1024, sqsize, is->faulty_dts, is->faulty_pts);
01310af2
FB
1290 fflush(stdout);
1291 last_time = cur_time;
1292 }
1293 }
1294}
1295
1296/* allocate a picture (needs to do that in main thread to avoid
1297 potential locking problems */
1298static void alloc_picture(void *opaque)
1299{
1300 VideoState *is = opaque;
1301 VideoPicture *vp;
01310af2
FB
1302
1303 vp = &is->pictq[is->pictq_windex];
1304
1305 if (vp->bmp)
1306 SDL_FreeYUVOverlay(vp->bmp);
1307
917d2bb3
MN
1308#if CONFIG_AVFILTER
1309 if (vp->picref)
1310 avfilter_unref_pic(vp->picref);
1311 vp->picref = NULL;
1312
1313 vp->width = is->out_video_filter->inputs[0]->w;
1314 vp->height = is->out_video_filter->inputs[0]->h;
1315 vp->pix_fmt = is->out_video_filter->inputs[0]->format;
1316#else
1317 vp->width = is->video_st->codec->width;
1318 vp->height = is->video_st->codec->height;
1319 vp->pix_fmt = is->video_st->codec->pix_fmt;
1320#endif
1321
1322 vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1 1323 SDL_YV12_OVERLAY,
61890b02 1324 screen);
01310af2
FB
1325
1326 SDL_LockMutex(is->pictq_mutex);
1327 vp->allocated = 1;
1328 SDL_CondSignal(is->pictq_cond);
1329 SDL_UnlockMutex(is->pictq_mutex);
1330}
1331
267e9dfa
MN
1332/**
1333 *
1334 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1335 */
1a620dd7 1336static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos)
01310af2
FB
1337{
1338 VideoPicture *vp;
1339 int dst_pix_fmt;
917d2bb3
MN
1340#if CONFIG_AVFILTER
1341 AVPicture pict_src;
1342#endif
01310af2
FB
1343 /* wait until we have space to put a new picture */
1344 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1345
1346 if(is->pictq_size>=VIDEO_PICTURE_QUEUE_SIZE && !is->refresh)
1347 is->skip_frames= FFMAX(1.0 - FRAME_SKIP_FACTOR, is->skip_frames * (1.0-FRAME_SKIP_FACTOR));
1348
01310af2
FB
1349 while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1350 !is->videoq.abort_request) {
1351 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1352 }
1353 SDL_UnlockMutex(is->pictq_mutex);
115329f1 1354
01310af2
FB
1355 if (is->videoq.abort_request)
1356 return -1;
1357
1358 vp = &is->pictq[is->pictq_windex];
1359
1360 /* alloc or resize hardware picture buffer */
115329f1 1361 if (!vp->bmp ||
917d2bb3
MN
1362#if CONFIG_AVFILTER
1363 vp->width != is->out_video_filter->inputs[0]->w ||
1364 vp->height != is->out_video_filter->inputs[0]->h) {
1365#else
01f4895c
MN
1366 vp->width != is->video_st->codec->width ||
1367 vp->height != is->video_st->codec->height) {
917d2bb3 1368#endif
01310af2
FB
1369 SDL_Event event;
1370
1371 vp->allocated = 0;
1372
1373 /* the allocation must be done in the main thread to avoid
1374 locking problems */
1375 event.type = FF_ALLOC_EVENT;
1376 event.user.data1 = is;
1377 SDL_PushEvent(&event);
115329f1 1378
01310af2
FB
1379 /* wait until the picture is allocated */
1380 SDL_LockMutex(is->pictq_mutex);
1381 while (!vp->allocated && !is->videoq.abort_request) {
1382 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1383 }
1384 SDL_UnlockMutex(is->pictq_mutex);
1385
1386 if (is->videoq.abort_request)
1387 return -1;
1388 }
1389
638c9d91 1390 /* if the frame is not skipped, then display it */
01310af2 1391 if (vp->bmp) {
fbf1b885 1392 AVPicture pict;
917d2bb3
MN
1393#if CONFIG_AVFILTER
1394 if(vp->picref)
1395 avfilter_unref_pic(vp->picref);
1396 vp->picref = src_frame->opaque;
1397#endif
fbf1b885 1398
01310af2
FB
1399 /* get a pointer on the bitmap */
1400 SDL_LockYUVOverlay (vp->bmp);
1401
1402 dst_pix_fmt = PIX_FMT_YUV420P;
fbf1b885 1403 memset(&pict,0,sizeof(AVPicture));
01310af2
FB
1404 pict.data[0] = vp->bmp->pixels[0];
1405 pict.data[1] = vp->bmp->pixels[2];
1406 pict.data[2] = vp->bmp->pixels[1];
1407
1408 pict.linesize[0] = vp->bmp->pitches[0];
1409 pict.linesize[1] = vp->bmp->pitches[2];
1410 pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
MN
1411
1412#if CONFIG_AVFILTER
1413 pict_src.data[0] = src_frame->data[0];
1414 pict_src.data[1] = src_frame->data[1];
1415 pict_src.data[2] = src_frame->data[2];
1416
1417 pict_src.linesize[0] = src_frame->linesize[0];
1418 pict_src.linesize[1] = src_frame->linesize[1];
1419 pict_src.linesize[2] = src_frame->linesize[2];
1420
1421 //FIXME use direct rendering
1422 av_picture_copy(&pict, &pict_src,
1423 vp->pix_fmt, vp->width, vp->height);
1424#else
e43d7a18 1425 sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28 1426 is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3 1427 vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
feb7bc67 1428 dst_pix_fmt, sws_flags, NULL, NULL, NULL);
3ac56e28 1429 if (is->img_convert_ctx == NULL) {
26ba8235
AB
1430 fprintf(stderr, "Cannot initialize the conversion context\n");
1431 exit(1);
1432 }
3ac56e28 1433 sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
MN
1434 0, vp->height, pict.data, pict.linesize);
1435#endif
01310af2
FB
1436 /* update the bitmap content */
1437 SDL_UnlockYUVOverlay(vp->bmp);
1438
638c9d91 1439 vp->pts = pts;
1a620dd7 1440 vp->pos = pos;
01310af2
FB
1441
1442 /* now we can update the picture count */
1443 if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1444 is->pictq_windex = 0;
1445 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1446 vp->target_clock= compute_target_time(vp->pts, is);
1447
01310af2
FB
1448 is->pictq_size++;
1449 SDL_UnlockMutex(is->pictq_mutex);
1450 }
638c9d91
FB
1451 return 0;
1452}
1453
115329f1
DB
1454/**
1455 * compute the exact PTS for the picture if it is omitted in the stream
267e9dfa
MN
1456 * @param pts1 the dts of the pkt / pts of the frame
1457 */
1a620dd7 1458static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
638c9d91
FB
1459{
1460 double frame_delay, pts;
115329f1 1461
638c9d91
FB
1462 pts = pts1;
1463
01310af2 1464 if (pts != 0) {
638c9d91 1465 /* update video clock with pts, if present */
01310af2
FB
1466 is->video_clock = pts;
1467 } else {
72ea344b
FB
1468 pts = is->video_clock;
1469 }
1470 /* update video clock for next frame */
01f4895c 1471 frame_delay = av_q2d(is->video_st->codec->time_base);
72ea344b
FB
1472 /* for MPEG2, the frame can be repeated, so we update the
1473 clock accordingly */
267e9dfa 1474 frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
72ea344b 1475 is->video_clock += frame_delay;
638c9d91
FB
1476
1477#if defined(DEBUG_SYNC) && 0
ff358eca
SS
1478 printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1479 av_get_pict_type_char(src_frame->pict_type), pts, pts1);
638c9d91 1480#endif
1a620dd7 1481 return queue_picture(is, src_frame, pts, pos);
01310af2
FB
1482}
1483
3966a574 1484static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2 1485{
6c7d3ead 1486 int len1, got_picture, i;
01310af2 1487
01310af2 1488 if (packet_queue_get(&is->videoq, pkt, 1) < 0)
917d2bb3 1489 return -1;
39c6a118
MN
1490
1491 if(pkt->data == flush_pkt.data){
1492 avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
MN
1493
1494 SDL_LockMutex(is->pictq_mutex);
1495 //Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
1496 for(i=0; i<VIDEO_PICTURE_QUEUE_SIZE; i++){
d38c9e7a 1497 is->pictq[i].target_clock= 0;
6c7d3ead
MN
1498 }
1499 while (is->pictq_size && !is->videoq.abort_request) {
1500 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1501 }
1a620dd7 1502 is->video_current_pos= -1;
6c7d3ead
MN
1503 SDL_UnlockMutex(is->pictq_mutex);
1504
41db429d
MN
1505 is->last_dts_for_fault_detection=
1506 is->last_pts_for_fault_detection= INT64_MIN;
967030eb 1507 is->frame_last_pts= AV_NOPTS_VALUE;
f7119e42 1508 is->frame_last_delay = 0;
b25453bd 1509 is->frame_timer = (double)av_gettime() / 1000000.0;
d38c9e7a
MN
1510 is->skip_frames= 1;
1511 is->skip_frames_index= 0;
917d2bb3 1512 return 0;
39c6a118
MN
1513 }
1514
638c9d91
FB
1515 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1516 this packet, if any */
7fb262b5 1517 is->video_st->codec->reordered_opaque= pkt->pts;
bea18375 1518 len1 = avcodec_decode_video2(is->video_st->codec,
620e8baf 1519 frame, &got_picture,
bea18375 1520 pkt);
620e8baf 1521
99e0b12b 1522 if (got_picture) {
df7d6e48
SS
1523 if(pkt->dts != AV_NOPTS_VALUE){
1524 is->faulty_dts += pkt->dts <= is->last_dts_for_fault_detection;
1525 is->last_dts_for_fault_detection= pkt->dts;
1526 }
1527 if(frame->reordered_opaque != AV_NOPTS_VALUE){
1528 is->faulty_pts += frame->reordered_opaque <= is->last_pts_for_fault_detection;
1529 is->last_pts_for_fault_detection= frame->reordered_opaque;
1530 }
99e0b12b 1531 }
41db429d
MN
1532
1533 if( ( decoder_reorder_pts==1
ecbed31c 1534 || (decoder_reorder_pts && is->faulty_pts<is->faulty_dts)
41db429d 1535 || pkt->dts == AV_NOPTS_VALUE)
7fb262b5 1536 && frame->reordered_opaque != AV_NOPTS_VALUE)
917d2bb3 1537 *pts= frame->reordered_opaque;
620e8baf 1538 else if(pkt->dts != AV_NOPTS_VALUE)
917d2bb3 1539 *pts= pkt->dts;
620e8baf 1540 else
917d2bb3
MN
1541 *pts= 0;
1542
fb966f99
MN
1543// if (len1 < 0)
1544// break;
d38c9e7a
MN
1545 if (got_picture){
1546 is->skip_frames_index += 1;
1547 if(is->skip_frames_index >= is->skip_frames){
1548 is->skip_frames_index -= FFMAX(is->skip_frames, 1.0);
1549 return 1;
1550 }
1551
1552 }
917d2bb3
MN
1553 return 0;
1554}
1555
1556#if CONFIG_AVFILTER
1557typedef struct {
1558 VideoState *is;
1559 AVFrame *frame;
dd0c789b 1560 int use_dr1;
917d2bb3
MN
1561} FilterPriv;
1562
dd0c789b
BB
1563static int input_get_buffer(AVCodecContext *codec, AVFrame *pic)
1564{
1565 AVFilterContext *ctx = codec->opaque;
1566 AVFilterPicRef *ref;
1567 int perms = AV_PERM_WRITE;
65929418 1568 int i, w, h, stride[4];
dd0c789b
BB
1569 unsigned edge;
1570
1571 if(pic->buffer_hints & FF_BUFFER_HINTS_VALID) {
1572 if(pic->buffer_hints & FF_BUFFER_HINTS_READABLE) perms |= AV_PERM_READ;
1573 if(pic->buffer_hints & FF_BUFFER_HINTS_PRESERVE) perms |= AV_PERM_PRESERVE;
1574 if(pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE) perms |= AV_PERM_REUSE2;
1575 }
1576 if(pic->reference) perms |= AV_PERM_READ | AV_PERM_PRESERVE;
1577
1578 w = codec->width;
1579 h = codec->height;
1580 avcodec_align_dimensions2(codec, &w, &h, stride);
1581 edge = codec->flags & CODEC_FLAG_EMU_EDGE ? 0 : avcodec_get_edge_width();
1582 w += edge << 1;
1583 h += edge << 1;
1584
1585 if(!(ref = avfilter_get_video_buffer(ctx->outputs[0], perms, w, h)))
1586 return -1;
1587
1588 ref->w = codec->width;
1589 ref->h = codec->height;
65929418 1590 for(i = 0; i < 3; i ++) {
dd0c789b
BB
1591 unsigned hshift = i == 0 ? 0 : av_pix_fmt_descriptors[ref->pic->format].log2_chroma_w;
1592 unsigned vshift = i == 0 ? 0 : av_pix_fmt_descriptors[ref->pic->format].log2_chroma_h;
1593
3635c07b 1594 if (ref->data[i]) {
e53ca636 1595 ref->data[i] += (edge >> hshift) + ((edge * ref->linesize[i]) >> vshift);
3635c07b 1596 }
dd0c789b
BB
1597 pic->data[i] = ref->data[i];
1598 pic->linesize[i] = ref->linesize[i];
1599 }
1600 pic->opaque = ref;
1601 pic->age = INT_MAX;
1602 pic->type = FF_BUFFER_TYPE_USER;
867ab7fb 1603 pic->reordered_opaque = codec->reordered_opaque;
dd0c789b
BB
1604 return 0;
1605}
1606
1607static void input_release_buffer(AVCodecContext *codec, AVFrame *pic)
1608{
1609 memset(pic->data, 0, sizeof(pic->data));
1610 avfilter_unref_pic(pic->opaque);
1611}
1612
12bd3c1f
JM
1613static int input_reget_buffer(AVCodecContext *codec, AVFrame *pic)
1614{
1615 AVFilterPicRef *ref = pic->opaque;
1616
1617 if (pic->data[0] == NULL) {
1618 pic->buffer_hints |= FF_BUFFER_HINTS_READABLE;
1619 return codec->get_buffer(codec, pic);
1620 }
1621
1622 if ((codec->width != ref->w) || (codec->height != ref->h) ||
1623 (codec->pix_fmt != ref->pic->format)) {
1624 av_log(codec, AV_LOG_ERROR, "Picture properties changed.\n");
1625 return -1;
1626 }
1627
1628 pic->reordered_opaque = codec->reordered_opaque;
1629 return 0;
1630}
1631
917d2bb3
MN
1632static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
1633{
1634 FilterPriv *priv = ctx->priv;
dd0c789b 1635 AVCodecContext *codec;
917d2bb3
MN
1636 if(!opaque) return -1;
1637
1638 priv->is = opaque;
dd0c789b
BB
1639 codec = priv->is->video_st->codec;
1640 codec->opaque = ctx;
1641 if(codec->codec->capabilities & CODEC_CAP_DR1) {
1642 priv->use_dr1 = 1;
1643 codec->get_buffer = input_get_buffer;
1644 codec->release_buffer = input_release_buffer;
12bd3c1f 1645 codec->reget_buffer = input_reget_buffer;
dd0c789b
BB
1646 }
1647
917d2bb3
MN
1648 priv->frame = avcodec_alloc_frame();
1649
1650 return 0;
1651}
1652
1653static void input_uninit(AVFilterContext *ctx)
1654{
1655 FilterPriv *priv = ctx->priv;
1656 av_free(priv->frame);
1657}
1658
1659static int input_request_frame(AVFilterLink *link)
1660{
1661 FilterPriv *priv = link->src->priv;
1662 AVFilterPicRef *picref;
3966a574 1663 int64_t pts = 0;
917d2bb3
MN
1664 AVPacket pkt;
1665 int ret;
1666
1667 while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
1668 av_free_packet(&pkt);
1669 if (ret < 0)
1670 return -1;
1671
dd0c789b 1672 if(priv->use_dr1) {
c41c5b02 1673 picref = avfilter_ref_pic(priv->frame->opaque, ~0);
dd0c789b 1674 } else {
cf097cbc
BB
1675 picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, link->w, link->h);
1676 av_picture_copy((AVPicture *)&picref->data, (AVPicture *)priv->frame,
1677 picref->pic->format, link->w, link->h);
dd0c789b 1678 }
917d2bb3
MN
1679 av_free_packet(&pkt);
1680
1681 picref->pts = pts;
bb409513 1682 picref->pos = pkt.pos;
917d2bb3 1683 picref->pixel_aspect = priv->is->video_st->codec->sample_aspect_ratio;
c41c5b02 1684 avfilter_start_frame(link, picref);
917d2bb3
MN
1685 avfilter_draw_slice(link, 0, link->h, 1);
1686 avfilter_end_frame(link);
917d2bb3
MN
1687
1688 return 0;
1689}
1690
1691static int input_query_formats(AVFilterContext *ctx)
1692{
1693 FilterPriv *priv = ctx->priv;
1694 enum PixelFormat pix_fmts[] = {
1695 priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
1696 };
1697
1698 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1699 return 0;
1700}
1701
1702static int input_config_props(AVFilterLink *link)
1703{
1704 FilterPriv *priv = link->src->priv;
1705 AVCodecContext *c = priv->is->video_st->codec;
1706
1707 link->w = c->width;
1708 link->h = c->height;
1709
1710 return 0;
1711}
1712
1713static AVFilter input_filter =
1714{
1715 .name = "ffplay_input",
1716
1717 .priv_size = sizeof(FilterPriv),
1718
1719 .init = input_init,
1720 .uninit = input_uninit,
1721
1722 .query_formats = input_query_formats,
1723
1724 .inputs = (AVFilterPad[]) {{ .name = NULL }},
1725 .outputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1726 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1727 .request_frame = input_request_frame,
1728 .config_props = input_config_props, },
1729 { .name = NULL }},
1730};
1731
1732static void output_end_frame(AVFilterLink *link)
1733{
1734}
1735
1736static int output_query_formats(AVFilterContext *ctx)
1737{
1738 enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
1739
1740 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1741 return 0;
1742}
1743
1744static int get_filtered_video_frame(AVFilterContext *ctx, AVFrame *frame,
bb409513 1745 int64_t *pts, int64_t *pos)
917d2bb3
MN
1746{
1747 AVFilterPicRef *pic;
1748
1749 if(avfilter_request_frame(ctx->inputs[0]))
1750 return -1;
1751 if(!(pic = ctx->inputs[0]->cur_pic))
1752 return -1;
1753 ctx->inputs[0]->cur_pic = NULL;
1754
1755 frame->opaque = pic;
1756 *pts = pic->pts;
bb409513 1757 *pos = pic->pos;
917d2bb3
MN
1758
1759 memcpy(frame->data, pic->data, sizeof(frame->data));
1760 memcpy(frame->linesize, pic->linesize, sizeof(frame->linesize));
1761
1762 return 1;
1763}
1764
1765static AVFilter output_filter =
1766{
1767 .name = "ffplay_output",
1768
1769 .query_formats = output_query_formats,
1770
1771 .inputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1772 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1773 .end_frame = output_end_frame,
1774 .min_perms = AV_PERM_READ, },
1775 { .name = NULL }},
1776 .outputs = (AVFilterPad[]) {{ .name = NULL }},
1777};
1778#endif /* CONFIG_AVFILTER */
1779
1780static int video_thread(void *arg)
1781{
1782 VideoState *is = arg;
1783 AVFrame *frame= avcodec_alloc_frame();
4903b5ca 1784 int64_t pts_int;
917d2bb3
MN
1785 double pts;
1786 int ret;
1787
1788#if CONFIG_AVFILTER
4903b5ca 1789 int64_t pos;
3f073fa2 1790 char sws_flags_str[128];
917d2bb3
MN
1791 AVFilterContext *filt_src = NULL, *filt_out = NULL;
1792 AVFilterGraph *graph = av_mallocz(sizeof(AVFilterGraph));
3f073fa2
SS
1793 snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
1794 graph->scale_sws_opts = av_strdup(sws_flags_str);
917d2bb3
MN
1795
1796 if(!(filt_src = avfilter_open(&input_filter, "src"))) goto the_end;
1797 if(!(filt_out = avfilter_open(&output_filter, "out"))) goto the_end;
1798
1799 if(avfilter_init_filter(filt_src, NULL, is)) goto the_end;
1800 if(avfilter_init_filter(filt_out, NULL, frame)) goto the_end;
1801
1802
1803 if(vfilters) {
1804 AVFilterInOut *outputs = av_malloc(sizeof(AVFilterInOut));
1805 AVFilterInOut *inputs = av_malloc(sizeof(AVFilterInOut));
1806
1807 outputs->name = av_strdup("in");
1808 outputs->filter = filt_src;
1809 outputs->pad_idx = 0;
1810 outputs->next = NULL;
1811
1812 inputs->name = av_strdup("out");
1813 inputs->filter = filt_out;
1814 inputs->pad_idx = 0;
1815 inputs->next = NULL;
1816
1817 if (avfilter_graph_parse(graph, vfilters, inputs, outputs, NULL) < 0)
1818 goto the_end;
1819 av_freep(&vfilters);
1820 } else {
1821 if(avfilter_link(filt_src, 0, filt_out, 0) < 0) goto the_end;
1822 }
1823 avfilter_graph_add_filter(graph, filt_src);
1824 avfilter_graph_add_filter(graph, filt_out);
1825
1826 if(avfilter_graph_check_validity(graph, NULL)) goto the_end;
1827 if(avfilter_graph_config_formats(graph, NULL)) goto the_end;
1828 if(avfilter_graph_config_links(graph, NULL)) goto the_end;
1829
1830 is->out_video_filter = filt_out;
1831#endif
1832
1833 for(;;) {
1834#if !CONFIG_AVFILTER
1835 AVPacket pkt;
1836#endif
1837 while (is->paused && !is->videoq.abort_request)
1838 SDL_Delay(10);
1839#if CONFIG_AVFILTER
bb409513 1840 ret = get_filtered_video_frame(filt_out, frame, &pts_int, &pos);
917d2bb3
MN
1841#else
1842 ret = get_video_frame(is, frame, &pts_int, &pkt);
1843#endif
1844
1845 if (ret < 0) goto the_end;
1846
1847 if (!ret)
1848 continue;
1849
3966a574 1850 pts = pts_int*av_q2d(is->video_st->time_base);
917d2bb3
MN
1851
1852#if CONFIG_AVFILTER
bb409513 1853 ret = output_picture2(is, frame, pts, pos);
917d2bb3 1854#else
fca62599 1855 ret = output_picture2(is, frame, pts, pkt.pos);
917d2bb3
MN
1856 av_free_packet(&pkt);
1857#endif
1858 if (ret < 0)
1859 goto the_end;
1860
115329f1 1861 if (step)
bba04f1e
WH
1862 if (cur_stream)
1863 stream_pause(cur_stream);
01310af2
FB
1864 }
1865 the_end:
917d2bb3
MN
1866#if CONFIG_AVFILTER
1867 avfilter_graph_destroy(graph);
1868 av_freep(&graph);
1869#endif
c6b1edc9 1870 av_free(frame);
01310af2
FB
1871 return 0;
1872}
1873
72ce053b
IC
1874static int subtitle_thread(void *arg)
1875{
1876 VideoState *is = arg;
1877 SubPicture *sp;
1878 AVPacket pkt1, *pkt = &pkt1;
1879 int len1, got_subtitle;
1880 double pts;
1881 int i, j;
1882 int r, g, b, y, u, v, a;
1883
1884 for(;;) {
1885 while (is->paused && !is->subtitleq.abort_request) {
1886 SDL_Delay(10);
1887 }
1888 if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1889 break;
115329f1 1890
39c6a118
MN
1891 if(pkt->data == flush_pkt.data){
1892 avcodec_flush_buffers(is->subtitle_st->codec);
1893 continue;
1894 }
72ce053b
IC
1895 SDL_LockMutex(is->subpq_mutex);
1896 while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1897 !is->subtitleq.abort_request) {
1898 SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1899 }
1900 SDL_UnlockMutex(is->subpq_mutex);
115329f1 1901
72ce053b
IC
1902 if (is->subtitleq.abort_request)
1903 goto the_end;
115329f1 1904
72ce053b
IC
1905 sp = &is->subpq[is->subpq_windex];
1906
1907 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1908 this packet, if any */
1909 pts = 0;
1910 if (pkt->pts != AV_NOPTS_VALUE)
1911 pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1912
bea18375 1913 len1 = avcodec_decode_subtitle2(is->subtitle_st->codec,
115329f1 1914 &sp->sub, &got_subtitle,
bea18375 1915 pkt);
72ce053b
IC
1916// if (len1 < 0)
1917// break;
1918 if (got_subtitle && sp->sub.format == 0) {
1919 sp->pts = pts;
115329f1 1920
72ce053b
IC
1921 for (i = 0; i < sp->sub.num_rects; i++)
1922 {
db4fac64 1923 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b 1924 {
25b4c651 1925 RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
IC
1926 y = RGB_TO_Y_CCIR(r, g, b);
1927 u = RGB_TO_U_CCIR(r, g, b, 0);
1928 v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651 1929 YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
IC
1930 }
1931 }
1932
1933 /* now we can update the picture count */
1934 if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1935 is->subpq_windex = 0;
1936 SDL_LockMutex(is->subpq_mutex);
1937 is->subpq_size++;
1938 SDL_UnlockMutex(is->subpq_mutex);
1939 }
1940 av_free_packet(pkt);
115329f1 1941// if (step)
72ce053b
IC
1942// if (cur_stream)
1943// stream_pause(cur_stream);
1944 }
1945 the_end:
1946 return 0;
1947}
1948
01310af2
FB
1949/* copy samples for viewing in editor window */
1950static void update_sample_display(VideoState *is, short *samples, int samples_size)
1951{
1952 int size, len, channels;
1953
01f4895c 1954 channels = is->audio_st->codec->channels;
01310af2
FB
1955
1956 size = samples_size / sizeof(short);
1957 while (size > 0) {
1958 len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1959 if (len > size)
1960 len = size;
1961 memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1962 samples += len;
1963 is->sample_array_index += len;
1964 if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1965 is->sample_array_index = 0;
1966 size -= len;
1967 }
1968}
1969
01310af2
FB
1970/* return the new audio buffer size (samples can be added or deleted
1971 to get better sync if video or external master clock) */
115329f1 1972static int synchronize_audio(VideoState *is, short *samples,
638c9d91 1973 int samples_size1, double pts)
01310af2 1974{
638c9d91 1975 int n, samples_size;
01310af2 1976 double ref_clock;
115329f1 1977
01f4895c 1978 n = 2 * is->audio_st->codec->channels;
638c9d91 1979 samples_size = samples_size1;
01310af2 1980
01310af2 1981 /* if not master, then we try to remove or add samples to correct the clock */
01310af2 1982 if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
FB
1983 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1984 double diff, avg_diff;
01310af2 1985 int wanted_size, min_size, max_size, nb_samples;
115329f1 1986
638c9d91
FB
1987 ref_clock = get_master_clock(is);
1988 diff = get_audio_clock(is) - ref_clock;
115329f1 1989
638c9d91
FB
1990 if (diff < AV_NOSYNC_THRESHOLD) {
1991 is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1992 if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1993 /* not enough measures to have a correct estimate */
1994 is->audio_diff_avg_count++;
1995 } else {
1996 /* estimate the A-V difference */
1997 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1998
1999 if (fabs(avg_diff) >= is->audio_diff_threshold) {
01f4895c 2000 wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
638c9d91 2001 nb_samples = samples_size / n;
115329f1 2002
638c9d91
FB
2003 min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
2004 max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
2005 if (wanted_size < min_size)
2006 wanted_size = min_size;
2007 else if (wanted_size > max_size)
2008 wanted_size = max_size;
115329f1 2009
638c9d91
FB
2010 /* add or remove samples to correction the synchro */
2011 if (wanted_size < samples_size) {
2012 /* remove samples */
2013 samples_size = wanted_size;
2014 } else if (wanted_size > samples_size) {
2015 uint8_t *samples_end, *q;
2016 int nb;
115329f1 2017
638c9d91
FB
2018 /* add samples */
2019 nb = (samples_size - wanted_size);
2020 samples_end = (uint8_t *)samples + samples_size - n;
2021 q = samples_end + n;
2022 while (nb > 0) {
2023 memcpy(q, samples_end, n);
2024 q += n;
2025 nb -= n;
2026 }
2027 samples_size = wanted_size;
2028 }
2029 }
2030#if 0
115329f1
DB
2031 printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
2032 diff, avg_diff, samples_size - samples_size1,
638c9d91
FB
2033 is->audio_clock, is->video_clock, is->audio_diff_threshold);
2034#endif
01310af2 2035 }
638c9d91
FB
2036 } else {
2037 /* too big difference : may be initial PTS errors, so
2038 reset A-V filter */
2039 is->audio_diff_avg_count = 0;
2040 is->audio_diff_cum = 0;
01310af2
FB
2041 }
2042 }
2043
01310af2
FB
2044 return samples_size;
2045}
2046
2047/* decode one audio frame and returns its uncompressed size */
5a4476e2 2048static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2 2049{
bea18375 2050 AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2 2051 AVPacket *pkt = &is->audio_pkt;
abdff646 2052 AVCodecContext *dec= is->audio_st->codec;
72ea344b 2053 int n, len1, data_size;
01310af2
FB
2054 double pts;
2055
2056 for(;;) {
72ea344b 2057 /* NOTE: the audio packet can contain several frames */
bea18375 2058 while (pkt_temp->size > 0) {
5a4476e2 2059 data_size = sizeof(is->audio_buf1);
bea18375 2060 len1 = avcodec_decode_audio3(dec,
5a4476e2 2061 (int16_t *)is->audio_buf1, &data_size,
bea18375 2062 pkt_temp);
72ea344b
FB
2063 if (len1 < 0) {
2064 /* if error, we skip the frame */
bea18375 2065 pkt_temp->size = 0;
01310af2 2066 break;
72ea344b 2067 }
115329f1 2068
bea18375
TB
2069 pkt_temp->data += len1;
2070 pkt_temp->size -= len1;
72ea344b
FB
2071 if (data_size <= 0)
2072 continue;
5a4476e2
PR
2073
2074 if (dec->sample_fmt != is->audio_src_fmt) {
2075 if (is->reformat_ctx)
2076 av_audio_convert_free(is->reformat_ctx);
2077 is->reformat_ctx= av_audio_convert_alloc(SAMPLE_FMT_S16, 1,
2078 dec->sample_fmt, 1, NULL, 0);
2079 if (!is->reformat_ctx) {
2080 fprintf(stderr, "Cannot convert %s sample format to %s sample format\n",
2081 avcodec_get_sample_fmt_name(dec->sample_fmt),
2082 avcodec_get_sample_fmt_name(SAMPLE_FMT_S16));
2083 break;
2084 }
2085 is->audio_src_fmt= dec->sample_fmt;
2086 }
2087
2088 if (is->reformat_ctx) {
2089 const void *ibuf[6]= {is->audio_buf1};
2090 void *obuf[6]= {is->audio_buf2};
2091 int istride[6]= {av_get_bits_per_sample_format(dec->sample_fmt)/8};
2092 int ostride[6]= {2};
2093 int len= data_size/istride[0];
2094 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
2095 printf("av_audio_convert() failed\n");
2096 break;
2097 }
2098 is->audio_buf= is->audio_buf2;
2099 /* FIXME: existing code assume that data_size equals framesize*channels*2
2100 remove this legacy cruft */
2101 data_size= len*2;
2102 }else{
2103 is->audio_buf= is->audio_buf1;
2104 }
2105
72ea344b
FB
2106 /* if no pts, then compute it */
2107 pts = is->audio_clock;
2108 *pts_ptr = pts;
abdff646 2109 n = 2 * dec->channels;
115329f1 2110 is->audio_clock += (double)data_size /
abdff646 2111 (double)(n * dec->sample_rate);
638c9d91 2112#if defined(DEBUG_SYNC)
72ea344b
FB
2113 {
2114 static double last_clock;
2115 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
2116 is->audio_clock - last_clock,
2117 is->audio_clock, pts);
2118 last_clock = is->audio_clock;
01310af2 2119 }
72ea344b
FB
2120#endif
2121 return data_size;
01310af2
FB
2122 }
2123
72ea344b
FB
2124 /* free the current packet */
2125 if (pkt->data)
01310af2 2126 av_free_packet(pkt);
115329f1 2127
72ea344b
FB
2128 if (is->paused || is->audioq.abort_request) {
2129 return -1;
2130 }
115329f1 2131
01310af2
FB
2132 /* read next packet */
2133 if (packet_queue_get(&is->audioq, pkt, 1) < 0)
2134 return -1;
39c6a118 2135 if(pkt->data == flush_pkt.data){
abdff646 2136 avcodec_flush_buffers(dec);
39c6a118
MN
2137 continue;
2138 }
2139
bea18375
TB
2140 pkt_temp->data = pkt->data;
2141 pkt_temp->size = pkt->size;
115329f1 2142
72ea344b
FB
2143 /* if update the audio clock with the pts */
2144 if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75 2145 is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b 2146 }
01310af2
FB
2147 }
2148}
2149
638c9d91
FB
2150/* get the current audio output buffer size, in samples. With SDL, we
2151 cannot have a precise information */
2152static int audio_write_get_buf_size(VideoState *is)
01310af2 2153{
b09b580b 2154 return is->audio_buf_size - is->audio_buf_index;
01310af2
FB
2155}
2156
2157
2158/* prepare a new audio buffer */
358061f6 2159static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
FB
2160{
2161 VideoState *is = opaque;
2162 int audio_size, len1;
2163 double pts;
2164
2165 audio_callback_time = av_gettime();
115329f1 2166
01310af2
FB
2167 while (len > 0) {
2168 if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2 2169 audio_size = audio_decode_frame(is, &pts);
01310af2
FB
2170 if (audio_size < 0) {
2171 /* if error, just output silence */
1a1078fa 2172 is->audio_buf = is->audio_buf1;
01310af2
FB
2173 is->audio_buf_size = 1024;
2174 memset(is->audio_buf, 0, is->audio_buf_size);
2175 } else {
2176 if (is->show_audio)
2177 update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
115329f1 2178 audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
01310af2
FB
2179 pts);
2180 is->audio_buf_size = audio_size;
2181 }
2182 is->audio_buf_index = 0;
2183 }
2184 len1 = is->audio_buf_size - is->audio_buf_index;
2185 if (len1 > len)
2186 len1 = len;
2187 memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
2188 len -= len1;
2189 stream += len1;
2190 is->audio_buf_index += len1;
2191 }
2192}
2193
01310af2
FB
2194/* open a given stream. Return 0 if OK */
2195static int stream_component_open(VideoState *is, int stream_index)
2196{
2197 AVFormatContext *ic = is->ic;
fe74099a 2198 AVCodecContext *avctx;
01310af2
FB
2199 AVCodec *codec;
2200 SDL_AudioSpec wanted_spec, spec;
2201
2202 if (stream_index < 0 || stream_index >= ic->nb_streams)
2203 return -1;
fe74099a 2204 avctx = ic->streams[stream_index]->codec;
115329f1 2205
01310af2 2206 /* prepare audio output */
72415b2a 2207 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a
SS
2208 if (avctx->channels > 0) {
2209 avctx->request_channels = FFMIN(2, avctx->channels);
94eadc8b 2210 } else {
fe74099a 2211 avctx->request_channels = 2;
638c9d91 2212 }
01310af2
FB
2213 }
2214
fe74099a
SS
2215 codec = avcodec_find_decoder(avctx->codec_id);
2216 avctx->debug_mv = debug_mv;
2217 avctx->debug = debug;
2218 avctx->workaround_bugs = workaround_bugs;
2219 avctx->lowres = lowres;
2220 if(lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
2221 avctx->idct_algo= idct;
2222 if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
2223 avctx->skip_frame= skip_frame;
2224 avctx->skip_idct= skip_idct;
2225 avctx->skip_loop_filter= skip_loop_filter;
2226 avctx->error_recognition= error_recognition;
2227 avctx->error_concealment= error_concealment;
2228 avcodec_thread_init(avctx, thread_count);
2229
2230 set_context_opts(avctx, avcodec_opts[avctx->codec_type], 0);
e43d7a18 2231
01310af2 2232 if (!codec ||
fe74099a 2233 avcodec_open(avctx, codec) < 0)
01310af2 2234 return -1;
51b73087
JR
2235
2236 /* prepare audio output */
72415b2a 2237 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a 2238 wanted_spec.freq = avctx->sample_rate;
51b73087 2239 wanted_spec.format = AUDIO_S16SYS;
fe74099a 2240 wanted_spec.channels = avctx->channels;
51b73087
JR
2241 wanted_spec.silence = 0;
2242 wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
2243 wanted_spec.callback = sdl_audio_callback;
2244 wanted_spec.userdata = is;
2245 if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
2246 fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
2247 return -1;
2248 }
2249 is->audio_hw_buf_size = spec.size;
5a4476e2 2250 is->audio_src_fmt= SAMPLE_FMT_S16;
51b73087
JR
2251 }
2252
3f3fe38d 2253 ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
fe74099a 2254 switch(avctx->codec_type) {
72415b2a 2255 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2256 is->audio_stream = stream_index;
2257 is->audio_st = ic->streams[stream_index];
2258 is->audio_buf_size = 0;
2259 is->audio_buf_index = 0;
638c9d91
FB
2260
2261 /* init averaging filter */
2262 is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
2263 is->audio_diff_avg_count = 0;
2264 /* since we do not have a precise anough audio fifo fullness,
2265 we correct audio sync only if larger than this threshold */
fe74099a 2266 is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / avctx->sample_rate;
638c9d91 2267
01310af2
FB
2268 memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
2269 packet_queue_init(&is->audioq);
bb270c08 2270 SDL_PauseAudio(0);
01310af2 2271 break;
72415b2a 2272 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2273 is->video_stream = stream_index;
2274 is->video_st = ic->streams[stream_index];
2275
68aefbe8 2276// is->video_current_pts_time = av_gettime();
638c9d91 2277
01310af2
FB
2278 packet_queue_init(&is->videoq);
2279 is->video_tid = SDL_CreateThread(video_thread, is);
2280 break;
72415b2a 2281 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2282 is->subtitle_stream = stream_index;
2283 is->subtitle_st = ic->streams[stream_index];
2284 packet_queue_init(&is->subtitleq);
115329f1 2285
72ce053b
IC
2286 is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
2287 break;
01310af2
FB
2288 default:
2289 break;
2290 }
2291 return 0;
2292}
2293
2294static void stream_component_close(VideoState *is, int stream_index)
2295{
2296 AVFormatContext *ic = is->ic;
fe74099a 2297 AVCodecContext *avctx;
115329f1 2298
72ce053b
IC
2299 if (stream_index < 0 || stream_index >= ic->nb_streams)
2300 return;
fe74099a 2301 avctx = ic->streams[stream_index]->codec;
01310af2 2302
fe74099a 2303 switch(avctx->codec_type) {
72415b2a 2304 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2305 packet_queue_abort(&is->audioq);
2306
2307 SDL_CloseAudio();
2308
2309 packet_queue_end(&is->audioq);
5a4476e2
PR
2310 if (is->reformat_ctx)
2311 av_audio_convert_free(is->reformat_ctx);
bc77fce6 2312 is->reformat_ctx = NULL;
01310af2 2313 break;
72415b2a 2314 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2315 packet_queue_abort(&is->videoq);
2316
2317 /* note: we also signal this mutex to make sure we deblock the
2318 video thread in all cases */
2319 SDL_LockMutex(is->pictq_mutex);
2320 SDL_CondSignal(is->pictq_cond);
2321 SDL_UnlockMutex(is->pictq_mutex);
2322
2323 SDL_WaitThread(is->video_tid, NULL);
2324
2325 packet_queue_end(&is->videoq);
2326 break;
72415b2a 2327 case AVMEDIA_TYPE_SUBTITLE:
72ce053b 2328 packet_queue_abort(&is->subtitleq);
115329f1 2329
72ce053b
IC
2330 /* note: we also signal this mutex to make sure we deblock the
2331 video thread in all cases */
2332 SDL_LockMutex(is->subpq_mutex);
2333 is->subtitle_stream_changed = 1;
115329f1 2334
72ce053b
IC
2335 SDL_CondSignal(is->subpq_cond);
2336 SDL_UnlockMutex(is->subpq_mutex);
2337
2338 SDL_WaitThread(is->subtitle_tid, NULL);
2339
2340 packet_queue_end(&is->subtitleq);
2341 break;
01310af2
FB
2342 default:
2343 break;
2344 }
2345
3f3fe38d 2346 ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
SS
2347 avcodec_close(avctx);
2348 switch(avctx->codec_type) {
72415b2a 2349 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2350 is->audio_st = NULL;
2351 is->audio_stream = -1;
2352 break;
72415b2a 2353 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2354 is->video_st = NULL;
2355 is->video_stream = -1;
2356 break;
72415b2a 2357 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2358 is->subtitle_st = NULL;
2359 is->subtitle_stream = -1;
2360 break;
01310af2
FB
2361 default:
2362 break;
2363 }
2364}
2365
416e3508
FB
2366/* since we have only one decoding thread, we can use a global
2367 variable instead of a thread local variable */
2368static VideoState *global_video_state;
2369
2370static int decode_interrupt_cb(void)
2371{
2372 return (global_video_state && global_video_state->abort_request);
2373}
01310af2
FB
2374
2375/* this thread gets the stream from the disk or the network */
2376static int decode_thread(void *arg)
2377{
2378 VideoState *is = arg;
2379 AVFormatContext *ic;
6625a3de 2380 int err, i, ret;
72415b2a
SS
2381 int st_index[AVMEDIA_TYPE_NB];
2382 int st_count[AVMEDIA_TYPE_NB]={0};
2383 int st_best_packet_count[AVMEDIA_TYPE_NB];
01310af2 2384 AVPacket pkt1, *pkt = &pkt1;
61890b02 2385 AVFormatParameters params, *ap = &params;
75bb7b0a 2386 int eof=0;
d834d63b 2387 int pkt_in_play_range = 0;
01310af2 2388
6299a229
MN
2389 ic = avformat_alloc_context();
2390
6625a3de 2391 memset(st_index, -1, sizeof(st_index));
9f7490a0 2392 memset(st_best_packet_count, -1, sizeof(st_best_packet_count));
01310af2
FB
2393 is->video_stream = -1;
2394 is->audio_stream = -1;
72ce053b 2395 is->subtitle_stream = -1;
01310af2 2396
416e3508
FB
2397 global_video_state = is;
2398 url_set_interrupt_cb(decode_interrupt_cb);
2399
61890b02 2400 memset(ap, 0, sizeof(*ap));
115329f1 2401
6299a229 2402 ap->prealloced_context = 1;
e4b89522
LW
2403 ap->width = frame_width;
2404 ap->height= frame_height;
7e042912 2405 ap->time_base= (AVRational){1, 25};
e4b89522 2406 ap->pix_fmt = frame_pix_fmt;
7e042912 2407
6299a229
MN
2408 set_context_opts(ic, avformat_opts, AV_OPT_FLAG_DECODING_PARAM);
2409
61890b02 2410 err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
638c9d91
FB
2411 if (err < 0) {
2412 print_error(is->filename, err);
2413 ret = -1;
2414 goto fail;
2415 }
01310af2 2416 is->ic = ic;
30bc6613
MN
2417
2418 if(genpts)
2419 ic->flags |= AVFMT_FLAG_GENPTS;
2420
24c07998
LA
2421 err = av_find_stream_info(ic);
2422 if (err < 0) {
2423 fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
2424 ret = -1;
2425 goto fail;
2426 }
899681cd
BA
2427 if(ic->pb)
2428 ic->pb->eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b 2429
70a4764d
MN
2430 if(seek_by_bytes<0)
2431 seek_by_bytes= !!(ic->iformat->flags & AVFMT_TS_DISCONT);
2432
72ea344b
FB
2433 /* if seeking requested, we execute it */
2434 if (start_time != AV_NOPTS_VALUE) {
2435 int64_t timestamp;
2436
2437 timestamp = start_time;
2438 /* add the stream start time */
2439 if (ic->start_time != AV_NOPTS_VALUE)
2440 timestamp += ic->start_time;
4ed29207 2441 ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b 2442 if (ret < 0) {
115329f1 2443 fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
FB
2444 is->filename, (double)timestamp / AV_TIME_BASE);
2445 }
2446 }
72ea344b 2447
01310af2 2448 for(i = 0; i < ic->nb_streams; i++) {
9f7490a0
MN
2449 AVStream *st= ic->streams[i];
2450 AVCodecContext *avctx = st->codec;
3f3fe38d 2451 ic->streams[i]->discard = AVDISCARD_ALL;
72415b2a 2452 if(avctx->codec_type >= (unsigned)AVMEDIA_TYPE_NB)
8ef94840 2453 continue;
256ab3ed
MN
2454 if(st_count[avctx->codec_type]++ != wanted_stream[avctx->codec_type] && wanted_stream[avctx->codec_type] >= 0)
2455 continue;
2456
9f7490a0
MN
2457 if(st_best_packet_count[avctx->codec_type] >= st->codec_info_nb_frames)
2458 continue;
2459 st_best_packet_count[avctx->codec_type]= st->codec_info_nb_frames;
2460
fe74099a 2461 switch(avctx->codec_type) {
72415b2a 2462 case AVMEDIA_TYPE_AUDIO:
256ab3ed 2463 if (!audio_disable)
72415b2a 2464 st_index[AVMEDIA_TYPE_AUDIO] = i;
01310af2 2465 break;
72415b2a
SS
2466 case AVMEDIA_TYPE_VIDEO:
2467 case AVMEDIA_TYPE_SUBTITLE:
256ab3ed
MN
2468 if (!video_disable)
2469 st_index[avctx->codec_type] = i;
16a59a7b 2470 break;
01310af2
FB
2471 default:
2472 break;
2473 }
2474 }
2475 if (show_status) {
2476 dump_format(ic, 0, is->filename, 0);
2477 }
2478
2479 /* open the streams */
72415b2a
SS
2480 if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
2481 stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
01310af2
FB
2482 }
2483
077a8d61 2484 ret=-1;
72415b2a
SS
2485 if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
2486 ret= stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
077a8d61 2487 }
d38c9e7a 2488 is->refresh_tid = SDL_CreateThread(refresh_thread, is);
077a8d61 2489 if(ret<0) {
01310af2 2490 if (!display_disable)
bf8ae197 2491 is->show_audio = 2;
01310af2
FB
2492 }
2493
72415b2a
SS
2494 if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
2495 stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
16a59a7b
BA
2496 }
2497
01310af2 2498 if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
FB
2499 fprintf(stderr, "%s: could not open codecs\n", is->filename);
2500 ret = -1;
01310af2
FB
2501 goto fail;
2502 }
2503
2504 for(;;) {
2505 if (is->abort_request)
2506 break;
416e3508
FB
2507 if (is->paused != is->last_paused) {
2508 is->last_paused = is->paused;
72ea344b 2509 if (is->paused)
f5668147 2510 is->read_pause_return= av_read_pause(ic);
72ea344b
FB
2511 else
2512 av_read_play(ic);
416e3508 2513 }
2f642393
AJ
2514#if CONFIG_RTSP_DEMUXER
2515 if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
416e3508
FB
2516 /* wait 10 ms to avoid trying to get another packet */
2517 /* XXX: horrible */
2518 SDL_Delay(10);
2519 continue;
2520 }
400738b1 2521#endif
72ea344b 2522 if (is->seek_req) {
8e606cc8 2523 int64_t seek_target= is->seek_pos;
4ed29207
MN
2524 int64_t seek_min= is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
2525 int64_t seek_max= is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
2526//FIXME the +-2 is due to rounding being not done in the correct direction in generation
2527// of the seek_pos/seek_rel variables
8e606cc8 2528
4ed29207 2529 ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
FB
2530 if (ret < 0) {
2531 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
e6c0297f
MN
2532 }else{
2533 if (is->audio_stream >= 0) {
2534 packet_queue_flush(&is->audioq);
39c6a118 2535 packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f 2536 }
72ce053b
IC
2537 if (is->subtitle_stream >= 0) {
2538 packet_queue_flush(&is->subtitleq);
39c6a118 2539 packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b 2540 }
e6c0297f
MN
2541 if (is->video_stream >= 0) {
2542 packet_queue_flush(&is->videoq);
39c6a118 2543 packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f 2544 }
72ea344b
FB
2545 }
2546 is->seek_req = 0;
e45aeb38 2547 eof= 0;
72ea344b 2548 }
416e3508 2549
01310af2 2550 /* if the queue are full, no need to read more */
79ee4683
MN
2551 if ( is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
2552 || ( (is->audioq .size > MIN_AUDIOQ_SIZE || is->audio_stream<0)
2553 && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream<0)
2554 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream<0))) {
01310af2
FB
2555 /* wait 10 ms */
2556 SDL_Delay(10);
2557 continue;
2558 }
75bb7b0a 2559 if(url_feof(ic->pb) || eof) {
9dc41767 2560 if(is->video_stream >= 0){
26534fe8
MN
2561 av_init_packet(pkt);
2562 pkt->data=NULL;
2563 pkt->size=0;
2564 pkt->stream_index= is->video_stream;
2565 packet_queue_put(&is->videoq, pkt);
9dc41767 2566 }
b4083171 2567 SDL_Delay(10);
1922c0a7
RK
2568 if(is->audioq.size + is->videoq.size + is->subtitleq.size ==0){
2569 if(loop!=1 && (!loop || --loop)){
2570 stream_seek(cur_stream, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
2571 }else if(autoexit){
2572 ret=AVERROR_EOF;
2573 goto fail;
2574 }
2d1653b0 2575 }
600a331c
MN
2576 continue;
2577 }
72ea344b 2578 ret = av_read_frame(ic, pkt);
01310af2 2579 if (ret < 0) {
75bb7b0a
MN
2580 if (ret == AVERROR_EOF)
2581 eof=1;
2582 if (url_ferror(ic->pb))
bb270c08 2583 break;
75bb7b0a
MN
2584 SDL_Delay(100); /* wait for user event */
2585 continue;
01310af2 2586 }
d834d63b
RK
2587 /* check if packet is in play range specified by user, then queue, otherwise discard */
2588 pkt_in_play_range = duration == AV_NOPTS_VALUE ||
2589 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
2590 av_q2d(ic->streams[pkt->stream_index]->time_base) -
2591 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0)/1000000
2592 <= ((double)duration/1000000);
2593 if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
01310af2 2594 packet_queue_put(&is->audioq, pkt);
d834d63b 2595 } else if (pkt->stream_index == is->video_stream && pkt_in_play_range) {
01310af2 2596 packet_queue_put(&is->videoq, pkt);
d834d63b 2597 } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
72ce053b 2598 packet_queue_put(&is->subtitleq, pkt);
01310af2
FB
2599 } else {
2600 av_free_packet(pkt);
2601 }
2602 }
2603 /* wait until the end */
2604 while (!is->abort_request) {
2605 SDL_Delay(100);
2606 }
2607
638c9d91 2608 ret = 0;
01310af2 2609 fail:
416e3508
FB
2610 /* disable interrupting */
2611 global_video_state = NULL;
2612
01310af2
FB
2613 /* close each stream */
2614 if (is->audio_stream >= 0)
2615 stream_component_close(is, is->audio_stream);
2616 if (is->video_stream >= 0)
2617 stream_component_close(is, is->video_stream);
72ce053b
IC
2618 if (is->subtitle_stream >= 0)
2619 stream_component_close(is, is->subtitle_stream);
638c9d91
FB
2620 if (is->ic) {
2621 av_close_input_file(is->ic);
2622 is->ic = NULL; /* safety */
2623 }
416e3508
FB
2624 url_set_interrupt_cb(NULL);
2625
638c9d91
FB
2626 if (ret != 0) {
2627 SDL_Event event;
115329f1 2628
638c9d91
FB
2629 event.type = FF_QUIT_EVENT;
2630 event.user.data1 = is;
2631 SDL_PushEvent(&event);
2632 }
01310af2
FB
2633 return 0;
2634}
2635
638c9d91 2636static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
FB
2637{
2638 VideoState *is;
2639
2640 is = av_mallocz(sizeof(VideoState));
2641 if (!is)
2642 return NULL;
f7d78f36 2643 av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91 2644 is->iformat = iformat;
01310af2
FB
2645 is->ytop = 0;
2646 is->xleft = 0;
2647
2648 /* start video display */
2649 is->pictq_mutex = SDL_CreateMutex();
2650 is->pictq_cond = SDL_CreateCond();
115329f1 2651
72ce053b
IC
2652 is->subpq_mutex = SDL_CreateMutex();
2653 is->subpq_cond = SDL_CreateCond();
115329f1 2654
638c9d91 2655 is->av_sync_type = av_sync_type;
01310af2
FB
2656 is->parse_tid = SDL_CreateThread(decode_thread, is);
2657 if (!is->parse_tid) {
2658 av_free(is);
2659 return NULL;
2660 }
2661 return is;
2662}
2663
2664static void stream_close(VideoState *is)
2665{
2666 VideoPicture *vp;
2667 int i;
2668 /* XXX: use a special url_shutdown call to abort parse cleanly */
2669 is->abort_request = 1;
2670 SDL_WaitThread(is->parse_tid, NULL);
d38c9e7a 2671 SDL_WaitThread(is->refresh_tid, NULL);
01310af2
FB
2672
2673 /* free all pictures */
2674 for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2675 vp = &is->pictq[i];
917d2bb3
MN
2676#if CONFIG_AVFILTER
2677 if (vp->picref) {
2678 avfilter_unref_pic(vp->picref);
2679 vp->picref = NULL;
2680 }
2681#endif
01310af2
FB
2682 if (vp->bmp) {
2683 SDL_FreeYUVOverlay(vp->bmp);
2684 vp->bmp = NULL;
2685 }
2686 }
2687 SDL_DestroyMutex(is->pictq_mutex);
2688 SDL_DestroyCond(is->pictq_cond);
72ce053b
IC
2689 SDL_DestroyMutex(is->subpq_mutex);
2690 SDL_DestroyCond(is->subpq_cond);
917d2bb3 2691#if !CONFIG_AVFILTER
3ac56e28
MS
2692 if (is->img_convert_ctx)
2693 sws_freeContext(is->img_convert_ctx);
917d2bb3 2694#endif
7c5ab145 2695 av_free(is);
01310af2
FB
2696}
2697
7b49ce2e 2698static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
FB
2699{
2700 AVFormatContext *ic = is->ic;
2701 int start_index, stream_index;
2702 AVStream *st;
2703
72415b2a 2704 if (codec_type == AVMEDIA_TYPE_VIDEO)
638c9d91 2705 start_index = is->video_stream;
72415b2a 2706 else if (codec_type == AVMEDIA_TYPE_AUDIO)
638c9d91 2707 start_index = is->audio_stream;
72ce053b
IC
2708 else
2709 start_index = is->subtitle_stream;
72415b2a 2710 if (start_index < (codec_type == AVMEDIA_TYPE_SUBTITLE ? -1 : 0))
638c9d91
FB
2711 return;
2712 stream_index = start_index;
2713 for(;;) {
2714 if (++stream_index >= is->ic->nb_streams)
72ce053b 2715 {
72415b2a 2716 if (codec_type == AVMEDIA_TYPE_SUBTITLE)
72ce053b
IC
2717 {
2718 stream_index = -1;
2719 goto the_end;
2720 } else
2721 stream_index = 0;
2722 }
638c9d91
FB
2723 if (stream_index == start_index)
2724 return;
2725 st = ic->streams[stream_index];
01f4895c 2726 if (st->codec->codec_type == codec_type) {
638c9d91
FB
2727 /* check that parameters are OK */
2728 switch(codec_type) {
72415b2a 2729 case AVMEDIA_TYPE_AUDIO:
01f4895c
MN
2730 if (st->codec->sample_rate != 0 &&
2731 st->codec->channels != 0)
638c9d91
FB
2732 goto the_end;
2733 break;
72415b2a
SS
2734 case AVMEDIA_TYPE_VIDEO:
2735 case AVMEDIA_TYPE_SUBTITLE:
638c9d91
FB
2736 goto the_end;
2737 default:
2738 break;
2739 }
2740 }
2741 }
2742 the_end:
2743 stream_component_close(is, start_index);
2744 stream_component_open(is, stream_index);
2745}
2746
2747
7b49ce2e 2748static void toggle_full_screen(void)
01310af2 2749{
01310af2 2750 is_full_screen = !is_full_screen;
29f3b38a
MR
2751 if (!fs_screen_width) {
2752 /* use default SDL method */
fb84155b 2753// SDL_WM_ToggleFullScreen(screen);
01310af2 2754 }
fb84155b 2755 video_open(cur_stream);
01310af2
FB
2756}
2757
7b49ce2e 2758static void toggle_pause(void)
01310af2
FB
2759{
2760 if (cur_stream)
2761 stream_pause(cur_stream);
bba04f1e
WH
2762 step = 0;
2763}
2764
7b49ce2e 2765static void step_to_next_frame(void)
bba04f1e
WH
2766{
2767 if (cur_stream) {
19cc524a 2768 /* if the stream is paused unpause it, then step */
bba04f1e 2769 if (cur_stream->paused)
19cc524a 2770 stream_pause(cur_stream);
bba04f1e
WH
2771 }
2772 step = 1;
01310af2
FB
2773}
2774
7b49ce2e 2775static void do_exit(void)
01310af2 2776{
7c5ab145 2777 int i;
01310af2
FB
2778 if (cur_stream) {
2779 stream_close(cur_stream);
2780 cur_stream = NULL;
2781 }
72415b2a 2782 for (i = 0; i < AVMEDIA_TYPE_NB; i++)
7c5ab145
MS
2783 av_free(avcodec_opts[i]);
2784 av_free(avformat_opts);
2785 av_free(sws_opts);
917d2bb3
MN
2786#if CONFIG_AVFILTER
2787 avfilter_uninit();
2788#endif
01310af2
FB
2789 if (show_status)
2790 printf("\n");
2791 SDL_Quit();
2792 exit(0);
2793}
2794
7b49ce2e 2795static void toggle_audio_display(void)
01310af2
FB
2796{
2797 if (cur_stream) {
f5968788 2798 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 2799 cur_stream->show_audio = (cur_stream->show_audio + 1) % 3;
f5968788
MN
2800 fill_rectangle(screen,
2801 cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height,
2802 bgcolor);
2803 SDL_UpdateRect(screen, cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height);
01310af2
FB
2804 }
2805}
2806
2807/* handle an event sent by the GUI */
7b49ce2e 2808static void event_loop(void)
01310af2
FB
2809{
2810 SDL_Event event;
a11d11aa 2811 double incr, pos, frac;
01310af2
FB
2812
2813 for(;;) {
d52ec002 2814 double x;
01310af2
FB
2815 SDL_WaitEvent(&event);
2816 switch(event.type) {
2817 case SDL_KEYDOWN:
2818 switch(event.key.keysym.sym) {
2819 case SDLK_ESCAPE:
2820 case SDLK_q:
2821 do_exit();
2822 break;
2823 case SDLK_f:
2824 toggle_full_screen();
2825 break;
2826 case SDLK_p:
2827 case SDLK_SPACE:
2828 toggle_pause();
2829 break;
bba04f1e
WH
2830 case SDLK_s: //S: Step to next frame
2831 step_to_next_frame();
2832 break;
01310af2 2833 case SDLK_a:
115329f1 2834 if (cur_stream)
72415b2a 2835 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
638c9d91
FB
2836 break;
2837 case SDLK_v:
115329f1 2838 if (cur_stream)
72415b2a 2839 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
638c9d91 2840 break;
72ce053b 2841 case SDLK_t:
115329f1 2842 if (cur_stream)
72415b2a 2843 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
72ce053b 2844 break;
638c9d91 2845 case SDLK_w:
01310af2
FB
2846 toggle_audio_display();
2847 break;
72ea344b
FB
2848 case SDLK_LEFT:
2849 incr = -10.0;
2850 goto do_seek;
2851 case SDLK_RIGHT:
2852 incr = 10.0;
2853 goto do_seek;
2854 case SDLK_UP:
2855 incr = 60.0;
2856 goto do_seek;
2857 case SDLK_DOWN:
2858 incr = -60.0;
2859 do_seek:
2860 if (cur_stream) {
94b594c6 2861 if (seek_by_bytes) {
1a620dd7
MN
2862 if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos>=0){
2863 pos= cur_stream->video_current_pos;
2864 }else if(cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos>=0){
2865 pos= cur_stream->audio_pkt.pos;
2866 }else
2867 pos = url_ftell(cur_stream->ic->pb);
94b594c6 2868 if (cur_stream->ic->bit_rate)
566cd2cb 2869 incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
SH
2870 else
2871 incr *= 180000.0;
2872 pos += incr;
2ef46053 2873 stream_seek(cur_stream, pos, incr, 1);
94b594c6
SH
2874 } else {
2875 pos = get_master_clock(cur_stream);
2876 pos += incr;
2ef46053 2877 stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6 2878 }
72ea344b
FB
2879 }
2880 break;
01310af2
FB
2881 default:
2882 break;
2883 }
2884 break;
a11d11aa 2885 case SDL_MOUSEBUTTONDOWN:
d52ec002
MN
2886 case SDL_MOUSEMOTION:
2887 if(event.type ==SDL_MOUSEBUTTONDOWN){
2888 x= event.button.x;
2889 }else{
2890 if(event.motion.state != SDL_PRESSED)
2891 break;
2892 x= event.motion.x;
2893 }
bb270c08 2894 if (cur_stream) {
2ef46053
MN
2895 if(seek_by_bytes || cur_stream->ic->duration<=0){
2896 uint64_t size= url_fsize(cur_stream->ic->pb);
d52ec002 2897 stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
2ef46053 2898 }else{
6371c81a
MN
2899 int64_t ts;
2900 int ns, hh, mm, ss;
2901 int tns, thh, tmm, tss;
2902 tns = cur_stream->ic->duration/1000000LL;
2903 thh = tns/3600;
2904 tmm = (tns%3600)/60;
2905 tss = (tns%60);
d52ec002 2906 frac = x/cur_stream->width;
6371c81a
MN
2907 ns = frac*tns;
2908 hh = ns/3600;
2909 mm = (ns%3600)/60;
2910 ss = (ns%60);
2911 fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
2912 hh, mm, ss, thh, tmm, tss);
2913 ts = frac*cur_stream->ic->duration;
2914 if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
2915 ts += cur_stream->ic->start_time;
2916 stream_seek(cur_stream, ts, 0, 0);
2ef46053 2917 }
bb270c08
DB
2918 }
2919 break;
01310af2
FB
2920 case SDL_VIDEORESIZE:
2921 if (cur_stream) {
115329f1 2922 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2 2923 SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
c57d3469
MN
2924 screen_width = cur_stream->width = event.resize.w;
2925 screen_height= cur_stream->height= event.resize.h;
01310af2
FB
2926 }
2927 break;
2928 case SDL_QUIT:
638c9d91 2929 case FF_QUIT_EVENT:
01310af2
FB
2930 do_exit();
2931 break;
2932 case FF_ALLOC_EVENT:
fccb19e3 2933 video_open(event.user.data1);
01310af2
FB
2934 alloc_picture(event.user.data1);
2935 break;
2936 case FF_REFRESH_EVENT:
2937 video_refresh_timer(event.user.data1);
d38c9e7a 2938 cur_stream->refresh=0;
01310af2
FB
2939 break;
2940 default:
2941 break;
2942 }
2943 }
2944}
2945
e4b89522
LW
2946static void opt_frame_size(const char *arg)
2947{
b33ece16 2948 if (av_parse_video_frame_size(&frame_width, &frame_height, arg) < 0) {
e4b89522
LW
2949 fprintf(stderr, "Incorrect frame size\n");
2950 exit(1);
2951 }
2952 if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
2953 fprintf(stderr, "Frame size must be a multiple of 2\n");
2954 exit(1);
2955 }
2956}
2957
a5b3b5f6 2958static int opt_width(const char *opt, const char *arg)
01310af2 2959{
a5b3b5f6
SS
2960 screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2961 return 0;
01310af2
FB
2962}
2963
a5b3b5f6 2964static int opt_height(const char *opt, const char *arg)
01310af2 2965{
a5b3b5f6
SS
2966 screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2967 return 0;
01310af2
FB
2968}
2969
2970static void opt_format(const char *arg)
2971{
2972 file_iformat = av_find_input_format(arg);
2973 if (!file_iformat) {
2974 fprintf(stderr, "Unknown input format: %s\n", arg);
2975 exit(1);
2976 }
2977}
61890b02 2978
e4b89522
LW
2979static void opt_frame_pix_fmt(const char *arg)
2980{
718c7b18 2981 frame_pix_fmt = av_get_pix_fmt(arg);
e4b89522
LW
2982}
2983
b81d6235 2984static int opt_sync(const char *opt, const char *arg)
638c9d91
FB
2985{
2986 if (!strcmp(arg, "audio"))
2987 av_sync_type = AV_SYNC_AUDIO_MASTER;
2988 else if (!strcmp(arg, "video"))
2989 av_sync_type = AV_SYNC_VIDEO_MASTER;
2990 else if (!strcmp(arg, "ext"))
2991 av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5 2992 else {
b81d6235 2993 fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
SS
2994 exit(1);
2995 }
b81d6235 2996 return 0;
638c9d91
FB
2997}
2998
e11bc2c6 2999static int opt_seek(const char *opt, const char *arg)
72ea344b 3000{
e11bc2c6
SS
3001 start_time = parse_time_or_die(opt, arg, 1);
3002 return 0;
72ea344b
FB
3003}
3004
d834d63b
RK
3005static int opt_duration(const char *opt, const char *arg)
3006{
3007 duration = parse_time_or_die(opt, arg, 1);
3008 return 0;
3009}
3010
a5b3b5f6 3011static int opt_debug(const char *opt, const char *arg)
e26a8335 3012{
a309073b 3013 av_log_set_level(99);
a5b3b5f6
SS
3014 debug = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
3015 return 0;
e26a8335 3016}
115329f1 3017
a5b3b5f6 3018static int opt_vismv(const char *opt, const char *arg)
0c9bbaec 3019{
a5b3b5f6
SS
3020 debug_mv = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
3021 return 0;
0c9bbaec 3022}
c62c07d3 3023
a5b3b5f6 3024static int opt_thread_count(const char *opt, const char *arg)
c62c07d3 3025{
a5b3b5f6 3026 thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
b250f9c6 3027#if !HAVE_THREADS
c62c07d3
MN
3028 fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
3029#endif
a5b3b5f6 3030 return 0;
c62c07d3 3031}
115329f1 3032
358061f6 3033static const OptionDef options[] = {
992f8eae 3034#include "cmdutils_common_opts.h"
a5b3b5f6
SS
3035 { "x", HAS_ARG | OPT_FUNC2, {(void*)opt_width}, "force displayed width", "width" },
3036 { "y", HAS_ARG | OPT_FUNC2, {(void*)opt_height}, "force displayed height", "height" },
e4b89522 3037 { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
638c9d91 3038 { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
01310af2
FB
3039 { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
3040 { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
72415b2a
SS
3041 { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_AUDIO]}, "select desired audio stream", "stream_number" },
3042 { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_VIDEO]}, "select desired video stream", "stream_number" },
3043 { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_SUBTITLE]}, "select desired subtitle stream", "stream_number" },
e11bc2c6 3044 { "ss", HAS_ARG | OPT_FUNC2, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
d834d63b 3045 { "t", HAS_ARG | OPT_FUNC2, {(void*)&opt_duration}, "play \"duration\" seconds of audio/video", "duration" },
674fe163 3046 { "bytes", OPT_INT | HAS_ARG, {(void*)&seek_by_bytes}, "seek by bytes 0=off 1=on -1=auto", "val" },
01310af2
FB
3047 { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
3048 { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
e4b89522 3049 { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
98ae6acf 3050 { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
a5b3b5f6 3051 { "debug", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
6387c3e6 3052 { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
a5b3b5f6 3053 { "vismv", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
6fc5b059 3054 { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
30bc6613 3055 { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
59055363 3056 { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts 0=off 1=on -1=auto", ""},
178fcca8 3057 { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
8c3eba7c
MN
3058 { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
3059 { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
3060 { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
178fcca8 3061 { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo", "algo" },
047599a4 3062 { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_recognition}, "set error detection threshold (0-4)", "threshold" },
1b51e051 3063 { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options", "bit_mask" },
b81d6235 3064 { "sync", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
a5b3b5f6 3065 { "threads", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2d1653b0 3066 { "autoexit", OPT_BOOL | OPT_EXPERT, {(void*)&autoexit}, "exit at the end", "" },
1922c0a7 3067 { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&loop}, "set number of times the playback shall be looped", "loop count" },
d38c9e7a 3068 { "framedrop", OPT_BOOL | OPT_EXPERT, {(void*)&framedrop}, "drop frames when cpu is too slow", "" },
076db5ed 3069 { "window_title", OPT_STRING | HAS_ARG, {(void*)&window_title}, "set window title", "window title" },
917d2bb3 3070#if CONFIG_AVFILTER
09ed11e5 3071 { "vf", OPT_STRING | HAS_ARG, {(void*)&vfilters}, "video filters", "filter list" },
917d2bb3 3072#endif
2b3da32f 3073 { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, {(void*)&rdftspeed}, "rdft speed", "msecs" },
e43d7a18 3074 { "default", OPT_FUNC2 | HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {(void*)opt_default}, "generic catch all option", "" },
01310af2
FB
3075 { NULL, },
3076};
3077
0c2a18cb 3078static void show_usage(void)
01310af2 3079{
27daa420
RP
3080 printf("Simple media player\n");
3081 printf("usage: ffplay [options] input_file\n");
01310af2 3082 printf("\n");
0c2a18cb
RP
3083}
3084
3085static void show_help(void)
3086{
3087 show_usage();
02d504a7
FB
3088 show_help_options(options, "Main options:\n",
3089 OPT_EXPERT, 0);
3090 show_help_options(options, "\nAdvanced options:\n",
3091 OPT_EXPERT, OPT_EXPERT);
01310af2
FB
3092 printf("\nWhile playing:\n"
3093 "q, ESC quit\n"
3094 "f toggle full screen\n"
3095 "p, SPC pause\n"
638c9d91
FB
3096 "a cycle audio channel\n"
3097 "v cycle video channel\n"
72ce053b 3098 "t cycle subtitle channel\n"
638c9d91 3099 "w show audio waves\n"
79f8b328 3100 "s activate frame-step mode\n"
72ea344b
FB
3101 "left/right seek backward/forward 10 seconds\n"
3102 "down/up seek backward/forward 1 minute\n"
a11d11aa 3103 "mouse click seek to percentage in file corresponding to fraction of width\n"
01310af2 3104 );
01310af2
FB
3105}
3106
358061f6 3107static void opt_input_file(const char *filename)
01310af2 3108{
07a70b38
SS
3109 if (input_filename) {
3110 fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
3111 filename, input_filename);
3112 exit(1);
3113 }
e8d83e1c 3114 if (!strcmp(filename, "-"))
9fcfc0b7 3115 filename = "pipe:";
01310af2
FB
3116 input_filename = filename;
3117}
3118
3119/* Called from the main */
3120int main(int argc, char **argv)
3121{
e43d7a18 3122 int flags, i;
115329f1 3123
01310af2 3124 /* register all codecs, demux and protocols */
c721d803 3125 avcodec_register_all();
9b157b0c 3126#if CONFIG_AVDEVICE
c721d803 3127 avdevice_register_all();
9b157b0c 3128#endif
917d2bb3
MN
3129#if CONFIG_AVFILTER
3130 avfilter_register_all();
3131#endif
01310af2
FB
3132 av_register_all();
3133
72415b2a 3134 for(i=0; i<AVMEDIA_TYPE_NB; i++){
636f1c4c 3135 avcodec_opts[i]= avcodec_alloc_context2(i);
e43d7a18 3136 }
8e2fd8e1 3137 avformat_opts = avformat_alloc_context();
917d2bb3 3138#if !CONFIG_AVFILTER
e43d7a18 3139 sws_opts = sws_getContext(16,16,0, 16,16,0, sws_flags, NULL,NULL,NULL);
917d2bb3 3140#endif
e43d7a18 3141
ea9c581f 3142 show_banner();
4cfac5bc 3143
f5da5c93 3144 parse_options(argc, argv, options, opt_input_file);
01310af2 3145
aab1b7e5 3146 if (!input_filename) {
7f11e745 3147 show_usage();
7a7da6b4 3148 fprintf(stderr, "An input file must be specified\n");
7f11e745 3149 fprintf(stderr, "Use -h to get full help or, even better, run 'man ffplay'\n");
aab1b7e5
SS
3150 exit(1);
3151 }
01310af2
FB
3152
3153 if (display_disable) {
3154 video_disable = 1;
3155 }
31319a8c 3156 flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
c97f5402
DB
3157#if !defined(__MINGW32__) && !defined(__APPLE__)
3158 flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c 3159#endif
01310af2 3160 if (SDL_Init (flags)) {
05ab0b76 3161 fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
01310af2
FB
3162 exit(1);
3163 }
3164
3165 if (!display_disable) {
b250f9c6 3166#if HAVE_SDL_VIDEO_SIZE
3ef17d62
MR
3167 const SDL_VideoInfo *vi = SDL_GetVideoInfo();
3168 fs_screen_width = vi->current_w;
3169 fs_screen_height = vi->current_h;
29f3b38a 3170#endif
01310af2
FB
3171 }
3172
3173 SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
01310af2
FB
3174 SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
3175 SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
3176
39c6a118
MN
3177 av_init_packet(&flush_pkt);
3178 flush_pkt.data= "FLUSH";
3179
638c9d91 3180 cur_stream = stream_open(input_filename, file_iformat);
01310af2
FB
3181
3182 event_loop();
3183
3184 /* never returns */
3185
3186 return 0;
3187}