Skip pes payload during probing to avoid start code emulation.
[libav.git] / ffplay.c
CommitLineData
01310af2 1/*
f05ef45c 2 * FFplay : Simple Media Player based on the FFmpeg libraries
01310af2
FB
3 * Copyright (c) 2003 Fabrice Bellard
4 *
b78e7197
DB
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
01310af2
FB
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
b78e7197 10 * version 2.1 of the License, or (at your option) any later version.
01310af2 11 *
b78e7197 12 * FFmpeg is distributed in the hope that it will be useful,
01310af2
FB
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
b78e7197 18 * License along with FFmpeg; if not, write to the Free Software
5509bffa 19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2 20 */
364a9607 21
ba11257e 22#include "config.h"
8a3ceaf4 23#include <inttypes.h>
0f4e8165
RB
24#include <math.h>
25#include <limits.h>
245976da 26#include "libavutil/avstring.h"
718c7b18 27#include "libavutil/pixdesc.h"
245976da 28#include "libavformat/avformat.h"
245976da
DB
29#include "libavdevice/avdevice.h"
30#include "libswscale/swscale.h"
5a4476e2 31#include "libavcodec/audioconvert.h"
a7e6312b 32#include "libavcodec/colorspace.h"
e43d7a18 33#include "libavcodec/opt.h"
166621ab 34#include "libavcodec/avfft.h"
01310af2 35
917d2bb3
MN
36#if CONFIG_AVFILTER
37# include "libavfilter/avfilter.h"
38# include "libavfilter/avfiltergraph.h"
39# include "libavfilter/graphparser.h"
40#endif
41
01310af2
FB
42#include "cmdutils.h"
43
44#include <SDL.h>
45#include <SDL_thread.h>
46
2f30a81d 47#ifdef __MINGW32__
31319a8c
FB
48#undef main /* We don't want SDL to override our main() */
49#endif
50
d38c9e7a
MN
51#include <unistd.h>
52#include <assert.h>
53
64555bd9 54const char program_name[] = "FFplay";
ea9c581f 55const int program_birth_year = 2003;
4cfac5bc 56
638c9d91
FB
57//#define DEBUG_SYNC
58
79ee4683
MN
59#define MAX_QUEUE_SIZE (15 * 1024 * 1024)
60#define MIN_AUDIOQ_SIZE (20 * 16 * 1024)
61#define MIN_FRAMES 5
01310af2 62
638c9d91
FB
63/* SDL audio buffer size, in samples. Should be small to have precise
64 A/V sync as SDL does not have hardware buffer fullness info. */
65#define SDL_AUDIO_BUFFER_SIZE 1024
66
67/* no AV sync correction is done if below the AV sync threshold */
7e0140cb 68#define AV_SYNC_THRESHOLD 0.01
638c9d91
FB
69/* no AV correction is done if too big error */
70#define AV_NOSYNC_THRESHOLD 10.0
71
d38c9e7a
MN
72#define FRAME_SKIP_FACTOR 0.05
73
638c9d91
FB
74/* maximum audio speed change to get correct sync */
75#define SAMPLE_CORRECTION_PERCENT_MAX 10
76
77/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
78#define AUDIO_DIFF_AVG_NB 20
79
01310af2
FB
80/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
81#define SAMPLE_ARRAY_SIZE (2*65536)
82
03ae87a3
LA
83static int sws_flags = SWS_BICUBIC;
84
01310af2
FB
85typedef struct PacketQueue {
86 AVPacketList *first_pkt, *last_pkt;
87 int nb_packets;
88 int size;
89 int abort_request;
90 SDL_mutex *mutex;
91 SDL_cond *cond;
92} PacketQueue;
93
562f382c 94#define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b 95#define SUBPICTURE_QUEUE_SIZE 4
01310af2
FB
96
97typedef struct VideoPicture {
267e9dfa 98 double pts; ///<presentation time stamp for this picture
d38c9e7a 99 double target_clock; ///<av_gettime() time at which this should be displayed ideally
1a620dd7 100 int64_t pos; ///<byte position in file
01310af2
FB
101 SDL_Overlay *bmp;
102 int width, height; /* source height & width */
103 int allocated;
917d2bb3
MN
104 enum PixelFormat pix_fmt;
105
106#if CONFIG_AVFILTER
107 AVFilterPicRef *picref;
108#endif
01310af2
FB
109} VideoPicture;
110
72ce053b
IC
111typedef struct SubPicture {
112 double pts; /* presentation time stamp for this picture */
113 AVSubtitle sub;
114} SubPicture;
115
01310af2
FB
116enum {
117 AV_SYNC_AUDIO_MASTER, /* default choice */
118 AV_SYNC_VIDEO_MASTER,
638c9d91 119 AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
FB
120};
121
122typedef struct VideoState {
123 SDL_Thread *parse_tid;
124 SDL_Thread *video_tid;
d38c9e7a 125 SDL_Thread *refresh_tid;
638c9d91 126 AVInputFormat *iformat;
01310af2
FB
127 int no_background;
128 int abort_request;
129 int paused;
416e3508 130 int last_paused;
72ea344b 131 int seek_req;
3ba1438d 132 int seek_flags;
72ea344b 133 int64_t seek_pos;
4ed29207 134 int64_t seek_rel;
f5668147 135 int read_pause_return;
01310af2
FB
136 AVFormatContext *ic;
137 int dtg_active_format;
138
139 int audio_stream;
115329f1 140
01310af2 141 int av_sync_type;
638c9d91
FB
142 double external_clock; /* external clock base */
143 int64_t external_clock_time;
115329f1 144
638c9d91
FB
145 double audio_clock;
146 double audio_diff_cum; /* used for AV difference average computation */
147 double audio_diff_avg_coef;
148 double audio_diff_threshold;
149 int audio_diff_avg_count;
01310af2
FB
150 AVStream *audio_st;
151 PacketQueue audioq;
152 int audio_hw_buf_size;
153 /* samples output by the codec. we reserve more space for avsync
154 compensation */
c6727809
MR
155 DECLARE_ALIGNED(16,uint8_t,audio_buf1)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
156 DECLARE_ALIGNED(16,uint8_t,audio_buf2)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
5a4476e2 157 uint8_t *audio_buf;
7fea94ce 158 unsigned int audio_buf_size; /* in bytes */
01310af2 159 int audio_buf_index; /* in bytes */
bea18375 160 AVPacket audio_pkt_temp;
01310af2 161 AVPacket audio_pkt;
5a4476e2
PR
162 enum SampleFormat audio_src_fmt;
163 AVAudioConvert *reformat_ctx;
115329f1 164
01310af2
FB
165 int show_audio; /* if true, display audio samples */
166 int16_t sample_array[SAMPLE_ARRAY_SIZE];
167 int sample_array_index;
5e0257e3 168 int last_i_start;
166621ab 169 RDFTContext *rdft;
12eeda34
MN
170 int rdft_bits;
171 int xpos;
115329f1 172
72ce053b
IC
173 SDL_Thread *subtitle_tid;
174 int subtitle_stream;
175 int subtitle_stream_changed;
176 AVStream *subtitle_st;
177 PacketQueue subtitleq;
178 SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
179 int subpq_size, subpq_rindex, subpq_windex;
180 SDL_mutex *subpq_mutex;
181 SDL_cond *subpq_cond;
115329f1 182
638c9d91
FB
183 double frame_timer;
184 double frame_last_pts;
185 double frame_last_delay;
115329f1 186 double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
01310af2
FB
187 int video_stream;
188 AVStream *video_st;
189 PacketQueue videoq;
267e9dfa 190 double video_current_pts; ///<current displayed pts (different from video_clock if frame fifos are used)
68aefbe8 191 double video_current_pts_drift; ///<video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
1a620dd7 192 int64_t video_current_pos; ///<current displayed file pos
01310af2
FB
193 VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
194 int pictq_size, pictq_rindex, pictq_windex;
195 SDL_mutex *pictq_mutex;
196 SDL_cond *pictq_cond;
917d2bb3 197#if !CONFIG_AVFILTER
3ac56e28 198 struct SwsContext *img_convert_ctx;
917d2bb3 199#endif
115329f1 200
01310af2
FB
201 // QETimer *video_timer;
202 char filename[1024];
203 int width, height, xleft, ytop;
41db429d
MN
204
205 int64_t faulty_pts;
206 int64_t faulty_dts;
207 int64_t last_dts_for_fault_detection;
208 int64_t last_pts_for_fault_detection;
209
917d2bb3
MN
210#if CONFIG_AVFILTER
211 AVFilterContext *out_video_filter; ///<the last filter in the video chain
212#endif
d38c9e7a
MN
213
214 float skip_frames;
215 float skip_frames_index;
216 int refresh;
01310af2
FB
217} VideoState;
218
358061f6 219static void show_help(void);
638c9d91 220static int audio_write_get_buf_size(VideoState *is);
01310af2
FB
221
222/* options specified by the user */
223static AVInputFormat *file_iformat;
224static const char *input_filename;
076db5ed 225static const char *window_title;
01310af2
FB
226static int fs_screen_width;
227static int fs_screen_height;
fccb19e3
MN
228static int screen_width = 0;
229static int screen_height = 0;
e4b89522
LW
230static int frame_width = 0;
231static int frame_height = 0;
232static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
01310af2
FB
233static int audio_disable;
234static int video_disable;
72415b2a
SS
235static int wanted_stream[AVMEDIA_TYPE_NB]={
236 [AVMEDIA_TYPE_AUDIO]=-1,
237 [AVMEDIA_TYPE_VIDEO]=-1,
238 [AVMEDIA_TYPE_SUBTITLE]=-1,
5b369983 239};
70a4764d 240static int seek_by_bytes=-1;
01310af2 241static int display_disable;
1e1a0b18 242static int show_status = 1;
638c9d91 243static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b 244static int64_t start_time = AV_NOPTS_VALUE;
d834d63b 245static int64_t duration = AV_NOPTS_VALUE;
e26a8335 246static int debug = 0;
0c9bbaec 247static int debug_mv = 0;
bba04f1e 248static int step = 0;
c62c07d3 249static int thread_count = 1;
6387c3e6 250static int workaround_bugs = 1;
6fc5b059 251static int fast = 0;
30bc6613 252static int genpts = 0;
178fcca8
MN
253static int lowres = 0;
254static int idct = FF_IDCT_AUTO;
8c3eba7c
MN
255static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
256static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
257static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
047599a4 258static int error_recognition = FF_ER_CAREFUL;
1b51e051 259static int error_concealment = 3;
41db429d 260static int decoder_reorder_pts= -1;
2d1653b0 261static int autoexit;
1922c0a7 262static int loop=1;
d38c9e7a 263static int framedrop=1;
2b3da32f
MN
264
265static int rdftspeed=20;
917d2bb3
MN
266#if CONFIG_AVFILTER
267static char *vfilters = NULL;
268#endif
01310af2
FB
269
270/* current context */
271static int is_full_screen;
272static VideoState *cur_stream;
5e0257e3 273static int64_t audio_callback_time;
01310af2 274
2c676c33 275static AVPacket flush_pkt;
39c6a118 276
01310af2
FB
277#define FF_ALLOC_EVENT (SDL_USEREVENT)
278#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91 279#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
01310af2 280
2c676c33 281static SDL_Surface *screen;
01310af2 282
515bd00e
MN
283static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
284
01310af2
FB
285/* packet queue handling */
286static void packet_queue_init(PacketQueue *q)
287{
288 memset(q, 0, sizeof(PacketQueue));
289 q->mutex = SDL_CreateMutex();
290 q->cond = SDL_CreateCond();
515bd00e 291 packet_queue_put(q, &flush_pkt);
01310af2
FB
292}
293
72ea344b 294static void packet_queue_flush(PacketQueue *q)
01310af2
FB
295{
296 AVPacketList *pkt, *pkt1;
297
687fae2b 298 SDL_LockMutex(q->mutex);
01310af2
FB
299 for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
300 pkt1 = pkt->next;
301 av_free_packet(&pkt->pkt);
da6c4573 302 av_freep(&pkt);
01310af2 303 }
72ea344b
FB
304 q->last_pkt = NULL;
305 q->first_pkt = NULL;
306 q->nb_packets = 0;
307 q->size = 0;
687fae2b 308 SDL_UnlockMutex(q->mutex);
72ea344b
FB
309}
310
311static void packet_queue_end(PacketQueue *q)
312{
313 packet_queue_flush(q);
01310af2
FB
314 SDL_DestroyMutex(q->mutex);
315 SDL_DestroyCond(q->cond);
316}
317
318static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
319{
320 AVPacketList *pkt1;
321
72ea344b 322 /* duplicate the packet */
39c6a118 323 if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
72ea344b 324 return -1;
115329f1 325
01310af2
FB
326 pkt1 = av_malloc(sizeof(AVPacketList));
327 if (!pkt1)
328 return -1;
329 pkt1->pkt = *pkt;
330 pkt1->next = NULL;
331
72ea344b 332
01310af2
FB
333 SDL_LockMutex(q->mutex);
334
335 if (!q->last_pkt)
336
337 q->first_pkt = pkt1;
338 else
339 q->last_pkt->next = pkt1;
340 q->last_pkt = pkt1;
341 q->nb_packets++;
7b776589 342 q->size += pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
343 /* XXX: should duplicate packet data in DV case */
344 SDL_CondSignal(q->cond);
345
346 SDL_UnlockMutex(q->mutex);
347 return 0;
348}
349
350static void packet_queue_abort(PacketQueue *q)
351{
352 SDL_LockMutex(q->mutex);
353
354 q->abort_request = 1;
115329f1 355
01310af2
FB
356 SDL_CondSignal(q->cond);
357
358 SDL_UnlockMutex(q->mutex);
359}
360
361/* return < 0 if aborted, 0 if no packet and > 0 if packet. */
362static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
363{
364 AVPacketList *pkt1;
365 int ret;
366
367 SDL_LockMutex(q->mutex);
368
369 for(;;) {
370 if (q->abort_request) {
371 ret = -1;
372 break;
373 }
115329f1 374
01310af2
FB
375 pkt1 = q->first_pkt;
376 if (pkt1) {
377 q->first_pkt = pkt1->next;
378 if (!q->first_pkt)
379 q->last_pkt = NULL;
380 q->nb_packets--;
7b776589 381 q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
382 *pkt = pkt1->pkt;
383 av_free(pkt1);
384 ret = 1;
385 break;
386 } else if (!block) {
387 ret = 0;
388 break;
389 } else {
390 SDL_CondWait(q->cond, q->mutex);
391 }
392 }
393 SDL_UnlockMutex(q->mutex);
394 return ret;
395}
396
115329f1 397static inline void fill_rectangle(SDL_Surface *screen,
01310af2
FB
398 int x, int y, int w, int h, int color)
399{
400 SDL_Rect rect;
401 rect.x = x;
402 rect.y = y;
403 rect.w = w;
404 rect.h = h;
405 SDL_FillRect(screen, &rect, color);
406}
407
408#if 0
409/* draw only the border of a rectangle */
410void fill_border(VideoState *s, int x, int y, int w, int h, int color)
411{
412 int w1, w2, h1, h2;
413
414 /* fill the background */
415 w1 = x;
416 if (w1 < 0)
417 w1 = 0;
418 w2 = s->width - (x + w);
419 if (w2 < 0)
420 w2 = 0;
421 h1 = y;
422 if (h1 < 0)
423 h1 = 0;
424 h2 = s->height - (y + h);
425 if (h2 < 0)
426 h2 = 0;
115329f1
DB
427 fill_rectangle(screen,
428 s->xleft, s->ytop,
429 w1, s->height,
01310af2 430 color);
115329f1
DB
431 fill_rectangle(screen,
432 s->xleft + s->width - w2, s->ytop,
433 w2, s->height,
01310af2 434 color);
115329f1
DB
435 fill_rectangle(screen,
436 s->xleft + w1, s->ytop,
437 s->width - w1 - w2, h1,
01310af2 438 color);
115329f1 439 fill_rectangle(screen,
01310af2
FB
440 s->xleft + w1, s->ytop + s->height - h2,
441 s->width - w1 - w2, h2,
442 color);
443}
444#endif
445
72ce053b
IC
446#define ALPHA_BLEND(a, oldp, newp, s)\
447((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
448
449#define RGBA_IN(r, g, b, a, s)\
450{\
451 unsigned int v = ((const uint32_t *)(s))[0];\
452 a = (v >> 24) & 0xff;\
453 r = (v >> 16) & 0xff;\
454 g = (v >> 8) & 0xff;\
455 b = v & 0xff;\
456}
457
458#define YUVA_IN(y, u, v, a, s, pal)\
459{\
57cf99f2 460 unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
IC
461 a = (val >> 24) & 0xff;\
462 y = (val >> 16) & 0xff;\
463 u = (val >> 8) & 0xff;\
464 v = val & 0xff;\
465}
466
467#define YUVA_OUT(d, y, u, v, a)\
468{\
469 ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
470}
471
472
473#define BPP 1
474
0a8cd696 475static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
IC
476{
477 int wrap, wrap3, width2, skip2;
478 int y, u, v, a, u1, v1, a1, w, h;
479 uint8_t *lum, *cb, *cr;
480 const uint8_t *p;
481 const uint32_t *pal;
9cb5a11e
RD
482 int dstx, dsty, dstw, dsth;
483
7cf9c6ae
MN
484 dstw = av_clip(rect->w, 0, imgw);
485 dsth = av_clip(rect->h, 0, imgh);
486 dstx = av_clip(rect->x, 0, imgw - dstw);
487 dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
RD
488 lum = dst->data[0] + dsty * dst->linesize[0];
489 cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
490 cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
491
f54b31b9 492 width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e 493 skip2 = dstx >> 1;
72ce053b 494 wrap = dst->linesize[0];
25b4c651
MN
495 wrap3 = rect->pict.linesize[0];
496 p = rect->pict.data[0];
497 pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */
115329f1 498
9cb5a11e
RD
499 if (dsty & 1) {
500 lum += dstx;
72ce053b
IC
501 cb += skip2;
502 cr += skip2;
115329f1 503
9cb5a11e 504 if (dstx & 1) {
72ce053b
IC
505 YUVA_IN(y, u, v, a, p, pal);
506 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
507 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
508 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
509 cb++;
510 cr++;
511 lum++;
512 p += BPP;
513 }
9cb5a11e 514 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
515 YUVA_IN(y, u, v, a, p, pal);
516 u1 = u;
517 v1 = v;
518 a1 = a;
519 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
520
521 YUVA_IN(y, u, v, a, p + BPP, pal);
522 u1 += u;
523 v1 += v;
524 a1 += a;
525 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
526 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
527 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
528 cb++;
529 cr++;
530 p += 2 * BPP;
531 lum += 2;
532 }
533 if (w) {
534 YUVA_IN(y, u, v, a, p, pal);
535 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
536 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
537 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
BA
538 p++;
539 lum++;
72ce053b 540 }
4606a059
BA
541 p += wrap3 - dstw * BPP;
542 lum += wrap - dstw - dstx;
72ce053b
IC
543 cb += dst->linesize[1] - width2 - skip2;
544 cr += dst->linesize[2] - width2 - skip2;
545 }
9cb5a11e
RD
546 for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
547 lum += dstx;
72ce053b
IC
548 cb += skip2;
549 cr += skip2;
115329f1 550
9cb5a11e 551 if (dstx & 1) {
72ce053b
IC
552 YUVA_IN(y, u, v, a, p, pal);
553 u1 = u;
554 v1 = v;
555 a1 = a;
556 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
557 p += wrap3;
558 lum += wrap;
559 YUVA_IN(y, u, v, a, p, pal);
560 u1 += u;
561 v1 += v;
562 a1 += a;
563 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
564 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
565 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
566 cb++;
567 cr++;
568 p += -wrap3 + BPP;
569 lum += -wrap + 1;
570 }
9cb5a11e 571 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
572 YUVA_IN(y, u, v, a, p, pal);
573 u1 = u;
574 v1 = v;
575 a1 = a;
576 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
577
f8ca63e8 578 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
579 u1 += u;
580 v1 += v;
581 a1 += a;
582 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
583 p += wrap3;
584 lum += wrap;
585
586 YUVA_IN(y, u, v, a, p, pal);
587 u1 += u;
588 v1 += v;
589 a1 += a;
590 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
591
f8ca63e8 592 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
593 u1 += u;
594 v1 += v;
595 a1 += a;
596 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
597
598 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
599 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
600
601 cb++;
602 cr++;
603 p += -wrap3 + 2 * BPP;
604 lum += -wrap + 2;
605 }
606 if (w) {
607 YUVA_IN(y, u, v, a, p, pal);
608 u1 = u;
609 v1 = v;
610 a1 = a;
611 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
612 p += wrap3;
613 lum += wrap;
614 YUVA_IN(y, u, v, a, p, pal);
615 u1 += u;
616 v1 += v;
617 a1 += a;
618 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
619 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
620 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
621 cb++;
622 cr++;
623 p += -wrap3 + BPP;
624 lum += -wrap + 1;
625 }
9cb5a11e
RD
626 p += wrap3 + (wrap3 - dstw * BPP);
627 lum += wrap + (wrap - dstw - dstx);
72ce053b
IC
628 cb += dst->linesize[1] - width2 - skip2;
629 cr += dst->linesize[2] - width2 - skip2;
630 }
631 /* handle odd height */
632 if (h) {
9cb5a11e 633 lum += dstx;
72ce053b
IC
634 cb += skip2;
635 cr += skip2;
115329f1 636
9cb5a11e 637 if (dstx & 1) {
72ce053b
IC
638 YUVA_IN(y, u, v, a, p, pal);
639 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
640 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
641 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
642 cb++;
643 cr++;
644 lum++;
645 p += BPP;
646 }
9cb5a11e 647 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
648 YUVA_IN(y, u, v, a, p, pal);
649 u1 = u;
650 v1 = v;
651 a1 = a;
652 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
653
654 YUVA_IN(y, u, v, a, p + BPP, pal);
655 u1 += u;
656 v1 += v;
657 a1 += a;
658 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
659 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
660 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
661 cb++;
662 cr++;
663 p += 2 * BPP;
664 lum += 2;
665 }
666 if (w) {
667 YUVA_IN(y, u, v, a, p, pal);
668 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
669 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
670 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
671 }
672 }
673}
674
675static void free_subpicture(SubPicture *sp)
676{
677 int i;
115329f1 678
72ce053b
IC
679 for (i = 0; i < sp->sub.num_rects; i++)
680 {
25b4c651
MN
681 av_freep(&sp->sub.rects[i]->pict.data[0]);
682 av_freep(&sp->sub.rects[i]->pict.data[1]);
db4fac64 683 av_freep(&sp->sub.rects[i]);
72ce053b 684 }
115329f1 685
72ce053b 686 av_free(sp->sub.rects);
115329f1 687
72ce053b
IC
688 memset(&sp->sub, 0, sizeof(AVSubtitle));
689}
690
01310af2
FB
691static void video_image_display(VideoState *is)
692{
693 VideoPicture *vp;
72ce053b
IC
694 SubPicture *sp;
695 AVPicture pict;
01310af2
FB
696 float aspect_ratio;
697 int width, height, x, y;
698 SDL_Rect rect;
72ce053b 699 int i;
01310af2
FB
700
701 vp = &is->pictq[is->pictq_rindex];
702 if (vp->bmp) {
917d2bb3
MN
703#if CONFIG_AVFILTER
704 if (vp->picref->pixel_aspect.num == 0)
705 aspect_ratio = 0;
706 else
707 aspect_ratio = av_q2d(vp->picref->pixel_aspect);
708#else
709
01310af2 710 /* XXX: use variable in the frame */
c30a4489
AJ
711 if (is->video_st->sample_aspect_ratio.num)
712 aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
713 else if (is->video_st->codec->sample_aspect_ratio.num)
714 aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
72ea344b 715 else
c30a4489 716 aspect_ratio = 0;
917d2bb3 717#endif
01310af2 718 if (aspect_ratio <= 0.0)
c30a4489 719 aspect_ratio = 1.0;
917d2bb3 720 aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
FB
721 /* if an active format is indicated, then it overrides the
722 mpeg format */
723#if 0
01f4895c
MN
724 if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
725 is->dtg_active_format = is->video_st->codec->dtg_active_format;
01310af2
FB
726 printf("dtg_active_format=%d\n", is->dtg_active_format);
727 }
728#endif
729#if 0
01f4895c 730 switch(is->video_st->codec->dtg_active_format) {
01310af2
FB
731 case FF_DTG_AFD_SAME:
732 default:
733 /* nothing to do */
734 break;
735 case FF_DTG_AFD_4_3:
736 aspect_ratio = 4.0 / 3.0;
737 break;
738 case FF_DTG_AFD_16_9:
739 aspect_ratio = 16.0 / 9.0;
740 break;
741 case FF_DTG_AFD_14_9:
742 aspect_ratio = 14.0 / 9.0;
743 break;
744 case FF_DTG_AFD_4_3_SP_14_9:
745 aspect_ratio = 14.0 / 9.0;
746 break;
747 case FF_DTG_AFD_16_9_SP_14_9:
748 aspect_ratio = 14.0 / 9.0;
749 break;
750 case FF_DTG_AFD_SP_4_3:
751 aspect_ratio = 4.0 / 3.0;
752 break;
753 }
754#endif
755
72ce053b
IC
756 if (is->subtitle_st)
757 {
758 if (is->subpq_size > 0)
759 {
760 sp = &is->subpq[is->subpq_rindex];
761
762 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
763 {
764 SDL_LockYUVOverlay (vp->bmp);
765
766 pict.data[0] = vp->bmp->pixels[0];
767 pict.data[1] = vp->bmp->pixels[2];
768 pict.data[2] = vp->bmp->pixels[1];
769
770 pict.linesize[0] = vp->bmp->pitches[0];
771 pict.linesize[1] = vp->bmp->pitches[2];
772 pict.linesize[2] = vp->bmp->pitches[1];
773
774 for (i = 0; i < sp->sub.num_rects; i++)
db4fac64 775 blend_subrect(&pict, sp->sub.rects[i],
0a8cd696 776 vp->bmp->w, vp->bmp->h);
72ce053b
IC
777
778 SDL_UnlockYUVOverlay (vp->bmp);
779 }
780 }
781 }
782
783
01310af2
FB
784 /* XXX: we suppose the screen has a 1.0 pixel ratio */
785 height = is->height;
bb6c34e5 786 width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
FB
787 if (width > is->width) {
788 width = is->width;
bb6c34e5 789 height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
FB
790 }
791 x = (is->width - width) / 2;
792 y = (is->height - height) / 2;
793 if (!is->no_background) {
794 /* fill the background */
795 // fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
796 } else {
797 is->no_background = 0;
798 }
799 rect.x = is->xleft + x;
2f6547fb 800 rect.y = is->ytop + y;
01310af2
FB
801 rect.w = width;
802 rect.h = height;
803 SDL_DisplayYUVOverlay(vp->bmp, &rect);
804 } else {
805#if 0
115329f1
DB
806 fill_rectangle(screen,
807 is->xleft, is->ytop, is->width, is->height,
01310af2
FB
808 QERGB(0x00, 0x00, 0x00));
809#endif
810 }
811}
812
813static inline int compute_mod(int a, int b)
814{
815 a = a % b;
115329f1 816 if (a >= 0)
01310af2
FB
817 return a;
818 else
819 return a + b;
820}
821
822static void video_audio_display(VideoState *s)
823{
824 int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
825 int ch, channels, h, h2, bgcolor, fgcolor;
826 int16_t time_diff;
4c7c7645
MN
827 int rdft_bits, nb_freq;
828
829 for(rdft_bits=1; (1<<rdft_bits)<2*s->height; rdft_bits++)
830 ;
831 nb_freq= 1<<(rdft_bits-1);
115329f1 832
01310af2 833 /* compute display index : center on currently output samples */
01f4895c 834 channels = s->audio_st->codec->channels;
01310af2 835 nb_display_channels = channels;
5e0257e3 836 if (!s->paused) {
4c7c7645 837 int data_used= s->show_audio==1 ? s->width : (2*nb_freq);
5e0257e3
FB
838 n = 2 * channels;
839 delay = audio_write_get_buf_size(s);
840 delay /= n;
115329f1 841
5e0257e3
FB
842 /* to be more precise, we take into account the time spent since
843 the last buffer computation */
844 if (audio_callback_time) {
845 time_diff = av_gettime() - audio_callback_time;
122dcdcb 846 delay -= (time_diff * s->audio_st->codec->sample_rate) / 1000000;
5e0257e3 847 }
115329f1 848
122dcdcb 849 delay += 2*data_used;
4c7c7645
MN
850 if (delay < data_used)
851 delay = data_used;
ac50bcc8
MN
852
853 i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
12eeda34 854 if(s->show_audio==1){
6c7165c7
JM
855 h= INT_MIN;
856 for(i=0; i<1000; i+=channels){
857 int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
858 int a= s->sample_array[idx];
859 int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
860 int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
861 int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
862 int score= a-d;
863 if(h<score && (b^c)<0){
864 h= score;
865 i_start= idx;
866 }
ac50bcc8
MN
867 }
868 }
869
5e0257e3
FB
870 s->last_i_start = i_start;
871 } else {
872 i_start = s->last_i_start;
01310af2
FB
873 }
874
01310af2 875 bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 876 if(s->show_audio==1){
6c7165c7
JM
877 fill_rectangle(screen,
878 s->xleft, s->ytop, s->width, s->height,
879 bgcolor);
880
881 fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
882
883 /* total height for one channel */
884 h = s->height / nb_display_channels;
885 /* graph height / 2 */
886 h2 = (h * 9) / 20;
887 for(ch = 0;ch < nb_display_channels; ch++) {
888 i = i_start + ch;
889 y1 = s->ytop + ch * h + (h / 2); /* position of center line */
890 for(x = 0; x < s->width; x++) {
891 y = (s->sample_array[i] * h2) >> 15;
892 if (y < 0) {
893 y = -y;
894 ys = y1 - y;
895 } else {
896 ys = y1;
897 }
898 fill_rectangle(screen,
899 s->xleft + x, ys, 1, y,
900 fgcolor);
901 i += channels;
902 if (i >= SAMPLE_ARRAY_SIZE)
903 i -= SAMPLE_ARRAY_SIZE;
01310af2 904 }
01310af2 905 }
01310af2 906
6c7165c7 907 fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2 908
6c7165c7
JM
909 for(ch = 1;ch < nb_display_channels; ch++) {
910 y = s->ytop + ch * h;
911 fill_rectangle(screen,
912 s->xleft, y, s->width, 1,
913 fgcolor);
914 }
915 SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
12eeda34 916 }else{
12eeda34 917 nb_display_channels= FFMIN(nb_display_channels, 2);
12eeda34 918 if(rdft_bits != s->rdft_bits){
166621ab
MR
919 av_rdft_end(s->rdft);
920 s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
12eeda34
MN
921 s->rdft_bits= rdft_bits;
922 }
12eeda34
MN
923 {
924 FFTSample data[2][2*nb_freq];
925 for(ch = 0;ch < nb_display_channels; ch++) {
926 i = i_start + ch;
927 for(x = 0; x < 2*nb_freq; x++) {
928 double w= (x-nb_freq)*(1.0/nb_freq);
929 data[ch][x]= s->sample_array[i]*(1.0-w*w);
930 i += channels;
931 if (i >= SAMPLE_ARRAY_SIZE)
932 i -= SAMPLE_ARRAY_SIZE;
933 }
166621ab 934 av_rdft_calc(s->rdft, data[ch]);
12eeda34
MN
935 }
936 //least efficient way to do this, we should of course directly access it but its more than fast enough
092421cf 937 for(y=0; y<s->height; y++){
12eeda34
MN
938 double w= 1/sqrt(nb_freq);
939 int a= sqrt(w*sqrt(data[0][2*y+0]*data[0][2*y+0] + data[0][2*y+1]*data[0][2*y+1]));
940 int b= sqrt(w*sqrt(data[1][2*y+0]*data[1][2*y+0] + data[1][2*y+1]*data[1][2*y+1]));
941 a= FFMIN(a,255);
942 b= FFMIN(b,255);
943 fgcolor = SDL_MapRGB(screen->format, a, b, (a+b)/2);
944
945 fill_rectangle(screen,
946 s->xpos, s->height-y, 1, 1,
947 fgcolor);
948 }
949 }
950 SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
951 s->xpos++;
952 if(s->xpos >= s->width)
953 s->xpos= s->xleft;
954 }
01310af2
FB
955}
956
990c8438
MN
957static int video_open(VideoState *is){
958 int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
959 int w,h;
960
fb84155b
MN
961 if(is_full_screen) flags |= SDL_FULLSCREEN;
962 else flags |= SDL_RESIZABLE;
963
990c8438
MN
964 if (is_full_screen && fs_screen_width) {
965 w = fs_screen_width;
966 h = fs_screen_height;
fb84155b
MN
967 } else if(!is_full_screen && screen_width){
968 w = screen_width;
969 h = screen_height;
917d2bb3
MN
970#if CONFIG_AVFILTER
971 }else if (is->out_video_filter && is->out_video_filter->inputs[0]){
972 w = is->out_video_filter->inputs[0]->w;
973 h = is->out_video_filter->inputs[0]->h;
974#else
fb84155b
MN
975 }else if (is->video_st && is->video_st->codec->width){
976 w = is->video_st->codec->width;
977 h = is->video_st->codec->height;
917d2bb3 978#endif
990c8438 979 } else {
fb84155b
MN
980 w = 640;
981 h = 480;
990c8438 982 }
d3d7b12e
MN
983 if(screen && is->width == screen->w && screen->w == w
984 && is->height== screen->h && screen->h == h)
985 return 0;
986
c97f5402 987#ifndef __APPLE__
990c8438
MN
988 screen = SDL_SetVideoMode(w, h, 0, flags);
989#else
990 /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
991 screen = SDL_SetVideoMode(w, h, 24, flags);
992#endif
993 if (!screen) {
994 fprintf(stderr, "SDL: could not set video mode - exiting\n");
995 return -1;
996 }
076db5ed
RK
997 if (!window_title)
998 window_title = input_filename;
999 SDL_WM_SetCaption(window_title, window_title);
990c8438
MN
1000
1001 is->width = screen->w;
1002 is->height = screen->h;
1003
1004 return 0;
1005}
8c982c5d 1006
01310af2
FB
1007/* display the current picture, if any */
1008static void video_display(VideoState *is)
1009{
8c982c5d
MN
1010 if(!screen)
1011 video_open(cur_stream);
115329f1 1012 if (is->audio_st && is->show_audio)
01310af2
FB
1013 video_audio_display(is);
1014 else if (is->video_st)
1015 video_image_display(is);
1016}
1017
d38c9e7a 1018static int refresh_thread(void *opaque)
01310af2 1019{
d38c9e7a
MN
1020 VideoState *is= opaque;
1021 while(!is->abort_request){
01310af2
FB
1022 SDL_Event event;
1023 event.type = FF_REFRESH_EVENT;
1024 event.user.data1 = opaque;
d38c9e7a
MN
1025 if(!is->refresh){
1026 is->refresh=1;
01310af2 1027 SDL_PushEvent(&event);
d38c9e7a 1028 }
2b3da32f 1029 usleep(is->audio_st && is->show_audio ? rdftspeed*1000 : 5000); //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
d38c9e7a
MN
1030 }
1031 return 0;
01310af2
FB
1032}
1033
638c9d91
FB
1034/* get the current audio clock value */
1035static double get_audio_clock(VideoState *is)
1036{
1037 double pts;
1038 int hw_buf_size, bytes_per_sec;
1039 pts = is->audio_clock;
1040 hw_buf_size = audio_write_get_buf_size(is);
1041 bytes_per_sec = 0;
1042 if (is->audio_st) {
115329f1 1043 bytes_per_sec = is->audio_st->codec->sample_rate *
01f4895c 1044 2 * is->audio_st->codec->channels;
638c9d91
FB
1045 }
1046 if (bytes_per_sec)
1047 pts -= (double)hw_buf_size / bytes_per_sec;
1048 return pts;
1049}
1050
1051/* get the current video clock value */
1052static double get_video_clock(VideoState *is)
1053{
04108619 1054 if (is->paused) {
41a4cd0c 1055 return is->video_current_pts;
72ea344b 1056 } else {
68aefbe8 1057 return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b 1058 }
638c9d91
FB
1059}
1060
1061/* get the current external clock value */
1062static double get_external_clock(VideoState *is)
1063{
1064 int64_t ti;
1065 ti = av_gettime();
1066 return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
1067}
1068
1069/* get the current master clock value */
1070static double get_master_clock(VideoState *is)
1071{
1072 double val;
1073
72ea344b
FB
1074 if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
1075 if (is->video_st)
1076 val = get_video_clock(is);
1077 else
1078 val = get_audio_clock(is);
1079 } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
1080 if (is->audio_st)
1081 val = get_audio_clock(is);
1082 else
1083 val = get_video_clock(is);
1084 } else {
638c9d91 1085 val = get_external_clock(is);
72ea344b 1086 }
638c9d91
FB
1087 return val;
1088}
1089
72ea344b 1090/* seek in the stream */
2ef46053 1091static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b 1092{
687fae2b
IW
1093 if (!is->seek_req) {
1094 is->seek_pos = pos;
4ed29207 1095 is->seek_rel = rel;
3890dd3a 1096 is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
SH
1097 if (seek_by_bytes)
1098 is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
IW
1099 is->seek_req = 1;
1100 }
72ea344b
FB
1101}
1102
1103/* pause or resume the video */
1104static void stream_pause(VideoState *is)
1105{
68aefbe8
MN
1106 if (is->paused) {
1107 is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
f5668147 1108 if(is->read_pause_return != AVERROR(ENOSYS)){
68aefbe8 1109 is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147 1110 }
68aefbe8 1111 is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b 1112 }
68aefbe8 1113 is->paused = !is->paused;
72ea344b
FB
1114}
1115
d38c9e7a 1116static double compute_target_time(double frame_current_pts, VideoState *is)
49410784 1117{
d38c9e7a 1118 double delay, sync_threshold, diff;
49410784
TB
1119
1120 /* compute nominal delay */
1121 delay = frame_current_pts - is->frame_last_pts;
1122 if (delay <= 0 || delay >= 10.0) {
1123 /* if incorrect delay, use previous one */
1124 delay = is->frame_last_delay;
443658fd 1125 } else {
712de377 1126 is->frame_last_delay = delay;
443658fd 1127 }
49410784
TB
1128 is->frame_last_pts = frame_current_pts;
1129
1130 /* update delay to follow master synchronisation source */
1131 if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1132 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1133 /* if video is slave, we try to correct big delays by
1134 duplicating or deleting a frame */
f04c6e35 1135 diff = get_video_clock(is) - get_master_clock(is);
49410784
TB
1136
1137 /* skip or repeat frame. We take into account the
1138 delay to compute the threshold. I still don't know
1139 if it is the best guess */
1140 sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
1141 if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1142 if (diff <= -sync_threshold)
1143 delay = 0;
1144 else if (diff >= sync_threshold)
1145 delay = 2 * delay;
1146 }
1147 }
49410784 1148 is->frame_timer += delay;
eecc17a7
TB
1149#if defined(DEBUG_SYNC)
1150 printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1151 delay, actual_delay, frame_current_pts, -diff);
1152#endif
1153
d38c9e7a 1154 return is->frame_timer;
49410784
TB
1155}
1156
01310af2
FB
1157/* called to display each frame */
1158static void video_refresh_timer(void *opaque)
1159{
1160 VideoState *is = opaque;
1161 VideoPicture *vp;
638c9d91 1162
72ce053b 1163 SubPicture *sp, *sp2;
01310af2
FB
1164
1165 if (is->video_st) {
d38c9e7a 1166retry:
01310af2 1167 if (is->pictq_size == 0) {
d38c9e7a 1168 //nothing to do, no picture to display in the que
01310af2 1169 } else {
d38c9e7a
MN
1170 double time= av_gettime()/1000000.0;
1171 double next_target;
638c9d91 1172 /* dequeue the picture */
01310af2 1173 vp = &is->pictq[is->pictq_rindex];
638c9d91 1174
d38c9e7a
MN
1175 if(time < vp->target_clock)
1176 return;
638c9d91
FB
1177 /* update current video pts */
1178 is->video_current_pts = vp->pts;
d38c9e7a 1179 is->video_current_pts_drift = is->video_current_pts - time;
a3cc2160 1180 is->video_current_pos = vp->pos;
d38c9e7a
MN
1181 if(is->pictq_size > 1){
1182 VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
1183 assert(nextvp->target_clock >= vp->target_clock);
1184 next_target= nextvp->target_clock;
1185 }else{
1186 next_target= vp->target_clock + is->video_clock - vp->pts; //FIXME pass durations cleanly
1187 }
1188 if(framedrop && time > next_target){
1189 is->skip_frames *= 1.0 + FRAME_SKIP_FACTOR;
1190 if(is->pictq_size > 1 || time > next_target + 0.5){
1191 /* update queue size and signal for next picture */
1192 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1193 is->pictq_rindex = 0;
1194
1195 SDL_LockMutex(is->pictq_mutex);
1196 is->pictq_size--;
1197 SDL_CondSignal(is->pictq_cond);
1198 SDL_UnlockMutex(is->pictq_mutex);
1199 goto retry;
1200 }
1201 }
638c9d91 1202
72ce053b
IC
1203 if(is->subtitle_st) {
1204 if (is->subtitle_stream_changed) {
1205 SDL_LockMutex(is->subpq_mutex);
115329f1 1206
72ce053b
IC
1207 while (is->subpq_size) {
1208 free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1 1209
72ce053b
IC
1210 /* update queue size and signal for next picture */
1211 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1212 is->subpq_rindex = 0;
115329f1 1213
72ce053b
IC
1214 is->subpq_size--;
1215 }
1216 is->subtitle_stream_changed = 0;
1217
1218 SDL_CondSignal(is->subpq_cond);
1219 SDL_UnlockMutex(is->subpq_mutex);
1220 } else {
1221 if (is->subpq_size > 0) {
1222 sp = &is->subpq[is->subpq_rindex];
1223
1224 if (is->subpq_size > 1)
1225 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1226 else
1227 sp2 = NULL;
1228
1229 if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1230 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1231 {
1232 free_subpicture(sp);
1233
1234 /* update queue size and signal for next picture */
1235 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1236 is->subpq_rindex = 0;
1237
1238 SDL_LockMutex(is->subpq_mutex);
1239 is->subpq_size--;
1240 SDL_CondSignal(is->subpq_cond);
1241 SDL_UnlockMutex(is->subpq_mutex);
1242 }
1243 }
1244 }
1245 }
1246
01310af2
FB
1247 /* display picture */
1248 video_display(is);
115329f1 1249
01310af2
FB
1250 /* update queue size and signal for next picture */
1251 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1252 is->pictq_rindex = 0;
115329f1 1253
01310af2
FB
1254 SDL_LockMutex(is->pictq_mutex);
1255 is->pictq_size--;
1256 SDL_CondSignal(is->pictq_cond);
1257 SDL_UnlockMutex(is->pictq_mutex);
1258 }
1259 } else if (is->audio_st) {
1260 /* draw the next audio frame */
1261
01310af2
FB
1262 /* if only audio stream, then display the audio bars (better
1263 than nothing, just to test the implementation */
115329f1 1264
01310af2
FB
1265 /* display picture */
1266 video_display(is);
01310af2
FB
1267 }
1268 if (show_status) {
1269 static int64_t last_time;
1270 int64_t cur_time;
72ce053b 1271 int aqsize, vqsize, sqsize;
638c9d91 1272 double av_diff;
115329f1 1273
01310af2 1274 cur_time = av_gettime();
1e1a0b18 1275 if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
FB
1276 aqsize = 0;
1277 vqsize = 0;
72ce053b 1278 sqsize = 0;
01310af2
FB
1279 if (is->audio_st)
1280 aqsize = is->audioq.size;
1281 if (is->video_st)
1282 vqsize = is->videoq.size;
72ce053b
IC
1283 if (is->subtitle_st)
1284 sqsize = is->subtitleq.size;
638c9d91
FB
1285 av_diff = 0;
1286 if (is->audio_st && is->video_st)
1287 av_diff = get_audio_clock(is) - get_video_clock(is);
382f3a5b
MN
1288 printf("%7.2f A-V:%7.3f s:%3.1f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
1289 get_master_clock(is), av_diff, FFMAX(is->skip_frames-1, 0), aqsize / 1024, vqsize / 1024, sqsize, is->faulty_dts, is->faulty_pts);
01310af2
FB
1290 fflush(stdout);
1291 last_time = cur_time;
1292 }
1293 }
1294}
1295
1296/* allocate a picture (needs to do that in main thread to avoid
1297 potential locking problems */
1298static void alloc_picture(void *opaque)
1299{
1300 VideoState *is = opaque;
1301 VideoPicture *vp;
01310af2
FB
1302
1303 vp = &is->pictq[is->pictq_windex];
1304
1305 if (vp->bmp)
1306 SDL_FreeYUVOverlay(vp->bmp);
1307
917d2bb3
MN
1308#if CONFIG_AVFILTER
1309 if (vp->picref)
1310 avfilter_unref_pic(vp->picref);
1311 vp->picref = NULL;
1312
1313 vp->width = is->out_video_filter->inputs[0]->w;
1314 vp->height = is->out_video_filter->inputs[0]->h;
1315 vp->pix_fmt = is->out_video_filter->inputs[0]->format;
1316#else
1317 vp->width = is->video_st->codec->width;
1318 vp->height = is->video_st->codec->height;
1319 vp->pix_fmt = is->video_st->codec->pix_fmt;
1320#endif
1321
1322 vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1 1323 SDL_YV12_OVERLAY,
61890b02 1324 screen);
01310af2
FB
1325
1326 SDL_LockMutex(is->pictq_mutex);
1327 vp->allocated = 1;
1328 SDL_CondSignal(is->pictq_cond);
1329 SDL_UnlockMutex(is->pictq_mutex);
1330}
1331
267e9dfa
MN
1332/**
1333 *
1334 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1335 */
1a620dd7 1336static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos)
01310af2
FB
1337{
1338 VideoPicture *vp;
1339 int dst_pix_fmt;
917d2bb3
MN
1340#if CONFIG_AVFILTER
1341 AVPicture pict_src;
1342#endif
01310af2
FB
1343 /* wait until we have space to put a new picture */
1344 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1345
1346 if(is->pictq_size>=VIDEO_PICTURE_QUEUE_SIZE && !is->refresh)
1347 is->skip_frames= FFMAX(1.0 - FRAME_SKIP_FACTOR, is->skip_frames * (1.0-FRAME_SKIP_FACTOR));
1348
01310af2
FB
1349 while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1350 !is->videoq.abort_request) {
1351 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1352 }
1353 SDL_UnlockMutex(is->pictq_mutex);
115329f1 1354
01310af2
FB
1355 if (is->videoq.abort_request)
1356 return -1;
1357
1358 vp = &is->pictq[is->pictq_windex];
1359
1360 /* alloc or resize hardware picture buffer */
115329f1 1361 if (!vp->bmp ||
917d2bb3
MN
1362#if CONFIG_AVFILTER
1363 vp->width != is->out_video_filter->inputs[0]->w ||
1364 vp->height != is->out_video_filter->inputs[0]->h) {
1365#else
01f4895c
MN
1366 vp->width != is->video_st->codec->width ||
1367 vp->height != is->video_st->codec->height) {
917d2bb3 1368#endif
01310af2
FB
1369 SDL_Event event;
1370
1371 vp->allocated = 0;
1372
1373 /* the allocation must be done in the main thread to avoid
1374 locking problems */
1375 event.type = FF_ALLOC_EVENT;
1376 event.user.data1 = is;
1377 SDL_PushEvent(&event);
115329f1 1378
01310af2
FB
1379 /* wait until the picture is allocated */
1380 SDL_LockMutex(is->pictq_mutex);
1381 while (!vp->allocated && !is->videoq.abort_request) {
1382 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1383 }
1384 SDL_UnlockMutex(is->pictq_mutex);
1385
1386 if (is->videoq.abort_request)
1387 return -1;
1388 }
1389
638c9d91 1390 /* if the frame is not skipped, then display it */
01310af2 1391 if (vp->bmp) {
fbf1b885 1392 AVPicture pict;
917d2bb3
MN
1393#if CONFIG_AVFILTER
1394 if(vp->picref)
1395 avfilter_unref_pic(vp->picref);
1396 vp->picref = src_frame->opaque;
1397#endif
fbf1b885 1398
01310af2
FB
1399 /* get a pointer on the bitmap */
1400 SDL_LockYUVOverlay (vp->bmp);
1401
1402 dst_pix_fmt = PIX_FMT_YUV420P;
fbf1b885 1403 memset(&pict,0,sizeof(AVPicture));
01310af2
FB
1404 pict.data[0] = vp->bmp->pixels[0];
1405 pict.data[1] = vp->bmp->pixels[2];
1406 pict.data[2] = vp->bmp->pixels[1];
1407
1408 pict.linesize[0] = vp->bmp->pitches[0];
1409 pict.linesize[1] = vp->bmp->pitches[2];
1410 pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
MN
1411
1412#if CONFIG_AVFILTER
1413 pict_src.data[0] = src_frame->data[0];
1414 pict_src.data[1] = src_frame->data[1];
1415 pict_src.data[2] = src_frame->data[2];
1416
1417 pict_src.linesize[0] = src_frame->linesize[0];
1418 pict_src.linesize[1] = src_frame->linesize[1];
1419 pict_src.linesize[2] = src_frame->linesize[2];
1420
1421 //FIXME use direct rendering
1422 av_picture_copy(&pict, &pict_src,
1423 vp->pix_fmt, vp->width, vp->height);
1424#else
e43d7a18 1425 sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28 1426 is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3 1427 vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
feb7bc67 1428 dst_pix_fmt, sws_flags, NULL, NULL, NULL);
3ac56e28 1429 if (is->img_convert_ctx == NULL) {
26ba8235
AB
1430 fprintf(stderr, "Cannot initialize the conversion context\n");
1431 exit(1);
1432 }
3ac56e28 1433 sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
MN
1434 0, vp->height, pict.data, pict.linesize);
1435#endif
01310af2
FB
1436 /* update the bitmap content */
1437 SDL_UnlockYUVOverlay(vp->bmp);
1438
638c9d91 1439 vp->pts = pts;
1a620dd7 1440 vp->pos = pos;
01310af2
FB
1441
1442 /* now we can update the picture count */
1443 if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1444 is->pictq_windex = 0;
1445 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1446 vp->target_clock= compute_target_time(vp->pts, is);
1447
01310af2
FB
1448 is->pictq_size++;
1449 SDL_UnlockMutex(is->pictq_mutex);
1450 }
638c9d91
FB
1451 return 0;
1452}
1453
115329f1
DB
1454/**
1455 * compute the exact PTS for the picture if it is omitted in the stream
267e9dfa
MN
1456 * @param pts1 the dts of the pkt / pts of the frame
1457 */
1a620dd7 1458static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
638c9d91
FB
1459{
1460 double frame_delay, pts;
115329f1 1461
638c9d91
FB
1462 pts = pts1;
1463
01310af2 1464 if (pts != 0) {
638c9d91 1465 /* update video clock with pts, if present */
01310af2
FB
1466 is->video_clock = pts;
1467 } else {
72ea344b
FB
1468 pts = is->video_clock;
1469 }
1470 /* update video clock for next frame */
01f4895c 1471 frame_delay = av_q2d(is->video_st->codec->time_base);
72ea344b
FB
1472 /* for MPEG2, the frame can be repeated, so we update the
1473 clock accordingly */
267e9dfa 1474 frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
72ea344b 1475 is->video_clock += frame_delay;
638c9d91
FB
1476
1477#if defined(DEBUG_SYNC) && 0
ff358eca
SS
1478 printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1479 av_get_pict_type_char(src_frame->pict_type), pts, pts1);
638c9d91 1480#endif
1a620dd7 1481 return queue_picture(is, src_frame, pts, pos);
01310af2
FB
1482}
1483
3966a574 1484static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2 1485{
6c7d3ead 1486 int len1, got_picture, i;
01310af2 1487
01310af2 1488 if (packet_queue_get(&is->videoq, pkt, 1) < 0)
917d2bb3 1489 return -1;
39c6a118
MN
1490
1491 if(pkt->data == flush_pkt.data){
1492 avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
MN
1493
1494 SDL_LockMutex(is->pictq_mutex);
1495 //Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
1496 for(i=0; i<VIDEO_PICTURE_QUEUE_SIZE; i++){
d38c9e7a 1497 is->pictq[i].target_clock= 0;
6c7d3ead
MN
1498 }
1499 while (is->pictq_size && !is->videoq.abort_request) {
1500 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1501 }
1a620dd7 1502 is->video_current_pos= -1;
6c7d3ead
MN
1503 SDL_UnlockMutex(is->pictq_mutex);
1504
41db429d
MN
1505 is->last_dts_for_fault_detection=
1506 is->last_pts_for_fault_detection= INT64_MIN;
967030eb 1507 is->frame_last_pts= AV_NOPTS_VALUE;
f7119e42 1508 is->frame_last_delay = 0;
b25453bd 1509 is->frame_timer = (double)av_gettime() / 1000000.0;
d38c9e7a
MN
1510 is->skip_frames= 1;
1511 is->skip_frames_index= 0;
917d2bb3 1512 return 0;
39c6a118
MN
1513 }
1514
638c9d91
FB
1515 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1516 this packet, if any */
7fb262b5 1517 is->video_st->codec->reordered_opaque= pkt->pts;
bea18375 1518 len1 = avcodec_decode_video2(is->video_st->codec,
620e8baf 1519 frame, &got_picture,
bea18375 1520 pkt);
620e8baf 1521
99e0b12b 1522 if (got_picture) {
df7d6e48
SS
1523 if(pkt->dts != AV_NOPTS_VALUE){
1524 is->faulty_dts += pkt->dts <= is->last_dts_for_fault_detection;
1525 is->last_dts_for_fault_detection= pkt->dts;
1526 }
1527 if(frame->reordered_opaque != AV_NOPTS_VALUE){
1528 is->faulty_pts += frame->reordered_opaque <= is->last_pts_for_fault_detection;
1529 is->last_pts_for_fault_detection= frame->reordered_opaque;
1530 }
99e0b12b 1531 }
41db429d
MN
1532
1533 if( ( decoder_reorder_pts==1
ecbed31c 1534 || (decoder_reorder_pts && is->faulty_pts<is->faulty_dts)
41db429d 1535 || pkt->dts == AV_NOPTS_VALUE)
7fb262b5 1536 && frame->reordered_opaque != AV_NOPTS_VALUE)
917d2bb3 1537 *pts= frame->reordered_opaque;
620e8baf 1538 else if(pkt->dts != AV_NOPTS_VALUE)
917d2bb3 1539 *pts= pkt->dts;
620e8baf 1540 else
917d2bb3
MN
1541 *pts= 0;
1542
fb966f99
MN
1543// if (len1 < 0)
1544// break;
d38c9e7a
MN
1545 if (got_picture){
1546 is->skip_frames_index += 1;
1547 if(is->skip_frames_index >= is->skip_frames){
1548 is->skip_frames_index -= FFMAX(is->skip_frames, 1.0);
1549 return 1;
1550 }
1551
1552 }
917d2bb3
MN
1553 return 0;
1554}
1555
1556#if CONFIG_AVFILTER
1557typedef struct {
1558 VideoState *is;
1559 AVFrame *frame;
dd0c789b 1560 int use_dr1;
917d2bb3
MN
1561} FilterPriv;
1562
dd0c789b
BB
1563static int input_get_buffer(AVCodecContext *codec, AVFrame *pic)
1564{
1565 AVFilterContext *ctx = codec->opaque;
1566 AVFilterPicRef *ref;
1567 int perms = AV_PERM_WRITE;
65929418 1568 int i, w, h, stride[4];
dd0c789b
BB
1569 unsigned edge;
1570
1571 if(pic->buffer_hints & FF_BUFFER_HINTS_VALID) {
1572 if(pic->buffer_hints & FF_BUFFER_HINTS_READABLE) perms |= AV_PERM_READ;
1573 if(pic->buffer_hints & FF_BUFFER_HINTS_PRESERVE) perms |= AV_PERM_PRESERVE;
1574 if(pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE) perms |= AV_PERM_REUSE2;
1575 }
1576 if(pic->reference) perms |= AV_PERM_READ | AV_PERM_PRESERVE;
1577
1578 w = codec->width;
1579 h = codec->height;
1580 avcodec_align_dimensions2(codec, &w, &h, stride);
1581 edge = codec->flags & CODEC_FLAG_EMU_EDGE ? 0 : avcodec_get_edge_width();
1582 w += edge << 1;
1583 h += edge << 1;
1584
1585 if(!(ref = avfilter_get_video_buffer(ctx->outputs[0], perms, w, h)))
1586 return -1;
1587
1588 ref->w = codec->width;
1589 ref->h = codec->height;
65929418 1590 for(i = 0; i < 3; i ++) {
dd0c789b
BB
1591 unsigned hshift = i == 0 ? 0 : av_pix_fmt_descriptors[ref->pic->format].log2_chroma_w;
1592 unsigned vshift = i == 0 ? 0 : av_pix_fmt_descriptors[ref->pic->format].log2_chroma_h;
1593
3635c07b 1594 if (ref->data[i]) {
e53ca636 1595 ref->data[i] += (edge >> hshift) + ((edge * ref->linesize[i]) >> vshift);
3635c07b 1596 }
dd0c789b
BB
1597 pic->data[i] = ref->data[i];
1598 pic->linesize[i] = ref->linesize[i];
1599 }
1600 pic->opaque = ref;
1601 pic->age = INT_MAX;
1602 pic->type = FF_BUFFER_TYPE_USER;
1603 return 0;
1604}
1605
1606static void input_release_buffer(AVCodecContext *codec, AVFrame *pic)
1607{
1608 memset(pic->data, 0, sizeof(pic->data));
1609 avfilter_unref_pic(pic->opaque);
1610}
1611
917d2bb3
MN
1612static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
1613{
1614 FilterPriv *priv = ctx->priv;
dd0c789b 1615 AVCodecContext *codec;
917d2bb3
MN
1616 if(!opaque) return -1;
1617
1618 priv->is = opaque;
dd0c789b
BB
1619 codec = priv->is->video_st->codec;
1620 codec->opaque = ctx;
1621 if(codec->codec->capabilities & CODEC_CAP_DR1) {
1622 priv->use_dr1 = 1;
1623 codec->get_buffer = input_get_buffer;
1624 codec->release_buffer = input_release_buffer;
1625 }
1626
917d2bb3
MN
1627 priv->frame = avcodec_alloc_frame();
1628
1629 return 0;
1630}
1631
1632static void input_uninit(AVFilterContext *ctx)
1633{
1634 FilterPriv *priv = ctx->priv;
1635 av_free(priv->frame);
1636}
1637
1638static int input_request_frame(AVFilterLink *link)
1639{
1640 FilterPriv *priv = link->src->priv;
1641 AVFilterPicRef *picref;
3966a574 1642 int64_t pts = 0;
917d2bb3
MN
1643 AVPacket pkt;
1644 int ret;
1645
1646 while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
1647 av_free_packet(&pkt);
1648 if (ret < 0)
1649 return -1;
1650
dd0c789b 1651 if(priv->use_dr1) {
c41c5b02 1652 picref = avfilter_ref_pic(priv->frame->opaque, ~0);
dd0c789b 1653 } else {
cf097cbc
BB
1654 picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, link->w, link->h);
1655 av_picture_copy((AVPicture *)&picref->data, (AVPicture *)priv->frame,
1656 picref->pic->format, link->w, link->h);
dd0c789b 1657 }
917d2bb3
MN
1658 av_free_packet(&pkt);
1659
1660 picref->pts = pts;
bb409513 1661 picref->pos = pkt.pos;
917d2bb3 1662 picref->pixel_aspect = priv->is->video_st->codec->sample_aspect_ratio;
c41c5b02 1663 avfilter_start_frame(link, picref);
917d2bb3
MN
1664 avfilter_draw_slice(link, 0, link->h, 1);
1665 avfilter_end_frame(link);
917d2bb3
MN
1666
1667 return 0;
1668}
1669
1670static int input_query_formats(AVFilterContext *ctx)
1671{
1672 FilterPriv *priv = ctx->priv;
1673 enum PixelFormat pix_fmts[] = {
1674 priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
1675 };
1676
1677 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1678 return 0;
1679}
1680
1681static int input_config_props(AVFilterLink *link)
1682{
1683 FilterPriv *priv = link->src->priv;
1684 AVCodecContext *c = priv->is->video_st->codec;
1685
1686 link->w = c->width;
1687 link->h = c->height;
1688
1689 return 0;
1690}
1691
1692static AVFilter input_filter =
1693{
1694 .name = "ffplay_input",
1695
1696 .priv_size = sizeof(FilterPriv),
1697
1698 .init = input_init,
1699 .uninit = input_uninit,
1700
1701 .query_formats = input_query_formats,
1702
1703 .inputs = (AVFilterPad[]) {{ .name = NULL }},
1704 .outputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1705 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1706 .request_frame = input_request_frame,
1707 .config_props = input_config_props, },
1708 { .name = NULL }},
1709};
1710
1711static void output_end_frame(AVFilterLink *link)
1712{
1713}
1714
1715static int output_query_formats(AVFilterContext *ctx)
1716{
1717 enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
1718
1719 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1720 return 0;
1721}
1722
1723static int get_filtered_video_frame(AVFilterContext *ctx, AVFrame *frame,
bb409513 1724 int64_t *pts, int64_t *pos)
917d2bb3
MN
1725{
1726 AVFilterPicRef *pic;
1727
1728 if(avfilter_request_frame(ctx->inputs[0]))
1729 return -1;
1730 if(!(pic = ctx->inputs[0]->cur_pic))
1731 return -1;
1732 ctx->inputs[0]->cur_pic = NULL;
1733
1734 frame->opaque = pic;
1735 *pts = pic->pts;
bb409513 1736 *pos = pic->pos;
917d2bb3
MN
1737
1738 memcpy(frame->data, pic->data, sizeof(frame->data));
1739 memcpy(frame->linesize, pic->linesize, sizeof(frame->linesize));
1740
1741 return 1;
1742}
1743
1744static AVFilter output_filter =
1745{
1746 .name = "ffplay_output",
1747
1748 .query_formats = output_query_formats,
1749
1750 .inputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1751 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1752 .end_frame = output_end_frame,
1753 .min_perms = AV_PERM_READ, },
1754 { .name = NULL }},
1755 .outputs = (AVFilterPad[]) {{ .name = NULL }},
1756};
1757#endif /* CONFIG_AVFILTER */
1758
1759static int video_thread(void *arg)
1760{
1761 VideoState *is = arg;
1762 AVFrame *frame= avcodec_alloc_frame();
4903b5ca 1763 int64_t pts_int;
917d2bb3
MN
1764 double pts;
1765 int ret;
1766
1767#if CONFIG_AVFILTER
4903b5ca 1768 int64_t pos;
3f073fa2 1769 char sws_flags_str[128];
917d2bb3
MN
1770 AVFilterContext *filt_src = NULL, *filt_out = NULL;
1771 AVFilterGraph *graph = av_mallocz(sizeof(AVFilterGraph));
3f073fa2
SS
1772 snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
1773 graph->scale_sws_opts = av_strdup(sws_flags_str);
917d2bb3
MN
1774
1775 if(!(filt_src = avfilter_open(&input_filter, "src"))) goto the_end;
1776 if(!(filt_out = avfilter_open(&output_filter, "out"))) goto the_end;
1777
1778 if(avfilter_init_filter(filt_src, NULL, is)) goto the_end;
1779 if(avfilter_init_filter(filt_out, NULL, frame)) goto the_end;
1780
1781
1782 if(vfilters) {
1783 AVFilterInOut *outputs = av_malloc(sizeof(AVFilterInOut));
1784 AVFilterInOut *inputs = av_malloc(sizeof(AVFilterInOut));
1785
1786 outputs->name = av_strdup("in");
1787 outputs->filter = filt_src;
1788 outputs->pad_idx = 0;
1789 outputs->next = NULL;
1790
1791 inputs->name = av_strdup("out");
1792 inputs->filter = filt_out;
1793 inputs->pad_idx = 0;
1794 inputs->next = NULL;
1795
1796 if (avfilter_graph_parse(graph, vfilters, inputs, outputs, NULL) < 0)
1797 goto the_end;
1798 av_freep(&vfilters);
1799 } else {
1800 if(avfilter_link(filt_src, 0, filt_out, 0) < 0) goto the_end;
1801 }
1802 avfilter_graph_add_filter(graph, filt_src);
1803 avfilter_graph_add_filter(graph, filt_out);
1804
1805 if(avfilter_graph_check_validity(graph, NULL)) goto the_end;
1806 if(avfilter_graph_config_formats(graph, NULL)) goto the_end;
1807 if(avfilter_graph_config_links(graph, NULL)) goto the_end;
1808
1809 is->out_video_filter = filt_out;
1810#endif
1811
1812 for(;;) {
1813#if !CONFIG_AVFILTER
1814 AVPacket pkt;
1815#endif
1816 while (is->paused && !is->videoq.abort_request)
1817 SDL_Delay(10);
1818#if CONFIG_AVFILTER
bb409513 1819 ret = get_filtered_video_frame(filt_out, frame, &pts_int, &pos);
917d2bb3
MN
1820#else
1821 ret = get_video_frame(is, frame, &pts_int, &pkt);
1822#endif
1823
1824 if (ret < 0) goto the_end;
1825
1826 if (!ret)
1827 continue;
1828
3966a574 1829 pts = pts_int*av_q2d(is->video_st->time_base);
917d2bb3
MN
1830
1831#if CONFIG_AVFILTER
bb409513 1832 ret = output_picture2(is, frame, pts, pos);
917d2bb3 1833#else
fca62599 1834 ret = output_picture2(is, frame, pts, pkt.pos);
917d2bb3
MN
1835 av_free_packet(&pkt);
1836#endif
1837 if (ret < 0)
1838 goto the_end;
1839
115329f1 1840 if (step)
bba04f1e
WH
1841 if (cur_stream)
1842 stream_pause(cur_stream);
01310af2
FB
1843 }
1844 the_end:
917d2bb3
MN
1845#if CONFIG_AVFILTER
1846 avfilter_graph_destroy(graph);
1847 av_freep(&graph);
1848#endif
c6b1edc9 1849 av_free(frame);
01310af2
FB
1850 return 0;
1851}
1852
72ce053b
IC
1853static int subtitle_thread(void *arg)
1854{
1855 VideoState *is = arg;
1856 SubPicture *sp;
1857 AVPacket pkt1, *pkt = &pkt1;
1858 int len1, got_subtitle;
1859 double pts;
1860 int i, j;
1861 int r, g, b, y, u, v, a;
1862
1863 for(;;) {
1864 while (is->paused && !is->subtitleq.abort_request) {
1865 SDL_Delay(10);
1866 }
1867 if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1868 break;
115329f1 1869
39c6a118
MN
1870 if(pkt->data == flush_pkt.data){
1871 avcodec_flush_buffers(is->subtitle_st->codec);
1872 continue;
1873 }
72ce053b
IC
1874 SDL_LockMutex(is->subpq_mutex);
1875 while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1876 !is->subtitleq.abort_request) {
1877 SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1878 }
1879 SDL_UnlockMutex(is->subpq_mutex);
115329f1 1880
72ce053b
IC
1881 if (is->subtitleq.abort_request)
1882 goto the_end;
115329f1 1883
72ce053b
IC
1884 sp = &is->subpq[is->subpq_windex];
1885
1886 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1887 this packet, if any */
1888 pts = 0;
1889 if (pkt->pts != AV_NOPTS_VALUE)
1890 pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1891
bea18375 1892 len1 = avcodec_decode_subtitle2(is->subtitle_st->codec,
115329f1 1893 &sp->sub, &got_subtitle,
bea18375 1894 pkt);
72ce053b
IC
1895// if (len1 < 0)
1896// break;
1897 if (got_subtitle && sp->sub.format == 0) {
1898 sp->pts = pts;
115329f1 1899
72ce053b
IC
1900 for (i = 0; i < sp->sub.num_rects; i++)
1901 {
db4fac64 1902 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b 1903 {
25b4c651 1904 RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
IC
1905 y = RGB_TO_Y_CCIR(r, g, b);
1906 u = RGB_TO_U_CCIR(r, g, b, 0);
1907 v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651 1908 YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
IC
1909 }
1910 }
1911
1912 /* now we can update the picture count */
1913 if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1914 is->subpq_windex = 0;
1915 SDL_LockMutex(is->subpq_mutex);
1916 is->subpq_size++;
1917 SDL_UnlockMutex(is->subpq_mutex);
1918 }
1919 av_free_packet(pkt);
115329f1 1920// if (step)
72ce053b
IC
1921// if (cur_stream)
1922// stream_pause(cur_stream);
1923 }
1924 the_end:
1925 return 0;
1926}
1927
01310af2
FB
1928/* copy samples for viewing in editor window */
1929static void update_sample_display(VideoState *is, short *samples, int samples_size)
1930{
1931 int size, len, channels;
1932
01f4895c 1933 channels = is->audio_st->codec->channels;
01310af2
FB
1934
1935 size = samples_size / sizeof(short);
1936 while (size > 0) {
1937 len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1938 if (len > size)
1939 len = size;
1940 memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1941 samples += len;
1942 is->sample_array_index += len;
1943 if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1944 is->sample_array_index = 0;
1945 size -= len;
1946 }
1947}
1948
01310af2
FB
1949/* return the new audio buffer size (samples can be added or deleted
1950 to get better sync if video or external master clock) */
115329f1 1951static int synchronize_audio(VideoState *is, short *samples,
638c9d91 1952 int samples_size1, double pts)
01310af2 1953{
638c9d91 1954 int n, samples_size;
01310af2 1955 double ref_clock;
115329f1 1956
01f4895c 1957 n = 2 * is->audio_st->codec->channels;
638c9d91 1958 samples_size = samples_size1;
01310af2 1959
01310af2 1960 /* if not master, then we try to remove or add samples to correct the clock */
01310af2 1961 if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
FB
1962 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1963 double diff, avg_diff;
01310af2 1964 int wanted_size, min_size, max_size, nb_samples;
115329f1 1965
638c9d91
FB
1966 ref_clock = get_master_clock(is);
1967 diff = get_audio_clock(is) - ref_clock;
115329f1 1968
638c9d91
FB
1969 if (diff < AV_NOSYNC_THRESHOLD) {
1970 is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1971 if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1972 /* not enough measures to have a correct estimate */
1973 is->audio_diff_avg_count++;
1974 } else {
1975 /* estimate the A-V difference */
1976 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1977
1978 if (fabs(avg_diff) >= is->audio_diff_threshold) {
01f4895c 1979 wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
638c9d91 1980 nb_samples = samples_size / n;
115329f1 1981
638c9d91
FB
1982 min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1983 max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1984 if (wanted_size < min_size)
1985 wanted_size = min_size;
1986 else if (wanted_size > max_size)
1987 wanted_size = max_size;
115329f1 1988
638c9d91
FB
1989 /* add or remove samples to correction the synchro */
1990 if (wanted_size < samples_size) {
1991 /* remove samples */
1992 samples_size = wanted_size;
1993 } else if (wanted_size > samples_size) {
1994 uint8_t *samples_end, *q;
1995 int nb;
115329f1 1996
638c9d91
FB
1997 /* add samples */
1998 nb = (samples_size - wanted_size);
1999 samples_end = (uint8_t *)samples + samples_size - n;
2000 q = samples_end + n;
2001 while (nb > 0) {
2002 memcpy(q, samples_end, n);
2003 q += n;
2004 nb -= n;
2005 }
2006 samples_size = wanted_size;
2007 }
2008 }
2009#if 0
115329f1
DB
2010 printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
2011 diff, avg_diff, samples_size - samples_size1,
638c9d91
FB
2012 is->audio_clock, is->video_clock, is->audio_diff_threshold);
2013#endif
01310af2 2014 }
638c9d91
FB
2015 } else {
2016 /* too big difference : may be initial PTS errors, so
2017 reset A-V filter */
2018 is->audio_diff_avg_count = 0;
2019 is->audio_diff_cum = 0;
01310af2
FB
2020 }
2021 }
2022
01310af2
FB
2023 return samples_size;
2024}
2025
2026/* decode one audio frame and returns its uncompressed size */
5a4476e2 2027static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2 2028{
bea18375 2029 AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2 2030 AVPacket *pkt = &is->audio_pkt;
abdff646 2031 AVCodecContext *dec= is->audio_st->codec;
72ea344b 2032 int n, len1, data_size;
01310af2
FB
2033 double pts;
2034
2035 for(;;) {
72ea344b 2036 /* NOTE: the audio packet can contain several frames */
bea18375 2037 while (pkt_temp->size > 0) {
5a4476e2 2038 data_size = sizeof(is->audio_buf1);
bea18375 2039 len1 = avcodec_decode_audio3(dec,
5a4476e2 2040 (int16_t *)is->audio_buf1, &data_size,
bea18375 2041 pkt_temp);
72ea344b
FB
2042 if (len1 < 0) {
2043 /* if error, we skip the frame */
bea18375 2044 pkt_temp->size = 0;
01310af2 2045 break;
72ea344b 2046 }
115329f1 2047
bea18375
TB
2048 pkt_temp->data += len1;
2049 pkt_temp->size -= len1;
72ea344b
FB
2050 if (data_size <= 0)
2051 continue;
5a4476e2
PR
2052
2053 if (dec->sample_fmt != is->audio_src_fmt) {
2054 if (is->reformat_ctx)
2055 av_audio_convert_free(is->reformat_ctx);
2056 is->reformat_ctx= av_audio_convert_alloc(SAMPLE_FMT_S16, 1,
2057 dec->sample_fmt, 1, NULL, 0);
2058 if (!is->reformat_ctx) {
2059 fprintf(stderr, "Cannot convert %s sample format to %s sample format\n",
2060 avcodec_get_sample_fmt_name(dec->sample_fmt),
2061 avcodec_get_sample_fmt_name(SAMPLE_FMT_S16));
2062 break;
2063 }
2064 is->audio_src_fmt= dec->sample_fmt;
2065 }
2066
2067 if (is->reformat_ctx) {
2068 const void *ibuf[6]= {is->audio_buf1};
2069 void *obuf[6]= {is->audio_buf2};
2070 int istride[6]= {av_get_bits_per_sample_format(dec->sample_fmt)/8};
2071 int ostride[6]= {2};
2072 int len= data_size/istride[0];
2073 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
2074 printf("av_audio_convert() failed\n");
2075 break;
2076 }
2077 is->audio_buf= is->audio_buf2;
2078 /* FIXME: existing code assume that data_size equals framesize*channels*2
2079 remove this legacy cruft */
2080 data_size= len*2;
2081 }else{
2082 is->audio_buf= is->audio_buf1;
2083 }
2084
72ea344b
FB
2085 /* if no pts, then compute it */
2086 pts = is->audio_clock;
2087 *pts_ptr = pts;
abdff646 2088 n = 2 * dec->channels;
115329f1 2089 is->audio_clock += (double)data_size /
abdff646 2090 (double)(n * dec->sample_rate);
638c9d91 2091#if defined(DEBUG_SYNC)
72ea344b
FB
2092 {
2093 static double last_clock;
2094 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
2095 is->audio_clock - last_clock,
2096 is->audio_clock, pts);
2097 last_clock = is->audio_clock;
01310af2 2098 }
72ea344b
FB
2099#endif
2100 return data_size;
01310af2
FB
2101 }
2102
72ea344b
FB
2103 /* free the current packet */
2104 if (pkt->data)
01310af2 2105 av_free_packet(pkt);
115329f1 2106
72ea344b
FB
2107 if (is->paused || is->audioq.abort_request) {
2108 return -1;
2109 }
115329f1 2110
01310af2
FB
2111 /* read next packet */
2112 if (packet_queue_get(&is->audioq, pkt, 1) < 0)
2113 return -1;
39c6a118 2114 if(pkt->data == flush_pkt.data){
abdff646 2115 avcodec_flush_buffers(dec);
39c6a118
MN
2116 continue;
2117 }
2118
bea18375
TB
2119 pkt_temp->data = pkt->data;
2120 pkt_temp->size = pkt->size;
115329f1 2121
72ea344b
FB
2122 /* if update the audio clock with the pts */
2123 if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75 2124 is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b 2125 }
01310af2
FB
2126 }
2127}
2128
638c9d91
FB
2129/* get the current audio output buffer size, in samples. With SDL, we
2130 cannot have a precise information */
2131static int audio_write_get_buf_size(VideoState *is)
01310af2 2132{
b09b580b 2133 return is->audio_buf_size - is->audio_buf_index;
01310af2
FB
2134}
2135
2136
2137/* prepare a new audio buffer */
358061f6 2138static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
FB
2139{
2140 VideoState *is = opaque;
2141 int audio_size, len1;
2142 double pts;
2143
2144 audio_callback_time = av_gettime();
115329f1 2145
01310af2
FB
2146 while (len > 0) {
2147 if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2 2148 audio_size = audio_decode_frame(is, &pts);
01310af2
FB
2149 if (audio_size < 0) {
2150 /* if error, just output silence */
1a1078fa 2151 is->audio_buf = is->audio_buf1;
01310af2
FB
2152 is->audio_buf_size = 1024;
2153 memset(is->audio_buf, 0, is->audio_buf_size);
2154 } else {
2155 if (is->show_audio)
2156 update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
115329f1 2157 audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
01310af2
FB
2158 pts);
2159 is->audio_buf_size = audio_size;
2160 }
2161 is->audio_buf_index = 0;
2162 }
2163 len1 = is->audio_buf_size - is->audio_buf_index;
2164 if (len1 > len)
2165 len1 = len;
2166 memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
2167 len -= len1;
2168 stream += len1;
2169 is->audio_buf_index += len1;
2170 }
2171}
2172
01310af2
FB
2173/* open a given stream. Return 0 if OK */
2174static int stream_component_open(VideoState *is, int stream_index)
2175{
2176 AVFormatContext *ic = is->ic;
fe74099a 2177 AVCodecContext *avctx;
01310af2
FB
2178 AVCodec *codec;
2179 SDL_AudioSpec wanted_spec, spec;
2180
2181 if (stream_index < 0 || stream_index >= ic->nb_streams)
2182 return -1;
fe74099a 2183 avctx = ic->streams[stream_index]->codec;
115329f1 2184
01310af2 2185 /* prepare audio output */
72415b2a 2186 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a
SS
2187 if (avctx->channels > 0) {
2188 avctx->request_channels = FFMIN(2, avctx->channels);
94eadc8b 2189 } else {
fe74099a 2190 avctx->request_channels = 2;
638c9d91 2191 }
01310af2
FB
2192 }
2193
fe74099a
SS
2194 codec = avcodec_find_decoder(avctx->codec_id);
2195 avctx->debug_mv = debug_mv;
2196 avctx->debug = debug;
2197 avctx->workaround_bugs = workaround_bugs;
2198 avctx->lowres = lowres;
2199 if(lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
2200 avctx->idct_algo= idct;
2201 if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
2202 avctx->skip_frame= skip_frame;
2203 avctx->skip_idct= skip_idct;
2204 avctx->skip_loop_filter= skip_loop_filter;
2205 avctx->error_recognition= error_recognition;
2206 avctx->error_concealment= error_concealment;
2207 avcodec_thread_init(avctx, thread_count);
2208
2209 set_context_opts(avctx, avcodec_opts[avctx->codec_type], 0);
e43d7a18 2210
01310af2 2211 if (!codec ||
fe74099a 2212 avcodec_open(avctx, codec) < 0)
01310af2 2213 return -1;
51b73087
JR
2214
2215 /* prepare audio output */
72415b2a 2216 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a 2217 wanted_spec.freq = avctx->sample_rate;
51b73087 2218 wanted_spec.format = AUDIO_S16SYS;
fe74099a 2219 wanted_spec.channels = avctx->channels;
51b73087
JR
2220 wanted_spec.silence = 0;
2221 wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
2222 wanted_spec.callback = sdl_audio_callback;
2223 wanted_spec.userdata = is;
2224 if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
2225 fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
2226 return -1;
2227 }
2228 is->audio_hw_buf_size = spec.size;
5a4476e2 2229 is->audio_src_fmt= SAMPLE_FMT_S16;
51b73087
JR
2230 }
2231
3f3fe38d 2232 ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
fe74099a 2233 switch(avctx->codec_type) {
72415b2a 2234 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2235 is->audio_stream = stream_index;
2236 is->audio_st = ic->streams[stream_index];
2237 is->audio_buf_size = 0;
2238 is->audio_buf_index = 0;
638c9d91
FB
2239
2240 /* init averaging filter */
2241 is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
2242 is->audio_diff_avg_count = 0;
2243 /* since we do not have a precise anough audio fifo fullness,
2244 we correct audio sync only if larger than this threshold */
fe74099a 2245 is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / avctx->sample_rate;
638c9d91 2246
01310af2
FB
2247 memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
2248 packet_queue_init(&is->audioq);
bb270c08 2249 SDL_PauseAudio(0);
01310af2 2250 break;
72415b2a 2251 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2252 is->video_stream = stream_index;
2253 is->video_st = ic->streams[stream_index];
2254
68aefbe8 2255// is->video_current_pts_time = av_gettime();
638c9d91 2256
01310af2
FB
2257 packet_queue_init(&is->videoq);
2258 is->video_tid = SDL_CreateThread(video_thread, is);
2259 break;
72415b2a 2260 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2261 is->subtitle_stream = stream_index;
2262 is->subtitle_st = ic->streams[stream_index];
2263 packet_queue_init(&is->subtitleq);
115329f1 2264
72ce053b
IC
2265 is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
2266 break;
01310af2
FB
2267 default:
2268 break;
2269 }
2270 return 0;
2271}
2272
2273static void stream_component_close(VideoState *is, int stream_index)
2274{
2275 AVFormatContext *ic = is->ic;
fe74099a 2276 AVCodecContext *avctx;
115329f1 2277
72ce053b
IC
2278 if (stream_index < 0 || stream_index >= ic->nb_streams)
2279 return;
fe74099a 2280 avctx = ic->streams[stream_index]->codec;
01310af2 2281
fe74099a 2282 switch(avctx->codec_type) {
72415b2a 2283 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2284 packet_queue_abort(&is->audioq);
2285
2286 SDL_CloseAudio();
2287
2288 packet_queue_end(&is->audioq);
5a4476e2
PR
2289 if (is->reformat_ctx)
2290 av_audio_convert_free(is->reformat_ctx);
bc77fce6 2291 is->reformat_ctx = NULL;
01310af2 2292 break;
72415b2a 2293 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2294 packet_queue_abort(&is->videoq);
2295
2296 /* note: we also signal this mutex to make sure we deblock the
2297 video thread in all cases */
2298 SDL_LockMutex(is->pictq_mutex);
2299 SDL_CondSignal(is->pictq_cond);
2300 SDL_UnlockMutex(is->pictq_mutex);
2301
2302 SDL_WaitThread(is->video_tid, NULL);
2303
2304 packet_queue_end(&is->videoq);
2305 break;
72415b2a 2306 case AVMEDIA_TYPE_SUBTITLE:
72ce053b 2307 packet_queue_abort(&is->subtitleq);
115329f1 2308
72ce053b
IC
2309 /* note: we also signal this mutex to make sure we deblock the
2310 video thread in all cases */
2311 SDL_LockMutex(is->subpq_mutex);
2312 is->subtitle_stream_changed = 1;
115329f1 2313
72ce053b
IC
2314 SDL_CondSignal(is->subpq_cond);
2315 SDL_UnlockMutex(is->subpq_mutex);
2316
2317 SDL_WaitThread(is->subtitle_tid, NULL);
2318
2319 packet_queue_end(&is->subtitleq);
2320 break;
01310af2
FB
2321 default:
2322 break;
2323 }
2324
3f3fe38d 2325 ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
SS
2326 avcodec_close(avctx);
2327 switch(avctx->codec_type) {
72415b2a 2328 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2329 is->audio_st = NULL;
2330 is->audio_stream = -1;
2331 break;
72415b2a 2332 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2333 is->video_st = NULL;
2334 is->video_stream = -1;
2335 break;
72415b2a 2336 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2337 is->subtitle_st = NULL;
2338 is->subtitle_stream = -1;
2339 break;
01310af2
FB
2340 default:
2341 break;
2342 }
2343}
2344
416e3508
FB
2345/* since we have only one decoding thread, we can use a global
2346 variable instead of a thread local variable */
2347static VideoState *global_video_state;
2348
2349static int decode_interrupt_cb(void)
2350{
2351 return (global_video_state && global_video_state->abort_request);
2352}
01310af2
FB
2353
2354/* this thread gets the stream from the disk or the network */
2355static int decode_thread(void *arg)
2356{
2357 VideoState *is = arg;
2358 AVFormatContext *ic;
6625a3de 2359 int err, i, ret;
72415b2a
SS
2360 int st_index[AVMEDIA_TYPE_NB];
2361 int st_count[AVMEDIA_TYPE_NB]={0};
2362 int st_best_packet_count[AVMEDIA_TYPE_NB];
01310af2 2363 AVPacket pkt1, *pkt = &pkt1;
61890b02 2364 AVFormatParameters params, *ap = &params;
75bb7b0a 2365 int eof=0;
d834d63b 2366 int pkt_in_play_range = 0;
01310af2 2367
6299a229
MN
2368 ic = avformat_alloc_context();
2369
6625a3de 2370 memset(st_index, -1, sizeof(st_index));
9f7490a0 2371 memset(st_best_packet_count, -1, sizeof(st_best_packet_count));
01310af2
FB
2372 is->video_stream = -1;
2373 is->audio_stream = -1;
72ce053b 2374 is->subtitle_stream = -1;
01310af2 2375
416e3508
FB
2376 global_video_state = is;
2377 url_set_interrupt_cb(decode_interrupt_cb);
2378
61890b02 2379 memset(ap, 0, sizeof(*ap));
115329f1 2380
6299a229 2381 ap->prealloced_context = 1;
e4b89522
LW
2382 ap->width = frame_width;
2383 ap->height= frame_height;
7e042912 2384 ap->time_base= (AVRational){1, 25};
e4b89522 2385 ap->pix_fmt = frame_pix_fmt;
7e042912 2386
6299a229
MN
2387 set_context_opts(ic, avformat_opts, AV_OPT_FLAG_DECODING_PARAM);
2388
61890b02 2389 err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
638c9d91
FB
2390 if (err < 0) {
2391 print_error(is->filename, err);
2392 ret = -1;
2393 goto fail;
2394 }
01310af2 2395 is->ic = ic;
30bc6613
MN
2396
2397 if(genpts)
2398 ic->flags |= AVFMT_FLAG_GENPTS;
2399
24c07998
LA
2400 err = av_find_stream_info(ic);
2401 if (err < 0) {
2402 fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
2403 ret = -1;
2404 goto fail;
2405 }
899681cd
BA
2406 if(ic->pb)
2407 ic->pb->eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b 2408
70a4764d
MN
2409 if(seek_by_bytes<0)
2410 seek_by_bytes= !!(ic->iformat->flags & AVFMT_TS_DISCONT);
2411
72ea344b
FB
2412 /* if seeking requested, we execute it */
2413 if (start_time != AV_NOPTS_VALUE) {
2414 int64_t timestamp;
2415
2416 timestamp = start_time;
2417 /* add the stream start time */
2418 if (ic->start_time != AV_NOPTS_VALUE)
2419 timestamp += ic->start_time;
4ed29207 2420 ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b 2421 if (ret < 0) {
115329f1 2422 fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
FB
2423 is->filename, (double)timestamp / AV_TIME_BASE);
2424 }
2425 }
72ea344b 2426
01310af2 2427 for(i = 0; i < ic->nb_streams; i++) {
9f7490a0
MN
2428 AVStream *st= ic->streams[i];
2429 AVCodecContext *avctx = st->codec;
3f3fe38d 2430 ic->streams[i]->discard = AVDISCARD_ALL;
72415b2a 2431 if(avctx->codec_type >= (unsigned)AVMEDIA_TYPE_NB)
8ef94840 2432 continue;
256ab3ed
MN
2433 if(st_count[avctx->codec_type]++ != wanted_stream[avctx->codec_type] && wanted_stream[avctx->codec_type] >= 0)
2434 continue;
2435
9f7490a0
MN
2436 if(st_best_packet_count[avctx->codec_type] >= st->codec_info_nb_frames)
2437 continue;
2438 st_best_packet_count[avctx->codec_type]= st->codec_info_nb_frames;
2439
fe74099a 2440 switch(avctx->codec_type) {
72415b2a 2441 case AVMEDIA_TYPE_AUDIO:
256ab3ed 2442 if (!audio_disable)
72415b2a 2443 st_index[AVMEDIA_TYPE_AUDIO] = i;
01310af2 2444 break;
72415b2a
SS
2445 case AVMEDIA_TYPE_VIDEO:
2446 case AVMEDIA_TYPE_SUBTITLE:
256ab3ed
MN
2447 if (!video_disable)
2448 st_index[avctx->codec_type] = i;
16a59a7b 2449 break;
01310af2
FB
2450 default:
2451 break;
2452 }
2453 }
2454 if (show_status) {
2455 dump_format(ic, 0, is->filename, 0);
2456 }
2457
2458 /* open the streams */
72415b2a
SS
2459 if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
2460 stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
01310af2
FB
2461 }
2462
077a8d61 2463 ret=-1;
72415b2a
SS
2464 if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
2465 ret= stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
077a8d61 2466 }
d38c9e7a 2467 is->refresh_tid = SDL_CreateThread(refresh_thread, is);
077a8d61 2468 if(ret<0) {
01310af2 2469 if (!display_disable)
bf8ae197 2470 is->show_audio = 2;
01310af2
FB
2471 }
2472
72415b2a
SS
2473 if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
2474 stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
16a59a7b
BA
2475 }
2476
01310af2 2477 if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
FB
2478 fprintf(stderr, "%s: could not open codecs\n", is->filename);
2479 ret = -1;
01310af2
FB
2480 goto fail;
2481 }
2482
2483 for(;;) {
2484 if (is->abort_request)
2485 break;
416e3508
FB
2486 if (is->paused != is->last_paused) {
2487 is->last_paused = is->paused;
72ea344b 2488 if (is->paused)
f5668147 2489 is->read_pause_return= av_read_pause(ic);
72ea344b
FB
2490 else
2491 av_read_play(ic);
416e3508 2492 }
2f642393
AJ
2493#if CONFIG_RTSP_DEMUXER
2494 if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
416e3508
FB
2495 /* wait 10 ms to avoid trying to get another packet */
2496 /* XXX: horrible */
2497 SDL_Delay(10);
2498 continue;
2499 }
400738b1 2500#endif
72ea344b 2501 if (is->seek_req) {
8e606cc8 2502 int64_t seek_target= is->seek_pos;
4ed29207
MN
2503 int64_t seek_min= is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
2504 int64_t seek_max= is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
2505//FIXME the +-2 is due to rounding being not done in the correct direction in generation
2506// of the seek_pos/seek_rel variables
8e606cc8 2507
4ed29207 2508 ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
FB
2509 if (ret < 0) {
2510 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
e6c0297f
MN
2511 }else{
2512 if (is->audio_stream >= 0) {
2513 packet_queue_flush(&is->audioq);
39c6a118 2514 packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f 2515 }
72ce053b
IC
2516 if (is->subtitle_stream >= 0) {
2517 packet_queue_flush(&is->subtitleq);
39c6a118 2518 packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b 2519 }
e6c0297f
MN
2520 if (is->video_stream >= 0) {
2521 packet_queue_flush(&is->videoq);
39c6a118 2522 packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f 2523 }
72ea344b
FB
2524 }
2525 is->seek_req = 0;
e45aeb38 2526 eof= 0;
72ea344b 2527 }
416e3508 2528
01310af2 2529 /* if the queue are full, no need to read more */
79ee4683
MN
2530 if ( is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
2531 || ( (is->audioq .size > MIN_AUDIOQ_SIZE || is->audio_stream<0)
2532 && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream<0)
2533 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream<0))) {
01310af2
FB
2534 /* wait 10 ms */
2535 SDL_Delay(10);
2536 continue;
2537 }
75bb7b0a 2538 if(url_feof(ic->pb) || eof) {
9dc41767 2539 if(is->video_stream >= 0){
26534fe8
MN
2540 av_init_packet(pkt);
2541 pkt->data=NULL;
2542 pkt->size=0;
2543 pkt->stream_index= is->video_stream;
2544 packet_queue_put(&is->videoq, pkt);
9dc41767 2545 }
b4083171 2546 SDL_Delay(10);
1922c0a7
RK
2547 if(is->audioq.size + is->videoq.size + is->subtitleq.size ==0){
2548 if(loop!=1 && (!loop || --loop)){
2549 stream_seek(cur_stream, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
2550 }else if(autoexit){
2551 ret=AVERROR_EOF;
2552 goto fail;
2553 }
2d1653b0 2554 }
600a331c
MN
2555 continue;
2556 }
72ea344b 2557 ret = av_read_frame(ic, pkt);
01310af2 2558 if (ret < 0) {
75bb7b0a
MN
2559 if (ret == AVERROR_EOF)
2560 eof=1;
2561 if (url_ferror(ic->pb))
bb270c08 2562 break;
75bb7b0a
MN
2563 SDL_Delay(100); /* wait for user event */
2564 continue;
01310af2 2565 }
d834d63b
RK
2566 /* check if packet is in play range specified by user, then queue, otherwise discard */
2567 pkt_in_play_range = duration == AV_NOPTS_VALUE ||
2568 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
2569 av_q2d(ic->streams[pkt->stream_index]->time_base) -
2570 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0)/1000000
2571 <= ((double)duration/1000000);
2572 if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
01310af2 2573 packet_queue_put(&is->audioq, pkt);
d834d63b 2574 } else if (pkt->stream_index == is->video_stream && pkt_in_play_range) {
01310af2 2575 packet_queue_put(&is->videoq, pkt);
d834d63b 2576 } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
72ce053b 2577 packet_queue_put(&is->subtitleq, pkt);
01310af2
FB
2578 } else {
2579 av_free_packet(pkt);
2580 }
2581 }
2582 /* wait until the end */
2583 while (!is->abort_request) {
2584 SDL_Delay(100);
2585 }
2586
638c9d91 2587 ret = 0;
01310af2 2588 fail:
416e3508
FB
2589 /* disable interrupting */
2590 global_video_state = NULL;
2591
01310af2
FB
2592 /* close each stream */
2593 if (is->audio_stream >= 0)
2594 stream_component_close(is, is->audio_stream);
2595 if (is->video_stream >= 0)
2596 stream_component_close(is, is->video_stream);
72ce053b
IC
2597 if (is->subtitle_stream >= 0)
2598 stream_component_close(is, is->subtitle_stream);
638c9d91
FB
2599 if (is->ic) {
2600 av_close_input_file(is->ic);
2601 is->ic = NULL; /* safety */
2602 }
416e3508
FB
2603 url_set_interrupt_cb(NULL);
2604
638c9d91
FB
2605 if (ret != 0) {
2606 SDL_Event event;
115329f1 2607
638c9d91
FB
2608 event.type = FF_QUIT_EVENT;
2609 event.user.data1 = is;
2610 SDL_PushEvent(&event);
2611 }
01310af2
FB
2612 return 0;
2613}
2614
638c9d91 2615static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
FB
2616{
2617 VideoState *is;
2618
2619 is = av_mallocz(sizeof(VideoState));
2620 if (!is)
2621 return NULL;
f7d78f36 2622 av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91 2623 is->iformat = iformat;
01310af2
FB
2624 is->ytop = 0;
2625 is->xleft = 0;
2626
2627 /* start video display */
2628 is->pictq_mutex = SDL_CreateMutex();
2629 is->pictq_cond = SDL_CreateCond();
115329f1 2630
72ce053b
IC
2631 is->subpq_mutex = SDL_CreateMutex();
2632 is->subpq_cond = SDL_CreateCond();
115329f1 2633
638c9d91 2634 is->av_sync_type = av_sync_type;
01310af2
FB
2635 is->parse_tid = SDL_CreateThread(decode_thread, is);
2636 if (!is->parse_tid) {
2637 av_free(is);
2638 return NULL;
2639 }
2640 return is;
2641}
2642
2643static void stream_close(VideoState *is)
2644{
2645 VideoPicture *vp;
2646 int i;
2647 /* XXX: use a special url_shutdown call to abort parse cleanly */
2648 is->abort_request = 1;
2649 SDL_WaitThread(is->parse_tid, NULL);
d38c9e7a 2650 SDL_WaitThread(is->refresh_tid, NULL);
01310af2
FB
2651
2652 /* free all pictures */
2653 for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2654 vp = &is->pictq[i];
917d2bb3
MN
2655#if CONFIG_AVFILTER
2656 if (vp->picref) {
2657 avfilter_unref_pic(vp->picref);
2658 vp->picref = NULL;
2659 }
2660#endif
01310af2
FB
2661 if (vp->bmp) {
2662 SDL_FreeYUVOverlay(vp->bmp);
2663 vp->bmp = NULL;
2664 }
2665 }
2666 SDL_DestroyMutex(is->pictq_mutex);
2667 SDL_DestroyCond(is->pictq_cond);
72ce053b
IC
2668 SDL_DestroyMutex(is->subpq_mutex);
2669 SDL_DestroyCond(is->subpq_cond);
917d2bb3 2670#if !CONFIG_AVFILTER
3ac56e28
MS
2671 if (is->img_convert_ctx)
2672 sws_freeContext(is->img_convert_ctx);
917d2bb3 2673#endif
7c5ab145 2674 av_free(is);
01310af2
FB
2675}
2676
7b49ce2e 2677static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
FB
2678{
2679 AVFormatContext *ic = is->ic;
2680 int start_index, stream_index;
2681 AVStream *st;
2682
72415b2a 2683 if (codec_type == AVMEDIA_TYPE_VIDEO)
638c9d91 2684 start_index = is->video_stream;
72415b2a 2685 else if (codec_type == AVMEDIA_TYPE_AUDIO)
638c9d91 2686 start_index = is->audio_stream;
72ce053b
IC
2687 else
2688 start_index = is->subtitle_stream;
72415b2a 2689 if (start_index < (codec_type == AVMEDIA_TYPE_SUBTITLE ? -1 : 0))
638c9d91
FB
2690 return;
2691 stream_index = start_index;
2692 for(;;) {
2693 if (++stream_index >= is->ic->nb_streams)
72ce053b 2694 {
72415b2a 2695 if (codec_type == AVMEDIA_TYPE_SUBTITLE)
72ce053b
IC
2696 {
2697 stream_index = -1;
2698 goto the_end;
2699 } else
2700 stream_index = 0;
2701 }
638c9d91
FB
2702 if (stream_index == start_index)
2703 return;
2704 st = ic->streams[stream_index];
01f4895c 2705 if (st->codec->codec_type == codec_type) {
638c9d91
FB
2706 /* check that parameters are OK */
2707 switch(codec_type) {
72415b2a 2708 case AVMEDIA_TYPE_AUDIO:
01f4895c
MN
2709 if (st->codec->sample_rate != 0 &&
2710 st->codec->channels != 0)
638c9d91
FB
2711 goto the_end;
2712 break;
72415b2a
SS
2713 case AVMEDIA_TYPE_VIDEO:
2714 case AVMEDIA_TYPE_SUBTITLE:
638c9d91
FB
2715 goto the_end;
2716 default:
2717 break;
2718 }
2719 }
2720 }
2721 the_end:
2722 stream_component_close(is, start_index);
2723 stream_component_open(is, stream_index);
2724}
2725
2726
7b49ce2e 2727static void toggle_full_screen(void)
01310af2 2728{
01310af2 2729 is_full_screen = !is_full_screen;
29f3b38a
MR
2730 if (!fs_screen_width) {
2731 /* use default SDL method */
fb84155b 2732// SDL_WM_ToggleFullScreen(screen);
01310af2 2733 }
fb84155b 2734 video_open(cur_stream);
01310af2
FB
2735}
2736
7b49ce2e 2737static void toggle_pause(void)
01310af2
FB
2738{
2739 if (cur_stream)
2740 stream_pause(cur_stream);
bba04f1e
WH
2741 step = 0;
2742}
2743
7b49ce2e 2744static void step_to_next_frame(void)
bba04f1e
WH
2745{
2746 if (cur_stream) {
19cc524a 2747 /* if the stream is paused unpause it, then step */
bba04f1e 2748 if (cur_stream->paused)
19cc524a 2749 stream_pause(cur_stream);
bba04f1e
WH
2750 }
2751 step = 1;
01310af2
FB
2752}
2753
7b49ce2e 2754static void do_exit(void)
01310af2 2755{
7c5ab145 2756 int i;
01310af2
FB
2757 if (cur_stream) {
2758 stream_close(cur_stream);
2759 cur_stream = NULL;
2760 }
72415b2a 2761 for (i = 0; i < AVMEDIA_TYPE_NB; i++)
7c5ab145
MS
2762 av_free(avcodec_opts[i]);
2763 av_free(avformat_opts);
2764 av_free(sws_opts);
917d2bb3
MN
2765#if CONFIG_AVFILTER
2766 avfilter_uninit();
2767#endif
01310af2
FB
2768 if (show_status)
2769 printf("\n");
2770 SDL_Quit();
2771 exit(0);
2772}
2773
7b49ce2e 2774static void toggle_audio_display(void)
01310af2
FB
2775{
2776 if (cur_stream) {
f5968788 2777 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 2778 cur_stream->show_audio = (cur_stream->show_audio + 1) % 3;
f5968788
MN
2779 fill_rectangle(screen,
2780 cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height,
2781 bgcolor);
2782 SDL_UpdateRect(screen, cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height);
01310af2
FB
2783 }
2784}
2785
2786/* handle an event sent by the GUI */
7b49ce2e 2787static void event_loop(void)
01310af2
FB
2788{
2789 SDL_Event event;
a11d11aa 2790 double incr, pos, frac;
01310af2
FB
2791
2792 for(;;) {
d52ec002 2793 double x;
01310af2
FB
2794 SDL_WaitEvent(&event);
2795 switch(event.type) {
2796 case SDL_KEYDOWN:
2797 switch(event.key.keysym.sym) {
2798 case SDLK_ESCAPE:
2799 case SDLK_q:
2800 do_exit();
2801 break;
2802 case SDLK_f:
2803 toggle_full_screen();
2804 break;
2805 case SDLK_p:
2806 case SDLK_SPACE:
2807 toggle_pause();
2808 break;
bba04f1e
WH
2809 case SDLK_s: //S: Step to next frame
2810 step_to_next_frame();
2811 break;
01310af2 2812 case SDLK_a:
115329f1 2813 if (cur_stream)
72415b2a 2814 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
638c9d91
FB
2815 break;
2816 case SDLK_v:
115329f1 2817 if (cur_stream)
72415b2a 2818 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
638c9d91 2819 break;
72ce053b 2820 case SDLK_t:
115329f1 2821 if (cur_stream)
72415b2a 2822 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
72ce053b 2823 break;
638c9d91 2824 case SDLK_w:
01310af2
FB
2825 toggle_audio_display();
2826 break;
72ea344b
FB
2827 case SDLK_LEFT:
2828 incr = -10.0;
2829 goto do_seek;
2830 case SDLK_RIGHT:
2831 incr = 10.0;
2832 goto do_seek;
2833 case SDLK_UP:
2834 incr = 60.0;
2835 goto do_seek;
2836 case SDLK_DOWN:
2837 incr = -60.0;
2838 do_seek:
2839 if (cur_stream) {
94b594c6 2840 if (seek_by_bytes) {
1a620dd7
MN
2841 if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos>=0){
2842 pos= cur_stream->video_current_pos;
2843 }else if(cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos>=0){
2844 pos= cur_stream->audio_pkt.pos;
2845 }else
2846 pos = url_ftell(cur_stream->ic->pb);
94b594c6 2847 if (cur_stream->ic->bit_rate)
566cd2cb 2848 incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
SH
2849 else
2850 incr *= 180000.0;
2851 pos += incr;
2ef46053 2852 stream_seek(cur_stream, pos, incr, 1);
94b594c6
SH
2853 } else {
2854 pos = get_master_clock(cur_stream);
2855 pos += incr;
2ef46053 2856 stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6 2857 }
72ea344b
FB
2858 }
2859 break;
01310af2
FB
2860 default:
2861 break;
2862 }
2863 break;
a11d11aa 2864 case SDL_MOUSEBUTTONDOWN:
d52ec002
MN
2865 case SDL_MOUSEMOTION:
2866 if(event.type ==SDL_MOUSEBUTTONDOWN){
2867 x= event.button.x;
2868 }else{
2869 if(event.motion.state != SDL_PRESSED)
2870 break;
2871 x= event.motion.x;
2872 }
bb270c08 2873 if (cur_stream) {
2ef46053
MN
2874 if(seek_by_bytes || cur_stream->ic->duration<=0){
2875 uint64_t size= url_fsize(cur_stream->ic->pb);
d52ec002 2876 stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
2ef46053 2877 }else{
6371c81a
MN
2878 int64_t ts;
2879 int ns, hh, mm, ss;
2880 int tns, thh, tmm, tss;
2881 tns = cur_stream->ic->duration/1000000LL;
2882 thh = tns/3600;
2883 tmm = (tns%3600)/60;
2884 tss = (tns%60);
d52ec002 2885 frac = x/cur_stream->width;
6371c81a
MN
2886 ns = frac*tns;
2887 hh = ns/3600;
2888 mm = (ns%3600)/60;
2889 ss = (ns%60);
2890 fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
2891 hh, mm, ss, thh, tmm, tss);
2892 ts = frac*cur_stream->ic->duration;
2893 if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
2894 ts += cur_stream->ic->start_time;
2895 stream_seek(cur_stream, ts, 0, 0);
2ef46053 2896 }
bb270c08
DB
2897 }
2898 break;
01310af2
FB
2899 case SDL_VIDEORESIZE:
2900 if (cur_stream) {
115329f1 2901 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2 2902 SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
c57d3469
MN
2903 screen_width = cur_stream->width = event.resize.w;
2904 screen_height= cur_stream->height= event.resize.h;
01310af2
FB
2905 }
2906 break;
2907 case SDL_QUIT:
638c9d91 2908 case FF_QUIT_EVENT:
01310af2
FB
2909 do_exit();
2910 break;
2911 case FF_ALLOC_EVENT:
fccb19e3 2912 video_open(event.user.data1);
01310af2
FB
2913 alloc_picture(event.user.data1);
2914 break;
2915 case FF_REFRESH_EVENT:
2916 video_refresh_timer(event.user.data1);
d38c9e7a 2917 cur_stream->refresh=0;
01310af2
FB
2918 break;
2919 default:
2920 break;
2921 }
2922 }
2923}
2924
e4b89522
LW
2925static void opt_frame_size(const char *arg)
2926{
b33ece16 2927 if (av_parse_video_frame_size(&frame_width, &frame_height, arg) < 0) {
e4b89522
LW
2928 fprintf(stderr, "Incorrect frame size\n");
2929 exit(1);
2930 }
2931 if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
2932 fprintf(stderr, "Frame size must be a multiple of 2\n");
2933 exit(1);
2934 }
2935}
2936
a5b3b5f6 2937static int opt_width(const char *opt, const char *arg)
01310af2 2938{
a5b3b5f6
SS
2939 screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2940 return 0;
01310af2
FB
2941}
2942
a5b3b5f6 2943static int opt_height(const char *opt, const char *arg)
01310af2 2944{
a5b3b5f6
SS
2945 screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2946 return 0;
01310af2
FB
2947}
2948
2949static void opt_format(const char *arg)
2950{
2951 file_iformat = av_find_input_format(arg);
2952 if (!file_iformat) {
2953 fprintf(stderr, "Unknown input format: %s\n", arg);
2954 exit(1);
2955 }
2956}
61890b02 2957
e4b89522
LW
2958static void opt_frame_pix_fmt(const char *arg)
2959{
718c7b18 2960 frame_pix_fmt = av_get_pix_fmt(arg);
e4b89522
LW
2961}
2962
b81d6235 2963static int opt_sync(const char *opt, const char *arg)
638c9d91
FB
2964{
2965 if (!strcmp(arg, "audio"))
2966 av_sync_type = AV_SYNC_AUDIO_MASTER;
2967 else if (!strcmp(arg, "video"))
2968 av_sync_type = AV_SYNC_VIDEO_MASTER;
2969 else if (!strcmp(arg, "ext"))
2970 av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5 2971 else {
b81d6235 2972 fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
SS
2973 exit(1);
2974 }
b81d6235 2975 return 0;
638c9d91
FB
2976}
2977
e11bc2c6 2978static int opt_seek(const char *opt, const char *arg)
72ea344b 2979{
e11bc2c6
SS
2980 start_time = parse_time_or_die(opt, arg, 1);
2981 return 0;
72ea344b
FB
2982}
2983
d834d63b
RK
2984static int opt_duration(const char *opt, const char *arg)
2985{
2986 duration = parse_time_or_die(opt, arg, 1);
2987 return 0;
2988}
2989
a5b3b5f6 2990static int opt_debug(const char *opt, const char *arg)
e26a8335 2991{
a309073b 2992 av_log_set_level(99);
a5b3b5f6
SS
2993 debug = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
2994 return 0;
e26a8335 2995}
115329f1 2996
a5b3b5f6 2997static int opt_vismv(const char *opt, const char *arg)
0c9bbaec 2998{
a5b3b5f6
SS
2999 debug_mv = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
3000 return 0;
0c9bbaec 3001}
c62c07d3 3002
a5b3b5f6 3003static int opt_thread_count(const char *opt, const char *arg)
c62c07d3 3004{
a5b3b5f6 3005 thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
b250f9c6 3006#if !HAVE_THREADS
c62c07d3
MN
3007 fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
3008#endif
a5b3b5f6 3009 return 0;
c62c07d3 3010}
115329f1 3011
358061f6 3012static const OptionDef options[] = {
992f8eae 3013#include "cmdutils_common_opts.h"
a5b3b5f6
SS
3014 { "x", HAS_ARG | OPT_FUNC2, {(void*)opt_width}, "force displayed width", "width" },
3015 { "y", HAS_ARG | OPT_FUNC2, {(void*)opt_height}, "force displayed height", "height" },
e4b89522 3016 { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
638c9d91 3017 { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
01310af2
FB
3018 { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
3019 { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
72415b2a
SS
3020 { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_AUDIO]}, "select desired audio stream", "stream_number" },
3021 { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_VIDEO]}, "select desired video stream", "stream_number" },
3022 { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_SUBTITLE]}, "select desired subtitle stream", "stream_number" },
e11bc2c6 3023 { "ss", HAS_ARG | OPT_FUNC2, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
d834d63b 3024 { "t", HAS_ARG | OPT_FUNC2, {(void*)&opt_duration}, "play \"duration\" seconds of audio/video", "duration" },
674fe163 3025 { "bytes", OPT_INT | HAS_ARG, {(void*)&seek_by_bytes}, "seek by bytes 0=off 1=on -1=auto", "val" },
01310af2
FB
3026 { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
3027 { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
e4b89522 3028 { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
98ae6acf 3029 { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
a5b3b5f6 3030 { "debug", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
6387c3e6 3031 { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
a5b3b5f6 3032 { "vismv", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
6fc5b059 3033 { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
30bc6613 3034 { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
59055363 3035 { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts 0=off 1=on -1=auto", ""},
178fcca8 3036 { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
8c3eba7c
MN
3037 { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
3038 { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
3039 { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
178fcca8 3040 { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo", "algo" },
047599a4 3041 { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_recognition}, "set error detection threshold (0-4)", "threshold" },
1b51e051 3042 { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options", "bit_mask" },
b81d6235 3043 { "sync", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
a5b3b5f6 3044 { "threads", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2d1653b0 3045 { "autoexit", OPT_BOOL | OPT_EXPERT, {(void*)&autoexit}, "exit at the end", "" },
1922c0a7 3046 { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&loop}, "set number of times the playback shall be looped", "loop count" },
d38c9e7a 3047 { "framedrop", OPT_BOOL | OPT_EXPERT, {(void*)&framedrop}, "drop frames when cpu is too slow", "" },
076db5ed 3048 { "window_title", OPT_STRING | HAS_ARG, {(void*)&window_title}, "set window title", "window title" },
917d2bb3 3049#if CONFIG_AVFILTER
09ed11e5 3050 { "vf", OPT_STRING | HAS_ARG, {(void*)&vfilters}, "video filters", "filter list" },
917d2bb3 3051#endif
2b3da32f 3052 { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, {(void*)&rdftspeed}, "rdft speed", "msecs" },
e43d7a18 3053 { "default", OPT_FUNC2 | HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {(void*)opt_default}, "generic catch all option", "" },
01310af2
FB
3054 { NULL, },
3055};
3056
0c2a18cb 3057static void show_usage(void)
01310af2 3058{
27daa420
RP
3059 printf("Simple media player\n");
3060 printf("usage: ffplay [options] input_file\n");
01310af2 3061 printf("\n");
0c2a18cb
RP
3062}
3063
3064static void show_help(void)
3065{
3066 show_usage();
02d504a7
FB
3067 show_help_options(options, "Main options:\n",
3068 OPT_EXPERT, 0);
3069 show_help_options(options, "\nAdvanced options:\n",
3070 OPT_EXPERT, OPT_EXPERT);
01310af2
FB
3071 printf("\nWhile playing:\n"
3072 "q, ESC quit\n"
3073 "f toggle full screen\n"
3074 "p, SPC pause\n"
638c9d91
FB
3075 "a cycle audio channel\n"
3076 "v cycle video channel\n"
72ce053b 3077 "t cycle subtitle channel\n"
638c9d91 3078 "w show audio waves\n"
79f8b328 3079 "s activate frame-step mode\n"
72ea344b
FB
3080 "left/right seek backward/forward 10 seconds\n"
3081 "down/up seek backward/forward 1 minute\n"
a11d11aa 3082 "mouse click seek to percentage in file corresponding to fraction of width\n"
01310af2 3083 );
01310af2
FB
3084}
3085
358061f6 3086static void opt_input_file(const char *filename)
01310af2 3087{
07a70b38
SS
3088 if (input_filename) {
3089 fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
3090 filename, input_filename);
3091 exit(1);
3092 }
e8d83e1c 3093 if (!strcmp(filename, "-"))
9fcfc0b7 3094 filename = "pipe:";
01310af2
FB
3095 input_filename = filename;
3096}
3097
3098/* Called from the main */
3099int main(int argc, char **argv)
3100{
e43d7a18 3101 int flags, i;
115329f1 3102
01310af2 3103 /* register all codecs, demux and protocols */
c721d803 3104 avcodec_register_all();
9b157b0c 3105#if CONFIG_AVDEVICE
c721d803 3106 avdevice_register_all();
9b157b0c 3107#endif
917d2bb3
MN
3108#if CONFIG_AVFILTER
3109 avfilter_register_all();
3110#endif
01310af2
FB
3111 av_register_all();
3112
72415b2a 3113 for(i=0; i<AVMEDIA_TYPE_NB; i++){
636f1c4c 3114 avcodec_opts[i]= avcodec_alloc_context2(i);
e43d7a18 3115 }
8e2fd8e1 3116 avformat_opts = avformat_alloc_context();
917d2bb3 3117#if !CONFIG_AVFILTER
e43d7a18 3118 sws_opts = sws_getContext(16,16,0, 16,16,0, sws_flags, NULL,NULL,NULL);
917d2bb3 3119#endif
e43d7a18 3120
ea9c581f 3121 show_banner();
4cfac5bc 3122
f5da5c93 3123 parse_options(argc, argv, options, opt_input_file);
01310af2 3124
aab1b7e5 3125 if (!input_filename) {
7f11e745 3126 show_usage();
7a7da6b4 3127 fprintf(stderr, "An input file must be specified\n");
7f11e745 3128 fprintf(stderr, "Use -h to get full help or, even better, run 'man ffplay'\n");
aab1b7e5
SS
3129 exit(1);
3130 }
01310af2
FB
3131
3132 if (display_disable) {
3133 video_disable = 1;
3134 }
31319a8c 3135 flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
c97f5402
DB
3136#if !defined(__MINGW32__) && !defined(__APPLE__)
3137 flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c 3138#endif
01310af2 3139 if (SDL_Init (flags)) {
05ab0b76 3140 fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
01310af2
FB
3141 exit(1);
3142 }
3143
3144 if (!display_disable) {
b250f9c6 3145#if HAVE_SDL_VIDEO_SIZE
3ef17d62
MR
3146 const SDL_VideoInfo *vi = SDL_GetVideoInfo();
3147 fs_screen_width = vi->current_w;
3148 fs_screen_height = vi->current_h;
29f3b38a 3149#endif
01310af2
FB
3150 }
3151
3152 SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
01310af2
FB
3153 SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
3154 SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
3155
39c6a118
MN
3156 av_init_packet(&flush_pkt);
3157 flush_pkt.data= "FLUSH";
3158
638c9d91 3159 cur_stream = stream_open(input_filename, file_iformat);
01310af2
FB
3160
3161 event_loop();
3162
3163 /* never returns */
3164
3165 return 0;
3166}