Fix style: "if(" -> "if (".
[libav.git] / ffplay.c
CommitLineData
01310af2 1/*
f05ef45c 2 * FFplay : Simple Media Player based on the FFmpeg libraries
01310af2
FB
3 * Copyright (c) 2003 Fabrice Bellard
4 *
b78e7197
DB
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
01310af2
FB
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
b78e7197 10 * version 2.1 of the License, or (at your option) any later version.
01310af2 11 *
b78e7197 12 * FFmpeg is distributed in the hope that it will be useful,
01310af2
FB
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
b78e7197 18 * License along with FFmpeg; if not, write to the Free Software
5509bffa 19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2 20 */
364a9607 21
ba11257e 22#include "config.h"
8a3ceaf4 23#include <inttypes.h>
0f4e8165
RB
24#include <math.h>
25#include <limits.h>
245976da 26#include "libavutil/avstring.h"
2b4abbd6 27#include "libavutil/colorspace.h"
718c7b18 28#include "libavutil/pixdesc.h"
245976da 29#include "libavformat/avformat.h"
245976da
DB
30#include "libavdevice/avdevice.h"
31#include "libswscale/swscale.h"
5a4476e2 32#include "libavcodec/audioconvert.h"
e43d7a18 33#include "libavcodec/opt.h"
166621ab 34#include "libavcodec/avfft.h"
01310af2 35
917d2bb3
MN
36#if CONFIG_AVFILTER
37# include "libavfilter/avfilter.h"
38# include "libavfilter/avfiltergraph.h"
39# include "libavfilter/graphparser.h"
40#endif
41
01310af2
FB
42#include "cmdutils.h"
43
44#include <SDL.h>
45#include <SDL_thread.h>
46
2f30a81d 47#ifdef __MINGW32__
31319a8c
FB
48#undef main /* We don't want SDL to override our main() */
49#endif
50
d38c9e7a
MN
51#include <unistd.h>
52#include <assert.h>
53
64555bd9 54const char program_name[] = "FFplay";
ea9c581f 55const int program_birth_year = 2003;
4cfac5bc 56
638c9d91
FB
57//#define DEBUG_SYNC
58
79ee4683
MN
59#define MAX_QUEUE_SIZE (15 * 1024 * 1024)
60#define MIN_AUDIOQ_SIZE (20 * 16 * 1024)
61#define MIN_FRAMES 5
01310af2 62
638c9d91
FB
63/* SDL audio buffer size, in samples. Should be small to have precise
64 A/V sync as SDL does not have hardware buffer fullness info. */
65#define SDL_AUDIO_BUFFER_SIZE 1024
66
67/* no AV sync correction is done if below the AV sync threshold */
7e0140cb 68#define AV_SYNC_THRESHOLD 0.01
638c9d91
FB
69/* no AV correction is done if too big error */
70#define AV_NOSYNC_THRESHOLD 10.0
71
d38c9e7a
MN
72#define FRAME_SKIP_FACTOR 0.05
73
638c9d91
FB
74/* maximum audio speed change to get correct sync */
75#define SAMPLE_CORRECTION_PERCENT_MAX 10
76
77/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
78#define AUDIO_DIFF_AVG_NB 20
79
01310af2
FB
80/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
81#define SAMPLE_ARRAY_SIZE (2*65536)
82
03ae87a3
LA
83static int sws_flags = SWS_BICUBIC;
84
01310af2
FB
85typedef struct PacketQueue {
86 AVPacketList *first_pkt, *last_pkt;
87 int nb_packets;
88 int size;
89 int abort_request;
90 SDL_mutex *mutex;
91 SDL_cond *cond;
92} PacketQueue;
93
562f382c 94#define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b 95#define SUBPICTURE_QUEUE_SIZE 4
01310af2
FB
96
97typedef struct VideoPicture {
267e9dfa 98 double pts; ///<presentation time stamp for this picture
d38c9e7a 99 double target_clock; ///<av_gettime() time at which this should be displayed ideally
1a620dd7 100 int64_t pos; ///<byte position in file
01310af2
FB
101 SDL_Overlay *bmp;
102 int width, height; /* source height & width */
103 int allocated;
917d2bb3
MN
104 enum PixelFormat pix_fmt;
105
106#if CONFIG_AVFILTER
107 AVFilterPicRef *picref;
108#endif
01310af2
FB
109} VideoPicture;
110
72ce053b
IC
111typedef struct SubPicture {
112 double pts; /* presentation time stamp for this picture */
113 AVSubtitle sub;
114} SubPicture;
115
01310af2
FB
116enum {
117 AV_SYNC_AUDIO_MASTER, /* default choice */
118 AV_SYNC_VIDEO_MASTER,
638c9d91 119 AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
FB
120};
121
122typedef struct VideoState {
123 SDL_Thread *parse_tid;
124 SDL_Thread *video_tid;
d38c9e7a 125 SDL_Thread *refresh_tid;
638c9d91 126 AVInputFormat *iformat;
01310af2
FB
127 int no_background;
128 int abort_request;
129 int paused;
416e3508 130 int last_paused;
72ea344b 131 int seek_req;
3ba1438d 132 int seek_flags;
72ea344b 133 int64_t seek_pos;
4ed29207 134 int64_t seek_rel;
f5668147 135 int read_pause_return;
01310af2
FB
136 AVFormatContext *ic;
137 int dtg_active_format;
138
139 int audio_stream;
115329f1 140
01310af2 141 int av_sync_type;
638c9d91
FB
142 double external_clock; /* external clock base */
143 int64_t external_clock_time;
115329f1 144
638c9d91
FB
145 double audio_clock;
146 double audio_diff_cum; /* used for AV difference average computation */
147 double audio_diff_avg_coef;
148 double audio_diff_threshold;
149 int audio_diff_avg_count;
01310af2
FB
150 AVStream *audio_st;
151 PacketQueue audioq;
152 int audio_hw_buf_size;
153 /* samples output by the codec. we reserve more space for avsync
154 compensation */
c6727809
MR
155 DECLARE_ALIGNED(16,uint8_t,audio_buf1)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
156 DECLARE_ALIGNED(16,uint8_t,audio_buf2)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
5a4476e2 157 uint8_t *audio_buf;
7fea94ce 158 unsigned int audio_buf_size; /* in bytes */
01310af2 159 int audio_buf_index; /* in bytes */
bea18375 160 AVPacket audio_pkt_temp;
01310af2 161 AVPacket audio_pkt;
5a4476e2
PR
162 enum SampleFormat audio_src_fmt;
163 AVAudioConvert *reformat_ctx;
115329f1 164
01310af2
FB
165 int show_audio; /* if true, display audio samples */
166 int16_t sample_array[SAMPLE_ARRAY_SIZE];
167 int sample_array_index;
5e0257e3 168 int last_i_start;
166621ab 169 RDFTContext *rdft;
12eeda34 170 int rdft_bits;
7dbbf6a1 171 FFTSample *rdft_data;
12eeda34 172 int xpos;
115329f1 173
72ce053b
IC
174 SDL_Thread *subtitle_tid;
175 int subtitle_stream;
176 int subtitle_stream_changed;
177 AVStream *subtitle_st;
178 PacketQueue subtitleq;
179 SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
180 int subpq_size, subpq_rindex, subpq_windex;
181 SDL_mutex *subpq_mutex;
182 SDL_cond *subpq_cond;
115329f1 183
638c9d91
FB
184 double frame_timer;
185 double frame_last_pts;
186 double frame_last_delay;
115329f1 187 double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
01310af2
FB
188 int video_stream;
189 AVStream *video_st;
190 PacketQueue videoq;
267e9dfa 191 double video_current_pts; ///<current displayed pts (different from video_clock if frame fifos are used)
68aefbe8 192 double video_current_pts_drift; ///<video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
1a620dd7 193 int64_t video_current_pos; ///<current displayed file pos
01310af2
FB
194 VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
195 int pictq_size, pictq_rindex, pictq_windex;
196 SDL_mutex *pictq_mutex;
197 SDL_cond *pictq_cond;
917d2bb3 198#if !CONFIG_AVFILTER
3ac56e28 199 struct SwsContext *img_convert_ctx;
917d2bb3 200#endif
115329f1 201
01310af2
FB
202 // QETimer *video_timer;
203 char filename[1024];
204 int width, height, xleft, ytop;
41db429d
MN
205
206 int64_t faulty_pts;
207 int64_t faulty_dts;
208 int64_t last_dts_for_fault_detection;
209 int64_t last_pts_for_fault_detection;
210
917d2bb3
MN
211#if CONFIG_AVFILTER
212 AVFilterContext *out_video_filter; ///<the last filter in the video chain
213#endif
d38c9e7a
MN
214
215 float skip_frames;
216 float skip_frames_index;
217 int refresh;
01310af2
FB
218} VideoState;
219
358061f6 220static void show_help(void);
638c9d91 221static int audio_write_get_buf_size(VideoState *is);
01310af2
FB
222
223/* options specified by the user */
224static AVInputFormat *file_iformat;
225static const char *input_filename;
076db5ed 226static const char *window_title;
01310af2
FB
227static int fs_screen_width;
228static int fs_screen_height;
fccb19e3
MN
229static int screen_width = 0;
230static int screen_height = 0;
e4b89522
LW
231static int frame_width = 0;
232static int frame_height = 0;
233static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
01310af2
FB
234static int audio_disable;
235static int video_disable;
72415b2a
SS
236static int wanted_stream[AVMEDIA_TYPE_NB]={
237 [AVMEDIA_TYPE_AUDIO]=-1,
238 [AVMEDIA_TYPE_VIDEO]=-1,
239 [AVMEDIA_TYPE_SUBTITLE]=-1,
5b369983 240};
70a4764d 241static int seek_by_bytes=-1;
01310af2 242static int display_disable;
1e1a0b18 243static int show_status = 1;
638c9d91 244static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b 245static int64_t start_time = AV_NOPTS_VALUE;
d834d63b 246static int64_t duration = AV_NOPTS_VALUE;
e26a8335 247static int debug = 0;
0c9bbaec 248static int debug_mv = 0;
bba04f1e 249static int step = 0;
c62c07d3 250static int thread_count = 1;
6387c3e6 251static int workaround_bugs = 1;
6fc5b059 252static int fast = 0;
30bc6613 253static int genpts = 0;
178fcca8
MN
254static int lowres = 0;
255static int idct = FF_IDCT_AUTO;
8c3eba7c
MN
256static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
257static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
258static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
047599a4 259static int error_recognition = FF_ER_CAREFUL;
1b51e051 260static int error_concealment = 3;
41db429d 261static int decoder_reorder_pts= -1;
2d1653b0 262static int autoexit;
066ce8c9
AS
263static int exit_on_keydown;
264static int exit_on_mousedown;
1922c0a7 265static int loop=1;
d38c9e7a 266static int framedrop=1;
2b3da32f
MN
267
268static int rdftspeed=20;
917d2bb3
MN
269#if CONFIG_AVFILTER
270static char *vfilters = NULL;
271#endif
01310af2
FB
272
273/* current context */
274static int is_full_screen;
275static VideoState *cur_stream;
5e0257e3 276static int64_t audio_callback_time;
01310af2 277
2c676c33 278static AVPacket flush_pkt;
39c6a118 279
01310af2
FB
280#define FF_ALLOC_EVENT (SDL_USEREVENT)
281#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91 282#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
01310af2 283
2c676c33 284static SDL_Surface *screen;
01310af2 285
515bd00e
MN
286static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
287
01310af2
FB
288/* packet queue handling */
289static void packet_queue_init(PacketQueue *q)
290{
291 memset(q, 0, sizeof(PacketQueue));
292 q->mutex = SDL_CreateMutex();
293 q->cond = SDL_CreateCond();
515bd00e 294 packet_queue_put(q, &flush_pkt);
01310af2
FB
295}
296
72ea344b 297static void packet_queue_flush(PacketQueue *q)
01310af2
FB
298{
299 AVPacketList *pkt, *pkt1;
300
687fae2b 301 SDL_LockMutex(q->mutex);
01310af2
FB
302 for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
303 pkt1 = pkt->next;
304 av_free_packet(&pkt->pkt);
da6c4573 305 av_freep(&pkt);
01310af2 306 }
72ea344b
FB
307 q->last_pkt = NULL;
308 q->first_pkt = NULL;
309 q->nb_packets = 0;
310 q->size = 0;
687fae2b 311 SDL_UnlockMutex(q->mutex);
72ea344b
FB
312}
313
314static void packet_queue_end(PacketQueue *q)
315{
316 packet_queue_flush(q);
01310af2
FB
317 SDL_DestroyMutex(q->mutex);
318 SDL_DestroyCond(q->cond);
319}
320
321static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
322{
323 AVPacketList *pkt1;
324
72ea344b 325 /* duplicate the packet */
39c6a118 326 if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
72ea344b 327 return -1;
115329f1 328
01310af2
FB
329 pkt1 = av_malloc(sizeof(AVPacketList));
330 if (!pkt1)
331 return -1;
332 pkt1->pkt = *pkt;
333 pkt1->next = NULL;
334
72ea344b 335
01310af2
FB
336 SDL_LockMutex(q->mutex);
337
338 if (!q->last_pkt)
339
340 q->first_pkt = pkt1;
341 else
342 q->last_pkt->next = pkt1;
343 q->last_pkt = pkt1;
344 q->nb_packets++;
7b776589 345 q->size += pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
346 /* XXX: should duplicate packet data in DV case */
347 SDL_CondSignal(q->cond);
348
349 SDL_UnlockMutex(q->mutex);
350 return 0;
351}
352
353static void packet_queue_abort(PacketQueue *q)
354{
355 SDL_LockMutex(q->mutex);
356
357 q->abort_request = 1;
115329f1 358
01310af2
FB
359 SDL_CondSignal(q->cond);
360
361 SDL_UnlockMutex(q->mutex);
362}
363
364/* return < 0 if aborted, 0 if no packet and > 0 if packet. */
365static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
366{
367 AVPacketList *pkt1;
368 int ret;
369
370 SDL_LockMutex(q->mutex);
371
372 for(;;) {
373 if (q->abort_request) {
374 ret = -1;
375 break;
376 }
115329f1 377
01310af2
FB
378 pkt1 = q->first_pkt;
379 if (pkt1) {
380 q->first_pkt = pkt1->next;
381 if (!q->first_pkt)
382 q->last_pkt = NULL;
383 q->nb_packets--;
7b776589 384 q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
385 *pkt = pkt1->pkt;
386 av_free(pkt1);
387 ret = 1;
388 break;
389 } else if (!block) {
390 ret = 0;
391 break;
392 } else {
393 SDL_CondWait(q->cond, q->mutex);
394 }
395 }
396 SDL_UnlockMutex(q->mutex);
397 return ret;
398}
399
115329f1 400static inline void fill_rectangle(SDL_Surface *screen,
01310af2
FB
401 int x, int y, int w, int h, int color)
402{
403 SDL_Rect rect;
404 rect.x = x;
405 rect.y = y;
406 rect.w = w;
407 rect.h = h;
408 SDL_FillRect(screen, &rect, color);
409}
410
411#if 0
412/* draw only the border of a rectangle */
413void fill_border(VideoState *s, int x, int y, int w, int h, int color)
414{
415 int w1, w2, h1, h2;
416
417 /* fill the background */
418 w1 = x;
419 if (w1 < 0)
420 w1 = 0;
421 w2 = s->width - (x + w);
422 if (w2 < 0)
423 w2 = 0;
424 h1 = y;
425 if (h1 < 0)
426 h1 = 0;
427 h2 = s->height - (y + h);
428 if (h2 < 0)
429 h2 = 0;
115329f1
DB
430 fill_rectangle(screen,
431 s->xleft, s->ytop,
432 w1, s->height,
01310af2 433 color);
115329f1
DB
434 fill_rectangle(screen,
435 s->xleft + s->width - w2, s->ytop,
436 w2, s->height,
01310af2 437 color);
115329f1
DB
438 fill_rectangle(screen,
439 s->xleft + w1, s->ytop,
440 s->width - w1 - w2, h1,
01310af2 441 color);
115329f1 442 fill_rectangle(screen,
01310af2
FB
443 s->xleft + w1, s->ytop + s->height - h2,
444 s->width - w1 - w2, h2,
445 color);
446}
447#endif
448
72ce053b
IC
449#define ALPHA_BLEND(a, oldp, newp, s)\
450((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
451
452#define RGBA_IN(r, g, b, a, s)\
453{\
454 unsigned int v = ((const uint32_t *)(s))[0];\
455 a = (v >> 24) & 0xff;\
456 r = (v >> 16) & 0xff;\
457 g = (v >> 8) & 0xff;\
458 b = v & 0xff;\
459}
460
461#define YUVA_IN(y, u, v, a, s, pal)\
462{\
57cf99f2 463 unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
IC
464 a = (val >> 24) & 0xff;\
465 y = (val >> 16) & 0xff;\
466 u = (val >> 8) & 0xff;\
467 v = val & 0xff;\
468}
469
470#define YUVA_OUT(d, y, u, v, a)\
471{\
472 ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
473}
474
475
476#define BPP 1
477
0a8cd696 478static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
IC
479{
480 int wrap, wrap3, width2, skip2;
481 int y, u, v, a, u1, v1, a1, w, h;
482 uint8_t *lum, *cb, *cr;
483 const uint8_t *p;
484 const uint32_t *pal;
9cb5a11e
RD
485 int dstx, dsty, dstw, dsth;
486
7cf9c6ae
MN
487 dstw = av_clip(rect->w, 0, imgw);
488 dsth = av_clip(rect->h, 0, imgh);
489 dstx = av_clip(rect->x, 0, imgw - dstw);
490 dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
RD
491 lum = dst->data[0] + dsty * dst->linesize[0];
492 cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
493 cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
494
f54b31b9 495 width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e 496 skip2 = dstx >> 1;
72ce053b 497 wrap = dst->linesize[0];
25b4c651
MN
498 wrap3 = rect->pict.linesize[0];
499 p = rect->pict.data[0];
500 pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */
115329f1 501
9cb5a11e
RD
502 if (dsty & 1) {
503 lum += dstx;
72ce053b
IC
504 cb += skip2;
505 cr += skip2;
115329f1 506
9cb5a11e 507 if (dstx & 1) {
72ce053b
IC
508 YUVA_IN(y, u, v, a, p, pal);
509 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
510 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
511 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
512 cb++;
513 cr++;
514 lum++;
515 p += BPP;
516 }
9cb5a11e 517 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
518 YUVA_IN(y, u, v, a, p, pal);
519 u1 = u;
520 v1 = v;
521 a1 = a;
522 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
523
524 YUVA_IN(y, u, v, a, p + BPP, pal);
525 u1 += u;
526 v1 += v;
527 a1 += a;
528 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
529 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
530 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
531 cb++;
532 cr++;
533 p += 2 * BPP;
534 lum += 2;
535 }
536 if (w) {
537 YUVA_IN(y, u, v, a, p, pal);
538 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
539 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
540 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
BA
541 p++;
542 lum++;
72ce053b 543 }
4606a059
BA
544 p += wrap3 - dstw * BPP;
545 lum += wrap - dstw - dstx;
72ce053b
IC
546 cb += dst->linesize[1] - width2 - skip2;
547 cr += dst->linesize[2] - width2 - skip2;
548 }
9cb5a11e
RD
549 for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
550 lum += dstx;
72ce053b
IC
551 cb += skip2;
552 cr += skip2;
115329f1 553
9cb5a11e 554 if (dstx & 1) {
72ce053b
IC
555 YUVA_IN(y, u, v, a, p, pal);
556 u1 = u;
557 v1 = v;
558 a1 = a;
559 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
560 p += wrap3;
561 lum += wrap;
562 YUVA_IN(y, u, v, a, p, pal);
563 u1 += u;
564 v1 += v;
565 a1 += a;
566 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
567 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
568 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
569 cb++;
570 cr++;
571 p += -wrap3 + BPP;
572 lum += -wrap + 1;
573 }
9cb5a11e 574 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
575 YUVA_IN(y, u, v, a, p, pal);
576 u1 = u;
577 v1 = v;
578 a1 = a;
579 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
580
f8ca63e8 581 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
582 u1 += u;
583 v1 += v;
584 a1 += a;
585 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
586 p += wrap3;
587 lum += wrap;
588
589 YUVA_IN(y, u, v, a, p, pal);
590 u1 += u;
591 v1 += v;
592 a1 += a;
593 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
594
f8ca63e8 595 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
596 u1 += u;
597 v1 += v;
598 a1 += a;
599 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
600
601 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
602 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
603
604 cb++;
605 cr++;
606 p += -wrap3 + 2 * BPP;
607 lum += -wrap + 2;
608 }
609 if (w) {
610 YUVA_IN(y, u, v, a, p, pal);
611 u1 = u;
612 v1 = v;
613 a1 = a;
614 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
615 p += wrap3;
616 lum += wrap;
617 YUVA_IN(y, u, v, a, p, pal);
618 u1 += u;
619 v1 += v;
620 a1 += a;
621 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
622 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
623 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
624 cb++;
625 cr++;
626 p += -wrap3 + BPP;
627 lum += -wrap + 1;
628 }
9cb5a11e
RD
629 p += wrap3 + (wrap3 - dstw * BPP);
630 lum += wrap + (wrap - dstw - dstx);
72ce053b
IC
631 cb += dst->linesize[1] - width2 - skip2;
632 cr += dst->linesize[2] - width2 - skip2;
633 }
634 /* handle odd height */
635 if (h) {
9cb5a11e 636 lum += dstx;
72ce053b
IC
637 cb += skip2;
638 cr += skip2;
115329f1 639
9cb5a11e 640 if (dstx & 1) {
72ce053b
IC
641 YUVA_IN(y, u, v, a, p, pal);
642 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
643 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
644 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
645 cb++;
646 cr++;
647 lum++;
648 p += BPP;
649 }
9cb5a11e 650 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
651 YUVA_IN(y, u, v, a, p, pal);
652 u1 = u;
653 v1 = v;
654 a1 = a;
655 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
656
657 YUVA_IN(y, u, v, a, p + BPP, pal);
658 u1 += u;
659 v1 += v;
660 a1 += a;
661 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
662 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
663 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
664 cb++;
665 cr++;
666 p += 2 * BPP;
667 lum += 2;
668 }
669 if (w) {
670 YUVA_IN(y, u, v, a, p, pal);
671 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
672 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
673 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
674 }
675 }
676}
677
678static void free_subpicture(SubPicture *sp)
679{
e1d7c883 680 avsubtitle_free(&sp->sub);
72ce053b
IC
681}
682
01310af2
FB
683static void video_image_display(VideoState *is)
684{
685 VideoPicture *vp;
72ce053b
IC
686 SubPicture *sp;
687 AVPicture pict;
01310af2
FB
688 float aspect_ratio;
689 int width, height, x, y;
690 SDL_Rect rect;
72ce053b 691 int i;
01310af2
FB
692
693 vp = &is->pictq[is->pictq_rindex];
694 if (vp->bmp) {
917d2bb3
MN
695#if CONFIG_AVFILTER
696 if (vp->picref->pixel_aspect.num == 0)
697 aspect_ratio = 0;
698 else
699 aspect_ratio = av_q2d(vp->picref->pixel_aspect);
700#else
701
01310af2 702 /* XXX: use variable in the frame */
c30a4489
AJ
703 if (is->video_st->sample_aspect_ratio.num)
704 aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
705 else if (is->video_st->codec->sample_aspect_ratio.num)
706 aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
72ea344b 707 else
c30a4489 708 aspect_ratio = 0;
917d2bb3 709#endif
01310af2 710 if (aspect_ratio <= 0.0)
c30a4489 711 aspect_ratio = 1.0;
917d2bb3 712 aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
FB
713 /* if an active format is indicated, then it overrides the
714 mpeg format */
715#if 0
01f4895c
MN
716 if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
717 is->dtg_active_format = is->video_st->codec->dtg_active_format;
01310af2
FB
718 printf("dtg_active_format=%d\n", is->dtg_active_format);
719 }
720#endif
721#if 0
01f4895c 722 switch(is->video_st->codec->dtg_active_format) {
01310af2
FB
723 case FF_DTG_AFD_SAME:
724 default:
725 /* nothing to do */
726 break;
727 case FF_DTG_AFD_4_3:
728 aspect_ratio = 4.0 / 3.0;
729 break;
730 case FF_DTG_AFD_16_9:
731 aspect_ratio = 16.0 / 9.0;
732 break;
733 case FF_DTG_AFD_14_9:
734 aspect_ratio = 14.0 / 9.0;
735 break;
736 case FF_DTG_AFD_4_3_SP_14_9:
737 aspect_ratio = 14.0 / 9.0;
738 break;
739 case FF_DTG_AFD_16_9_SP_14_9:
740 aspect_ratio = 14.0 / 9.0;
741 break;
742 case FF_DTG_AFD_SP_4_3:
743 aspect_ratio = 4.0 / 3.0;
744 break;
745 }
746#endif
747
72ce053b
IC
748 if (is->subtitle_st)
749 {
750 if (is->subpq_size > 0)
751 {
752 sp = &is->subpq[is->subpq_rindex];
753
754 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
755 {
756 SDL_LockYUVOverlay (vp->bmp);
757
758 pict.data[0] = vp->bmp->pixels[0];
759 pict.data[1] = vp->bmp->pixels[2];
760 pict.data[2] = vp->bmp->pixels[1];
761
762 pict.linesize[0] = vp->bmp->pitches[0];
763 pict.linesize[1] = vp->bmp->pitches[2];
764 pict.linesize[2] = vp->bmp->pitches[1];
765
766 for (i = 0; i < sp->sub.num_rects; i++)
db4fac64 767 blend_subrect(&pict, sp->sub.rects[i],
0a8cd696 768 vp->bmp->w, vp->bmp->h);
72ce053b
IC
769
770 SDL_UnlockYUVOverlay (vp->bmp);
771 }
772 }
773 }
774
775
01310af2
FB
776 /* XXX: we suppose the screen has a 1.0 pixel ratio */
777 height = is->height;
bb6c34e5 778 width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
FB
779 if (width > is->width) {
780 width = is->width;
bb6c34e5 781 height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
FB
782 }
783 x = (is->width - width) / 2;
784 y = (is->height - height) / 2;
785 if (!is->no_background) {
786 /* fill the background */
787 // fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
788 } else {
789 is->no_background = 0;
790 }
791 rect.x = is->xleft + x;
2f6547fb 792 rect.y = is->ytop + y;
01310af2
FB
793 rect.w = width;
794 rect.h = height;
795 SDL_DisplayYUVOverlay(vp->bmp, &rect);
796 } else {
797#if 0
115329f1
DB
798 fill_rectangle(screen,
799 is->xleft, is->ytop, is->width, is->height,
01310af2
FB
800 QERGB(0x00, 0x00, 0x00));
801#endif
802 }
803}
804
805static inline int compute_mod(int a, int b)
806{
807 a = a % b;
115329f1 808 if (a >= 0)
01310af2
FB
809 return a;
810 else
811 return a + b;
812}
813
814static void video_audio_display(VideoState *s)
815{
816 int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
817 int ch, channels, h, h2, bgcolor, fgcolor;
818 int16_t time_diff;
4c7c7645
MN
819 int rdft_bits, nb_freq;
820
821 for(rdft_bits=1; (1<<rdft_bits)<2*s->height; rdft_bits++)
822 ;
823 nb_freq= 1<<(rdft_bits-1);
115329f1 824
01310af2 825 /* compute display index : center on currently output samples */
01f4895c 826 channels = s->audio_st->codec->channels;
01310af2 827 nb_display_channels = channels;
5e0257e3 828 if (!s->paused) {
4c7c7645 829 int data_used= s->show_audio==1 ? s->width : (2*nb_freq);
5e0257e3
FB
830 n = 2 * channels;
831 delay = audio_write_get_buf_size(s);
832 delay /= n;
115329f1 833
5e0257e3
FB
834 /* to be more precise, we take into account the time spent since
835 the last buffer computation */
836 if (audio_callback_time) {
837 time_diff = av_gettime() - audio_callback_time;
122dcdcb 838 delay -= (time_diff * s->audio_st->codec->sample_rate) / 1000000;
5e0257e3 839 }
115329f1 840
122dcdcb 841 delay += 2*data_used;
4c7c7645
MN
842 if (delay < data_used)
843 delay = data_used;
ac50bcc8
MN
844
845 i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
12eeda34 846 if(s->show_audio==1){
6c7165c7
JM
847 h= INT_MIN;
848 for(i=0; i<1000; i+=channels){
849 int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
850 int a= s->sample_array[idx];
851 int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
852 int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
853 int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
854 int score= a-d;
855 if(h<score && (b^c)<0){
856 h= score;
857 i_start= idx;
858 }
ac50bcc8
MN
859 }
860 }
861
5e0257e3
FB
862 s->last_i_start = i_start;
863 } else {
864 i_start = s->last_i_start;
01310af2
FB
865 }
866
01310af2 867 bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 868 if(s->show_audio==1){
6c7165c7
JM
869 fill_rectangle(screen,
870 s->xleft, s->ytop, s->width, s->height,
871 bgcolor);
872
873 fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
874
875 /* total height for one channel */
876 h = s->height / nb_display_channels;
877 /* graph height / 2 */
878 h2 = (h * 9) / 20;
879 for(ch = 0;ch < nb_display_channels; ch++) {
880 i = i_start + ch;
881 y1 = s->ytop + ch * h + (h / 2); /* position of center line */
882 for(x = 0; x < s->width; x++) {
883 y = (s->sample_array[i] * h2) >> 15;
884 if (y < 0) {
885 y = -y;
886 ys = y1 - y;
887 } else {
888 ys = y1;
889 }
890 fill_rectangle(screen,
891 s->xleft + x, ys, 1, y,
892 fgcolor);
893 i += channels;
894 if (i >= SAMPLE_ARRAY_SIZE)
895 i -= SAMPLE_ARRAY_SIZE;
01310af2 896 }
01310af2 897 }
01310af2 898
6c7165c7 899 fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2 900
6c7165c7
JM
901 for(ch = 1;ch < nb_display_channels; ch++) {
902 y = s->ytop + ch * h;
903 fill_rectangle(screen,
904 s->xleft, y, s->width, 1,
905 fgcolor);
906 }
907 SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
12eeda34 908 }else{
12eeda34 909 nb_display_channels= FFMIN(nb_display_channels, 2);
12eeda34 910 if(rdft_bits != s->rdft_bits){
166621ab 911 av_rdft_end(s->rdft);
7dbbf6a1 912 av_free(s->rdft_data);
166621ab 913 s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
12eeda34 914 s->rdft_bits= rdft_bits;
7dbbf6a1 915 s->rdft_data= av_malloc(4*nb_freq*sizeof(*s->rdft_data));
12eeda34 916 }
12eeda34 917 {
7dbbf6a1 918 FFTSample *data[2];
12eeda34 919 for(ch = 0;ch < nb_display_channels; ch++) {
7dbbf6a1 920 data[ch] = s->rdft_data + 2*nb_freq*ch;
12eeda34
MN
921 i = i_start + ch;
922 for(x = 0; x < 2*nb_freq; x++) {
923 double w= (x-nb_freq)*(1.0/nb_freq);
924 data[ch][x]= s->sample_array[i]*(1.0-w*w);
925 i += channels;
926 if (i >= SAMPLE_ARRAY_SIZE)
927 i -= SAMPLE_ARRAY_SIZE;
928 }
166621ab 929 av_rdft_calc(s->rdft, data[ch]);
12eeda34
MN
930 }
931 //least efficient way to do this, we should of course directly access it but its more than fast enough
092421cf 932 for(y=0; y<s->height; y++){
12eeda34
MN
933 double w= 1/sqrt(nb_freq);
934 int a= sqrt(w*sqrt(data[0][2*y+0]*data[0][2*y+0] + data[0][2*y+1]*data[0][2*y+1]));
00f72577
JM
935 int b= (nb_display_channels == 2 ) ? sqrt(w*sqrt(data[1][2*y+0]*data[1][2*y+0]
936 + data[1][2*y+1]*data[1][2*y+1])) : a;
12eeda34
MN
937 a= FFMIN(a,255);
938 b= FFMIN(b,255);
939 fgcolor = SDL_MapRGB(screen->format, a, b, (a+b)/2);
940
941 fill_rectangle(screen,
942 s->xpos, s->height-y, 1, 1,
943 fgcolor);
944 }
945 }
946 SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
947 s->xpos++;
948 if(s->xpos >= s->width)
949 s->xpos= s->xleft;
950 }
01310af2
FB
951}
952
990c8438
MN
953static int video_open(VideoState *is){
954 int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
955 int w,h;
956
fb84155b
MN
957 if(is_full_screen) flags |= SDL_FULLSCREEN;
958 else flags |= SDL_RESIZABLE;
959
990c8438
MN
960 if (is_full_screen && fs_screen_width) {
961 w = fs_screen_width;
962 h = fs_screen_height;
fb84155b
MN
963 } else if(!is_full_screen && screen_width){
964 w = screen_width;
965 h = screen_height;
917d2bb3
MN
966#if CONFIG_AVFILTER
967 }else if (is->out_video_filter && is->out_video_filter->inputs[0]){
968 w = is->out_video_filter->inputs[0]->w;
969 h = is->out_video_filter->inputs[0]->h;
970#else
fb84155b
MN
971 }else if (is->video_st && is->video_st->codec->width){
972 w = is->video_st->codec->width;
973 h = is->video_st->codec->height;
917d2bb3 974#endif
990c8438 975 } else {
fb84155b
MN
976 w = 640;
977 h = 480;
990c8438 978 }
d3d7b12e
MN
979 if(screen && is->width == screen->w && screen->w == w
980 && is->height== screen->h && screen->h == h)
981 return 0;
982
c97f5402 983#ifndef __APPLE__
990c8438
MN
984 screen = SDL_SetVideoMode(w, h, 0, flags);
985#else
986 /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
987 screen = SDL_SetVideoMode(w, h, 24, flags);
988#endif
989 if (!screen) {
990 fprintf(stderr, "SDL: could not set video mode - exiting\n");
991 return -1;
992 }
076db5ed
RK
993 if (!window_title)
994 window_title = input_filename;
995 SDL_WM_SetCaption(window_title, window_title);
990c8438
MN
996
997 is->width = screen->w;
998 is->height = screen->h;
999
1000 return 0;
1001}
8c982c5d 1002
01310af2
FB
1003/* display the current picture, if any */
1004static void video_display(VideoState *is)
1005{
8c982c5d
MN
1006 if(!screen)
1007 video_open(cur_stream);
115329f1 1008 if (is->audio_st && is->show_audio)
01310af2
FB
1009 video_audio_display(is);
1010 else if (is->video_st)
1011 video_image_display(is);
1012}
1013
d38c9e7a 1014static int refresh_thread(void *opaque)
01310af2 1015{
d38c9e7a
MN
1016 VideoState *is= opaque;
1017 while(!is->abort_request){
01310af2
FB
1018 SDL_Event event;
1019 event.type = FF_REFRESH_EVENT;
1020 event.user.data1 = opaque;
d38c9e7a
MN
1021 if(!is->refresh){
1022 is->refresh=1;
01310af2 1023 SDL_PushEvent(&event);
d38c9e7a 1024 }
2b3da32f 1025 usleep(is->audio_st && is->show_audio ? rdftspeed*1000 : 5000); //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
d38c9e7a
MN
1026 }
1027 return 0;
01310af2
FB
1028}
1029
638c9d91
FB
1030/* get the current audio clock value */
1031static double get_audio_clock(VideoState *is)
1032{
1033 double pts;
1034 int hw_buf_size, bytes_per_sec;
1035 pts = is->audio_clock;
1036 hw_buf_size = audio_write_get_buf_size(is);
1037 bytes_per_sec = 0;
1038 if (is->audio_st) {
115329f1 1039 bytes_per_sec = is->audio_st->codec->sample_rate *
01f4895c 1040 2 * is->audio_st->codec->channels;
638c9d91
FB
1041 }
1042 if (bytes_per_sec)
1043 pts -= (double)hw_buf_size / bytes_per_sec;
1044 return pts;
1045}
1046
1047/* get the current video clock value */
1048static double get_video_clock(VideoState *is)
1049{
04108619 1050 if (is->paused) {
41a4cd0c 1051 return is->video_current_pts;
72ea344b 1052 } else {
68aefbe8 1053 return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b 1054 }
638c9d91
FB
1055}
1056
1057/* get the current external clock value */
1058static double get_external_clock(VideoState *is)
1059{
1060 int64_t ti;
1061 ti = av_gettime();
1062 return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
1063}
1064
1065/* get the current master clock value */
1066static double get_master_clock(VideoState *is)
1067{
1068 double val;
1069
72ea344b
FB
1070 if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
1071 if (is->video_st)
1072 val = get_video_clock(is);
1073 else
1074 val = get_audio_clock(is);
1075 } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
1076 if (is->audio_st)
1077 val = get_audio_clock(is);
1078 else
1079 val = get_video_clock(is);
1080 } else {
638c9d91 1081 val = get_external_clock(is);
72ea344b 1082 }
638c9d91
FB
1083 return val;
1084}
1085
72ea344b 1086/* seek in the stream */
2ef46053 1087static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b 1088{
687fae2b
IW
1089 if (!is->seek_req) {
1090 is->seek_pos = pos;
4ed29207 1091 is->seek_rel = rel;
3890dd3a 1092 is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
SH
1093 if (seek_by_bytes)
1094 is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
IW
1095 is->seek_req = 1;
1096 }
72ea344b
FB
1097}
1098
1099/* pause or resume the video */
1100static void stream_pause(VideoState *is)
1101{
68aefbe8
MN
1102 if (is->paused) {
1103 is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
f5668147 1104 if(is->read_pause_return != AVERROR(ENOSYS)){
68aefbe8 1105 is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147 1106 }
68aefbe8 1107 is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b 1108 }
68aefbe8 1109 is->paused = !is->paused;
72ea344b
FB
1110}
1111
d38c9e7a 1112static double compute_target_time(double frame_current_pts, VideoState *is)
49410784 1113{
d38c9e7a 1114 double delay, sync_threshold, diff;
49410784
TB
1115
1116 /* compute nominal delay */
1117 delay = frame_current_pts - is->frame_last_pts;
1118 if (delay <= 0 || delay >= 10.0) {
1119 /* if incorrect delay, use previous one */
1120 delay = is->frame_last_delay;
443658fd 1121 } else {
712de377 1122 is->frame_last_delay = delay;
443658fd 1123 }
49410784
TB
1124 is->frame_last_pts = frame_current_pts;
1125
1126 /* update delay to follow master synchronisation source */
1127 if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1128 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1129 /* if video is slave, we try to correct big delays by
1130 duplicating or deleting a frame */
f04c6e35 1131 diff = get_video_clock(is) - get_master_clock(is);
49410784
TB
1132
1133 /* skip or repeat frame. We take into account the
1134 delay to compute the threshold. I still don't know
1135 if it is the best guess */
1136 sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
1137 if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1138 if (diff <= -sync_threshold)
1139 delay = 0;
1140 else if (diff >= sync_threshold)
1141 delay = 2 * delay;
1142 }
1143 }
49410784 1144 is->frame_timer += delay;
eecc17a7
TB
1145#if defined(DEBUG_SYNC)
1146 printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1147 delay, actual_delay, frame_current_pts, -diff);
1148#endif
1149
d38c9e7a 1150 return is->frame_timer;
49410784
TB
1151}
1152
01310af2
FB
1153/* called to display each frame */
1154static void video_refresh_timer(void *opaque)
1155{
1156 VideoState *is = opaque;
1157 VideoPicture *vp;
638c9d91 1158
72ce053b 1159 SubPicture *sp, *sp2;
01310af2
FB
1160
1161 if (is->video_st) {
d38c9e7a 1162retry:
01310af2 1163 if (is->pictq_size == 0) {
d38c9e7a 1164 //nothing to do, no picture to display in the que
01310af2 1165 } else {
d38c9e7a
MN
1166 double time= av_gettime()/1000000.0;
1167 double next_target;
638c9d91 1168 /* dequeue the picture */
01310af2 1169 vp = &is->pictq[is->pictq_rindex];
638c9d91 1170
d38c9e7a
MN
1171 if(time < vp->target_clock)
1172 return;
638c9d91
FB
1173 /* update current video pts */
1174 is->video_current_pts = vp->pts;
d38c9e7a 1175 is->video_current_pts_drift = is->video_current_pts - time;
a3cc2160 1176 is->video_current_pos = vp->pos;
d38c9e7a
MN
1177 if(is->pictq_size > 1){
1178 VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
1179 assert(nextvp->target_clock >= vp->target_clock);
1180 next_target= nextvp->target_clock;
1181 }else{
1182 next_target= vp->target_clock + is->video_clock - vp->pts; //FIXME pass durations cleanly
1183 }
1184 if(framedrop && time > next_target){
1185 is->skip_frames *= 1.0 + FRAME_SKIP_FACTOR;
1186 if(is->pictq_size > 1 || time > next_target + 0.5){
1187 /* update queue size and signal for next picture */
1188 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1189 is->pictq_rindex = 0;
1190
1191 SDL_LockMutex(is->pictq_mutex);
1192 is->pictq_size--;
1193 SDL_CondSignal(is->pictq_cond);
1194 SDL_UnlockMutex(is->pictq_mutex);
1195 goto retry;
1196 }
1197 }
638c9d91 1198
72ce053b
IC
1199 if(is->subtitle_st) {
1200 if (is->subtitle_stream_changed) {
1201 SDL_LockMutex(is->subpq_mutex);
115329f1 1202
72ce053b
IC
1203 while (is->subpq_size) {
1204 free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1 1205
72ce053b
IC
1206 /* update queue size and signal for next picture */
1207 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1208 is->subpq_rindex = 0;
115329f1 1209
72ce053b
IC
1210 is->subpq_size--;
1211 }
1212 is->subtitle_stream_changed = 0;
1213
1214 SDL_CondSignal(is->subpq_cond);
1215 SDL_UnlockMutex(is->subpq_mutex);
1216 } else {
1217 if (is->subpq_size > 0) {
1218 sp = &is->subpq[is->subpq_rindex];
1219
1220 if (is->subpq_size > 1)
1221 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1222 else
1223 sp2 = NULL;
1224
1225 if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1226 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1227 {
1228 free_subpicture(sp);
1229
1230 /* update queue size and signal for next picture */
1231 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1232 is->subpq_rindex = 0;
1233
1234 SDL_LockMutex(is->subpq_mutex);
1235 is->subpq_size--;
1236 SDL_CondSignal(is->subpq_cond);
1237 SDL_UnlockMutex(is->subpq_mutex);
1238 }
1239 }
1240 }
1241 }
1242
01310af2
FB
1243 /* display picture */
1244 video_display(is);
115329f1 1245
01310af2
FB
1246 /* update queue size and signal for next picture */
1247 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1248 is->pictq_rindex = 0;
115329f1 1249
01310af2
FB
1250 SDL_LockMutex(is->pictq_mutex);
1251 is->pictq_size--;
1252 SDL_CondSignal(is->pictq_cond);
1253 SDL_UnlockMutex(is->pictq_mutex);
1254 }
1255 } else if (is->audio_st) {
1256 /* draw the next audio frame */
1257
01310af2
FB
1258 /* if only audio stream, then display the audio bars (better
1259 than nothing, just to test the implementation */
115329f1 1260
01310af2
FB
1261 /* display picture */
1262 video_display(is);
01310af2
FB
1263 }
1264 if (show_status) {
1265 static int64_t last_time;
1266 int64_t cur_time;
72ce053b 1267 int aqsize, vqsize, sqsize;
638c9d91 1268 double av_diff;
115329f1 1269
01310af2 1270 cur_time = av_gettime();
1e1a0b18 1271 if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
FB
1272 aqsize = 0;
1273 vqsize = 0;
72ce053b 1274 sqsize = 0;
01310af2
FB
1275 if (is->audio_st)
1276 aqsize = is->audioq.size;
1277 if (is->video_st)
1278 vqsize = is->videoq.size;
72ce053b
IC
1279 if (is->subtitle_st)
1280 sqsize = is->subtitleq.size;
638c9d91
FB
1281 av_diff = 0;
1282 if (is->audio_st && is->video_st)
1283 av_diff = get_audio_clock(is) - get_video_clock(is);
382f3a5b
MN
1284 printf("%7.2f A-V:%7.3f s:%3.1f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
1285 get_master_clock(is), av_diff, FFMAX(is->skip_frames-1, 0), aqsize / 1024, vqsize / 1024, sqsize, is->faulty_dts, is->faulty_pts);
01310af2
FB
1286 fflush(stdout);
1287 last_time = cur_time;
1288 }
1289 }
1290}
1291
1292/* allocate a picture (needs to do that in main thread to avoid
1293 potential locking problems */
1294static void alloc_picture(void *opaque)
1295{
1296 VideoState *is = opaque;
1297 VideoPicture *vp;
01310af2
FB
1298
1299 vp = &is->pictq[is->pictq_windex];
1300
1301 if (vp->bmp)
1302 SDL_FreeYUVOverlay(vp->bmp);
1303
917d2bb3
MN
1304#if CONFIG_AVFILTER
1305 if (vp->picref)
1306 avfilter_unref_pic(vp->picref);
1307 vp->picref = NULL;
1308
1309 vp->width = is->out_video_filter->inputs[0]->w;
1310 vp->height = is->out_video_filter->inputs[0]->h;
1311 vp->pix_fmt = is->out_video_filter->inputs[0]->format;
1312#else
1313 vp->width = is->video_st->codec->width;
1314 vp->height = is->video_st->codec->height;
1315 vp->pix_fmt = is->video_st->codec->pix_fmt;
1316#endif
1317
1318 vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1 1319 SDL_YV12_OVERLAY,
61890b02 1320 screen);
01310af2
FB
1321
1322 SDL_LockMutex(is->pictq_mutex);
1323 vp->allocated = 1;
1324 SDL_CondSignal(is->pictq_cond);
1325 SDL_UnlockMutex(is->pictq_mutex);
1326}
1327
267e9dfa
MN
1328/**
1329 *
1330 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1331 */
1a620dd7 1332static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos)
01310af2
FB
1333{
1334 VideoPicture *vp;
1335 int dst_pix_fmt;
917d2bb3
MN
1336#if CONFIG_AVFILTER
1337 AVPicture pict_src;
1338#endif
01310af2
FB
1339 /* wait until we have space to put a new picture */
1340 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1341
1342 if(is->pictq_size>=VIDEO_PICTURE_QUEUE_SIZE && !is->refresh)
1343 is->skip_frames= FFMAX(1.0 - FRAME_SKIP_FACTOR, is->skip_frames * (1.0-FRAME_SKIP_FACTOR));
1344
01310af2
FB
1345 while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1346 !is->videoq.abort_request) {
1347 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1348 }
1349 SDL_UnlockMutex(is->pictq_mutex);
115329f1 1350
01310af2
FB
1351 if (is->videoq.abort_request)
1352 return -1;
1353
1354 vp = &is->pictq[is->pictq_windex];
1355
1356 /* alloc or resize hardware picture buffer */
115329f1 1357 if (!vp->bmp ||
917d2bb3
MN
1358#if CONFIG_AVFILTER
1359 vp->width != is->out_video_filter->inputs[0]->w ||
1360 vp->height != is->out_video_filter->inputs[0]->h) {
1361#else
01f4895c
MN
1362 vp->width != is->video_st->codec->width ||
1363 vp->height != is->video_st->codec->height) {
917d2bb3 1364#endif
01310af2
FB
1365 SDL_Event event;
1366
1367 vp->allocated = 0;
1368
1369 /* the allocation must be done in the main thread to avoid
1370 locking problems */
1371 event.type = FF_ALLOC_EVENT;
1372 event.user.data1 = is;
1373 SDL_PushEvent(&event);
115329f1 1374
01310af2
FB
1375 /* wait until the picture is allocated */
1376 SDL_LockMutex(is->pictq_mutex);
1377 while (!vp->allocated && !is->videoq.abort_request) {
1378 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1379 }
1380 SDL_UnlockMutex(is->pictq_mutex);
1381
1382 if (is->videoq.abort_request)
1383 return -1;
1384 }
1385
638c9d91 1386 /* if the frame is not skipped, then display it */
01310af2 1387 if (vp->bmp) {
fbf1b885 1388 AVPicture pict;
917d2bb3
MN
1389#if CONFIG_AVFILTER
1390 if(vp->picref)
1391 avfilter_unref_pic(vp->picref);
1392 vp->picref = src_frame->opaque;
1393#endif
fbf1b885 1394
01310af2
FB
1395 /* get a pointer on the bitmap */
1396 SDL_LockYUVOverlay (vp->bmp);
1397
1398 dst_pix_fmt = PIX_FMT_YUV420P;
fbf1b885 1399 memset(&pict,0,sizeof(AVPicture));
01310af2
FB
1400 pict.data[0] = vp->bmp->pixels[0];
1401 pict.data[1] = vp->bmp->pixels[2];
1402 pict.data[2] = vp->bmp->pixels[1];
1403
1404 pict.linesize[0] = vp->bmp->pitches[0];
1405 pict.linesize[1] = vp->bmp->pitches[2];
1406 pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
MN
1407
1408#if CONFIG_AVFILTER
1409 pict_src.data[0] = src_frame->data[0];
1410 pict_src.data[1] = src_frame->data[1];
1411 pict_src.data[2] = src_frame->data[2];
1412
1413 pict_src.linesize[0] = src_frame->linesize[0];
1414 pict_src.linesize[1] = src_frame->linesize[1];
1415 pict_src.linesize[2] = src_frame->linesize[2];
1416
1417 //FIXME use direct rendering
1418 av_picture_copy(&pict, &pict_src,
1419 vp->pix_fmt, vp->width, vp->height);
1420#else
e43d7a18 1421 sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28 1422 is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3 1423 vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
feb7bc67 1424 dst_pix_fmt, sws_flags, NULL, NULL, NULL);
3ac56e28 1425 if (is->img_convert_ctx == NULL) {
26ba8235
AB
1426 fprintf(stderr, "Cannot initialize the conversion context\n");
1427 exit(1);
1428 }
3ac56e28 1429 sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
MN
1430 0, vp->height, pict.data, pict.linesize);
1431#endif
01310af2
FB
1432 /* update the bitmap content */
1433 SDL_UnlockYUVOverlay(vp->bmp);
1434
638c9d91 1435 vp->pts = pts;
1a620dd7 1436 vp->pos = pos;
01310af2
FB
1437
1438 /* now we can update the picture count */
1439 if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1440 is->pictq_windex = 0;
1441 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1442 vp->target_clock= compute_target_time(vp->pts, is);
1443
01310af2
FB
1444 is->pictq_size++;
1445 SDL_UnlockMutex(is->pictq_mutex);
1446 }
638c9d91
FB
1447 return 0;
1448}
1449
115329f1
DB
1450/**
1451 * compute the exact PTS for the picture if it is omitted in the stream
267e9dfa
MN
1452 * @param pts1 the dts of the pkt / pts of the frame
1453 */
1a620dd7 1454static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
638c9d91
FB
1455{
1456 double frame_delay, pts;
115329f1 1457
638c9d91
FB
1458 pts = pts1;
1459
01310af2 1460 if (pts != 0) {
638c9d91 1461 /* update video clock with pts, if present */
01310af2
FB
1462 is->video_clock = pts;
1463 } else {
72ea344b
FB
1464 pts = is->video_clock;
1465 }
1466 /* update video clock for next frame */
01f4895c 1467 frame_delay = av_q2d(is->video_st->codec->time_base);
72ea344b
FB
1468 /* for MPEG2, the frame can be repeated, so we update the
1469 clock accordingly */
267e9dfa 1470 frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
72ea344b 1471 is->video_clock += frame_delay;
638c9d91
FB
1472
1473#if defined(DEBUG_SYNC) && 0
ff358eca
SS
1474 printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1475 av_get_pict_type_char(src_frame->pict_type), pts, pts1);
638c9d91 1476#endif
1a620dd7 1477 return queue_picture(is, src_frame, pts, pos);
01310af2
FB
1478}
1479
3966a574 1480static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2 1481{
6c7d3ead 1482 int len1, got_picture, i;
01310af2 1483
01310af2 1484 if (packet_queue_get(&is->videoq, pkt, 1) < 0)
917d2bb3 1485 return -1;
39c6a118
MN
1486
1487 if(pkt->data == flush_pkt.data){
1488 avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
MN
1489
1490 SDL_LockMutex(is->pictq_mutex);
1491 //Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
1492 for(i=0; i<VIDEO_PICTURE_QUEUE_SIZE; i++){
d38c9e7a 1493 is->pictq[i].target_clock= 0;
6c7d3ead
MN
1494 }
1495 while (is->pictq_size && !is->videoq.abort_request) {
1496 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1497 }
1a620dd7 1498 is->video_current_pos= -1;
6c7d3ead
MN
1499 SDL_UnlockMutex(is->pictq_mutex);
1500
41db429d
MN
1501 is->last_dts_for_fault_detection=
1502 is->last_pts_for_fault_detection= INT64_MIN;
967030eb 1503 is->frame_last_pts= AV_NOPTS_VALUE;
f7119e42 1504 is->frame_last_delay = 0;
b25453bd 1505 is->frame_timer = (double)av_gettime() / 1000000.0;
d38c9e7a
MN
1506 is->skip_frames= 1;
1507 is->skip_frames_index= 0;
917d2bb3 1508 return 0;
39c6a118
MN
1509 }
1510
638c9d91
FB
1511 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1512 this packet, if any */
7fb262b5 1513 is->video_st->codec->reordered_opaque= pkt->pts;
bea18375 1514 len1 = avcodec_decode_video2(is->video_st->codec,
620e8baf 1515 frame, &got_picture,
bea18375 1516 pkt);
620e8baf 1517
99e0b12b 1518 if (got_picture) {
df7d6e48
SS
1519 if(pkt->dts != AV_NOPTS_VALUE){
1520 is->faulty_dts += pkt->dts <= is->last_dts_for_fault_detection;
1521 is->last_dts_for_fault_detection= pkt->dts;
1522 }
1523 if(frame->reordered_opaque != AV_NOPTS_VALUE){
1524 is->faulty_pts += frame->reordered_opaque <= is->last_pts_for_fault_detection;
1525 is->last_pts_for_fault_detection= frame->reordered_opaque;
1526 }
99e0b12b 1527 }
41db429d
MN
1528
1529 if( ( decoder_reorder_pts==1
ecbed31c 1530 || (decoder_reorder_pts && is->faulty_pts<is->faulty_dts)
41db429d 1531 || pkt->dts == AV_NOPTS_VALUE)
7fb262b5 1532 && frame->reordered_opaque != AV_NOPTS_VALUE)
917d2bb3 1533 *pts= frame->reordered_opaque;
620e8baf 1534 else if(pkt->dts != AV_NOPTS_VALUE)
917d2bb3 1535 *pts= pkt->dts;
620e8baf 1536 else
917d2bb3
MN
1537 *pts= 0;
1538
fb966f99
MN
1539// if (len1 < 0)
1540// break;
d38c9e7a
MN
1541 if (got_picture){
1542 is->skip_frames_index += 1;
1543 if(is->skip_frames_index >= is->skip_frames){
1544 is->skip_frames_index -= FFMAX(is->skip_frames, 1.0);
1545 return 1;
1546 }
1547
1548 }
917d2bb3
MN
1549 return 0;
1550}
1551
1552#if CONFIG_AVFILTER
1553typedef struct {
1554 VideoState *is;
1555 AVFrame *frame;
dd0c789b 1556 int use_dr1;
917d2bb3
MN
1557} FilterPriv;
1558
dd0c789b
BB
1559static int input_get_buffer(AVCodecContext *codec, AVFrame *pic)
1560{
1561 AVFilterContext *ctx = codec->opaque;
1562 AVFilterPicRef *ref;
1563 int perms = AV_PERM_WRITE;
65929418 1564 int i, w, h, stride[4];
dd0c789b
BB
1565 unsigned edge;
1566
1567 if(pic->buffer_hints & FF_BUFFER_HINTS_VALID) {
1568 if(pic->buffer_hints & FF_BUFFER_HINTS_READABLE) perms |= AV_PERM_READ;
1569 if(pic->buffer_hints & FF_BUFFER_HINTS_PRESERVE) perms |= AV_PERM_PRESERVE;
1570 if(pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE) perms |= AV_PERM_REUSE2;
1571 }
1572 if(pic->reference) perms |= AV_PERM_READ | AV_PERM_PRESERVE;
1573
1574 w = codec->width;
1575 h = codec->height;
1576 avcodec_align_dimensions2(codec, &w, &h, stride);
1577 edge = codec->flags & CODEC_FLAG_EMU_EDGE ? 0 : avcodec_get_edge_width();
1578 w += edge << 1;
1579 h += edge << 1;
1580
1581 if(!(ref = avfilter_get_video_buffer(ctx->outputs[0], perms, w, h)))
1582 return -1;
1583
1584 ref->w = codec->width;
1585 ref->h = codec->height;
cfb7e6e6
JM
1586 for(i = 0; i < 4; i ++) {
1587 unsigned hshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->pic->format].log2_chroma_w : 0;
1588 unsigned vshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->pic->format].log2_chroma_h : 0;
dd0c789b 1589
3635c07b 1590 if (ref->data[i]) {
e53ca636 1591 ref->data[i] += (edge >> hshift) + ((edge * ref->linesize[i]) >> vshift);
3635c07b 1592 }
dd0c789b
BB
1593 pic->data[i] = ref->data[i];
1594 pic->linesize[i] = ref->linesize[i];
1595 }
1596 pic->opaque = ref;
1597 pic->age = INT_MAX;
1598 pic->type = FF_BUFFER_TYPE_USER;
867ab7fb 1599 pic->reordered_opaque = codec->reordered_opaque;
dd0c789b
BB
1600 return 0;
1601}
1602
1603static void input_release_buffer(AVCodecContext *codec, AVFrame *pic)
1604{
1605 memset(pic->data, 0, sizeof(pic->data));
1606 avfilter_unref_pic(pic->opaque);
1607}
1608
12bd3c1f
JM
1609static int input_reget_buffer(AVCodecContext *codec, AVFrame *pic)
1610{
1611 AVFilterPicRef *ref = pic->opaque;
1612
1613 if (pic->data[0] == NULL) {
1614 pic->buffer_hints |= FF_BUFFER_HINTS_READABLE;
1615 return codec->get_buffer(codec, pic);
1616 }
1617
1618 if ((codec->width != ref->w) || (codec->height != ref->h) ||
1619 (codec->pix_fmt != ref->pic->format)) {
1620 av_log(codec, AV_LOG_ERROR, "Picture properties changed.\n");
1621 return -1;
1622 }
1623
1624 pic->reordered_opaque = codec->reordered_opaque;
1625 return 0;
1626}
1627
917d2bb3
MN
1628static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
1629{
1630 FilterPriv *priv = ctx->priv;
dd0c789b 1631 AVCodecContext *codec;
917d2bb3
MN
1632 if(!opaque) return -1;
1633
1634 priv->is = opaque;
dd0c789b
BB
1635 codec = priv->is->video_st->codec;
1636 codec->opaque = ctx;
1637 if(codec->codec->capabilities & CODEC_CAP_DR1) {
1638 priv->use_dr1 = 1;
1639 codec->get_buffer = input_get_buffer;
1640 codec->release_buffer = input_release_buffer;
12bd3c1f 1641 codec->reget_buffer = input_reget_buffer;
dd0c789b
BB
1642 }
1643
917d2bb3
MN
1644 priv->frame = avcodec_alloc_frame();
1645
1646 return 0;
1647}
1648
1649static void input_uninit(AVFilterContext *ctx)
1650{
1651 FilterPriv *priv = ctx->priv;
1652 av_free(priv->frame);
1653}
1654
1655static int input_request_frame(AVFilterLink *link)
1656{
1657 FilterPriv *priv = link->src->priv;
1658 AVFilterPicRef *picref;
3966a574 1659 int64_t pts = 0;
917d2bb3
MN
1660 AVPacket pkt;
1661 int ret;
1662
1663 while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
1664 av_free_packet(&pkt);
1665 if (ret < 0)
1666 return -1;
1667
dd0c789b 1668 if(priv->use_dr1) {
c41c5b02 1669 picref = avfilter_ref_pic(priv->frame->opaque, ~0);
dd0c789b 1670 } else {
cf097cbc
BB
1671 picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, link->w, link->h);
1672 av_picture_copy((AVPicture *)&picref->data, (AVPicture *)priv->frame,
1673 picref->pic->format, link->w, link->h);
dd0c789b 1674 }
917d2bb3
MN
1675 av_free_packet(&pkt);
1676
1677 picref->pts = pts;
bb409513 1678 picref->pos = pkt.pos;
917d2bb3 1679 picref->pixel_aspect = priv->is->video_st->codec->sample_aspect_ratio;
c41c5b02 1680 avfilter_start_frame(link, picref);
917d2bb3
MN
1681 avfilter_draw_slice(link, 0, link->h, 1);
1682 avfilter_end_frame(link);
917d2bb3
MN
1683
1684 return 0;
1685}
1686
1687static int input_query_formats(AVFilterContext *ctx)
1688{
1689 FilterPriv *priv = ctx->priv;
1690 enum PixelFormat pix_fmts[] = {
1691 priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
1692 };
1693
1694 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1695 return 0;
1696}
1697
1698static int input_config_props(AVFilterLink *link)
1699{
1700 FilterPriv *priv = link->src->priv;
1701 AVCodecContext *c = priv->is->video_st->codec;
1702
1703 link->w = c->width;
1704 link->h = c->height;
1705
1706 return 0;
1707}
1708
1709static AVFilter input_filter =
1710{
1711 .name = "ffplay_input",
1712
1713 .priv_size = sizeof(FilterPriv),
1714
1715 .init = input_init,
1716 .uninit = input_uninit,
1717
1718 .query_formats = input_query_formats,
1719
1720 .inputs = (AVFilterPad[]) {{ .name = NULL }},
1721 .outputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1722 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1723 .request_frame = input_request_frame,
1724 .config_props = input_config_props, },
1725 { .name = NULL }},
1726};
1727
1728static void output_end_frame(AVFilterLink *link)
1729{
1730}
1731
1732static int output_query_formats(AVFilterContext *ctx)
1733{
1734 enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
1735
1736 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1737 return 0;
1738}
1739
1740static int get_filtered_video_frame(AVFilterContext *ctx, AVFrame *frame,
bb409513 1741 int64_t *pts, int64_t *pos)
917d2bb3
MN
1742{
1743 AVFilterPicRef *pic;
1744
1745 if(avfilter_request_frame(ctx->inputs[0]))
1746 return -1;
1747 if(!(pic = ctx->inputs[0]->cur_pic))
1748 return -1;
1749 ctx->inputs[0]->cur_pic = NULL;
1750
1751 frame->opaque = pic;
1752 *pts = pic->pts;
bb409513 1753 *pos = pic->pos;
917d2bb3
MN
1754
1755 memcpy(frame->data, pic->data, sizeof(frame->data));
1756 memcpy(frame->linesize, pic->linesize, sizeof(frame->linesize));
1757
1758 return 1;
1759}
1760
1761static AVFilter output_filter =
1762{
1763 .name = "ffplay_output",
1764
1765 .query_formats = output_query_formats,
1766
1767 .inputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1768 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1769 .end_frame = output_end_frame,
1770 .min_perms = AV_PERM_READ, },
1771 { .name = NULL }},
1772 .outputs = (AVFilterPad[]) {{ .name = NULL }},
1773};
1774#endif /* CONFIG_AVFILTER */
1775
1776static int video_thread(void *arg)
1777{
1778 VideoState *is = arg;
1779 AVFrame *frame= avcodec_alloc_frame();
4903b5ca 1780 int64_t pts_int;
917d2bb3
MN
1781 double pts;
1782 int ret;
1783
1784#if CONFIG_AVFILTER
4903b5ca 1785 int64_t pos;
3f073fa2 1786 char sws_flags_str[128];
917d2bb3
MN
1787 AVFilterContext *filt_src = NULL, *filt_out = NULL;
1788 AVFilterGraph *graph = av_mallocz(sizeof(AVFilterGraph));
3f073fa2
SS
1789 snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
1790 graph->scale_sws_opts = av_strdup(sws_flags_str);
917d2bb3
MN
1791
1792 if(!(filt_src = avfilter_open(&input_filter, "src"))) goto the_end;
1793 if(!(filt_out = avfilter_open(&output_filter, "out"))) goto the_end;
1794
1795 if(avfilter_init_filter(filt_src, NULL, is)) goto the_end;
1796 if(avfilter_init_filter(filt_out, NULL, frame)) goto the_end;
1797
1798
1799 if(vfilters) {
1800 AVFilterInOut *outputs = av_malloc(sizeof(AVFilterInOut));
1801 AVFilterInOut *inputs = av_malloc(sizeof(AVFilterInOut));
1802
1803 outputs->name = av_strdup("in");
1804 outputs->filter = filt_src;
1805 outputs->pad_idx = 0;
1806 outputs->next = NULL;
1807
1808 inputs->name = av_strdup("out");
1809 inputs->filter = filt_out;
1810 inputs->pad_idx = 0;
1811 inputs->next = NULL;
1812
1813 if (avfilter_graph_parse(graph, vfilters, inputs, outputs, NULL) < 0)
1814 goto the_end;
1815 av_freep(&vfilters);
1816 } else {
1817 if(avfilter_link(filt_src, 0, filt_out, 0) < 0) goto the_end;
1818 }
1819 avfilter_graph_add_filter(graph, filt_src);
1820 avfilter_graph_add_filter(graph, filt_out);
1821
1822 if(avfilter_graph_check_validity(graph, NULL)) goto the_end;
1823 if(avfilter_graph_config_formats(graph, NULL)) goto the_end;
1824 if(avfilter_graph_config_links(graph, NULL)) goto the_end;
1825
1826 is->out_video_filter = filt_out;
1827#endif
1828
1829 for(;;) {
1830#if !CONFIG_AVFILTER
1831 AVPacket pkt;
1832#endif
1833 while (is->paused && !is->videoq.abort_request)
1834 SDL_Delay(10);
1835#if CONFIG_AVFILTER
bb409513 1836 ret = get_filtered_video_frame(filt_out, frame, &pts_int, &pos);
917d2bb3
MN
1837#else
1838 ret = get_video_frame(is, frame, &pts_int, &pkt);
1839#endif
1840
1841 if (ret < 0) goto the_end;
1842
1843 if (!ret)
1844 continue;
1845
3966a574 1846 pts = pts_int*av_q2d(is->video_st->time_base);
917d2bb3
MN
1847
1848#if CONFIG_AVFILTER
bb409513 1849 ret = output_picture2(is, frame, pts, pos);
917d2bb3 1850#else
fca62599 1851 ret = output_picture2(is, frame, pts, pkt.pos);
917d2bb3
MN
1852 av_free_packet(&pkt);
1853#endif
1854 if (ret < 0)
1855 goto the_end;
1856
115329f1 1857 if (step)
bba04f1e
WH
1858 if (cur_stream)
1859 stream_pause(cur_stream);
01310af2
FB
1860 }
1861 the_end:
917d2bb3
MN
1862#if CONFIG_AVFILTER
1863 avfilter_graph_destroy(graph);
1864 av_freep(&graph);
1865#endif
c6b1edc9 1866 av_free(frame);
01310af2
FB
1867 return 0;
1868}
1869
72ce053b
IC
1870static int subtitle_thread(void *arg)
1871{
1872 VideoState *is = arg;
1873 SubPicture *sp;
1874 AVPacket pkt1, *pkt = &pkt1;
1875 int len1, got_subtitle;
1876 double pts;
1877 int i, j;
1878 int r, g, b, y, u, v, a;
1879
1880 for(;;) {
1881 while (is->paused && !is->subtitleq.abort_request) {
1882 SDL_Delay(10);
1883 }
1884 if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1885 break;
115329f1 1886
39c6a118
MN
1887 if(pkt->data == flush_pkt.data){
1888 avcodec_flush_buffers(is->subtitle_st->codec);
1889 continue;
1890 }
72ce053b
IC
1891 SDL_LockMutex(is->subpq_mutex);
1892 while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1893 !is->subtitleq.abort_request) {
1894 SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1895 }
1896 SDL_UnlockMutex(is->subpq_mutex);
115329f1 1897
72ce053b
IC
1898 if (is->subtitleq.abort_request)
1899 goto the_end;
115329f1 1900
72ce053b
IC
1901 sp = &is->subpq[is->subpq_windex];
1902
1903 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1904 this packet, if any */
1905 pts = 0;
1906 if (pkt->pts != AV_NOPTS_VALUE)
1907 pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1908
bea18375 1909 len1 = avcodec_decode_subtitle2(is->subtitle_st->codec,
115329f1 1910 &sp->sub, &got_subtitle,
bea18375 1911 pkt);
72ce053b
IC
1912// if (len1 < 0)
1913// break;
1914 if (got_subtitle && sp->sub.format == 0) {
1915 sp->pts = pts;
115329f1 1916
72ce053b
IC
1917 for (i = 0; i < sp->sub.num_rects; i++)
1918 {
db4fac64 1919 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b 1920 {
25b4c651 1921 RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
IC
1922 y = RGB_TO_Y_CCIR(r, g, b);
1923 u = RGB_TO_U_CCIR(r, g, b, 0);
1924 v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651 1925 YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
IC
1926 }
1927 }
1928
1929 /* now we can update the picture count */
1930 if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1931 is->subpq_windex = 0;
1932 SDL_LockMutex(is->subpq_mutex);
1933 is->subpq_size++;
1934 SDL_UnlockMutex(is->subpq_mutex);
1935 }
1936 av_free_packet(pkt);
115329f1 1937// if (step)
72ce053b
IC
1938// if (cur_stream)
1939// stream_pause(cur_stream);
1940 }
1941 the_end:
1942 return 0;
1943}
1944
01310af2
FB
1945/* copy samples for viewing in editor window */
1946static void update_sample_display(VideoState *is, short *samples, int samples_size)
1947{
1948 int size, len, channels;
1949
01f4895c 1950 channels = is->audio_st->codec->channels;
01310af2
FB
1951
1952 size = samples_size / sizeof(short);
1953 while (size > 0) {
1954 len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1955 if (len > size)
1956 len = size;
1957 memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1958 samples += len;
1959 is->sample_array_index += len;
1960 if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1961 is->sample_array_index = 0;
1962 size -= len;
1963 }
1964}
1965
01310af2
FB
1966/* return the new audio buffer size (samples can be added or deleted
1967 to get better sync if video or external master clock) */
115329f1 1968static int synchronize_audio(VideoState *is, short *samples,
638c9d91 1969 int samples_size1, double pts)
01310af2 1970{
638c9d91 1971 int n, samples_size;
01310af2 1972 double ref_clock;
115329f1 1973
01f4895c 1974 n = 2 * is->audio_st->codec->channels;
638c9d91 1975 samples_size = samples_size1;
01310af2 1976
01310af2 1977 /* if not master, then we try to remove or add samples to correct the clock */
01310af2 1978 if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
FB
1979 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1980 double diff, avg_diff;
01310af2 1981 int wanted_size, min_size, max_size, nb_samples;
115329f1 1982
638c9d91
FB
1983 ref_clock = get_master_clock(is);
1984 diff = get_audio_clock(is) - ref_clock;
115329f1 1985
638c9d91
FB
1986 if (diff < AV_NOSYNC_THRESHOLD) {
1987 is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1988 if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1989 /* not enough measures to have a correct estimate */
1990 is->audio_diff_avg_count++;
1991 } else {
1992 /* estimate the A-V difference */
1993 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1994
1995 if (fabs(avg_diff) >= is->audio_diff_threshold) {
01f4895c 1996 wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
638c9d91 1997 nb_samples = samples_size / n;
115329f1 1998
638c9d91
FB
1999 min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
2000 max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
2001 if (wanted_size < min_size)
2002 wanted_size = min_size;
2003 else if (wanted_size > max_size)
2004 wanted_size = max_size;
115329f1 2005
638c9d91
FB
2006 /* add or remove samples to correction the synchro */
2007 if (wanted_size < samples_size) {
2008 /* remove samples */
2009 samples_size = wanted_size;
2010 } else if (wanted_size > samples_size) {
2011 uint8_t *samples_end, *q;
2012 int nb;
115329f1 2013
638c9d91
FB
2014 /* add samples */
2015 nb = (samples_size - wanted_size);
2016 samples_end = (uint8_t *)samples + samples_size - n;
2017 q = samples_end + n;
2018 while (nb > 0) {
2019 memcpy(q, samples_end, n);
2020 q += n;
2021 nb -= n;
2022 }
2023 samples_size = wanted_size;
2024 }
2025 }
2026#if 0
115329f1
DB
2027 printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
2028 diff, avg_diff, samples_size - samples_size1,
638c9d91
FB
2029 is->audio_clock, is->video_clock, is->audio_diff_threshold);
2030#endif
01310af2 2031 }
638c9d91
FB
2032 } else {
2033 /* too big difference : may be initial PTS errors, so
2034 reset A-V filter */
2035 is->audio_diff_avg_count = 0;
2036 is->audio_diff_cum = 0;
01310af2
FB
2037 }
2038 }
2039
01310af2
FB
2040 return samples_size;
2041}
2042
2043/* decode one audio frame and returns its uncompressed size */
5a4476e2 2044static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2 2045{
bea18375 2046 AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2 2047 AVPacket *pkt = &is->audio_pkt;
abdff646 2048 AVCodecContext *dec= is->audio_st->codec;
72ea344b 2049 int n, len1, data_size;
01310af2
FB
2050 double pts;
2051
2052 for(;;) {
72ea344b 2053 /* NOTE: the audio packet can contain several frames */
bea18375 2054 while (pkt_temp->size > 0) {
5a4476e2 2055 data_size = sizeof(is->audio_buf1);
bea18375 2056 len1 = avcodec_decode_audio3(dec,
5a4476e2 2057 (int16_t *)is->audio_buf1, &data_size,
bea18375 2058 pkt_temp);
72ea344b
FB
2059 if (len1 < 0) {
2060 /* if error, we skip the frame */
bea18375 2061 pkt_temp->size = 0;
01310af2 2062 break;
72ea344b 2063 }
115329f1 2064
bea18375
TB
2065 pkt_temp->data += len1;
2066 pkt_temp->size -= len1;
72ea344b
FB
2067 if (data_size <= 0)
2068 continue;
5a4476e2
PR
2069
2070 if (dec->sample_fmt != is->audio_src_fmt) {
2071 if (is->reformat_ctx)
2072 av_audio_convert_free(is->reformat_ctx);
2073 is->reformat_ctx= av_audio_convert_alloc(SAMPLE_FMT_S16, 1,
2074 dec->sample_fmt, 1, NULL, 0);
2075 if (!is->reformat_ctx) {
2076 fprintf(stderr, "Cannot convert %s sample format to %s sample format\n",
2077 avcodec_get_sample_fmt_name(dec->sample_fmt),
2078 avcodec_get_sample_fmt_name(SAMPLE_FMT_S16));
2079 break;
2080 }
2081 is->audio_src_fmt= dec->sample_fmt;
2082 }
2083
2084 if (is->reformat_ctx) {
2085 const void *ibuf[6]= {is->audio_buf1};
2086 void *obuf[6]= {is->audio_buf2};
2087 int istride[6]= {av_get_bits_per_sample_format(dec->sample_fmt)/8};
2088 int ostride[6]= {2};
2089 int len= data_size/istride[0];
2090 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
2091 printf("av_audio_convert() failed\n");
2092 break;
2093 }
2094 is->audio_buf= is->audio_buf2;
2095 /* FIXME: existing code assume that data_size equals framesize*channels*2
2096 remove this legacy cruft */
2097 data_size= len*2;
2098 }else{
2099 is->audio_buf= is->audio_buf1;
2100 }
2101
72ea344b
FB
2102 /* if no pts, then compute it */
2103 pts = is->audio_clock;
2104 *pts_ptr = pts;
abdff646 2105 n = 2 * dec->channels;
115329f1 2106 is->audio_clock += (double)data_size /
abdff646 2107 (double)(n * dec->sample_rate);
638c9d91 2108#if defined(DEBUG_SYNC)
72ea344b
FB
2109 {
2110 static double last_clock;
2111 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
2112 is->audio_clock - last_clock,
2113 is->audio_clock, pts);
2114 last_clock = is->audio_clock;
01310af2 2115 }
72ea344b
FB
2116#endif
2117 return data_size;
01310af2
FB
2118 }
2119
72ea344b
FB
2120 /* free the current packet */
2121 if (pkt->data)
01310af2 2122 av_free_packet(pkt);
115329f1 2123
72ea344b
FB
2124 if (is->paused || is->audioq.abort_request) {
2125 return -1;
2126 }
115329f1 2127
01310af2
FB
2128 /* read next packet */
2129 if (packet_queue_get(&is->audioq, pkt, 1) < 0)
2130 return -1;
39c6a118 2131 if(pkt->data == flush_pkt.data){
abdff646 2132 avcodec_flush_buffers(dec);
39c6a118
MN
2133 continue;
2134 }
2135
bea18375
TB
2136 pkt_temp->data = pkt->data;
2137 pkt_temp->size = pkt->size;
115329f1 2138
72ea344b
FB
2139 /* if update the audio clock with the pts */
2140 if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75 2141 is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b 2142 }
01310af2
FB
2143 }
2144}
2145
638c9d91
FB
2146/* get the current audio output buffer size, in samples. With SDL, we
2147 cannot have a precise information */
2148static int audio_write_get_buf_size(VideoState *is)
01310af2 2149{
b09b580b 2150 return is->audio_buf_size - is->audio_buf_index;
01310af2
FB
2151}
2152
2153
2154/* prepare a new audio buffer */
358061f6 2155static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
FB
2156{
2157 VideoState *is = opaque;
2158 int audio_size, len1;
2159 double pts;
2160
2161 audio_callback_time = av_gettime();
115329f1 2162
01310af2
FB
2163 while (len > 0) {
2164 if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2 2165 audio_size = audio_decode_frame(is, &pts);
01310af2
FB
2166 if (audio_size < 0) {
2167 /* if error, just output silence */
1a1078fa 2168 is->audio_buf = is->audio_buf1;
01310af2
FB
2169 is->audio_buf_size = 1024;
2170 memset(is->audio_buf, 0, is->audio_buf_size);
2171 } else {
2172 if (is->show_audio)
2173 update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
115329f1 2174 audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
01310af2
FB
2175 pts);
2176 is->audio_buf_size = audio_size;
2177 }
2178 is->audio_buf_index = 0;
2179 }
2180 len1 = is->audio_buf_size - is->audio_buf_index;
2181 if (len1 > len)
2182 len1 = len;
2183 memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
2184 len -= len1;
2185 stream += len1;
2186 is->audio_buf_index += len1;
2187 }
2188}
2189
01310af2
FB
2190/* open a given stream. Return 0 if OK */
2191static int stream_component_open(VideoState *is, int stream_index)
2192{
2193 AVFormatContext *ic = is->ic;
fe74099a 2194 AVCodecContext *avctx;
01310af2
FB
2195 AVCodec *codec;
2196 SDL_AudioSpec wanted_spec, spec;
2197
2198 if (stream_index < 0 || stream_index >= ic->nb_streams)
2199 return -1;
fe74099a 2200 avctx = ic->streams[stream_index]->codec;
115329f1 2201
01310af2 2202 /* prepare audio output */
72415b2a 2203 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a
SS
2204 if (avctx->channels > 0) {
2205 avctx->request_channels = FFMIN(2, avctx->channels);
94eadc8b 2206 } else {
fe74099a 2207 avctx->request_channels = 2;
638c9d91 2208 }
01310af2
FB
2209 }
2210
fe74099a
SS
2211 codec = avcodec_find_decoder(avctx->codec_id);
2212 avctx->debug_mv = debug_mv;
2213 avctx->debug = debug;
2214 avctx->workaround_bugs = workaround_bugs;
2215 avctx->lowres = lowres;
2216 if(lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
2217 avctx->idct_algo= idct;
2218 if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
2219 avctx->skip_frame= skip_frame;
2220 avctx->skip_idct= skip_idct;
2221 avctx->skip_loop_filter= skip_loop_filter;
2222 avctx->error_recognition= error_recognition;
2223 avctx->error_concealment= error_concealment;
2224 avcodec_thread_init(avctx, thread_count);
2225
2226 set_context_opts(avctx, avcodec_opts[avctx->codec_type], 0);
e43d7a18 2227
01310af2 2228 if (!codec ||
fe74099a 2229 avcodec_open(avctx, codec) < 0)
01310af2 2230 return -1;
51b73087
JR
2231
2232 /* prepare audio output */
72415b2a 2233 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a 2234 wanted_spec.freq = avctx->sample_rate;
51b73087 2235 wanted_spec.format = AUDIO_S16SYS;
fe74099a 2236 wanted_spec.channels = avctx->channels;
51b73087
JR
2237 wanted_spec.silence = 0;
2238 wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
2239 wanted_spec.callback = sdl_audio_callback;
2240 wanted_spec.userdata = is;
2241 if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
2242 fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
2243 return -1;
2244 }
2245 is->audio_hw_buf_size = spec.size;
5a4476e2 2246 is->audio_src_fmt= SAMPLE_FMT_S16;
51b73087
JR
2247 }
2248
3f3fe38d 2249 ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
fe74099a 2250 switch(avctx->codec_type) {
72415b2a 2251 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2252 is->audio_stream = stream_index;
2253 is->audio_st = ic->streams[stream_index];
2254 is->audio_buf_size = 0;
2255 is->audio_buf_index = 0;
638c9d91
FB
2256
2257 /* init averaging filter */
2258 is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
2259 is->audio_diff_avg_count = 0;
2260 /* since we do not have a precise anough audio fifo fullness,
2261 we correct audio sync only if larger than this threshold */
fe74099a 2262 is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / avctx->sample_rate;
638c9d91 2263
01310af2
FB
2264 memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
2265 packet_queue_init(&is->audioq);
bb270c08 2266 SDL_PauseAudio(0);
01310af2 2267 break;
72415b2a 2268 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2269 is->video_stream = stream_index;
2270 is->video_st = ic->streams[stream_index];
2271
68aefbe8 2272// is->video_current_pts_time = av_gettime();
638c9d91 2273
01310af2
FB
2274 packet_queue_init(&is->videoq);
2275 is->video_tid = SDL_CreateThread(video_thread, is);
2276 break;
72415b2a 2277 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2278 is->subtitle_stream = stream_index;
2279 is->subtitle_st = ic->streams[stream_index];
2280 packet_queue_init(&is->subtitleq);
115329f1 2281
72ce053b
IC
2282 is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
2283 break;
01310af2
FB
2284 default:
2285 break;
2286 }
2287 return 0;
2288}
2289
2290static void stream_component_close(VideoState *is, int stream_index)
2291{
2292 AVFormatContext *ic = is->ic;
fe74099a 2293 AVCodecContext *avctx;
115329f1 2294
72ce053b
IC
2295 if (stream_index < 0 || stream_index >= ic->nb_streams)
2296 return;
fe74099a 2297 avctx = ic->streams[stream_index]->codec;
01310af2 2298
fe74099a 2299 switch(avctx->codec_type) {
72415b2a 2300 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2301 packet_queue_abort(&is->audioq);
2302
2303 SDL_CloseAudio();
2304
2305 packet_queue_end(&is->audioq);
5a4476e2
PR
2306 if (is->reformat_ctx)
2307 av_audio_convert_free(is->reformat_ctx);
bc77fce6 2308 is->reformat_ctx = NULL;
01310af2 2309 break;
72415b2a 2310 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2311 packet_queue_abort(&is->videoq);
2312
2313 /* note: we also signal this mutex to make sure we deblock the
2314 video thread in all cases */
2315 SDL_LockMutex(is->pictq_mutex);
2316 SDL_CondSignal(is->pictq_cond);
2317 SDL_UnlockMutex(is->pictq_mutex);
2318
2319 SDL_WaitThread(is->video_tid, NULL);
2320
2321 packet_queue_end(&is->videoq);
2322 break;
72415b2a 2323 case AVMEDIA_TYPE_SUBTITLE:
72ce053b 2324 packet_queue_abort(&is->subtitleq);
115329f1 2325
72ce053b
IC
2326 /* note: we also signal this mutex to make sure we deblock the
2327 video thread in all cases */
2328 SDL_LockMutex(is->subpq_mutex);
2329 is->subtitle_stream_changed = 1;
115329f1 2330
72ce053b
IC
2331 SDL_CondSignal(is->subpq_cond);
2332 SDL_UnlockMutex(is->subpq_mutex);
2333
2334 SDL_WaitThread(is->subtitle_tid, NULL);
2335
2336 packet_queue_end(&is->subtitleq);
2337 break;
01310af2
FB
2338 default:
2339 break;
2340 }
2341
3f3fe38d 2342 ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
SS
2343 avcodec_close(avctx);
2344 switch(avctx->codec_type) {
72415b2a 2345 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2346 is->audio_st = NULL;
2347 is->audio_stream = -1;
2348 break;
72415b2a 2349 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2350 is->video_st = NULL;
2351 is->video_stream = -1;
2352 break;
72415b2a 2353 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2354 is->subtitle_st = NULL;
2355 is->subtitle_stream = -1;
2356 break;
01310af2
FB
2357 default:
2358 break;
2359 }
2360}
2361
416e3508
FB
2362/* since we have only one decoding thread, we can use a global
2363 variable instead of a thread local variable */
2364static VideoState *global_video_state;
2365
2366static int decode_interrupt_cb(void)
2367{
2368 return (global_video_state && global_video_state->abort_request);
2369}
01310af2
FB
2370
2371/* this thread gets the stream from the disk or the network */
2372static int decode_thread(void *arg)
2373{
2374 VideoState *is = arg;
2375 AVFormatContext *ic;
6625a3de 2376 int err, i, ret;
72415b2a
SS
2377 int st_index[AVMEDIA_TYPE_NB];
2378 int st_count[AVMEDIA_TYPE_NB]={0};
2379 int st_best_packet_count[AVMEDIA_TYPE_NB];
01310af2 2380 AVPacket pkt1, *pkt = &pkt1;
61890b02 2381 AVFormatParameters params, *ap = &params;
75bb7b0a 2382 int eof=0;
d834d63b 2383 int pkt_in_play_range = 0;
01310af2 2384
6299a229
MN
2385 ic = avformat_alloc_context();
2386
6625a3de 2387 memset(st_index, -1, sizeof(st_index));
9f7490a0 2388 memset(st_best_packet_count, -1, sizeof(st_best_packet_count));
01310af2
FB
2389 is->video_stream = -1;
2390 is->audio_stream = -1;
72ce053b 2391 is->subtitle_stream = -1;
01310af2 2392
416e3508
FB
2393 global_video_state = is;
2394 url_set_interrupt_cb(decode_interrupt_cb);
2395
61890b02 2396 memset(ap, 0, sizeof(*ap));
115329f1 2397
6299a229 2398 ap->prealloced_context = 1;
e4b89522
LW
2399 ap->width = frame_width;
2400 ap->height= frame_height;
7e042912 2401 ap->time_base= (AVRational){1, 25};
e4b89522 2402 ap->pix_fmt = frame_pix_fmt;
7e042912 2403
6299a229
MN
2404 set_context_opts(ic, avformat_opts, AV_OPT_FLAG_DECODING_PARAM);
2405
61890b02 2406 err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
638c9d91
FB
2407 if (err < 0) {
2408 print_error(is->filename, err);
2409 ret = -1;
2410 goto fail;
2411 }
01310af2 2412 is->ic = ic;
30bc6613
MN
2413
2414 if(genpts)
2415 ic->flags |= AVFMT_FLAG_GENPTS;
2416
24c07998
LA
2417 err = av_find_stream_info(ic);
2418 if (err < 0) {
2419 fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
2420 ret = -1;
2421 goto fail;
2422 }
899681cd
BA
2423 if(ic->pb)
2424 ic->pb->eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b 2425
70a4764d
MN
2426 if(seek_by_bytes<0)
2427 seek_by_bytes= !!(ic->iformat->flags & AVFMT_TS_DISCONT);
2428
72ea344b
FB
2429 /* if seeking requested, we execute it */
2430 if (start_time != AV_NOPTS_VALUE) {
2431 int64_t timestamp;
2432
2433 timestamp = start_time;
2434 /* add the stream start time */
2435 if (ic->start_time != AV_NOPTS_VALUE)
2436 timestamp += ic->start_time;
4ed29207 2437 ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b 2438 if (ret < 0) {
115329f1 2439 fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
FB
2440 is->filename, (double)timestamp / AV_TIME_BASE);
2441 }
2442 }
72ea344b 2443
01310af2 2444 for(i = 0; i < ic->nb_streams; i++) {
9f7490a0
MN
2445 AVStream *st= ic->streams[i];
2446 AVCodecContext *avctx = st->codec;
3f3fe38d 2447 ic->streams[i]->discard = AVDISCARD_ALL;
72415b2a 2448 if(avctx->codec_type >= (unsigned)AVMEDIA_TYPE_NB)
8ef94840 2449 continue;
256ab3ed
MN
2450 if(st_count[avctx->codec_type]++ != wanted_stream[avctx->codec_type] && wanted_stream[avctx->codec_type] >= 0)
2451 continue;
2452
9f7490a0
MN
2453 if(st_best_packet_count[avctx->codec_type] >= st->codec_info_nb_frames)
2454 continue;
2455 st_best_packet_count[avctx->codec_type]= st->codec_info_nb_frames;
2456
fe74099a 2457 switch(avctx->codec_type) {
72415b2a 2458 case AVMEDIA_TYPE_AUDIO:
256ab3ed 2459 if (!audio_disable)
72415b2a 2460 st_index[AVMEDIA_TYPE_AUDIO] = i;
01310af2 2461 break;
72415b2a
SS
2462 case AVMEDIA_TYPE_VIDEO:
2463 case AVMEDIA_TYPE_SUBTITLE:
256ab3ed
MN
2464 if (!video_disable)
2465 st_index[avctx->codec_type] = i;
16a59a7b 2466 break;
01310af2
FB
2467 default:
2468 break;
2469 }
2470 }
2471 if (show_status) {
2472 dump_format(ic, 0, is->filename, 0);
2473 }
2474
2475 /* open the streams */
72415b2a
SS
2476 if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
2477 stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
01310af2
FB
2478 }
2479
077a8d61 2480 ret=-1;
72415b2a
SS
2481 if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
2482 ret= stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
077a8d61 2483 }
d38c9e7a 2484 is->refresh_tid = SDL_CreateThread(refresh_thread, is);
077a8d61 2485 if(ret<0) {
01310af2 2486 if (!display_disable)
bf8ae197 2487 is->show_audio = 2;
01310af2
FB
2488 }
2489
72415b2a
SS
2490 if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
2491 stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
16a59a7b
BA
2492 }
2493
01310af2 2494 if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
FB
2495 fprintf(stderr, "%s: could not open codecs\n", is->filename);
2496 ret = -1;
01310af2
FB
2497 goto fail;
2498 }
2499
2500 for(;;) {
2501 if (is->abort_request)
2502 break;
416e3508
FB
2503 if (is->paused != is->last_paused) {
2504 is->last_paused = is->paused;
72ea344b 2505 if (is->paused)
f5668147 2506 is->read_pause_return= av_read_pause(ic);
72ea344b
FB
2507 else
2508 av_read_play(ic);
416e3508 2509 }
2f642393
AJ
2510#if CONFIG_RTSP_DEMUXER
2511 if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
416e3508
FB
2512 /* wait 10 ms to avoid trying to get another packet */
2513 /* XXX: horrible */
2514 SDL_Delay(10);
2515 continue;
2516 }
400738b1 2517#endif
72ea344b 2518 if (is->seek_req) {
8e606cc8 2519 int64_t seek_target= is->seek_pos;
4ed29207
MN
2520 int64_t seek_min= is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
2521 int64_t seek_max= is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
2522//FIXME the +-2 is due to rounding being not done in the correct direction in generation
2523// of the seek_pos/seek_rel variables
8e606cc8 2524
4ed29207 2525 ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
FB
2526 if (ret < 0) {
2527 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
e6c0297f
MN
2528 }else{
2529 if (is->audio_stream >= 0) {
2530 packet_queue_flush(&is->audioq);
39c6a118 2531 packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f 2532 }
72ce053b
IC
2533 if (is->subtitle_stream >= 0) {
2534 packet_queue_flush(&is->subtitleq);
39c6a118 2535 packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b 2536 }
e6c0297f
MN
2537 if (is->video_stream >= 0) {
2538 packet_queue_flush(&is->videoq);
39c6a118 2539 packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f 2540 }
72ea344b
FB
2541 }
2542 is->seek_req = 0;
e45aeb38 2543 eof= 0;
72ea344b 2544 }
416e3508 2545
01310af2 2546 /* if the queue are full, no need to read more */
79ee4683
MN
2547 if ( is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
2548 || ( (is->audioq .size > MIN_AUDIOQ_SIZE || is->audio_stream<0)
2549 && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream<0)
2550 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream<0))) {
01310af2
FB
2551 /* wait 10 ms */
2552 SDL_Delay(10);
2553 continue;
2554 }
75bb7b0a 2555 if(url_feof(ic->pb) || eof) {
9dc41767 2556 if(is->video_stream >= 0){
26534fe8
MN
2557 av_init_packet(pkt);
2558 pkt->data=NULL;
2559 pkt->size=0;
2560 pkt->stream_index= is->video_stream;
2561 packet_queue_put(&is->videoq, pkt);
9dc41767 2562 }
b4083171 2563 SDL_Delay(10);
1922c0a7
RK
2564 if(is->audioq.size + is->videoq.size + is->subtitleq.size ==0){
2565 if(loop!=1 && (!loop || --loop)){
2566 stream_seek(cur_stream, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
2567 }else if(autoexit){
2568 ret=AVERROR_EOF;
2569 goto fail;
2570 }
2d1653b0 2571 }
600a331c
MN
2572 continue;
2573 }
72ea344b 2574 ret = av_read_frame(ic, pkt);
01310af2 2575 if (ret < 0) {
75bb7b0a
MN
2576 if (ret == AVERROR_EOF)
2577 eof=1;
2578 if (url_ferror(ic->pb))
bb270c08 2579 break;
75bb7b0a
MN
2580 SDL_Delay(100); /* wait for user event */
2581 continue;
01310af2 2582 }
d834d63b
RK
2583 /* check if packet is in play range specified by user, then queue, otherwise discard */
2584 pkt_in_play_range = duration == AV_NOPTS_VALUE ||
2585 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
2586 av_q2d(ic->streams[pkt->stream_index]->time_base) -
2587 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0)/1000000
2588 <= ((double)duration/1000000);
2589 if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
01310af2 2590 packet_queue_put(&is->audioq, pkt);
d834d63b 2591 } else if (pkt->stream_index == is->video_stream && pkt_in_play_range) {
01310af2 2592 packet_queue_put(&is->videoq, pkt);
d834d63b 2593 } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
72ce053b 2594 packet_queue_put(&is->subtitleq, pkt);
01310af2
FB
2595 } else {
2596 av_free_packet(pkt);
2597 }
2598 }
2599 /* wait until the end */
2600 while (!is->abort_request) {
2601 SDL_Delay(100);
2602 }
2603
638c9d91 2604 ret = 0;
01310af2 2605 fail:
416e3508
FB
2606 /* disable interrupting */
2607 global_video_state = NULL;
2608
01310af2
FB
2609 /* close each stream */
2610 if (is->audio_stream >= 0)
2611 stream_component_close(is, is->audio_stream);
2612 if (is->video_stream >= 0)
2613 stream_component_close(is, is->video_stream);
72ce053b
IC
2614 if (is->subtitle_stream >= 0)
2615 stream_component_close(is, is->subtitle_stream);
638c9d91
FB
2616 if (is->ic) {
2617 av_close_input_file(is->ic);
2618 is->ic = NULL; /* safety */
2619 }
416e3508
FB
2620 url_set_interrupt_cb(NULL);
2621
638c9d91
FB
2622 if (ret != 0) {
2623 SDL_Event event;
115329f1 2624
638c9d91
FB
2625 event.type = FF_QUIT_EVENT;
2626 event.user.data1 = is;
2627 SDL_PushEvent(&event);
2628 }
01310af2
FB
2629 return 0;
2630}
2631
638c9d91 2632static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
FB
2633{
2634 VideoState *is;
2635
2636 is = av_mallocz(sizeof(VideoState));
2637 if (!is)
2638 return NULL;
f7d78f36 2639 av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91 2640 is->iformat = iformat;
01310af2
FB
2641 is->ytop = 0;
2642 is->xleft = 0;
2643
2644 /* start video display */
2645 is->pictq_mutex = SDL_CreateMutex();
2646 is->pictq_cond = SDL_CreateCond();
115329f1 2647
72ce053b
IC
2648 is->subpq_mutex = SDL_CreateMutex();
2649 is->subpq_cond = SDL_CreateCond();
115329f1 2650
638c9d91 2651 is->av_sync_type = av_sync_type;
01310af2
FB
2652 is->parse_tid = SDL_CreateThread(decode_thread, is);
2653 if (!is->parse_tid) {
2654 av_free(is);
2655 return NULL;
2656 }
2657 return is;
2658}
2659
2660static void stream_close(VideoState *is)
2661{
2662 VideoPicture *vp;
2663 int i;
2664 /* XXX: use a special url_shutdown call to abort parse cleanly */
2665 is->abort_request = 1;
2666 SDL_WaitThread(is->parse_tid, NULL);
d38c9e7a 2667 SDL_WaitThread(is->refresh_tid, NULL);
01310af2
FB
2668
2669 /* free all pictures */
2670 for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2671 vp = &is->pictq[i];
917d2bb3
MN
2672#if CONFIG_AVFILTER
2673 if (vp->picref) {
2674 avfilter_unref_pic(vp->picref);
2675 vp->picref = NULL;
2676 }
2677#endif
01310af2
FB
2678 if (vp->bmp) {
2679 SDL_FreeYUVOverlay(vp->bmp);
2680 vp->bmp = NULL;
2681 }
2682 }
2683 SDL_DestroyMutex(is->pictq_mutex);
2684 SDL_DestroyCond(is->pictq_cond);
72ce053b
IC
2685 SDL_DestroyMutex(is->subpq_mutex);
2686 SDL_DestroyCond(is->subpq_cond);
917d2bb3 2687#if !CONFIG_AVFILTER
3ac56e28
MS
2688 if (is->img_convert_ctx)
2689 sws_freeContext(is->img_convert_ctx);
917d2bb3 2690#endif
7c5ab145 2691 av_free(is);
01310af2
FB
2692}
2693
7b49ce2e 2694static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
FB
2695{
2696 AVFormatContext *ic = is->ic;
2697 int start_index, stream_index;
2698 AVStream *st;
2699
72415b2a 2700 if (codec_type == AVMEDIA_TYPE_VIDEO)
638c9d91 2701 start_index = is->video_stream;
72415b2a 2702 else if (codec_type == AVMEDIA_TYPE_AUDIO)
638c9d91 2703 start_index = is->audio_stream;
72ce053b
IC
2704 else
2705 start_index = is->subtitle_stream;
72415b2a 2706 if (start_index < (codec_type == AVMEDIA_TYPE_SUBTITLE ? -1 : 0))
638c9d91
FB
2707 return;
2708 stream_index = start_index;
2709 for(;;) {
2710 if (++stream_index >= is->ic->nb_streams)
72ce053b 2711 {
72415b2a 2712 if (codec_type == AVMEDIA_TYPE_SUBTITLE)
72ce053b
IC
2713 {
2714 stream_index = -1;
2715 goto the_end;
2716 } else
2717 stream_index = 0;
2718 }
638c9d91
FB
2719 if (stream_index == start_index)
2720 return;
2721 st = ic->streams[stream_index];
01f4895c 2722 if (st->codec->codec_type == codec_type) {
638c9d91
FB
2723 /* check that parameters are OK */
2724 switch(codec_type) {
72415b2a 2725 case AVMEDIA_TYPE_AUDIO:
01f4895c
MN
2726 if (st->codec->sample_rate != 0 &&
2727 st->codec->channels != 0)
638c9d91
FB
2728 goto the_end;
2729 break;
72415b2a
SS
2730 case AVMEDIA_TYPE_VIDEO:
2731 case AVMEDIA_TYPE_SUBTITLE:
638c9d91
FB
2732 goto the_end;
2733 default:
2734 break;
2735 }
2736 }
2737 }
2738 the_end:
2739 stream_component_close(is, start_index);
2740 stream_component_open(is, stream_index);
2741}
2742
2743
7b49ce2e 2744static void toggle_full_screen(void)
01310af2 2745{
01310af2 2746 is_full_screen = !is_full_screen;
29f3b38a
MR
2747 if (!fs_screen_width) {
2748 /* use default SDL method */
fb84155b 2749// SDL_WM_ToggleFullScreen(screen);
01310af2 2750 }
fb84155b 2751 video_open(cur_stream);
01310af2
FB
2752}
2753
7b49ce2e 2754static void toggle_pause(void)
01310af2
FB
2755{
2756 if (cur_stream)
2757 stream_pause(cur_stream);
bba04f1e
WH
2758 step = 0;
2759}
2760
7b49ce2e 2761static void step_to_next_frame(void)
bba04f1e
WH
2762{
2763 if (cur_stream) {
19cc524a 2764 /* if the stream is paused unpause it, then step */
bba04f1e 2765 if (cur_stream->paused)
19cc524a 2766 stream_pause(cur_stream);
bba04f1e
WH
2767 }
2768 step = 1;
01310af2
FB
2769}
2770
7b49ce2e 2771static void do_exit(void)
01310af2 2772{
7c5ab145 2773 int i;
01310af2
FB
2774 if (cur_stream) {
2775 stream_close(cur_stream);
2776 cur_stream = NULL;
2777 }
72415b2a 2778 for (i = 0; i < AVMEDIA_TYPE_NB; i++)
7c5ab145
MS
2779 av_free(avcodec_opts[i]);
2780 av_free(avformat_opts);
2781 av_free(sws_opts);
917d2bb3
MN
2782#if CONFIG_AVFILTER
2783 avfilter_uninit();
2784#endif
01310af2
FB
2785 if (show_status)
2786 printf("\n");
2787 SDL_Quit();
2788 exit(0);
2789}
2790
7b49ce2e 2791static void toggle_audio_display(void)
01310af2
FB
2792{
2793 if (cur_stream) {
f5968788 2794 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 2795 cur_stream->show_audio = (cur_stream->show_audio + 1) % 3;
f5968788
MN
2796 fill_rectangle(screen,
2797 cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height,
2798 bgcolor);
2799 SDL_UpdateRect(screen, cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height);
01310af2
FB
2800 }
2801}
2802
2803/* handle an event sent by the GUI */
7b49ce2e 2804static void event_loop(void)
01310af2
FB
2805{
2806 SDL_Event event;
a11d11aa 2807 double incr, pos, frac;
01310af2
FB
2808
2809 for(;;) {
d52ec002 2810 double x;
01310af2
FB
2811 SDL_WaitEvent(&event);
2812 switch(event.type) {
2813 case SDL_KEYDOWN:
066ce8c9
AS
2814 if (exit_on_keydown) {
2815 do_exit();
2816 break;
2817 }
01310af2
FB
2818 switch(event.key.keysym.sym) {
2819 case SDLK_ESCAPE:
2820 case SDLK_q:
2821 do_exit();
2822 break;
2823 case SDLK_f:
2824 toggle_full_screen();
2825 break;
2826 case SDLK_p:
2827 case SDLK_SPACE:
2828 toggle_pause();
2829 break;
bba04f1e
WH
2830 case SDLK_s: //S: Step to next frame
2831 step_to_next_frame();
2832 break;
01310af2 2833 case SDLK_a:
115329f1 2834 if (cur_stream)
72415b2a 2835 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
638c9d91
FB
2836 break;
2837 case SDLK_v:
115329f1 2838 if (cur_stream)
72415b2a 2839 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
638c9d91 2840 break;
72ce053b 2841 case SDLK_t:
115329f1 2842 if (cur_stream)
72415b2a 2843 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
72ce053b 2844 break;
638c9d91 2845 case SDLK_w:
01310af2
FB
2846 toggle_audio_display();
2847 break;
72ea344b
FB
2848 case SDLK_LEFT:
2849 incr = -10.0;
2850 goto do_seek;
2851 case SDLK_RIGHT:
2852 incr = 10.0;
2853 goto do_seek;
2854 case SDLK_UP:
2855 incr = 60.0;
2856 goto do_seek;
2857 case SDLK_DOWN:
2858 incr = -60.0;
2859 do_seek:
2860 if (cur_stream) {
94b594c6 2861 if (seek_by_bytes) {
1a620dd7
MN
2862 if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos>=0){
2863 pos= cur_stream->video_current_pos;
2864 }else if(cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos>=0){
2865 pos= cur_stream->audio_pkt.pos;
2866 }else
2867 pos = url_ftell(cur_stream->ic->pb);
94b594c6 2868 if (cur_stream->ic->bit_rate)
566cd2cb 2869 incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
SH
2870 else
2871 incr *= 180000.0;
2872 pos += incr;
2ef46053 2873 stream_seek(cur_stream, pos, incr, 1);
94b594c6
SH
2874 } else {
2875 pos = get_master_clock(cur_stream);
2876 pos += incr;
2ef46053 2877 stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6 2878 }
72ea344b
FB
2879 }
2880 break;
01310af2
FB
2881 default:
2882 break;
2883 }
2884 break;
a11d11aa 2885 case SDL_MOUSEBUTTONDOWN:
066ce8c9
AS
2886 if (exit_on_mousedown) {
2887 do_exit();
2888 break;
2889 }
d52ec002
MN
2890 case SDL_MOUSEMOTION:
2891 if(event.type ==SDL_MOUSEBUTTONDOWN){
2892 x= event.button.x;
2893 }else{
2894 if(event.motion.state != SDL_PRESSED)
2895 break;
2896 x= event.motion.x;
2897 }
bb270c08 2898 if (cur_stream) {
2ef46053
MN
2899 if(seek_by_bytes || cur_stream->ic->duration<=0){
2900 uint64_t size= url_fsize(cur_stream->ic->pb);
d52ec002 2901 stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
2ef46053 2902 }else{
6371c81a
MN
2903 int64_t ts;
2904 int ns, hh, mm, ss;
2905 int tns, thh, tmm, tss;
2906 tns = cur_stream->ic->duration/1000000LL;
2907 thh = tns/3600;
2908 tmm = (tns%3600)/60;
2909 tss = (tns%60);
d52ec002 2910 frac = x/cur_stream->width;
6371c81a
MN
2911 ns = frac*tns;
2912 hh = ns/3600;
2913 mm = (ns%3600)/60;
2914 ss = (ns%60);
2915 fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
2916 hh, mm, ss, thh, tmm, tss);
2917 ts = frac*cur_stream->ic->duration;
2918 if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
2919 ts += cur_stream->ic->start_time;
2920 stream_seek(cur_stream, ts, 0, 0);
2ef46053 2921 }
bb270c08
DB
2922 }
2923 break;
01310af2
FB
2924 case SDL_VIDEORESIZE:
2925 if (cur_stream) {
115329f1 2926 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2 2927 SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
c57d3469
MN
2928 screen_width = cur_stream->width = event.resize.w;
2929 screen_height= cur_stream->height= event.resize.h;
01310af2
FB
2930 }
2931 break;
2932 case SDL_QUIT:
638c9d91 2933 case FF_QUIT_EVENT:
01310af2
FB
2934 do_exit();
2935 break;
2936 case FF_ALLOC_EVENT:
fccb19e3 2937 video_open(event.user.data1);
01310af2
FB
2938 alloc_picture(event.user.data1);
2939 break;
2940 case FF_REFRESH_EVENT:
2941 video_refresh_timer(event.user.data1);
d38c9e7a 2942 cur_stream->refresh=0;
01310af2
FB
2943 break;
2944 default:
2945 break;
2946 }
2947 }
2948}
2949
e4b89522
LW
2950static void opt_frame_size(const char *arg)
2951{
b33ece16 2952 if (av_parse_video_frame_size(&frame_width, &frame_height, arg) < 0) {
e4b89522
LW
2953 fprintf(stderr, "Incorrect frame size\n");
2954 exit(1);
2955 }
2956 if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
2957 fprintf(stderr, "Frame size must be a multiple of 2\n");
2958 exit(1);
2959 }
2960}
2961
a5b3b5f6 2962static int opt_width(const char *opt, const char *arg)
01310af2 2963{
a5b3b5f6
SS
2964 screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2965 return 0;
01310af2
FB
2966}
2967
a5b3b5f6 2968static int opt_height(const char *opt, const char *arg)
01310af2 2969{
a5b3b5f6
SS
2970 screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2971 return 0;
01310af2
FB
2972}
2973
2974static void opt_format(const char *arg)
2975{
2976 file_iformat = av_find_input_format(arg);
2977 if (!file_iformat) {
2978 fprintf(stderr, "Unknown input format: %s\n", arg);
2979 exit(1);
2980 }
2981}
61890b02 2982
e4b89522
LW
2983static void opt_frame_pix_fmt(const char *arg)
2984{
718c7b18 2985 frame_pix_fmt = av_get_pix_fmt(arg);
e4b89522
LW
2986}
2987
b81d6235 2988static int opt_sync(const char *opt, const char *arg)
638c9d91
FB
2989{
2990 if (!strcmp(arg, "audio"))
2991 av_sync_type = AV_SYNC_AUDIO_MASTER;
2992 else if (!strcmp(arg, "video"))
2993 av_sync_type = AV_SYNC_VIDEO_MASTER;
2994 else if (!strcmp(arg, "ext"))
2995 av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5 2996 else {
b81d6235 2997 fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
SS
2998 exit(1);
2999 }
b81d6235 3000 return 0;
638c9d91
FB
3001}
3002
e11bc2c6 3003static int opt_seek(const char *opt, const char *arg)
72ea344b 3004{
e11bc2c6
SS
3005 start_time = parse_time_or_die(opt, arg, 1);
3006 return 0;
72ea344b
FB
3007}
3008
d834d63b
RK
3009static int opt_duration(const char *opt, const char *arg)
3010{
3011 duration = parse_time_or_die(opt, arg, 1);
3012 return 0;
3013}
3014
a5b3b5f6 3015static int opt_debug(const char *opt, const char *arg)
e26a8335 3016{
a309073b 3017 av_log_set_level(99);
a5b3b5f6
SS
3018 debug = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
3019 return 0;
e26a8335 3020}
115329f1 3021
a5b3b5f6 3022static int opt_vismv(const char *opt, const char *arg)
0c9bbaec 3023{
a5b3b5f6
SS
3024 debug_mv = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
3025 return 0;
0c9bbaec 3026}
c62c07d3 3027
a5b3b5f6 3028static int opt_thread_count(const char *opt, const char *arg)
c62c07d3 3029{
a5b3b5f6 3030 thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
b250f9c6 3031#if !HAVE_THREADS
c62c07d3
MN
3032 fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
3033#endif
a5b3b5f6 3034 return 0;
c62c07d3 3035}
115329f1 3036
358061f6 3037static const OptionDef options[] = {
992f8eae 3038#include "cmdutils_common_opts.h"
a5b3b5f6
SS
3039 { "x", HAS_ARG | OPT_FUNC2, {(void*)opt_width}, "force displayed width", "width" },
3040 { "y", HAS_ARG | OPT_FUNC2, {(void*)opt_height}, "force displayed height", "height" },
e4b89522 3041 { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
638c9d91 3042 { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
01310af2
FB
3043 { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
3044 { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
72415b2a
SS
3045 { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_AUDIO]}, "select desired audio stream", "stream_number" },
3046 { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_VIDEO]}, "select desired video stream", "stream_number" },
3047 { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_SUBTITLE]}, "select desired subtitle stream", "stream_number" },
e11bc2c6 3048 { "ss", HAS_ARG | OPT_FUNC2, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
d834d63b 3049 { "t", HAS_ARG | OPT_FUNC2, {(void*)&opt_duration}, "play \"duration\" seconds of audio/video", "duration" },
674fe163 3050 { "bytes", OPT_INT | HAS_ARG, {(void*)&seek_by_bytes}, "seek by bytes 0=off 1=on -1=auto", "val" },
01310af2
FB
3051 { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
3052 { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
e4b89522 3053 { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
98ae6acf 3054 { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
a5b3b5f6 3055 { "debug", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
6387c3e6 3056 { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
a5b3b5f6 3057 { "vismv", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
6fc5b059 3058 { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
30bc6613 3059 { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
59055363 3060 { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts 0=off 1=on -1=auto", ""},
178fcca8 3061 { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
8c3eba7c
MN
3062 { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
3063 { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
3064 { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
178fcca8 3065 { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo", "algo" },
047599a4 3066 { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_recognition}, "set error detection threshold (0-4)", "threshold" },
1b51e051 3067 { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options", "bit_mask" },
b81d6235 3068 { "sync", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
a5b3b5f6 3069 { "threads", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2d1653b0 3070 { "autoexit", OPT_BOOL | OPT_EXPERT, {(void*)&autoexit}, "exit at the end", "" },
066ce8c9
AS
3071 { "exitonkeydown", OPT_BOOL | OPT_EXPERT, {(void*)&exit_on_keydown}, "exit on key down", "" },
3072 { "exitonmousedown", OPT_BOOL | OPT_EXPERT, {(void*)&exit_on_mousedown}, "exit on mouse down", "" },
1922c0a7 3073 { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&loop}, "set number of times the playback shall be looped", "loop count" },
d38c9e7a 3074 { "framedrop", OPT_BOOL | OPT_EXPERT, {(void*)&framedrop}, "drop frames when cpu is too slow", "" },
076db5ed 3075 { "window_title", OPT_STRING | HAS_ARG, {(void*)&window_title}, "set window title", "window title" },
917d2bb3 3076#if CONFIG_AVFILTER
09ed11e5 3077 { "vf", OPT_STRING | HAS_ARG, {(void*)&vfilters}, "video filters", "filter list" },
917d2bb3 3078#endif
2b3da32f 3079 { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, {(void*)&rdftspeed}, "rdft speed", "msecs" },
e43d7a18 3080 { "default", OPT_FUNC2 | HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {(void*)opt_default}, "generic catch all option", "" },
01310af2
FB
3081 { NULL, },
3082};
3083
0c2a18cb 3084static void show_usage(void)
01310af2 3085{
27daa420
RP
3086 printf("Simple media player\n");
3087 printf("usage: ffplay [options] input_file\n");
01310af2 3088 printf("\n");
0c2a18cb
RP
3089}
3090
3091static void show_help(void)
3092{
3093 show_usage();
02d504a7
FB
3094 show_help_options(options, "Main options:\n",
3095 OPT_EXPERT, 0);
3096 show_help_options(options, "\nAdvanced options:\n",
3097 OPT_EXPERT, OPT_EXPERT);
01310af2
FB
3098 printf("\nWhile playing:\n"
3099 "q, ESC quit\n"
3100 "f toggle full screen\n"
3101 "p, SPC pause\n"
638c9d91
FB
3102 "a cycle audio channel\n"
3103 "v cycle video channel\n"
72ce053b 3104 "t cycle subtitle channel\n"
638c9d91 3105 "w show audio waves\n"
79f8b328 3106 "s activate frame-step mode\n"
72ea344b
FB
3107 "left/right seek backward/forward 10 seconds\n"
3108 "down/up seek backward/forward 1 minute\n"
a11d11aa 3109 "mouse click seek to percentage in file corresponding to fraction of width\n"
01310af2 3110 );
01310af2
FB
3111}
3112
358061f6 3113static void opt_input_file(const char *filename)
01310af2 3114{
07a70b38
SS
3115 if (input_filename) {
3116 fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
3117 filename, input_filename);
3118 exit(1);
3119 }
e8d83e1c 3120 if (!strcmp(filename, "-"))
9fcfc0b7 3121 filename = "pipe:";
01310af2
FB
3122 input_filename = filename;
3123}
3124
3125/* Called from the main */
3126int main(int argc, char **argv)
3127{
e43d7a18 3128 int flags, i;
115329f1 3129
01310af2 3130 /* register all codecs, demux and protocols */
c721d803 3131 avcodec_register_all();
9b157b0c 3132#if CONFIG_AVDEVICE
c721d803 3133 avdevice_register_all();
9b157b0c 3134#endif
917d2bb3
MN
3135#if CONFIG_AVFILTER
3136 avfilter_register_all();
3137#endif
01310af2
FB
3138 av_register_all();
3139
72415b2a 3140 for(i=0; i<AVMEDIA_TYPE_NB; i++){
636f1c4c 3141 avcodec_opts[i]= avcodec_alloc_context2(i);
e43d7a18 3142 }
8e2fd8e1 3143 avformat_opts = avformat_alloc_context();
917d2bb3 3144#if !CONFIG_AVFILTER
e43d7a18 3145 sws_opts = sws_getContext(16,16,0, 16,16,0, sws_flags, NULL,NULL,NULL);
917d2bb3 3146#endif
e43d7a18 3147
ea9c581f 3148 show_banner();
4cfac5bc 3149
f5da5c93 3150 parse_options(argc, argv, options, opt_input_file);
01310af2 3151
aab1b7e5 3152 if (!input_filename) {
7f11e745 3153 show_usage();
7a7da6b4 3154 fprintf(stderr, "An input file must be specified\n");
7f11e745 3155 fprintf(stderr, "Use -h to get full help or, even better, run 'man ffplay'\n");
aab1b7e5
SS
3156 exit(1);
3157 }
01310af2
FB
3158
3159 if (display_disable) {
3160 video_disable = 1;
3161 }
31319a8c 3162 flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
c97f5402
DB
3163#if !defined(__MINGW32__) && !defined(__APPLE__)
3164 flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c 3165#endif
01310af2 3166 if (SDL_Init (flags)) {
05ab0b76 3167 fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
01310af2
FB
3168 exit(1);
3169 }
3170
3171 if (!display_disable) {
b250f9c6 3172#if HAVE_SDL_VIDEO_SIZE
3ef17d62
MR
3173 const SDL_VideoInfo *vi = SDL_GetVideoInfo();
3174 fs_screen_width = vi->current_w;
3175 fs_screen_height = vi->current_h;
29f3b38a 3176#endif
01310af2
FB
3177 }
3178
3179 SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
01310af2
FB
3180 SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
3181 SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
3182
39c6a118
MN
3183 av_init_packet(&flush_pkt);
3184 flush_pkt.data= "FLUSH";
3185
638c9d91 3186 cur_stream = stream_open(input_filename, file_iformat);
01310af2
FB
3187
3188 event_loop();
3189
3190 /* never returns */
3191
3192 return 0;
3193}