Add APIchanges entry after r24728.
[libav.git] / ffplay.c
CommitLineData
01310af2 1/*
f05ef45c 2 * FFplay : Simple Media Player based on the FFmpeg libraries
01310af2
FB
3 * Copyright (c) 2003 Fabrice Bellard
4 *
b78e7197
DB
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
01310af2
FB
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
b78e7197 10 * version 2.1 of the License, or (at your option) any later version.
01310af2 11 *
b78e7197 12 * FFmpeg is distributed in the hope that it will be useful,
01310af2
FB
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
b78e7197 18 * License along with FFmpeg; if not, write to the Free Software
5509bffa 19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2 20 */
364a9607 21
ba11257e 22#include "config.h"
8a3ceaf4 23#include <inttypes.h>
0f4e8165
RB
24#include <math.h>
25#include <limits.h>
245976da 26#include "libavutil/avstring.h"
2b4abbd6 27#include "libavutil/colorspace.h"
718c7b18 28#include "libavutil/pixdesc.h"
126b638e 29#include "libavcore/parseutils.h"
245976da 30#include "libavformat/avformat.h"
245976da
DB
31#include "libavdevice/avdevice.h"
32#include "libswscale/swscale.h"
5a4476e2 33#include "libavcodec/audioconvert.h"
e43d7a18 34#include "libavcodec/opt.h"
166621ab 35#include "libavcodec/avfft.h"
01310af2 36
917d2bb3
MN
37#if CONFIG_AVFILTER
38# include "libavfilter/avfilter.h"
39# include "libavfilter/avfiltergraph.h"
40# include "libavfilter/graphparser.h"
41#endif
42
01310af2
FB
43#include "cmdutils.h"
44
45#include <SDL.h>
46#include <SDL_thread.h>
47
2f30a81d 48#ifdef __MINGW32__
31319a8c
FB
49#undef main /* We don't want SDL to override our main() */
50#endif
51
d38c9e7a
MN
52#include <unistd.h>
53#include <assert.h>
54
64555bd9 55const char program_name[] = "FFplay";
ea9c581f 56const int program_birth_year = 2003;
4cfac5bc 57
638c9d91
FB
58//#define DEBUG_SYNC
59
79ee4683
MN
60#define MAX_QUEUE_SIZE (15 * 1024 * 1024)
61#define MIN_AUDIOQ_SIZE (20 * 16 * 1024)
62#define MIN_FRAMES 5
01310af2 63
638c9d91
FB
64/* SDL audio buffer size, in samples. Should be small to have precise
65 A/V sync as SDL does not have hardware buffer fullness info. */
66#define SDL_AUDIO_BUFFER_SIZE 1024
67
68/* no AV sync correction is done if below the AV sync threshold */
7e0140cb 69#define AV_SYNC_THRESHOLD 0.01
638c9d91
FB
70/* no AV correction is done if too big error */
71#define AV_NOSYNC_THRESHOLD 10.0
72
d38c9e7a
MN
73#define FRAME_SKIP_FACTOR 0.05
74
638c9d91
FB
75/* maximum audio speed change to get correct sync */
76#define SAMPLE_CORRECTION_PERCENT_MAX 10
77
78/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
79#define AUDIO_DIFF_AVG_NB 20
80
01310af2
FB
81/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
82#define SAMPLE_ARRAY_SIZE (2*65536)
83
03ae87a3
LA
84static int sws_flags = SWS_BICUBIC;
85
01310af2
FB
86typedef struct PacketQueue {
87 AVPacketList *first_pkt, *last_pkt;
88 int nb_packets;
89 int size;
90 int abort_request;
91 SDL_mutex *mutex;
92 SDL_cond *cond;
93} PacketQueue;
94
562f382c 95#define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b 96#define SUBPICTURE_QUEUE_SIZE 4
01310af2
FB
97
98typedef struct VideoPicture {
267e9dfa 99 double pts; ///<presentation time stamp for this picture
d38c9e7a 100 double target_clock; ///<av_gettime() time at which this should be displayed ideally
1a620dd7 101 int64_t pos; ///<byte position in file
01310af2
FB
102 SDL_Overlay *bmp;
103 int width, height; /* source height & width */
104 int allocated;
917d2bb3
MN
105 enum PixelFormat pix_fmt;
106
107#if CONFIG_AVFILTER
108 AVFilterPicRef *picref;
109#endif
01310af2
FB
110} VideoPicture;
111
72ce053b
IC
112typedef struct SubPicture {
113 double pts; /* presentation time stamp for this picture */
114 AVSubtitle sub;
115} SubPicture;
116
01310af2
FB
117enum {
118 AV_SYNC_AUDIO_MASTER, /* default choice */
119 AV_SYNC_VIDEO_MASTER,
638c9d91 120 AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
FB
121};
122
123typedef struct VideoState {
124 SDL_Thread *parse_tid;
125 SDL_Thread *video_tid;
d38c9e7a 126 SDL_Thread *refresh_tid;
638c9d91 127 AVInputFormat *iformat;
01310af2
FB
128 int no_background;
129 int abort_request;
130 int paused;
416e3508 131 int last_paused;
72ea344b 132 int seek_req;
3ba1438d 133 int seek_flags;
72ea344b 134 int64_t seek_pos;
4ed29207 135 int64_t seek_rel;
f5668147 136 int read_pause_return;
01310af2
FB
137 AVFormatContext *ic;
138 int dtg_active_format;
139
140 int audio_stream;
115329f1 141
01310af2 142 int av_sync_type;
638c9d91
FB
143 double external_clock; /* external clock base */
144 int64_t external_clock_time;
115329f1 145
638c9d91
FB
146 double audio_clock;
147 double audio_diff_cum; /* used for AV difference average computation */
148 double audio_diff_avg_coef;
149 double audio_diff_threshold;
150 int audio_diff_avg_count;
01310af2
FB
151 AVStream *audio_st;
152 PacketQueue audioq;
153 int audio_hw_buf_size;
154 /* samples output by the codec. we reserve more space for avsync
155 compensation */
c6727809
MR
156 DECLARE_ALIGNED(16,uint8_t,audio_buf1)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
157 DECLARE_ALIGNED(16,uint8_t,audio_buf2)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
5a4476e2 158 uint8_t *audio_buf;
7fea94ce 159 unsigned int audio_buf_size; /* in bytes */
01310af2 160 int audio_buf_index; /* in bytes */
bea18375 161 AVPacket audio_pkt_temp;
01310af2 162 AVPacket audio_pkt;
5a4476e2
PR
163 enum SampleFormat audio_src_fmt;
164 AVAudioConvert *reformat_ctx;
115329f1 165
01310af2
FB
166 int show_audio; /* if true, display audio samples */
167 int16_t sample_array[SAMPLE_ARRAY_SIZE];
168 int sample_array_index;
5e0257e3 169 int last_i_start;
166621ab 170 RDFTContext *rdft;
12eeda34 171 int rdft_bits;
7dbbf6a1 172 FFTSample *rdft_data;
12eeda34 173 int xpos;
115329f1 174
72ce053b
IC
175 SDL_Thread *subtitle_tid;
176 int subtitle_stream;
177 int subtitle_stream_changed;
178 AVStream *subtitle_st;
179 PacketQueue subtitleq;
180 SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
181 int subpq_size, subpq_rindex, subpq_windex;
182 SDL_mutex *subpq_mutex;
183 SDL_cond *subpq_cond;
115329f1 184
638c9d91
FB
185 double frame_timer;
186 double frame_last_pts;
187 double frame_last_delay;
115329f1 188 double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
01310af2
FB
189 int video_stream;
190 AVStream *video_st;
191 PacketQueue videoq;
267e9dfa 192 double video_current_pts; ///<current displayed pts (different from video_clock if frame fifos are used)
68aefbe8 193 double video_current_pts_drift; ///<video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
1a620dd7 194 int64_t video_current_pos; ///<current displayed file pos
01310af2
FB
195 VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
196 int pictq_size, pictq_rindex, pictq_windex;
197 SDL_mutex *pictq_mutex;
198 SDL_cond *pictq_cond;
917d2bb3 199#if !CONFIG_AVFILTER
3ac56e28 200 struct SwsContext *img_convert_ctx;
917d2bb3 201#endif
115329f1 202
01310af2
FB
203 // QETimer *video_timer;
204 char filename[1024];
205 int width, height, xleft, ytop;
41db429d
MN
206
207 int64_t faulty_pts;
208 int64_t faulty_dts;
209 int64_t last_dts_for_fault_detection;
210 int64_t last_pts_for_fault_detection;
211
917d2bb3
MN
212#if CONFIG_AVFILTER
213 AVFilterContext *out_video_filter; ///<the last filter in the video chain
214#endif
d38c9e7a
MN
215
216 float skip_frames;
217 float skip_frames_index;
218 int refresh;
01310af2
FB
219} VideoState;
220
358061f6 221static void show_help(void);
638c9d91 222static int audio_write_get_buf_size(VideoState *is);
01310af2
FB
223
224/* options specified by the user */
225static AVInputFormat *file_iformat;
226static const char *input_filename;
076db5ed 227static const char *window_title;
01310af2
FB
228static int fs_screen_width;
229static int fs_screen_height;
fccb19e3
MN
230static int screen_width = 0;
231static int screen_height = 0;
e4b89522
LW
232static int frame_width = 0;
233static int frame_height = 0;
234static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
01310af2
FB
235static int audio_disable;
236static int video_disable;
72415b2a
SS
237static int wanted_stream[AVMEDIA_TYPE_NB]={
238 [AVMEDIA_TYPE_AUDIO]=-1,
239 [AVMEDIA_TYPE_VIDEO]=-1,
240 [AVMEDIA_TYPE_SUBTITLE]=-1,
5b369983 241};
70a4764d 242static int seek_by_bytes=-1;
01310af2 243static int display_disable;
1e1a0b18 244static int show_status = 1;
638c9d91 245static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b 246static int64_t start_time = AV_NOPTS_VALUE;
d834d63b 247static int64_t duration = AV_NOPTS_VALUE;
e26a8335 248static int debug = 0;
0c9bbaec 249static int debug_mv = 0;
bba04f1e 250static int step = 0;
c62c07d3 251static int thread_count = 1;
6387c3e6 252static int workaround_bugs = 1;
6fc5b059 253static int fast = 0;
30bc6613 254static int genpts = 0;
178fcca8
MN
255static int lowres = 0;
256static int idct = FF_IDCT_AUTO;
8c3eba7c
MN
257static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
258static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
259static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
047599a4 260static int error_recognition = FF_ER_CAREFUL;
1b51e051 261static int error_concealment = 3;
41db429d 262static int decoder_reorder_pts= -1;
2d1653b0 263static int autoexit;
066ce8c9
AS
264static int exit_on_keydown;
265static int exit_on_mousedown;
1922c0a7 266static int loop=1;
d38c9e7a 267static int framedrop=1;
2b3da32f
MN
268
269static int rdftspeed=20;
917d2bb3
MN
270#if CONFIG_AVFILTER
271static char *vfilters = NULL;
272#endif
01310af2
FB
273
274/* current context */
275static int is_full_screen;
276static VideoState *cur_stream;
5e0257e3 277static int64_t audio_callback_time;
01310af2 278
2c676c33 279static AVPacket flush_pkt;
39c6a118 280
01310af2
FB
281#define FF_ALLOC_EVENT (SDL_USEREVENT)
282#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91 283#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
01310af2 284
2c676c33 285static SDL_Surface *screen;
01310af2 286
515bd00e
MN
287static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
288
01310af2
FB
289/* packet queue handling */
290static void packet_queue_init(PacketQueue *q)
291{
292 memset(q, 0, sizeof(PacketQueue));
293 q->mutex = SDL_CreateMutex();
294 q->cond = SDL_CreateCond();
515bd00e 295 packet_queue_put(q, &flush_pkt);
01310af2
FB
296}
297
72ea344b 298static void packet_queue_flush(PacketQueue *q)
01310af2
FB
299{
300 AVPacketList *pkt, *pkt1;
301
687fae2b 302 SDL_LockMutex(q->mutex);
01310af2
FB
303 for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
304 pkt1 = pkt->next;
305 av_free_packet(&pkt->pkt);
da6c4573 306 av_freep(&pkt);
01310af2 307 }
72ea344b
FB
308 q->last_pkt = NULL;
309 q->first_pkt = NULL;
310 q->nb_packets = 0;
311 q->size = 0;
687fae2b 312 SDL_UnlockMutex(q->mutex);
72ea344b
FB
313}
314
315static void packet_queue_end(PacketQueue *q)
316{
317 packet_queue_flush(q);
01310af2
FB
318 SDL_DestroyMutex(q->mutex);
319 SDL_DestroyCond(q->cond);
320}
321
322static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
323{
324 AVPacketList *pkt1;
325
72ea344b 326 /* duplicate the packet */
39c6a118 327 if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
72ea344b 328 return -1;
115329f1 329
01310af2
FB
330 pkt1 = av_malloc(sizeof(AVPacketList));
331 if (!pkt1)
332 return -1;
333 pkt1->pkt = *pkt;
334 pkt1->next = NULL;
335
72ea344b 336
01310af2
FB
337 SDL_LockMutex(q->mutex);
338
339 if (!q->last_pkt)
340
341 q->first_pkt = pkt1;
342 else
343 q->last_pkt->next = pkt1;
344 q->last_pkt = pkt1;
345 q->nb_packets++;
7b776589 346 q->size += pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
347 /* XXX: should duplicate packet data in DV case */
348 SDL_CondSignal(q->cond);
349
350 SDL_UnlockMutex(q->mutex);
351 return 0;
352}
353
354static void packet_queue_abort(PacketQueue *q)
355{
356 SDL_LockMutex(q->mutex);
357
358 q->abort_request = 1;
115329f1 359
01310af2
FB
360 SDL_CondSignal(q->cond);
361
362 SDL_UnlockMutex(q->mutex);
363}
364
365/* return < 0 if aborted, 0 if no packet and > 0 if packet. */
366static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
367{
368 AVPacketList *pkt1;
369 int ret;
370
371 SDL_LockMutex(q->mutex);
372
373 for(;;) {
374 if (q->abort_request) {
375 ret = -1;
376 break;
377 }
115329f1 378
01310af2
FB
379 pkt1 = q->first_pkt;
380 if (pkt1) {
381 q->first_pkt = pkt1->next;
382 if (!q->first_pkt)
383 q->last_pkt = NULL;
384 q->nb_packets--;
7b776589 385 q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
386 *pkt = pkt1->pkt;
387 av_free(pkt1);
388 ret = 1;
389 break;
390 } else if (!block) {
391 ret = 0;
392 break;
393 } else {
394 SDL_CondWait(q->cond, q->mutex);
395 }
396 }
397 SDL_UnlockMutex(q->mutex);
398 return ret;
399}
400
115329f1 401static inline void fill_rectangle(SDL_Surface *screen,
01310af2
FB
402 int x, int y, int w, int h, int color)
403{
404 SDL_Rect rect;
405 rect.x = x;
406 rect.y = y;
407 rect.w = w;
408 rect.h = h;
409 SDL_FillRect(screen, &rect, color);
410}
411
412#if 0
413/* draw only the border of a rectangle */
414void fill_border(VideoState *s, int x, int y, int w, int h, int color)
415{
416 int w1, w2, h1, h2;
417
418 /* fill the background */
419 w1 = x;
420 if (w1 < 0)
421 w1 = 0;
422 w2 = s->width - (x + w);
423 if (w2 < 0)
424 w2 = 0;
425 h1 = y;
426 if (h1 < 0)
427 h1 = 0;
428 h2 = s->height - (y + h);
429 if (h2 < 0)
430 h2 = 0;
115329f1
DB
431 fill_rectangle(screen,
432 s->xleft, s->ytop,
433 w1, s->height,
01310af2 434 color);
115329f1
DB
435 fill_rectangle(screen,
436 s->xleft + s->width - w2, s->ytop,
437 w2, s->height,
01310af2 438 color);
115329f1
DB
439 fill_rectangle(screen,
440 s->xleft + w1, s->ytop,
441 s->width - w1 - w2, h1,
01310af2 442 color);
115329f1 443 fill_rectangle(screen,
01310af2
FB
444 s->xleft + w1, s->ytop + s->height - h2,
445 s->width - w1 - w2, h2,
446 color);
447}
448#endif
449
72ce053b
IC
450#define ALPHA_BLEND(a, oldp, newp, s)\
451((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
452
453#define RGBA_IN(r, g, b, a, s)\
454{\
455 unsigned int v = ((const uint32_t *)(s))[0];\
456 a = (v >> 24) & 0xff;\
457 r = (v >> 16) & 0xff;\
458 g = (v >> 8) & 0xff;\
459 b = v & 0xff;\
460}
461
462#define YUVA_IN(y, u, v, a, s, pal)\
463{\
57cf99f2 464 unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
IC
465 a = (val >> 24) & 0xff;\
466 y = (val >> 16) & 0xff;\
467 u = (val >> 8) & 0xff;\
468 v = val & 0xff;\
469}
470
471#define YUVA_OUT(d, y, u, v, a)\
472{\
473 ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
474}
475
476
477#define BPP 1
478
0a8cd696 479static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
IC
480{
481 int wrap, wrap3, width2, skip2;
482 int y, u, v, a, u1, v1, a1, w, h;
483 uint8_t *lum, *cb, *cr;
484 const uint8_t *p;
485 const uint32_t *pal;
9cb5a11e
RD
486 int dstx, dsty, dstw, dsth;
487
7cf9c6ae
MN
488 dstw = av_clip(rect->w, 0, imgw);
489 dsth = av_clip(rect->h, 0, imgh);
490 dstx = av_clip(rect->x, 0, imgw - dstw);
491 dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
RD
492 lum = dst->data[0] + dsty * dst->linesize[0];
493 cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
494 cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
495
f54b31b9 496 width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e 497 skip2 = dstx >> 1;
72ce053b 498 wrap = dst->linesize[0];
25b4c651
MN
499 wrap3 = rect->pict.linesize[0];
500 p = rect->pict.data[0];
501 pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */
115329f1 502
9cb5a11e
RD
503 if (dsty & 1) {
504 lum += dstx;
72ce053b
IC
505 cb += skip2;
506 cr += skip2;
115329f1 507
9cb5a11e 508 if (dstx & 1) {
72ce053b
IC
509 YUVA_IN(y, u, v, a, p, pal);
510 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
511 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
512 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
513 cb++;
514 cr++;
515 lum++;
516 p += BPP;
517 }
9cb5a11e 518 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
519 YUVA_IN(y, u, v, a, p, pal);
520 u1 = u;
521 v1 = v;
522 a1 = a;
523 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
524
525 YUVA_IN(y, u, v, a, p + BPP, pal);
526 u1 += u;
527 v1 += v;
528 a1 += a;
529 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
530 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
531 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
532 cb++;
533 cr++;
534 p += 2 * BPP;
535 lum += 2;
536 }
537 if (w) {
538 YUVA_IN(y, u, v, a, p, pal);
539 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
540 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
541 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
BA
542 p++;
543 lum++;
72ce053b 544 }
4606a059
BA
545 p += wrap3 - dstw * BPP;
546 lum += wrap - dstw - dstx;
72ce053b
IC
547 cb += dst->linesize[1] - width2 - skip2;
548 cr += dst->linesize[2] - width2 - skip2;
549 }
9cb5a11e
RD
550 for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
551 lum += dstx;
72ce053b
IC
552 cb += skip2;
553 cr += skip2;
115329f1 554
9cb5a11e 555 if (dstx & 1) {
72ce053b
IC
556 YUVA_IN(y, u, v, a, p, pal);
557 u1 = u;
558 v1 = v;
559 a1 = a;
560 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
561 p += wrap3;
562 lum += wrap;
563 YUVA_IN(y, u, v, a, p, pal);
564 u1 += u;
565 v1 += v;
566 a1 += a;
567 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
568 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
569 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
570 cb++;
571 cr++;
572 p += -wrap3 + BPP;
573 lum += -wrap + 1;
574 }
9cb5a11e 575 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
576 YUVA_IN(y, u, v, a, p, pal);
577 u1 = u;
578 v1 = v;
579 a1 = a;
580 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
581
f8ca63e8 582 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
583 u1 += u;
584 v1 += v;
585 a1 += a;
586 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
587 p += wrap3;
588 lum += wrap;
589
590 YUVA_IN(y, u, v, a, p, pal);
591 u1 += u;
592 v1 += v;
593 a1 += a;
594 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
595
f8ca63e8 596 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
597 u1 += u;
598 v1 += v;
599 a1 += a;
600 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
601
602 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
603 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
604
605 cb++;
606 cr++;
607 p += -wrap3 + 2 * BPP;
608 lum += -wrap + 2;
609 }
610 if (w) {
611 YUVA_IN(y, u, v, a, p, pal);
612 u1 = u;
613 v1 = v;
614 a1 = a;
615 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
616 p += wrap3;
617 lum += wrap;
618 YUVA_IN(y, u, v, a, p, pal);
619 u1 += u;
620 v1 += v;
621 a1 += a;
622 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
623 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
624 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
625 cb++;
626 cr++;
627 p += -wrap3 + BPP;
628 lum += -wrap + 1;
629 }
9cb5a11e
RD
630 p += wrap3 + (wrap3 - dstw * BPP);
631 lum += wrap + (wrap - dstw - dstx);
72ce053b
IC
632 cb += dst->linesize[1] - width2 - skip2;
633 cr += dst->linesize[2] - width2 - skip2;
634 }
635 /* handle odd height */
636 if (h) {
9cb5a11e 637 lum += dstx;
72ce053b
IC
638 cb += skip2;
639 cr += skip2;
115329f1 640
9cb5a11e 641 if (dstx & 1) {
72ce053b
IC
642 YUVA_IN(y, u, v, a, p, pal);
643 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
644 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
645 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
646 cb++;
647 cr++;
648 lum++;
649 p += BPP;
650 }
9cb5a11e 651 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
652 YUVA_IN(y, u, v, a, p, pal);
653 u1 = u;
654 v1 = v;
655 a1 = a;
656 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
657
658 YUVA_IN(y, u, v, a, p + BPP, pal);
659 u1 += u;
660 v1 += v;
661 a1 += a;
662 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
663 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
664 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
665 cb++;
666 cr++;
667 p += 2 * BPP;
668 lum += 2;
669 }
670 if (w) {
671 YUVA_IN(y, u, v, a, p, pal);
672 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
673 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
674 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
675 }
676 }
677}
678
679static void free_subpicture(SubPicture *sp)
680{
e1d7c883 681 avsubtitle_free(&sp->sub);
72ce053b
IC
682}
683
01310af2
FB
684static void video_image_display(VideoState *is)
685{
686 VideoPicture *vp;
72ce053b
IC
687 SubPicture *sp;
688 AVPicture pict;
01310af2
FB
689 float aspect_ratio;
690 int width, height, x, y;
691 SDL_Rect rect;
72ce053b 692 int i;
01310af2
FB
693
694 vp = &is->pictq[is->pictq_rindex];
695 if (vp->bmp) {
917d2bb3
MN
696#if CONFIG_AVFILTER
697 if (vp->picref->pixel_aspect.num == 0)
698 aspect_ratio = 0;
699 else
700 aspect_ratio = av_q2d(vp->picref->pixel_aspect);
701#else
702
01310af2 703 /* XXX: use variable in the frame */
c30a4489
AJ
704 if (is->video_st->sample_aspect_ratio.num)
705 aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
706 else if (is->video_st->codec->sample_aspect_ratio.num)
707 aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
72ea344b 708 else
c30a4489 709 aspect_ratio = 0;
917d2bb3 710#endif
01310af2 711 if (aspect_ratio <= 0.0)
c30a4489 712 aspect_ratio = 1.0;
917d2bb3 713 aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
FB
714 /* if an active format is indicated, then it overrides the
715 mpeg format */
716#if 0
01f4895c
MN
717 if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
718 is->dtg_active_format = is->video_st->codec->dtg_active_format;
01310af2
FB
719 printf("dtg_active_format=%d\n", is->dtg_active_format);
720 }
721#endif
722#if 0
01f4895c 723 switch(is->video_st->codec->dtg_active_format) {
01310af2
FB
724 case FF_DTG_AFD_SAME:
725 default:
726 /* nothing to do */
727 break;
728 case FF_DTG_AFD_4_3:
729 aspect_ratio = 4.0 / 3.0;
730 break;
731 case FF_DTG_AFD_16_9:
732 aspect_ratio = 16.0 / 9.0;
733 break;
734 case FF_DTG_AFD_14_9:
735 aspect_ratio = 14.0 / 9.0;
736 break;
737 case FF_DTG_AFD_4_3_SP_14_9:
738 aspect_ratio = 14.0 / 9.0;
739 break;
740 case FF_DTG_AFD_16_9_SP_14_9:
741 aspect_ratio = 14.0 / 9.0;
742 break;
743 case FF_DTG_AFD_SP_4_3:
744 aspect_ratio = 4.0 / 3.0;
745 break;
746 }
747#endif
748
72ce053b
IC
749 if (is->subtitle_st)
750 {
751 if (is->subpq_size > 0)
752 {
753 sp = &is->subpq[is->subpq_rindex];
754
755 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
756 {
757 SDL_LockYUVOverlay (vp->bmp);
758
759 pict.data[0] = vp->bmp->pixels[0];
760 pict.data[1] = vp->bmp->pixels[2];
761 pict.data[2] = vp->bmp->pixels[1];
762
763 pict.linesize[0] = vp->bmp->pitches[0];
764 pict.linesize[1] = vp->bmp->pitches[2];
765 pict.linesize[2] = vp->bmp->pitches[1];
766
767 for (i = 0; i < sp->sub.num_rects; i++)
db4fac64 768 blend_subrect(&pict, sp->sub.rects[i],
0a8cd696 769 vp->bmp->w, vp->bmp->h);
72ce053b
IC
770
771 SDL_UnlockYUVOverlay (vp->bmp);
772 }
773 }
774 }
775
776
01310af2
FB
777 /* XXX: we suppose the screen has a 1.0 pixel ratio */
778 height = is->height;
bb6c34e5 779 width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
FB
780 if (width > is->width) {
781 width = is->width;
bb6c34e5 782 height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
FB
783 }
784 x = (is->width - width) / 2;
785 y = (is->height - height) / 2;
786 if (!is->no_background) {
787 /* fill the background */
788 // fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
789 } else {
790 is->no_background = 0;
791 }
792 rect.x = is->xleft + x;
2f6547fb 793 rect.y = is->ytop + y;
01310af2
FB
794 rect.w = width;
795 rect.h = height;
796 SDL_DisplayYUVOverlay(vp->bmp, &rect);
797 } else {
798#if 0
115329f1
DB
799 fill_rectangle(screen,
800 is->xleft, is->ytop, is->width, is->height,
01310af2
FB
801 QERGB(0x00, 0x00, 0x00));
802#endif
803 }
804}
805
806static inline int compute_mod(int a, int b)
807{
808 a = a % b;
115329f1 809 if (a >= 0)
01310af2
FB
810 return a;
811 else
812 return a + b;
813}
814
815static void video_audio_display(VideoState *s)
816{
817 int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
818 int ch, channels, h, h2, bgcolor, fgcolor;
819 int16_t time_diff;
4c7c7645
MN
820 int rdft_bits, nb_freq;
821
822 for(rdft_bits=1; (1<<rdft_bits)<2*s->height; rdft_bits++)
823 ;
824 nb_freq= 1<<(rdft_bits-1);
115329f1 825
01310af2 826 /* compute display index : center on currently output samples */
01f4895c 827 channels = s->audio_st->codec->channels;
01310af2 828 nb_display_channels = channels;
5e0257e3 829 if (!s->paused) {
4c7c7645 830 int data_used= s->show_audio==1 ? s->width : (2*nb_freq);
5e0257e3
FB
831 n = 2 * channels;
832 delay = audio_write_get_buf_size(s);
833 delay /= n;
115329f1 834
5e0257e3
FB
835 /* to be more precise, we take into account the time spent since
836 the last buffer computation */
837 if (audio_callback_time) {
838 time_diff = av_gettime() - audio_callback_time;
122dcdcb 839 delay -= (time_diff * s->audio_st->codec->sample_rate) / 1000000;
5e0257e3 840 }
115329f1 841
122dcdcb 842 delay += 2*data_used;
4c7c7645
MN
843 if (delay < data_used)
844 delay = data_used;
ac50bcc8
MN
845
846 i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
12eeda34 847 if(s->show_audio==1){
6c7165c7
JM
848 h= INT_MIN;
849 for(i=0; i<1000; i+=channels){
850 int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
851 int a= s->sample_array[idx];
852 int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
853 int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
854 int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
855 int score= a-d;
856 if(h<score && (b^c)<0){
857 h= score;
858 i_start= idx;
859 }
ac50bcc8
MN
860 }
861 }
862
5e0257e3
FB
863 s->last_i_start = i_start;
864 } else {
865 i_start = s->last_i_start;
01310af2
FB
866 }
867
01310af2 868 bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 869 if(s->show_audio==1){
6c7165c7
JM
870 fill_rectangle(screen,
871 s->xleft, s->ytop, s->width, s->height,
872 bgcolor);
873
874 fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
875
876 /* total height for one channel */
877 h = s->height / nb_display_channels;
878 /* graph height / 2 */
879 h2 = (h * 9) / 20;
880 for(ch = 0;ch < nb_display_channels; ch++) {
881 i = i_start + ch;
882 y1 = s->ytop + ch * h + (h / 2); /* position of center line */
883 for(x = 0; x < s->width; x++) {
884 y = (s->sample_array[i] * h2) >> 15;
885 if (y < 0) {
886 y = -y;
887 ys = y1 - y;
888 } else {
889 ys = y1;
890 }
891 fill_rectangle(screen,
892 s->xleft + x, ys, 1, y,
893 fgcolor);
894 i += channels;
895 if (i >= SAMPLE_ARRAY_SIZE)
896 i -= SAMPLE_ARRAY_SIZE;
01310af2 897 }
01310af2 898 }
01310af2 899
6c7165c7 900 fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2 901
6c7165c7
JM
902 for(ch = 1;ch < nb_display_channels; ch++) {
903 y = s->ytop + ch * h;
904 fill_rectangle(screen,
905 s->xleft, y, s->width, 1,
906 fgcolor);
907 }
908 SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
12eeda34 909 }else{
12eeda34 910 nb_display_channels= FFMIN(nb_display_channels, 2);
12eeda34 911 if(rdft_bits != s->rdft_bits){
166621ab 912 av_rdft_end(s->rdft);
7dbbf6a1 913 av_free(s->rdft_data);
166621ab 914 s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
12eeda34 915 s->rdft_bits= rdft_bits;
7dbbf6a1 916 s->rdft_data= av_malloc(4*nb_freq*sizeof(*s->rdft_data));
12eeda34 917 }
12eeda34 918 {
7dbbf6a1 919 FFTSample *data[2];
12eeda34 920 for(ch = 0;ch < nb_display_channels; ch++) {
7dbbf6a1 921 data[ch] = s->rdft_data + 2*nb_freq*ch;
12eeda34
MN
922 i = i_start + ch;
923 for(x = 0; x < 2*nb_freq; x++) {
924 double w= (x-nb_freq)*(1.0/nb_freq);
925 data[ch][x]= s->sample_array[i]*(1.0-w*w);
926 i += channels;
927 if (i >= SAMPLE_ARRAY_SIZE)
928 i -= SAMPLE_ARRAY_SIZE;
929 }
166621ab 930 av_rdft_calc(s->rdft, data[ch]);
12eeda34
MN
931 }
932 //least efficient way to do this, we should of course directly access it but its more than fast enough
092421cf 933 for(y=0; y<s->height; y++){
12eeda34
MN
934 double w= 1/sqrt(nb_freq);
935 int a= sqrt(w*sqrt(data[0][2*y+0]*data[0][2*y+0] + data[0][2*y+1]*data[0][2*y+1]));
00f72577
JM
936 int b= (nb_display_channels == 2 ) ? sqrt(w*sqrt(data[1][2*y+0]*data[1][2*y+0]
937 + data[1][2*y+1]*data[1][2*y+1])) : a;
12eeda34
MN
938 a= FFMIN(a,255);
939 b= FFMIN(b,255);
940 fgcolor = SDL_MapRGB(screen->format, a, b, (a+b)/2);
941
942 fill_rectangle(screen,
943 s->xpos, s->height-y, 1, 1,
944 fgcolor);
945 }
946 }
947 SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
948 s->xpos++;
949 if(s->xpos >= s->width)
950 s->xpos= s->xleft;
951 }
01310af2
FB
952}
953
990c8438
MN
954static int video_open(VideoState *is){
955 int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
956 int w,h;
957
fb84155b
MN
958 if(is_full_screen) flags |= SDL_FULLSCREEN;
959 else flags |= SDL_RESIZABLE;
960
990c8438
MN
961 if (is_full_screen && fs_screen_width) {
962 w = fs_screen_width;
963 h = fs_screen_height;
fb84155b
MN
964 } else if(!is_full_screen && screen_width){
965 w = screen_width;
966 h = screen_height;
917d2bb3
MN
967#if CONFIG_AVFILTER
968 }else if (is->out_video_filter && is->out_video_filter->inputs[0]){
969 w = is->out_video_filter->inputs[0]->w;
970 h = is->out_video_filter->inputs[0]->h;
971#else
fb84155b
MN
972 }else if (is->video_st && is->video_st->codec->width){
973 w = is->video_st->codec->width;
974 h = is->video_st->codec->height;
917d2bb3 975#endif
990c8438 976 } else {
fb84155b
MN
977 w = 640;
978 h = 480;
990c8438 979 }
d3d7b12e
MN
980 if(screen && is->width == screen->w && screen->w == w
981 && is->height== screen->h && screen->h == h)
982 return 0;
983
c97f5402 984#ifndef __APPLE__
990c8438
MN
985 screen = SDL_SetVideoMode(w, h, 0, flags);
986#else
987 /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
988 screen = SDL_SetVideoMode(w, h, 24, flags);
989#endif
990 if (!screen) {
991 fprintf(stderr, "SDL: could not set video mode - exiting\n");
992 return -1;
993 }
076db5ed
RK
994 if (!window_title)
995 window_title = input_filename;
996 SDL_WM_SetCaption(window_title, window_title);
990c8438
MN
997
998 is->width = screen->w;
999 is->height = screen->h;
1000
1001 return 0;
1002}
8c982c5d 1003
01310af2
FB
1004/* display the current picture, if any */
1005static void video_display(VideoState *is)
1006{
8c982c5d
MN
1007 if(!screen)
1008 video_open(cur_stream);
115329f1 1009 if (is->audio_st && is->show_audio)
01310af2
FB
1010 video_audio_display(is);
1011 else if (is->video_st)
1012 video_image_display(is);
1013}
1014
d38c9e7a 1015static int refresh_thread(void *opaque)
01310af2 1016{
d38c9e7a
MN
1017 VideoState *is= opaque;
1018 while(!is->abort_request){
01310af2
FB
1019 SDL_Event event;
1020 event.type = FF_REFRESH_EVENT;
1021 event.user.data1 = opaque;
d38c9e7a
MN
1022 if(!is->refresh){
1023 is->refresh=1;
01310af2 1024 SDL_PushEvent(&event);
d38c9e7a 1025 }
2b3da32f 1026 usleep(is->audio_st && is->show_audio ? rdftspeed*1000 : 5000); //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
d38c9e7a
MN
1027 }
1028 return 0;
01310af2
FB
1029}
1030
638c9d91
FB
1031/* get the current audio clock value */
1032static double get_audio_clock(VideoState *is)
1033{
1034 double pts;
1035 int hw_buf_size, bytes_per_sec;
1036 pts = is->audio_clock;
1037 hw_buf_size = audio_write_get_buf_size(is);
1038 bytes_per_sec = 0;
1039 if (is->audio_st) {
115329f1 1040 bytes_per_sec = is->audio_st->codec->sample_rate *
01f4895c 1041 2 * is->audio_st->codec->channels;
638c9d91
FB
1042 }
1043 if (bytes_per_sec)
1044 pts -= (double)hw_buf_size / bytes_per_sec;
1045 return pts;
1046}
1047
1048/* get the current video clock value */
1049static double get_video_clock(VideoState *is)
1050{
04108619 1051 if (is->paused) {
41a4cd0c 1052 return is->video_current_pts;
72ea344b 1053 } else {
68aefbe8 1054 return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b 1055 }
638c9d91
FB
1056}
1057
1058/* get the current external clock value */
1059static double get_external_clock(VideoState *is)
1060{
1061 int64_t ti;
1062 ti = av_gettime();
1063 return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
1064}
1065
1066/* get the current master clock value */
1067static double get_master_clock(VideoState *is)
1068{
1069 double val;
1070
72ea344b
FB
1071 if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
1072 if (is->video_st)
1073 val = get_video_clock(is);
1074 else
1075 val = get_audio_clock(is);
1076 } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
1077 if (is->audio_st)
1078 val = get_audio_clock(is);
1079 else
1080 val = get_video_clock(is);
1081 } else {
638c9d91 1082 val = get_external_clock(is);
72ea344b 1083 }
638c9d91
FB
1084 return val;
1085}
1086
72ea344b 1087/* seek in the stream */
2ef46053 1088static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b 1089{
687fae2b
IW
1090 if (!is->seek_req) {
1091 is->seek_pos = pos;
4ed29207 1092 is->seek_rel = rel;
3890dd3a 1093 is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
SH
1094 if (seek_by_bytes)
1095 is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
IW
1096 is->seek_req = 1;
1097 }
72ea344b
FB
1098}
1099
1100/* pause or resume the video */
1101static void stream_pause(VideoState *is)
1102{
68aefbe8
MN
1103 if (is->paused) {
1104 is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
f5668147 1105 if(is->read_pause_return != AVERROR(ENOSYS)){
68aefbe8 1106 is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147 1107 }
68aefbe8 1108 is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b 1109 }
68aefbe8 1110 is->paused = !is->paused;
72ea344b
FB
1111}
1112
d38c9e7a 1113static double compute_target_time(double frame_current_pts, VideoState *is)
49410784 1114{
d38c9e7a 1115 double delay, sync_threshold, diff;
49410784
TB
1116
1117 /* compute nominal delay */
1118 delay = frame_current_pts - is->frame_last_pts;
1119 if (delay <= 0 || delay >= 10.0) {
1120 /* if incorrect delay, use previous one */
1121 delay = is->frame_last_delay;
443658fd 1122 } else {
712de377 1123 is->frame_last_delay = delay;
443658fd 1124 }
49410784
TB
1125 is->frame_last_pts = frame_current_pts;
1126
1127 /* update delay to follow master synchronisation source */
1128 if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1129 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1130 /* if video is slave, we try to correct big delays by
1131 duplicating or deleting a frame */
f04c6e35 1132 diff = get_video_clock(is) - get_master_clock(is);
49410784
TB
1133
1134 /* skip or repeat frame. We take into account the
1135 delay to compute the threshold. I still don't know
1136 if it is the best guess */
1137 sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
1138 if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1139 if (diff <= -sync_threshold)
1140 delay = 0;
1141 else if (diff >= sync_threshold)
1142 delay = 2 * delay;
1143 }
1144 }
49410784 1145 is->frame_timer += delay;
eecc17a7
TB
1146#if defined(DEBUG_SYNC)
1147 printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1148 delay, actual_delay, frame_current_pts, -diff);
1149#endif
1150
d38c9e7a 1151 return is->frame_timer;
49410784
TB
1152}
1153
01310af2
FB
1154/* called to display each frame */
1155static void video_refresh_timer(void *opaque)
1156{
1157 VideoState *is = opaque;
1158 VideoPicture *vp;
638c9d91 1159
72ce053b 1160 SubPicture *sp, *sp2;
01310af2
FB
1161
1162 if (is->video_st) {
d38c9e7a 1163retry:
01310af2 1164 if (is->pictq_size == 0) {
d38c9e7a 1165 //nothing to do, no picture to display in the que
01310af2 1166 } else {
d38c9e7a
MN
1167 double time= av_gettime()/1000000.0;
1168 double next_target;
638c9d91 1169 /* dequeue the picture */
01310af2 1170 vp = &is->pictq[is->pictq_rindex];
638c9d91 1171
d38c9e7a
MN
1172 if(time < vp->target_clock)
1173 return;
638c9d91
FB
1174 /* update current video pts */
1175 is->video_current_pts = vp->pts;
d38c9e7a 1176 is->video_current_pts_drift = is->video_current_pts - time;
a3cc2160 1177 is->video_current_pos = vp->pos;
d38c9e7a
MN
1178 if(is->pictq_size > 1){
1179 VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
1180 assert(nextvp->target_clock >= vp->target_clock);
1181 next_target= nextvp->target_clock;
1182 }else{
1183 next_target= vp->target_clock + is->video_clock - vp->pts; //FIXME pass durations cleanly
1184 }
1185 if(framedrop && time > next_target){
1186 is->skip_frames *= 1.0 + FRAME_SKIP_FACTOR;
1187 if(is->pictq_size > 1 || time > next_target + 0.5){
1188 /* update queue size and signal for next picture */
1189 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1190 is->pictq_rindex = 0;
1191
1192 SDL_LockMutex(is->pictq_mutex);
1193 is->pictq_size--;
1194 SDL_CondSignal(is->pictq_cond);
1195 SDL_UnlockMutex(is->pictq_mutex);
1196 goto retry;
1197 }
1198 }
638c9d91 1199
72ce053b
IC
1200 if(is->subtitle_st) {
1201 if (is->subtitle_stream_changed) {
1202 SDL_LockMutex(is->subpq_mutex);
115329f1 1203
72ce053b
IC
1204 while (is->subpq_size) {
1205 free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1 1206
72ce053b
IC
1207 /* update queue size and signal for next picture */
1208 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1209 is->subpq_rindex = 0;
115329f1 1210
72ce053b
IC
1211 is->subpq_size--;
1212 }
1213 is->subtitle_stream_changed = 0;
1214
1215 SDL_CondSignal(is->subpq_cond);
1216 SDL_UnlockMutex(is->subpq_mutex);
1217 } else {
1218 if (is->subpq_size > 0) {
1219 sp = &is->subpq[is->subpq_rindex];
1220
1221 if (is->subpq_size > 1)
1222 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1223 else
1224 sp2 = NULL;
1225
1226 if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1227 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1228 {
1229 free_subpicture(sp);
1230
1231 /* update queue size and signal for next picture */
1232 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1233 is->subpq_rindex = 0;
1234
1235 SDL_LockMutex(is->subpq_mutex);
1236 is->subpq_size--;
1237 SDL_CondSignal(is->subpq_cond);
1238 SDL_UnlockMutex(is->subpq_mutex);
1239 }
1240 }
1241 }
1242 }
1243
01310af2
FB
1244 /* display picture */
1245 video_display(is);
115329f1 1246
01310af2
FB
1247 /* update queue size and signal for next picture */
1248 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1249 is->pictq_rindex = 0;
115329f1 1250
01310af2
FB
1251 SDL_LockMutex(is->pictq_mutex);
1252 is->pictq_size--;
1253 SDL_CondSignal(is->pictq_cond);
1254 SDL_UnlockMutex(is->pictq_mutex);
1255 }
1256 } else if (is->audio_st) {
1257 /* draw the next audio frame */
1258
01310af2
FB
1259 /* if only audio stream, then display the audio bars (better
1260 than nothing, just to test the implementation */
115329f1 1261
01310af2
FB
1262 /* display picture */
1263 video_display(is);
01310af2
FB
1264 }
1265 if (show_status) {
1266 static int64_t last_time;
1267 int64_t cur_time;
72ce053b 1268 int aqsize, vqsize, sqsize;
638c9d91 1269 double av_diff;
115329f1 1270
01310af2 1271 cur_time = av_gettime();
1e1a0b18 1272 if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
FB
1273 aqsize = 0;
1274 vqsize = 0;
72ce053b 1275 sqsize = 0;
01310af2
FB
1276 if (is->audio_st)
1277 aqsize = is->audioq.size;
1278 if (is->video_st)
1279 vqsize = is->videoq.size;
72ce053b
IC
1280 if (is->subtitle_st)
1281 sqsize = is->subtitleq.size;
638c9d91
FB
1282 av_diff = 0;
1283 if (is->audio_st && is->video_st)
1284 av_diff = get_audio_clock(is) - get_video_clock(is);
382f3a5b
MN
1285 printf("%7.2f A-V:%7.3f s:%3.1f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
1286 get_master_clock(is), av_diff, FFMAX(is->skip_frames-1, 0), aqsize / 1024, vqsize / 1024, sqsize, is->faulty_dts, is->faulty_pts);
01310af2
FB
1287 fflush(stdout);
1288 last_time = cur_time;
1289 }
1290 }
1291}
1292
1293/* allocate a picture (needs to do that in main thread to avoid
1294 potential locking problems */
1295static void alloc_picture(void *opaque)
1296{
1297 VideoState *is = opaque;
1298 VideoPicture *vp;
01310af2
FB
1299
1300 vp = &is->pictq[is->pictq_windex];
1301
1302 if (vp->bmp)
1303 SDL_FreeYUVOverlay(vp->bmp);
1304
917d2bb3
MN
1305#if CONFIG_AVFILTER
1306 if (vp->picref)
1307 avfilter_unref_pic(vp->picref);
1308 vp->picref = NULL;
1309
1310 vp->width = is->out_video_filter->inputs[0]->w;
1311 vp->height = is->out_video_filter->inputs[0]->h;
1312 vp->pix_fmt = is->out_video_filter->inputs[0]->format;
1313#else
1314 vp->width = is->video_st->codec->width;
1315 vp->height = is->video_st->codec->height;
1316 vp->pix_fmt = is->video_st->codec->pix_fmt;
1317#endif
1318
1319 vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1 1320 SDL_YV12_OVERLAY,
61890b02 1321 screen);
01310af2
FB
1322
1323 SDL_LockMutex(is->pictq_mutex);
1324 vp->allocated = 1;
1325 SDL_CondSignal(is->pictq_cond);
1326 SDL_UnlockMutex(is->pictq_mutex);
1327}
1328
267e9dfa
MN
1329/**
1330 *
1331 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1332 */
1a620dd7 1333static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos)
01310af2
FB
1334{
1335 VideoPicture *vp;
1336 int dst_pix_fmt;
917d2bb3
MN
1337#if CONFIG_AVFILTER
1338 AVPicture pict_src;
1339#endif
01310af2
FB
1340 /* wait until we have space to put a new picture */
1341 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1342
1343 if(is->pictq_size>=VIDEO_PICTURE_QUEUE_SIZE && !is->refresh)
1344 is->skip_frames= FFMAX(1.0 - FRAME_SKIP_FACTOR, is->skip_frames * (1.0-FRAME_SKIP_FACTOR));
1345
01310af2
FB
1346 while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1347 !is->videoq.abort_request) {
1348 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1349 }
1350 SDL_UnlockMutex(is->pictq_mutex);
115329f1 1351
01310af2
FB
1352 if (is->videoq.abort_request)
1353 return -1;
1354
1355 vp = &is->pictq[is->pictq_windex];
1356
1357 /* alloc or resize hardware picture buffer */
115329f1 1358 if (!vp->bmp ||
917d2bb3
MN
1359#if CONFIG_AVFILTER
1360 vp->width != is->out_video_filter->inputs[0]->w ||
1361 vp->height != is->out_video_filter->inputs[0]->h) {
1362#else
01f4895c
MN
1363 vp->width != is->video_st->codec->width ||
1364 vp->height != is->video_st->codec->height) {
917d2bb3 1365#endif
01310af2
FB
1366 SDL_Event event;
1367
1368 vp->allocated = 0;
1369
1370 /* the allocation must be done in the main thread to avoid
1371 locking problems */
1372 event.type = FF_ALLOC_EVENT;
1373 event.user.data1 = is;
1374 SDL_PushEvent(&event);
115329f1 1375
01310af2
FB
1376 /* wait until the picture is allocated */
1377 SDL_LockMutex(is->pictq_mutex);
1378 while (!vp->allocated && !is->videoq.abort_request) {
1379 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1380 }
1381 SDL_UnlockMutex(is->pictq_mutex);
1382
1383 if (is->videoq.abort_request)
1384 return -1;
1385 }
1386
638c9d91 1387 /* if the frame is not skipped, then display it */
01310af2 1388 if (vp->bmp) {
fbf1b885 1389 AVPicture pict;
917d2bb3
MN
1390#if CONFIG_AVFILTER
1391 if(vp->picref)
1392 avfilter_unref_pic(vp->picref);
1393 vp->picref = src_frame->opaque;
1394#endif
fbf1b885 1395
01310af2
FB
1396 /* get a pointer on the bitmap */
1397 SDL_LockYUVOverlay (vp->bmp);
1398
1399 dst_pix_fmt = PIX_FMT_YUV420P;
fbf1b885 1400 memset(&pict,0,sizeof(AVPicture));
01310af2
FB
1401 pict.data[0] = vp->bmp->pixels[0];
1402 pict.data[1] = vp->bmp->pixels[2];
1403 pict.data[2] = vp->bmp->pixels[1];
1404
1405 pict.linesize[0] = vp->bmp->pitches[0];
1406 pict.linesize[1] = vp->bmp->pitches[2];
1407 pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
MN
1408
1409#if CONFIG_AVFILTER
1410 pict_src.data[0] = src_frame->data[0];
1411 pict_src.data[1] = src_frame->data[1];
1412 pict_src.data[2] = src_frame->data[2];
1413
1414 pict_src.linesize[0] = src_frame->linesize[0];
1415 pict_src.linesize[1] = src_frame->linesize[1];
1416 pict_src.linesize[2] = src_frame->linesize[2];
1417
1418 //FIXME use direct rendering
1419 av_picture_copy(&pict, &pict_src,
1420 vp->pix_fmt, vp->width, vp->height);
1421#else
e43d7a18 1422 sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28 1423 is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3 1424 vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
feb7bc67 1425 dst_pix_fmt, sws_flags, NULL, NULL, NULL);
3ac56e28 1426 if (is->img_convert_ctx == NULL) {
26ba8235
AB
1427 fprintf(stderr, "Cannot initialize the conversion context\n");
1428 exit(1);
1429 }
3ac56e28 1430 sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
MN
1431 0, vp->height, pict.data, pict.linesize);
1432#endif
01310af2
FB
1433 /* update the bitmap content */
1434 SDL_UnlockYUVOverlay(vp->bmp);
1435
638c9d91 1436 vp->pts = pts;
1a620dd7 1437 vp->pos = pos;
01310af2
FB
1438
1439 /* now we can update the picture count */
1440 if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1441 is->pictq_windex = 0;
1442 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1443 vp->target_clock= compute_target_time(vp->pts, is);
1444
01310af2
FB
1445 is->pictq_size++;
1446 SDL_UnlockMutex(is->pictq_mutex);
1447 }
638c9d91
FB
1448 return 0;
1449}
1450
115329f1
DB
1451/**
1452 * compute the exact PTS for the picture if it is omitted in the stream
267e9dfa
MN
1453 * @param pts1 the dts of the pkt / pts of the frame
1454 */
1a620dd7 1455static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
638c9d91
FB
1456{
1457 double frame_delay, pts;
115329f1 1458
638c9d91
FB
1459 pts = pts1;
1460
01310af2 1461 if (pts != 0) {
638c9d91 1462 /* update video clock with pts, if present */
01310af2
FB
1463 is->video_clock = pts;
1464 } else {
72ea344b
FB
1465 pts = is->video_clock;
1466 }
1467 /* update video clock for next frame */
01f4895c 1468 frame_delay = av_q2d(is->video_st->codec->time_base);
72ea344b
FB
1469 /* for MPEG2, the frame can be repeated, so we update the
1470 clock accordingly */
267e9dfa 1471 frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
72ea344b 1472 is->video_clock += frame_delay;
638c9d91
FB
1473
1474#if defined(DEBUG_SYNC) && 0
ff358eca
SS
1475 printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1476 av_get_pict_type_char(src_frame->pict_type), pts, pts1);
638c9d91 1477#endif
1a620dd7 1478 return queue_picture(is, src_frame, pts, pos);
01310af2
FB
1479}
1480
3966a574 1481static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2 1482{
6c7d3ead 1483 int len1, got_picture, i;
01310af2 1484
01310af2 1485 if (packet_queue_get(&is->videoq, pkt, 1) < 0)
917d2bb3 1486 return -1;
39c6a118
MN
1487
1488 if(pkt->data == flush_pkt.data){
1489 avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
MN
1490
1491 SDL_LockMutex(is->pictq_mutex);
1492 //Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
1493 for(i=0; i<VIDEO_PICTURE_QUEUE_SIZE; i++){
d38c9e7a 1494 is->pictq[i].target_clock= 0;
6c7d3ead
MN
1495 }
1496 while (is->pictq_size && !is->videoq.abort_request) {
1497 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1498 }
1a620dd7 1499 is->video_current_pos= -1;
6c7d3ead
MN
1500 SDL_UnlockMutex(is->pictq_mutex);
1501
41db429d
MN
1502 is->last_dts_for_fault_detection=
1503 is->last_pts_for_fault_detection= INT64_MIN;
967030eb 1504 is->frame_last_pts= AV_NOPTS_VALUE;
f7119e42 1505 is->frame_last_delay = 0;
b25453bd 1506 is->frame_timer = (double)av_gettime() / 1000000.0;
d38c9e7a
MN
1507 is->skip_frames= 1;
1508 is->skip_frames_index= 0;
917d2bb3 1509 return 0;
39c6a118
MN
1510 }
1511
638c9d91
FB
1512 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1513 this packet, if any */
7fb262b5 1514 is->video_st->codec->reordered_opaque= pkt->pts;
bea18375 1515 len1 = avcodec_decode_video2(is->video_st->codec,
620e8baf 1516 frame, &got_picture,
bea18375 1517 pkt);
620e8baf 1518
99e0b12b 1519 if (got_picture) {
df7d6e48
SS
1520 if(pkt->dts != AV_NOPTS_VALUE){
1521 is->faulty_dts += pkt->dts <= is->last_dts_for_fault_detection;
1522 is->last_dts_for_fault_detection= pkt->dts;
1523 }
1524 if(frame->reordered_opaque != AV_NOPTS_VALUE){
1525 is->faulty_pts += frame->reordered_opaque <= is->last_pts_for_fault_detection;
1526 is->last_pts_for_fault_detection= frame->reordered_opaque;
1527 }
99e0b12b 1528 }
41db429d
MN
1529
1530 if( ( decoder_reorder_pts==1
ecbed31c 1531 || (decoder_reorder_pts && is->faulty_pts<is->faulty_dts)
41db429d 1532 || pkt->dts == AV_NOPTS_VALUE)
7fb262b5 1533 && frame->reordered_opaque != AV_NOPTS_VALUE)
917d2bb3 1534 *pts= frame->reordered_opaque;
620e8baf 1535 else if(pkt->dts != AV_NOPTS_VALUE)
917d2bb3 1536 *pts= pkt->dts;
620e8baf 1537 else
917d2bb3
MN
1538 *pts= 0;
1539
fb966f99
MN
1540// if (len1 < 0)
1541// break;
d38c9e7a
MN
1542 if (got_picture){
1543 is->skip_frames_index += 1;
1544 if(is->skip_frames_index >= is->skip_frames){
1545 is->skip_frames_index -= FFMAX(is->skip_frames, 1.0);
1546 return 1;
1547 }
1548
1549 }
917d2bb3
MN
1550 return 0;
1551}
1552
1553#if CONFIG_AVFILTER
1554typedef struct {
1555 VideoState *is;
1556 AVFrame *frame;
dd0c789b 1557 int use_dr1;
917d2bb3
MN
1558} FilterPriv;
1559
dd0c789b
BB
1560static int input_get_buffer(AVCodecContext *codec, AVFrame *pic)
1561{
1562 AVFilterContext *ctx = codec->opaque;
1563 AVFilterPicRef *ref;
1564 int perms = AV_PERM_WRITE;
65929418 1565 int i, w, h, stride[4];
dd0c789b
BB
1566 unsigned edge;
1567
1568 if(pic->buffer_hints & FF_BUFFER_HINTS_VALID) {
1569 if(pic->buffer_hints & FF_BUFFER_HINTS_READABLE) perms |= AV_PERM_READ;
1570 if(pic->buffer_hints & FF_BUFFER_HINTS_PRESERVE) perms |= AV_PERM_PRESERVE;
1571 if(pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE) perms |= AV_PERM_REUSE2;
1572 }
1573 if(pic->reference) perms |= AV_PERM_READ | AV_PERM_PRESERVE;
1574
1575 w = codec->width;
1576 h = codec->height;
1577 avcodec_align_dimensions2(codec, &w, &h, stride);
1578 edge = codec->flags & CODEC_FLAG_EMU_EDGE ? 0 : avcodec_get_edge_width();
1579 w += edge << 1;
1580 h += edge << 1;
1581
1582 if(!(ref = avfilter_get_video_buffer(ctx->outputs[0], perms, w, h)))
1583 return -1;
1584
1585 ref->w = codec->width;
1586 ref->h = codec->height;
cfb7e6e6 1587 for(i = 0; i < 4; i ++) {
d54e0948
HM
1588 unsigned hshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_w : 0;
1589 unsigned vshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_h : 0;
dd0c789b 1590
3635c07b 1591 if (ref->data[i]) {
e53ca636 1592 ref->data[i] += (edge >> hshift) + ((edge * ref->linesize[i]) >> vshift);
3635c07b 1593 }
dd0c789b
BB
1594 pic->data[i] = ref->data[i];
1595 pic->linesize[i] = ref->linesize[i];
1596 }
1597 pic->opaque = ref;
1598 pic->age = INT_MAX;
1599 pic->type = FF_BUFFER_TYPE_USER;
867ab7fb 1600 pic->reordered_opaque = codec->reordered_opaque;
dd0c789b
BB
1601 return 0;
1602}
1603
1604static void input_release_buffer(AVCodecContext *codec, AVFrame *pic)
1605{
1606 memset(pic->data, 0, sizeof(pic->data));
1607 avfilter_unref_pic(pic->opaque);
1608}
1609
12bd3c1f
JM
1610static int input_reget_buffer(AVCodecContext *codec, AVFrame *pic)
1611{
1612 AVFilterPicRef *ref = pic->opaque;
1613
1614 if (pic->data[0] == NULL) {
1615 pic->buffer_hints |= FF_BUFFER_HINTS_READABLE;
1616 return codec->get_buffer(codec, pic);
1617 }
1618
1619 if ((codec->width != ref->w) || (codec->height != ref->h) ||
d54e0948 1620 (codec->pix_fmt != ref->format)) {
12bd3c1f
JM
1621 av_log(codec, AV_LOG_ERROR, "Picture properties changed.\n");
1622 return -1;
1623 }
1624
1625 pic->reordered_opaque = codec->reordered_opaque;
1626 return 0;
1627}
1628
917d2bb3
MN
1629static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
1630{
1631 FilterPriv *priv = ctx->priv;
dd0c789b 1632 AVCodecContext *codec;
917d2bb3
MN
1633 if(!opaque) return -1;
1634
1635 priv->is = opaque;
dd0c789b
BB
1636 codec = priv->is->video_st->codec;
1637 codec->opaque = ctx;
1638 if(codec->codec->capabilities & CODEC_CAP_DR1) {
1639 priv->use_dr1 = 1;
1640 codec->get_buffer = input_get_buffer;
1641 codec->release_buffer = input_release_buffer;
12bd3c1f 1642 codec->reget_buffer = input_reget_buffer;
dd0c789b
BB
1643 }
1644
917d2bb3
MN
1645 priv->frame = avcodec_alloc_frame();
1646
1647 return 0;
1648}
1649
1650static void input_uninit(AVFilterContext *ctx)
1651{
1652 FilterPriv *priv = ctx->priv;
1653 av_free(priv->frame);
1654}
1655
1656static int input_request_frame(AVFilterLink *link)
1657{
1658 FilterPriv *priv = link->src->priv;
1659 AVFilterPicRef *picref;
3966a574 1660 int64_t pts = 0;
917d2bb3
MN
1661 AVPacket pkt;
1662 int ret;
1663
1664 while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
1665 av_free_packet(&pkt);
1666 if (ret < 0)
1667 return -1;
1668
dd0c789b 1669 if(priv->use_dr1) {
c41c5b02 1670 picref = avfilter_ref_pic(priv->frame->opaque, ~0);
dd0c789b 1671 } else {
cf097cbc
BB
1672 picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, link->w, link->h);
1673 av_picture_copy((AVPicture *)&picref->data, (AVPicture *)priv->frame,
d54e0948 1674 picref->format, link->w, link->h);
dd0c789b 1675 }
917d2bb3
MN
1676 av_free_packet(&pkt);
1677
1678 picref->pts = pts;
bb409513 1679 picref->pos = pkt.pos;
917d2bb3 1680 picref->pixel_aspect = priv->is->video_st->codec->sample_aspect_ratio;
c41c5b02 1681 avfilter_start_frame(link, picref);
917d2bb3
MN
1682 avfilter_draw_slice(link, 0, link->h, 1);
1683 avfilter_end_frame(link);
917d2bb3
MN
1684
1685 return 0;
1686}
1687
1688static int input_query_formats(AVFilterContext *ctx)
1689{
1690 FilterPriv *priv = ctx->priv;
1691 enum PixelFormat pix_fmts[] = {
1692 priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
1693 };
1694
1695 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1696 return 0;
1697}
1698
1699static int input_config_props(AVFilterLink *link)
1700{
1701 FilterPriv *priv = link->src->priv;
1702 AVCodecContext *c = priv->is->video_st->codec;
1703
1704 link->w = c->width;
1705 link->h = c->height;
1706
1707 return 0;
1708}
1709
1710static AVFilter input_filter =
1711{
1712 .name = "ffplay_input",
1713
1714 .priv_size = sizeof(FilterPriv),
1715
1716 .init = input_init,
1717 .uninit = input_uninit,
1718
1719 .query_formats = input_query_formats,
1720
1721 .inputs = (AVFilterPad[]) {{ .name = NULL }},
1722 .outputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1723 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1724 .request_frame = input_request_frame,
1725 .config_props = input_config_props, },
1726 { .name = NULL }},
1727};
1728
1729static void output_end_frame(AVFilterLink *link)
1730{
1731}
1732
1733static int output_query_formats(AVFilterContext *ctx)
1734{
1735 enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
1736
1737 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1738 return 0;
1739}
1740
1741static int get_filtered_video_frame(AVFilterContext *ctx, AVFrame *frame,
bb409513 1742 int64_t *pts, int64_t *pos)
917d2bb3
MN
1743{
1744 AVFilterPicRef *pic;
1745
1746 if(avfilter_request_frame(ctx->inputs[0]))
1747 return -1;
1748 if(!(pic = ctx->inputs[0]->cur_pic))
1749 return -1;
1750 ctx->inputs[0]->cur_pic = NULL;
1751
1752 frame->opaque = pic;
1753 *pts = pic->pts;
bb409513 1754 *pos = pic->pos;
917d2bb3
MN
1755
1756 memcpy(frame->data, pic->data, sizeof(frame->data));
1757 memcpy(frame->linesize, pic->linesize, sizeof(frame->linesize));
1758
1759 return 1;
1760}
1761
1762static AVFilter output_filter =
1763{
1764 .name = "ffplay_output",
1765
1766 .query_formats = output_query_formats,
1767
1768 .inputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1769 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1770 .end_frame = output_end_frame,
1771 .min_perms = AV_PERM_READ, },
1772 { .name = NULL }},
1773 .outputs = (AVFilterPad[]) {{ .name = NULL }},
1774};
1775#endif /* CONFIG_AVFILTER */
1776
1777static int video_thread(void *arg)
1778{
1779 VideoState *is = arg;
1780 AVFrame *frame= avcodec_alloc_frame();
4903b5ca 1781 int64_t pts_int;
917d2bb3
MN
1782 double pts;
1783 int ret;
1784
1785#if CONFIG_AVFILTER
4903b5ca 1786 int64_t pos;
3f073fa2 1787 char sws_flags_str[128];
917d2bb3
MN
1788 AVFilterContext *filt_src = NULL, *filt_out = NULL;
1789 AVFilterGraph *graph = av_mallocz(sizeof(AVFilterGraph));
3f073fa2
SS
1790 snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
1791 graph->scale_sws_opts = av_strdup(sws_flags_str);
917d2bb3
MN
1792
1793 if(!(filt_src = avfilter_open(&input_filter, "src"))) goto the_end;
1794 if(!(filt_out = avfilter_open(&output_filter, "out"))) goto the_end;
1795
1796 if(avfilter_init_filter(filt_src, NULL, is)) goto the_end;
1797 if(avfilter_init_filter(filt_out, NULL, frame)) goto the_end;
1798
1799
1800 if(vfilters) {
1801 AVFilterInOut *outputs = av_malloc(sizeof(AVFilterInOut));
1802 AVFilterInOut *inputs = av_malloc(sizeof(AVFilterInOut));
1803
1804 outputs->name = av_strdup("in");
1805 outputs->filter = filt_src;
1806 outputs->pad_idx = 0;
1807 outputs->next = NULL;
1808
1809 inputs->name = av_strdup("out");
1810 inputs->filter = filt_out;
1811 inputs->pad_idx = 0;
1812 inputs->next = NULL;
1813
1814 if (avfilter_graph_parse(graph, vfilters, inputs, outputs, NULL) < 0)
1815 goto the_end;
1816 av_freep(&vfilters);
1817 } else {
1818 if(avfilter_link(filt_src, 0, filt_out, 0) < 0) goto the_end;
1819 }
1820 avfilter_graph_add_filter(graph, filt_src);
1821 avfilter_graph_add_filter(graph, filt_out);
1822
1823 if(avfilter_graph_check_validity(graph, NULL)) goto the_end;
1824 if(avfilter_graph_config_formats(graph, NULL)) goto the_end;
1825 if(avfilter_graph_config_links(graph, NULL)) goto the_end;
1826
1827 is->out_video_filter = filt_out;
1828#endif
1829
1830 for(;;) {
1831#if !CONFIG_AVFILTER
1832 AVPacket pkt;
1833#endif
1834 while (is->paused && !is->videoq.abort_request)
1835 SDL_Delay(10);
1836#if CONFIG_AVFILTER
bb409513 1837 ret = get_filtered_video_frame(filt_out, frame, &pts_int, &pos);
917d2bb3
MN
1838#else
1839 ret = get_video_frame(is, frame, &pts_int, &pkt);
1840#endif
1841
1842 if (ret < 0) goto the_end;
1843
1844 if (!ret)
1845 continue;
1846
3966a574 1847 pts = pts_int*av_q2d(is->video_st->time_base);
917d2bb3
MN
1848
1849#if CONFIG_AVFILTER
bb409513 1850 ret = output_picture2(is, frame, pts, pos);
917d2bb3 1851#else
fca62599 1852 ret = output_picture2(is, frame, pts, pkt.pos);
917d2bb3
MN
1853 av_free_packet(&pkt);
1854#endif
1855 if (ret < 0)
1856 goto the_end;
1857
115329f1 1858 if (step)
bba04f1e
WH
1859 if (cur_stream)
1860 stream_pause(cur_stream);
01310af2
FB
1861 }
1862 the_end:
917d2bb3
MN
1863#if CONFIG_AVFILTER
1864 avfilter_graph_destroy(graph);
1865 av_freep(&graph);
1866#endif
c6b1edc9 1867 av_free(frame);
01310af2
FB
1868 return 0;
1869}
1870
72ce053b
IC
1871static int subtitle_thread(void *arg)
1872{
1873 VideoState *is = arg;
1874 SubPicture *sp;
1875 AVPacket pkt1, *pkt = &pkt1;
1876 int len1, got_subtitle;
1877 double pts;
1878 int i, j;
1879 int r, g, b, y, u, v, a;
1880
1881 for(;;) {
1882 while (is->paused && !is->subtitleq.abort_request) {
1883 SDL_Delay(10);
1884 }
1885 if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1886 break;
115329f1 1887
39c6a118
MN
1888 if(pkt->data == flush_pkt.data){
1889 avcodec_flush_buffers(is->subtitle_st->codec);
1890 continue;
1891 }
72ce053b
IC
1892 SDL_LockMutex(is->subpq_mutex);
1893 while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1894 !is->subtitleq.abort_request) {
1895 SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1896 }
1897 SDL_UnlockMutex(is->subpq_mutex);
115329f1 1898
72ce053b
IC
1899 if (is->subtitleq.abort_request)
1900 goto the_end;
115329f1 1901
72ce053b
IC
1902 sp = &is->subpq[is->subpq_windex];
1903
1904 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1905 this packet, if any */
1906 pts = 0;
1907 if (pkt->pts != AV_NOPTS_VALUE)
1908 pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1909
bea18375 1910 len1 = avcodec_decode_subtitle2(is->subtitle_st->codec,
115329f1 1911 &sp->sub, &got_subtitle,
bea18375 1912 pkt);
72ce053b
IC
1913// if (len1 < 0)
1914// break;
1915 if (got_subtitle && sp->sub.format == 0) {
1916 sp->pts = pts;
115329f1 1917
72ce053b
IC
1918 for (i = 0; i < sp->sub.num_rects; i++)
1919 {
db4fac64 1920 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b 1921 {
25b4c651 1922 RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
IC
1923 y = RGB_TO_Y_CCIR(r, g, b);
1924 u = RGB_TO_U_CCIR(r, g, b, 0);
1925 v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651 1926 YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
IC
1927 }
1928 }
1929
1930 /* now we can update the picture count */
1931 if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1932 is->subpq_windex = 0;
1933 SDL_LockMutex(is->subpq_mutex);
1934 is->subpq_size++;
1935 SDL_UnlockMutex(is->subpq_mutex);
1936 }
1937 av_free_packet(pkt);
115329f1 1938// if (step)
72ce053b
IC
1939// if (cur_stream)
1940// stream_pause(cur_stream);
1941 }
1942 the_end:
1943 return 0;
1944}
1945
01310af2
FB
1946/* copy samples for viewing in editor window */
1947static void update_sample_display(VideoState *is, short *samples, int samples_size)
1948{
1949 int size, len, channels;
1950
01f4895c 1951 channels = is->audio_st->codec->channels;
01310af2
FB
1952
1953 size = samples_size / sizeof(short);
1954 while (size > 0) {
1955 len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1956 if (len > size)
1957 len = size;
1958 memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1959 samples += len;
1960 is->sample_array_index += len;
1961 if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1962 is->sample_array_index = 0;
1963 size -= len;
1964 }
1965}
1966
01310af2
FB
1967/* return the new audio buffer size (samples can be added or deleted
1968 to get better sync if video or external master clock) */
115329f1 1969static int synchronize_audio(VideoState *is, short *samples,
638c9d91 1970 int samples_size1, double pts)
01310af2 1971{
638c9d91 1972 int n, samples_size;
01310af2 1973 double ref_clock;
115329f1 1974
01f4895c 1975 n = 2 * is->audio_st->codec->channels;
638c9d91 1976 samples_size = samples_size1;
01310af2 1977
01310af2 1978 /* if not master, then we try to remove or add samples to correct the clock */
01310af2 1979 if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
FB
1980 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1981 double diff, avg_diff;
01310af2 1982 int wanted_size, min_size, max_size, nb_samples;
115329f1 1983
638c9d91
FB
1984 ref_clock = get_master_clock(is);
1985 diff = get_audio_clock(is) - ref_clock;
115329f1 1986
638c9d91
FB
1987 if (diff < AV_NOSYNC_THRESHOLD) {
1988 is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1989 if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1990 /* not enough measures to have a correct estimate */
1991 is->audio_diff_avg_count++;
1992 } else {
1993 /* estimate the A-V difference */
1994 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1995
1996 if (fabs(avg_diff) >= is->audio_diff_threshold) {
01f4895c 1997 wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
638c9d91 1998 nb_samples = samples_size / n;
115329f1 1999
638c9d91
FB
2000 min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
2001 max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
2002 if (wanted_size < min_size)
2003 wanted_size = min_size;
2004 else if (wanted_size > max_size)
2005 wanted_size = max_size;
115329f1 2006
638c9d91
FB
2007 /* add or remove samples to correction the synchro */
2008 if (wanted_size < samples_size) {
2009 /* remove samples */
2010 samples_size = wanted_size;
2011 } else if (wanted_size > samples_size) {
2012 uint8_t *samples_end, *q;
2013 int nb;
115329f1 2014
638c9d91
FB
2015 /* add samples */
2016 nb = (samples_size - wanted_size);
2017 samples_end = (uint8_t *)samples + samples_size - n;
2018 q = samples_end + n;
2019 while (nb > 0) {
2020 memcpy(q, samples_end, n);
2021 q += n;
2022 nb -= n;
2023 }
2024 samples_size = wanted_size;
2025 }
2026 }
2027#if 0
115329f1
DB
2028 printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
2029 diff, avg_diff, samples_size - samples_size1,
638c9d91
FB
2030 is->audio_clock, is->video_clock, is->audio_diff_threshold);
2031#endif
01310af2 2032 }
638c9d91
FB
2033 } else {
2034 /* too big difference : may be initial PTS errors, so
2035 reset A-V filter */
2036 is->audio_diff_avg_count = 0;
2037 is->audio_diff_cum = 0;
01310af2
FB
2038 }
2039 }
2040
01310af2
FB
2041 return samples_size;
2042}
2043
2044/* decode one audio frame and returns its uncompressed size */
5a4476e2 2045static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2 2046{
bea18375 2047 AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2 2048 AVPacket *pkt = &is->audio_pkt;
abdff646 2049 AVCodecContext *dec= is->audio_st->codec;
72ea344b 2050 int n, len1, data_size;
01310af2
FB
2051 double pts;
2052
2053 for(;;) {
72ea344b 2054 /* NOTE: the audio packet can contain several frames */
bea18375 2055 while (pkt_temp->size > 0) {
5a4476e2 2056 data_size = sizeof(is->audio_buf1);
bea18375 2057 len1 = avcodec_decode_audio3(dec,
5a4476e2 2058 (int16_t *)is->audio_buf1, &data_size,
bea18375 2059 pkt_temp);
72ea344b
FB
2060 if (len1 < 0) {
2061 /* if error, we skip the frame */
bea18375 2062 pkt_temp->size = 0;
01310af2 2063 break;
72ea344b 2064 }
115329f1 2065
bea18375
TB
2066 pkt_temp->data += len1;
2067 pkt_temp->size -= len1;
72ea344b
FB
2068 if (data_size <= 0)
2069 continue;
5a4476e2
PR
2070
2071 if (dec->sample_fmt != is->audio_src_fmt) {
2072 if (is->reformat_ctx)
2073 av_audio_convert_free(is->reformat_ctx);
2074 is->reformat_ctx= av_audio_convert_alloc(SAMPLE_FMT_S16, 1,
2075 dec->sample_fmt, 1, NULL, 0);
2076 if (!is->reformat_ctx) {
2077 fprintf(stderr, "Cannot convert %s sample format to %s sample format\n",
2078 avcodec_get_sample_fmt_name(dec->sample_fmt),
2079 avcodec_get_sample_fmt_name(SAMPLE_FMT_S16));
2080 break;
2081 }
2082 is->audio_src_fmt= dec->sample_fmt;
2083 }
2084
2085 if (is->reformat_ctx) {
2086 const void *ibuf[6]= {is->audio_buf1};
2087 void *obuf[6]= {is->audio_buf2};
2088 int istride[6]= {av_get_bits_per_sample_format(dec->sample_fmt)/8};
2089 int ostride[6]= {2};
2090 int len= data_size/istride[0];
2091 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
2092 printf("av_audio_convert() failed\n");
2093 break;
2094 }
2095 is->audio_buf= is->audio_buf2;
2096 /* FIXME: existing code assume that data_size equals framesize*channels*2
2097 remove this legacy cruft */
2098 data_size= len*2;
2099 }else{
2100 is->audio_buf= is->audio_buf1;
2101 }
2102
72ea344b
FB
2103 /* if no pts, then compute it */
2104 pts = is->audio_clock;
2105 *pts_ptr = pts;
abdff646 2106 n = 2 * dec->channels;
115329f1 2107 is->audio_clock += (double)data_size /
abdff646 2108 (double)(n * dec->sample_rate);
638c9d91 2109#if defined(DEBUG_SYNC)
72ea344b
FB
2110 {
2111 static double last_clock;
2112 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
2113 is->audio_clock - last_clock,
2114 is->audio_clock, pts);
2115 last_clock = is->audio_clock;
01310af2 2116 }
72ea344b
FB
2117#endif
2118 return data_size;
01310af2
FB
2119 }
2120
72ea344b
FB
2121 /* free the current packet */
2122 if (pkt->data)
01310af2 2123 av_free_packet(pkt);
115329f1 2124
72ea344b
FB
2125 if (is->paused || is->audioq.abort_request) {
2126 return -1;
2127 }
115329f1 2128
01310af2
FB
2129 /* read next packet */
2130 if (packet_queue_get(&is->audioq, pkt, 1) < 0)
2131 return -1;
39c6a118 2132 if(pkt->data == flush_pkt.data){
abdff646 2133 avcodec_flush_buffers(dec);
39c6a118
MN
2134 continue;
2135 }
2136
bea18375
TB
2137 pkt_temp->data = pkt->data;
2138 pkt_temp->size = pkt->size;
115329f1 2139
72ea344b
FB
2140 /* if update the audio clock with the pts */
2141 if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75 2142 is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b 2143 }
01310af2
FB
2144 }
2145}
2146
638c9d91
FB
2147/* get the current audio output buffer size, in samples. With SDL, we
2148 cannot have a precise information */
2149static int audio_write_get_buf_size(VideoState *is)
01310af2 2150{
b09b580b 2151 return is->audio_buf_size - is->audio_buf_index;
01310af2
FB
2152}
2153
2154
2155/* prepare a new audio buffer */
358061f6 2156static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
FB
2157{
2158 VideoState *is = opaque;
2159 int audio_size, len1;
2160 double pts;
2161
2162 audio_callback_time = av_gettime();
115329f1 2163
01310af2
FB
2164 while (len > 0) {
2165 if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2 2166 audio_size = audio_decode_frame(is, &pts);
01310af2
FB
2167 if (audio_size < 0) {
2168 /* if error, just output silence */
1a1078fa 2169 is->audio_buf = is->audio_buf1;
01310af2
FB
2170 is->audio_buf_size = 1024;
2171 memset(is->audio_buf, 0, is->audio_buf_size);
2172 } else {
2173 if (is->show_audio)
2174 update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
115329f1 2175 audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
01310af2
FB
2176 pts);
2177 is->audio_buf_size = audio_size;
2178 }
2179 is->audio_buf_index = 0;
2180 }
2181 len1 = is->audio_buf_size - is->audio_buf_index;
2182 if (len1 > len)
2183 len1 = len;
2184 memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
2185 len -= len1;
2186 stream += len1;
2187 is->audio_buf_index += len1;
2188 }
2189}
2190
01310af2
FB
2191/* open a given stream. Return 0 if OK */
2192static int stream_component_open(VideoState *is, int stream_index)
2193{
2194 AVFormatContext *ic = is->ic;
fe74099a 2195 AVCodecContext *avctx;
01310af2
FB
2196 AVCodec *codec;
2197 SDL_AudioSpec wanted_spec, spec;
2198
2199 if (stream_index < 0 || stream_index >= ic->nb_streams)
2200 return -1;
fe74099a 2201 avctx = ic->streams[stream_index]->codec;
115329f1 2202
01310af2 2203 /* prepare audio output */
72415b2a 2204 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a
SS
2205 if (avctx->channels > 0) {
2206 avctx->request_channels = FFMIN(2, avctx->channels);
94eadc8b 2207 } else {
fe74099a 2208 avctx->request_channels = 2;
638c9d91 2209 }
01310af2
FB
2210 }
2211
fe74099a
SS
2212 codec = avcodec_find_decoder(avctx->codec_id);
2213 avctx->debug_mv = debug_mv;
2214 avctx->debug = debug;
2215 avctx->workaround_bugs = workaround_bugs;
2216 avctx->lowres = lowres;
2217 if(lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
2218 avctx->idct_algo= idct;
2219 if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
2220 avctx->skip_frame= skip_frame;
2221 avctx->skip_idct= skip_idct;
2222 avctx->skip_loop_filter= skip_loop_filter;
2223 avctx->error_recognition= error_recognition;
2224 avctx->error_concealment= error_concealment;
2225 avcodec_thread_init(avctx, thread_count);
2226
2227 set_context_opts(avctx, avcodec_opts[avctx->codec_type], 0);
e43d7a18 2228
01310af2 2229 if (!codec ||
fe74099a 2230 avcodec_open(avctx, codec) < 0)
01310af2 2231 return -1;
51b73087
JR
2232
2233 /* prepare audio output */
72415b2a 2234 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a 2235 wanted_spec.freq = avctx->sample_rate;
51b73087 2236 wanted_spec.format = AUDIO_S16SYS;
fe74099a 2237 wanted_spec.channels = avctx->channels;
51b73087
JR
2238 wanted_spec.silence = 0;
2239 wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
2240 wanted_spec.callback = sdl_audio_callback;
2241 wanted_spec.userdata = is;
2242 if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
2243 fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
2244 return -1;
2245 }
2246 is->audio_hw_buf_size = spec.size;
5a4476e2 2247 is->audio_src_fmt= SAMPLE_FMT_S16;
51b73087
JR
2248 }
2249
3f3fe38d 2250 ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
fe74099a 2251 switch(avctx->codec_type) {
72415b2a 2252 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2253 is->audio_stream = stream_index;
2254 is->audio_st = ic->streams[stream_index];
2255 is->audio_buf_size = 0;
2256 is->audio_buf_index = 0;
638c9d91
FB
2257
2258 /* init averaging filter */
2259 is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
2260 is->audio_diff_avg_count = 0;
2261 /* since we do not have a precise anough audio fifo fullness,
2262 we correct audio sync only if larger than this threshold */
fe74099a 2263 is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / avctx->sample_rate;
638c9d91 2264
01310af2
FB
2265 memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
2266 packet_queue_init(&is->audioq);
bb270c08 2267 SDL_PauseAudio(0);
01310af2 2268 break;
72415b2a 2269 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2270 is->video_stream = stream_index;
2271 is->video_st = ic->streams[stream_index];
2272
68aefbe8 2273// is->video_current_pts_time = av_gettime();
638c9d91 2274
01310af2
FB
2275 packet_queue_init(&is->videoq);
2276 is->video_tid = SDL_CreateThread(video_thread, is);
2277 break;
72415b2a 2278 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2279 is->subtitle_stream = stream_index;
2280 is->subtitle_st = ic->streams[stream_index];
2281 packet_queue_init(&is->subtitleq);
115329f1 2282
72ce053b
IC
2283 is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
2284 break;
01310af2
FB
2285 default:
2286 break;
2287 }
2288 return 0;
2289}
2290
2291static void stream_component_close(VideoState *is, int stream_index)
2292{
2293 AVFormatContext *ic = is->ic;
fe74099a 2294 AVCodecContext *avctx;
115329f1 2295
72ce053b
IC
2296 if (stream_index < 0 || stream_index >= ic->nb_streams)
2297 return;
fe74099a 2298 avctx = ic->streams[stream_index]->codec;
01310af2 2299
fe74099a 2300 switch(avctx->codec_type) {
72415b2a 2301 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2302 packet_queue_abort(&is->audioq);
2303
2304 SDL_CloseAudio();
2305
2306 packet_queue_end(&is->audioq);
5a4476e2
PR
2307 if (is->reformat_ctx)
2308 av_audio_convert_free(is->reformat_ctx);
bc77fce6 2309 is->reformat_ctx = NULL;
01310af2 2310 break;
72415b2a 2311 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2312 packet_queue_abort(&is->videoq);
2313
2314 /* note: we also signal this mutex to make sure we deblock the
2315 video thread in all cases */
2316 SDL_LockMutex(is->pictq_mutex);
2317 SDL_CondSignal(is->pictq_cond);
2318 SDL_UnlockMutex(is->pictq_mutex);
2319
2320 SDL_WaitThread(is->video_tid, NULL);
2321
2322 packet_queue_end(&is->videoq);
2323 break;
72415b2a 2324 case AVMEDIA_TYPE_SUBTITLE:
72ce053b 2325 packet_queue_abort(&is->subtitleq);
115329f1 2326
72ce053b
IC
2327 /* note: we also signal this mutex to make sure we deblock the
2328 video thread in all cases */
2329 SDL_LockMutex(is->subpq_mutex);
2330 is->subtitle_stream_changed = 1;
115329f1 2331
72ce053b
IC
2332 SDL_CondSignal(is->subpq_cond);
2333 SDL_UnlockMutex(is->subpq_mutex);
2334
2335 SDL_WaitThread(is->subtitle_tid, NULL);
2336
2337 packet_queue_end(&is->subtitleq);
2338 break;
01310af2
FB
2339 default:
2340 break;
2341 }
2342
3f3fe38d 2343 ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
SS
2344 avcodec_close(avctx);
2345 switch(avctx->codec_type) {
72415b2a 2346 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2347 is->audio_st = NULL;
2348 is->audio_stream = -1;
2349 break;
72415b2a 2350 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2351 is->video_st = NULL;
2352 is->video_stream = -1;
2353 break;
72415b2a 2354 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2355 is->subtitle_st = NULL;
2356 is->subtitle_stream = -1;
2357 break;
01310af2
FB
2358 default:
2359 break;
2360 }
2361}
2362
416e3508
FB
2363/* since we have only one decoding thread, we can use a global
2364 variable instead of a thread local variable */
2365static VideoState *global_video_state;
2366
2367static int decode_interrupt_cb(void)
2368{
2369 return (global_video_state && global_video_state->abort_request);
2370}
01310af2
FB
2371
2372/* this thread gets the stream from the disk or the network */
2373static int decode_thread(void *arg)
2374{
2375 VideoState *is = arg;
2376 AVFormatContext *ic;
6625a3de 2377 int err, i, ret;
72415b2a
SS
2378 int st_index[AVMEDIA_TYPE_NB];
2379 int st_count[AVMEDIA_TYPE_NB]={0};
2380 int st_best_packet_count[AVMEDIA_TYPE_NB];
01310af2 2381 AVPacket pkt1, *pkt = &pkt1;
61890b02 2382 AVFormatParameters params, *ap = &params;
75bb7b0a 2383 int eof=0;
d834d63b 2384 int pkt_in_play_range = 0;
01310af2 2385
6299a229
MN
2386 ic = avformat_alloc_context();
2387
6625a3de 2388 memset(st_index, -1, sizeof(st_index));
9f7490a0 2389 memset(st_best_packet_count, -1, sizeof(st_best_packet_count));
01310af2
FB
2390 is->video_stream = -1;
2391 is->audio_stream = -1;
72ce053b 2392 is->subtitle_stream = -1;
01310af2 2393
416e3508
FB
2394 global_video_state = is;
2395 url_set_interrupt_cb(decode_interrupt_cb);
2396
61890b02 2397 memset(ap, 0, sizeof(*ap));
115329f1 2398
6299a229 2399 ap->prealloced_context = 1;
e4b89522
LW
2400 ap->width = frame_width;
2401 ap->height= frame_height;
7e042912 2402 ap->time_base= (AVRational){1, 25};
e4b89522 2403 ap->pix_fmt = frame_pix_fmt;
7e042912 2404
6299a229
MN
2405 set_context_opts(ic, avformat_opts, AV_OPT_FLAG_DECODING_PARAM);
2406
61890b02 2407 err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
638c9d91
FB
2408 if (err < 0) {
2409 print_error(is->filename, err);
2410 ret = -1;
2411 goto fail;
2412 }
01310af2 2413 is->ic = ic;
30bc6613
MN
2414
2415 if(genpts)
2416 ic->flags |= AVFMT_FLAG_GENPTS;
2417
24c07998
LA
2418 err = av_find_stream_info(ic);
2419 if (err < 0) {
2420 fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
2421 ret = -1;
2422 goto fail;
2423 }
899681cd
BA
2424 if(ic->pb)
2425 ic->pb->eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b 2426
70a4764d
MN
2427 if(seek_by_bytes<0)
2428 seek_by_bytes= !!(ic->iformat->flags & AVFMT_TS_DISCONT);
2429
72ea344b
FB
2430 /* if seeking requested, we execute it */
2431 if (start_time != AV_NOPTS_VALUE) {
2432 int64_t timestamp;
2433
2434 timestamp = start_time;
2435 /* add the stream start time */
2436 if (ic->start_time != AV_NOPTS_VALUE)
2437 timestamp += ic->start_time;
4ed29207 2438 ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b 2439 if (ret < 0) {
115329f1 2440 fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
FB
2441 is->filename, (double)timestamp / AV_TIME_BASE);
2442 }
2443 }
72ea344b 2444
01310af2 2445 for(i = 0; i < ic->nb_streams; i++) {
9f7490a0
MN
2446 AVStream *st= ic->streams[i];
2447 AVCodecContext *avctx = st->codec;
3f3fe38d 2448 ic->streams[i]->discard = AVDISCARD_ALL;
72415b2a 2449 if(avctx->codec_type >= (unsigned)AVMEDIA_TYPE_NB)
8ef94840 2450 continue;
256ab3ed
MN
2451 if(st_count[avctx->codec_type]++ != wanted_stream[avctx->codec_type] && wanted_stream[avctx->codec_type] >= 0)
2452 continue;
2453
9f7490a0
MN
2454 if(st_best_packet_count[avctx->codec_type] >= st->codec_info_nb_frames)
2455 continue;
2456 st_best_packet_count[avctx->codec_type]= st->codec_info_nb_frames;
2457
fe74099a 2458 switch(avctx->codec_type) {
72415b2a 2459 case AVMEDIA_TYPE_AUDIO:
256ab3ed 2460 if (!audio_disable)
72415b2a 2461 st_index[AVMEDIA_TYPE_AUDIO] = i;
01310af2 2462 break;
72415b2a
SS
2463 case AVMEDIA_TYPE_VIDEO:
2464 case AVMEDIA_TYPE_SUBTITLE:
256ab3ed
MN
2465 if (!video_disable)
2466 st_index[avctx->codec_type] = i;
16a59a7b 2467 break;
01310af2
FB
2468 default:
2469 break;
2470 }
2471 }
2472 if (show_status) {
2473 dump_format(ic, 0, is->filename, 0);
2474 }
2475
2476 /* open the streams */
72415b2a
SS
2477 if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
2478 stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
01310af2
FB
2479 }
2480
077a8d61 2481 ret=-1;
72415b2a
SS
2482 if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
2483 ret= stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
077a8d61 2484 }
d38c9e7a 2485 is->refresh_tid = SDL_CreateThread(refresh_thread, is);
077a8d61 2486 if(ret<0) {
01310af2 2487 if (!display_disable)
bf8ae197 2488 is->show_audio = 2;
01310af2
FB
2489 }
2490
72415b2a
SS
2491 if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
2492 stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
16a59a7b
BA
2493 }
2494
01310af2 2495 if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
FB
2496 fprintf(stderr, "%s: could not open codecs\n", is->filename);
2497 ret = -1;
01310af2
FB
2498 goto fail;
2499 }
2500
2501 for(;;) {
2502 if (is->abort_request)
2503 break;
416e3508
FB
2504 if (is->paused != is->last_paused) {
2505 is->last_paused = is->paused;
72ea344b 2506 if (is->paused)
f5668147 2507 is->read_pause_return= av_read_pause(ic);
72ea344b
FB
2508 else
2509 av_read_play(ic);
416e3508 2510 }
2f642393
AJ
2511#if CONFIG_RTSP_DEMUXER
2512 if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
416e3508
FB
2513 /* wait 10 ms to avoid trying to get another packet */
2514 /* XXX: horrible */
2515 SDL_Delay(10);
2516 continue;
2517 }
400738b1 2518#endif
72ea344b 2519 if (is->seek_req) {
8e606cc8 2520 int64_t seek_target= is->seek_pos;
4ed29207
MN
2521 int64_t seek_min= is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
2522 int64_t seek_max= is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
2523//FIXME the +-2 is due to rounding being not done in the correct direction in generation
2524// of the seek_pos/seek_rel variables
8e606cc8 2525
4ed29207 2526 ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
FB
2527 if (ret < 0) {
2528 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
e6c0297f
MN
2529 }else{
2530 if (is->audio_stream >= 0) {
2531 packet_queue_flush(&is->audioq);
39c6a118 2532 packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f 2533 }
72ce053b
IC
2534 if (is->subtitle_stream >= 0) {
2535 packet_queue_flush(&is->subtitleq);
39c6a118 2536 packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b 2537 }
e6c0297f
MN
2538 if (is->video_stream >= 0) {
2539 packet_queue_flush(&is->videoq);
39c6a118 2540 packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f 2541 }
72ea344b
FB
2542 }
2543 is->seek_req = 0;
e45aeb38 2544 eof= 0;
72ea344b 2545 }
416e3508 2546
01310af2 2547 /* if the queue are full, no need to read more */
79ee4683
MN
2548 if ( is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
2549 || ( (is->audioq .size > MIN_AUDIOQ_SIZE || is->audio_stream<0)
2550 && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream<0)
2551 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream<0))) {
01310af2
FB
2552 /* wait 10 ms */
2553 SDL_Delay(10);
2554 continue;
2555 }
75bb7b0a 2556 if(url_feof(ic->pb) || eof) {
9dc41767 2557 if(is->video_stream >= 0){
26534fe8
MN
2558 av_init_packet(pkt);
2559 pkt->data=NULL;
2560 pkt->size=0;
2561 pkt->stream_index= is->video_stream;
2562 packet_queue_put(&is->videoq, pkt);
9dc41767 2563 }
b4083171 2564 SDL_Delay(10);
1922c0a7
RK
2565 if(is->audioq.size + is->videoq.size + is->subtitleq.size ==0){
2566 if(loop!=1 && (!loop || --loop)){
2567 stream_seek(cur_stream, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
2568 }else if(autoexit){
2569 ret=AVERROR_EOF;
2570 goto fail;
2571 }
2d1653b0 2572 }
600a331c
MN
2573 continue;
2574 }
72ea344b 2575 ret = av_read_frame(ic, pkt);
01310af2 2576 if (ret < 0) {
75bb7b0a
MN
2577 if (ret == AVERROR_EOF)
2578 eof=1;
2579 if (url_ferror(ic->pb))
bb270c08 2580 break;
75bb7b0a
MN
2581 SDL_Delay(100); /* wait for user event */
2582 continue;
01310af2 2583 }
d834d63b
RK
2584 /* check if packet is in play range specified by user, then queue, otherwise discard */
2585 pkt_in_play_range = duration == AV_NOPTS_VALUE ||
2586 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
2587 av_q2d(ic->streams[pkt->stream_index]->time_base) -
2588 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0)/1000000
2589 <= ((double)duration/1000000);
2590 if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
01310af2 2591 packet_queue_put(&is->audioq, pkt);
d834d63b 2592 } else if (pkt->stream_index == is->video_stream && pkt_in_play_range) {
01310af2 2593 packet_queue_put(&is->videoq, pkt);
d834d63b 2594 } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
72ce053b 2595 packet_queue_put(&is->subtitleq, pkt);
01310af2
FB
2596 } else {
2597 av_free_packet(pkt);
2598 }
2599 }
2600 /* wait until the end */
2601 while (!is->abort_request) {
2602 SDL_Delay(100);
2603 }
2604
638c9d91 2605 ret = 0;
01310af2 2606 fail:
416e3508
FB
2607 /* disable interrupting */
2608 global_video_state = NULL;
2609
01310af2
FB
2610 /* close each stream */
2611 if (is->audio_stream >= 0)
2612 stream_component_close(is, is->audio_stream);
2613 if (is->video_stream >= 0)
2614 stream_component_close(is, is->video_stream);
72ce053b
IC
2615 if (is->subtitle_stream >= 0)
2616 stream_component_close(is, is->subtitle_stream);
638c9d91
FB
2617 if (is->ic) {
2618 av_close_input_file(is->ic);
2619 is->ic = NULL; /* safety */
2620 }
416e3508
FB
2621 url_set_interrupt_cb(NULL);
2622
638c9d91
FB
2623 if (ret != 0) {
2624 SDL_Event event;
115329f1 2625
638c9d91
FB
2626 event.type = FF_QUIT_EVENT;
2627 event.user.data1 = is;
2628 SDL_PushEvent(&event);
2629 }
01310af2
FB
2630 return 0;
2631}
2632
638c9d91 2633static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
FB
2634{
2635 VideoState *is;
2636
2637 is = av_mallocz(sizeof(VideoState));
2638 if (!is)
2639 return NULL;
f7d78f36 2640 av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91 2641 is->iformat = iformat;
01310af2
FB
2642 is->ytop = 0;
2643 is->xleft = 0;
2644
2645 /* start video display */
2646 is->pictq_mutex = SDL_CreateMutex();
2647 is->pictq_cond = SDL_CreateCond();
115329f1 2648
72ce053b
IC
2649 is->subpq_mutex = SDL_CreateMutex();
2650 is->subpq_cond = SDL_CreateCond();
115329f1 2651
638c9d91 2652 is->av_sync_type = av_sync_type;
01310af2
FB
2653 is->parse_tid = SDL_CreateThread(decode_thread, is);
2654 if (!is->parse_tid) {
2655 av_free(is);
2656 return NULL;
2657 }
2658 return is;
2659}
2660
2661static void stream_close(VideoState *is)
2662{
2663 VideoPicture *vp;
2664 int i;
2665 /* XXX: use a special url_shutdown call to abort parse cleanly */
2666 is->abort_request = 1;
2667 SDL_WaitThread(is->parse_tid, NULL);
d38c9e7a 2668 SDL_WaitThread(is->refresh_tid, NULL);
01310af2
FB
2669
2670 /* free all pictures */
2671 for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2672 vp = &is->pictq[i];
917d2bb3
MN
2673#if CONFIG_AVFILTER
2674 if (vp->picref) {
2675 avfilter_unref_pic(vp->picref);
2676 vp->picref = NULL;
2677 }
2678#endif
01310af2
FB
2679 if (vp->bmp) {
2680 SDL_FreeYUVOverlay(vp->bmp);
2681 vp->bmp = NULL;
2682 }
2683 }
2684 SDL_DestroyMutex(is->pictq_mutex);
2685 SDL_DestroyCond(is->pictq_cond);
72ce053b
IC
2686 SDL_DestroyMutex(is->subpq_mutex);
2687 SDL_DestroyCond(is->subpq_cond);
917d2bb3 2688#if !CONFIG_AVFILTER
3ac56e28
MS
2689 if (is->img_convert_ctx)
2690 sws_freeContext(is->img_convert_ctx);
917d2bb3 2691#endif
7c5ab145 2692 av_free(is);
01310af2
FB
2693}
2694
7b49ce2e 2695static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
FB
2696{
2697 AVFormatContext *ic = is->ic;
2698 int start_index, stream_index;
2699 AVStream *st;
2700
72415b2a 2701 if (codec_type == AVMEDIA_TYPE_VIDEO)
638c9d91 2702 start_index = is->video_stream;
72415b2a 2703 else if (codec_type == AVMEDIA_TYPE_AUDIO)
638c9d91 2704 start_index = is->audio_stream;
72ce053b
IC
2705 else
2706 start_index = is->subtitle_stream;
72415b2a 2707 if (start_index < (codec_type == AVMEDIA_TYPE_SUBTITLE ? -1 : 0))
638c9d91
FB
2708 return;
2709 stream_index = start_index;
2710 for(;;) {
2711 if (++stream_index >= is->ic->nb_streams)
72ce053b 2712 {
72415b2a 2713 if (codec_type == AVMEDIA_TYPE_SUBTITLE)
72ce053b
IC
2714 {
2715 stream_index = -1;
2716 goto the_end;
2717 } else
2718 stream_index = 0;
2719 }
638c9d91
FB
2720 if (stream_index == start_index)
2721 return;
2722 st = ic->streams[stream_index];
01f4895c 2723 if (st->codec->codec_type == codec_type) {
638c9d91
FB
2724 /* check that parameters are OK */
2725 switch(codec_type) {
72415b2a 2726 case AVMEDIA_TYPE_AUDIO:
01f4895c
MN
2727 if (st->codec->sample_rate != 0 &&
2728 st->codec->channels != 0)
638c9d91
FB
2729 goto the_end;
2730 break;
72415b2a
SS
2731 case AVMEDIA_TYPE_VIDEO:
2732 case AVMEDIA_TYPE_SUBTITLE:
638c9d91
FB
2733 goto the_end;
2734 default:
2735 break;
2736 }
2737 }
2738 }
2739 the_end:
2740 stream_component_close(is, start_index);
2741 stream_component_open(is, stream_index);
2742}
2743
2744
7b49ce2e 2745static void toggle_full_screen(void)
01310af2 2746{
01310af2 2747 is_full_screen = !is_full_screen;
29f3b38a
MR
2748 if (!fs_screen_width) {
2749 /* use default SDL method */
fb84155b 2750// SDL_WM_ToggleFullScreen(screen);
01310af2 2751 }
fb84155b 2752 video_open(cur_stream);
01310af2
FB
2753}
2754
7b49ce2e 2755static void toggle_pause(void)
01310af2
FB
2756{
2757 if (cur_stream)
2758 stream_pause(cur_stream);
bba04f1e
WH
2759 step = 0;
2760}
2761
7b49ce2e 2762static void step_to_next_frame(void)
bba04f1e
WH
2763{
2764 if (cur_stream) {
19cc524a 2765 /* if the stream is paused unpause it, then step */
bba04f1e 2766 if (cur_stream->paused)
19cc524a 2767 stream_pause(cur_stream);
bba04f1e
WH
2768 }
2769 step = 1;
01310af2
FB
2770}
2771
7b49ce2e 2772static void do_exit(void)
01310af2 2773{
7c5ab145 2774 int i;
01310af2
FB
2775 if (cur_stream) {
2776 stream_close(cur_stream);
2777 cur_stream = NULL;
2778 }
72415b2a 2779 for (i = 0; i < AVMEDIA_TYPE_NB; i++)
7c5ab145
MS
2780 av_free(avcodec_opts[i]);
2781 av_free(avformat_opts);
2782 av_free(sws_opts);
917d2bb3
MN
2783#if CONFIG_AVFILTER
2784 avfilter_uninit();
2785#endif
01310af2
FB
2786 if (show_status)
2787 printf("\n");
2788 SDL_Quit();
2789 exit(0);
2790}
2791
7b49ce2e 2792static void toggle_audio_display(void)
01310af2
FB
2793{
2794 if (cur_stream) {
f5968788 2795 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 2796 cur_stream->show_audio = (cur_stream->show_audio + 1) % 3;
f5968788
MN
2797 fill_rectangle(screen,
2798 cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height,
2799 bgcolor);
2800 SDL_UpdateRect(screen, cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height);
01310af2
FB
2801 }
2802}
2803
2804/* handle an event sent by the GUI */
7b49ce2e 2805static void event_loop(void)
01310af2
FB
2806{
2807 SDL_Event event;
a11d11aa 2808 double incr, pos, frac;
01310af2
FB
2809
2810 for(;;) {
d52ec002 2811 double x;
01310af2
FB
2812 SDL_WaitEvent(&event);
2813 switch(event.type) {
2814 case SDL_KEYDOWN:
066ce8c9
AS
2815 if (exit_on_keydown) {
2816 do_exit();
2817 break;
2818 }
01310af2
FB
2819 switch(event.key.keysym.sym) {
2820 case SDLK_ESCAPE:
2821 case SDLK_q:
2822 do_exit();
2823 break;
2824 case SDLK_f:
2825 toggle_full_screen();
2826 break;
2827 case SDLK_p:
2828 case SDLK_SPACE:
2829 toggle_pause();
2830 break;
bba04f1e
WH
2831 case SDLK_s: //S: Step to next frame
2832 step_to_next_frame();
2833 break;
01310af2 2834 case SDLK_a:
115329f1 2835 if (cur_stream)
72415b2a 2836 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
638c9d91
FB
2837 break;
2838 case SDLK_v:
115329f1 2839 if (cur_stream)
72415b2a 2840 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
638c9d91 2841 break;
72ce053b 2842 case SDLK_t:
115329f1 2843 if (cur_stream)
72415b2a 2844 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
72ce053b 2845 break;
638c9d91 2846 case SDLK_w:
01310af2
FB
2847 toggle_audio_display();
2848 break;
72ea344b
FB
2849 case SDLK_LEFT:
2850 incr = -10.0;
2851 goto do_seek;
2852 case SDLK_RIGHT:
2853 incr = 10.0;
2854 goto do_seek;
2855 case SDLK_UP:
2856 incr = 60.0;
2857 goto do_seek;
2858 case SDLK_DOWN:
2859 incr = -60.0;
2860 do_seek:
2861 if (cur_stream) {
94b594c6 2862 if (seek_by_bytes) {
1a620dd7
MN
2863 if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos>=0){
2864 pos= cur_stream->video_current_pos;
2865 }else if(cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos>=0){
2866 pos= cur_stream->audio_pkt.pos;
2867 }else
2868 pos = url_ftell(cur_stream->ic->pb);
94b594c6 2869 if (cur_stream->ic->bit_rate)
566cd2cb 2870 incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
SH
2871 else
2872 incr *= 180000.0;
2873 pos += incr;
2ef46053 2874 stream_seek(cur_stream, pos, incr, 1);
94b594c6
SH
2875 } else {
2876 pos = get_master_clock(cur_stream);
2877 pos += incr;
2ef46053 2878 stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6 2879 }
72ea344b
FB
2880 }
2881 break;
01310af2
FB
2882 default:
2883 break;
2884 }
2885 break;
a11d11aa 2886 case SDL_MOUSEBUTTONDOWN:
066ce8c9
AS
2887 if (exit_on_mousedown) {
2888 do_exit();
2889 break;
2890 }
d52ec002
MN
2891 case SDL_MOUSEMOTION:
2892 if(event.type ==SDL_MOUSEBUTTONDOWN){
2893 x= event.button.x;
2894 }else{
2895 if(event.motion.state != SDL_PRESSED)
2896 break;
2897 x= event.motion.x;
2898 }
bb270c08 2899 if (cur_stream) {
2ef46053
MN
2900 if(seek_by_bytes || cur_stream->ic->duration<=0){
2901 uint64_t size= url_fsize(cur_stream->ic->pb);
d52ec002 2902 stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
2ef46053 2903 }else{
6371c81a
MN
2904 int64_t ts;
2905 int ns, hh, mm, ss;
2906 int tns, thh, tmm, tss;
2907 tns = cur_stream->ic->duration/1000000LL;
2908 thh = tns/3600;
2909 tmm = (tns%3600)/60;
2910 tss = (tns%60);
d52ec002 2911 frac = x/cur_stream->width;
6371c81a
MN
2912 ns = frac*tns;
2913 hh = ns/3600;
2914 mm = (ns%3600)/60;
2915 ss = (ns%60);
2916 fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
2917 hh, mm, ss, thh, tmm, tss);
2918 ts = frac*cur_stream->ic->duration;
2919 if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
2920 ts += cur_stream->ic->start_time;
2921 stream_seek(cur_stream, ts, 0, 0);
2ef46053 2922 }
bb270c08
DB
2923 }
2924 break;
01310af2
FB
2925 case SDL_VIDEORESIZE:
2926 if (cur_stream) {
115329f1 2927 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2 2928 SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
c57d3469
MN
2929 screen_width = cur_stream->width = event.resize.w;
2930 screen_height= cur_stream->height= event.resize.h;
01310af2
FB
2931 }
2932 break;
2933 case SDL_QUIT:
638c9d91 2934 case FF_QUIT_EVENT:
01310af2
FB
2935 do_exit();
2936 break;
2937 case FF_ALLOC_EVENT:
fccb19e3 2938 video_open(event.user.data1);
01310af2
FB
2939 alloc_picture(event.user.data1);
2940 break;
2941 case FF_REFRESH_EVENT:
2942 video_refresh_timer(event.user.data1);
d38c9e7a 2943 cur_stream->refresh=0;
01310af2
FB
2944 break;
2945 default:
2946 break;
2947 }
2948 }
2949}
2950
e4b89522
LW
2951static void opt_frame_size(const char *arg)
2952{
126b638e 2953 if (av_parse_video_size(&frame_width, &frame_height, arg) < 0) {
e4b89522
LW
2954 fprintf(stderr, "Incorrect frame size\n");
2955 exit(1);
2956 }
2957 if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
2958 fprintf(stderr, "Frame size must be a multiple of 2\n");
2959 exit(1);
2960 }
2961}
2962
a5b3b5f6 2963static int opt_width(const char *opt, const char *arg)
01310af2 2964{
a5b3b5f6
SS
2965 screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2966 return 0;
01310af2
FB
2967}
2968
a5b3b5f6 2969static int opt_height(const char *opt, const char *arg)
01310af2 2970{
a5b3b5f6
SS
2971 screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2972 return 0;
01310af2
FB
2973}
2974
2975static void opt_format(const char *arg)
2976{
2977 file_iformat = av_find_input_format(arg);
2978 if (!file_iformat) {
2979 fprintf(stderr, "Unknown input format: %s\n", arg);
2980 exit(1);
2981 }
2982}
61890b02 2983
e4b89522
LW
2984static void opt_frame_pix_fmt(const char *arg)
2985{
718c7b18 2986 frame_pix_fmt = av_get_pix_fmt(arg);
e4b89522
LW
2987}
2988
b81d6235 2989static int opt_sync(const char *opt, const char *arg)
638c9d91
FB
2990{
2991 if (!strcmp(arg, "audio"))
2992 av_sync_type = AV_SYNC_AUDIO_MASTER;
2993 else if (!strcmp(arg, "video"))
2994 av_sync_type = AV_SYNC_VIDEO_MASTER;
2995 else if (!strcmp(arg, "ext"))
2996 av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5 2997 else {
b81d6235 2998 fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
SS
2999 exit(1);
3000 }
b81d6235 3001 return 0;
638c9d91
FB
3002}
3003
e11bc2c6 3004static int opt_seek(const char *opt, const char *arg)
72ea344b 3005{
e11bc2c6
SS
3006 start_time = parse_time_or_die(opt, arg, 1);
3007 return 0;
72ea344b
FB
3008}
3009
d834d63b
RK
3010static int opt_duration(const char *opt, const char *arg)
3011{
3012 duration = parse_time_or_die(opt, arg, 1);
3013 return 0;
3014}
3015
a5b3b5f6 3016static int opt_debug(const char *opt, const char *arg)
e26a8335 3017{
a309073b 3018 av_log_set_level(99);
a5b3b5f6
SS
3019 debug = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
3020 return 0;
e26a8335 3021}
115329f1 3022
a5b3b5f6 3023static int opt_vismv(const char *opt, const char *arg)
0c9bbaec 3024{
a5b3b5f6
SS
3025 debug_mv = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
3026 return 0;
0c9bbaec 3027}
c62c07d3 3028
a5b3b5f6 3029static int opt_thread_count(const char *opt, const char *arg)
c62c07d3 3030{
a5b3b5f6 3031 thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
b250f9c6 3032#if !HAVE_THREADS
c62c07d3
MN
3033 fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
3034#endif
a5b3b5f6 3035 return 0;
c62c07d3 3036}
115329f1 3037
358061f6 3038static const OptionDef options[] = {
992f8eae 3039#include "cmdutils_common_opts.h"
a5b3b5f6
SS
3040 { "x", HAS_ARG | OPT_FUNC2, {(void*)opt_width}, "force displayed width", "width" },
3041 { "y", HAS_ARG | OPT_FUNC2, {(void*)opt_height}, "force displayed height", "height" },
e4b89522 3042 { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
638c9d91 3043 { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
01310af2
FB
3044 { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
3045 { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
72415b2a
SS
3046 { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_AUDIO]}, "select desired audio stream", "stream_number" },
3047 { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_VIDEO]}, "select desired video stream", "stream_number" },
3048 { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_SUBTITLE]}, "select desired subtitle stream", "stream_number" },
e11bc2c6 3049 { "ss", HAS_ARG | OPT_FUNC2, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
d834d63b 3050 { "t", HAS_ARG | OPT_FUNC2, {(void*)&opt_duration}, "play \"duration\" seconds of audio/video", "duration" },
674fe163 3051 { "bytes", OPT_INT | HAS_ARG, {(void*)&seek_by_bytes}, "seek by bytes 0=off 1=on -1=auto", "val" },
01310af2
FB
3052 { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
3053 { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
e4b89522 3054 { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
98ae6acf 3055 { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
a5b3b5f6 3056 { "debug", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
6387c3e6 3057 { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
a5b3b5f6 3058 { "vismv", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
6fc5b059 3059 { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
30bc6613 3060 { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
59055363 3061 { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts 0=off 1=on -1=auto", ""},
178fcca8 3062 { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
8c3eba7c
MN
3063 { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
3064 { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
3065 { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
178fcca8 3066 { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo", "algo" },
047599a4 3067 { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_recognition}, "set error detection threshold (0-4)", "threshold" },
1b51e051 3068 { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options", "bit_mask" },
b81d6235 3069 { "sync", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
a5b3b5f6 3070 { "threads", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2d1653b0 3071 { "autoexit", OPT_BOOL | OPT_EXPERT, {(void*)&autoexit}, "exit at the end", "" },
066ce8c9
AS
3072 { "exitonkeydown", OPT_BOOL | OPT_EXPERT, {(void*)&exit_on_keydown}, "exit on key down", "" },
3073 { "exitonmousedown", OPT_BOOL | OPT_EXPERT, {(void*)&exit_on_mousedown}, "exit on mouse down", "" },
1922c0a7 3074 { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&loop}, "set number of times the playback shall be looped", "loop count" },
d38c9e7a 3075 { "framedrop", OPT_BOOL | OPT_EXPERT, {(void*)&framedrop}, "drop frames when cpu is too slow", "" },
076db5ed 3076 { "window_title", OPT_STRING | HAS_ARG, {(void*)&window_title}, "set window title", "window title" },
917d2bb3 3077#if CONFIG_AVFILTER
09ed11e5 3078 { "vf", OPT_STRING | HAS_ARG, {(void*)&vfilters}, "video filters", "filter list" },
917d2bb3 3079#endif
2b3da32f 3080 { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, {(void*)&rdftspeed}, "rdft speed", "msecs" },
e43d7a18 3081 { "default", OPT_FUNC2 | HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {(void*)opt_default}, "generic catch all option", "" },
01310af2
FB
3082 { NULL, },
3083};
3084
0c2a18cb 3085static void show_usage(void)
01310af2 3086{
27daa420
RP
3087 printf("Simple media player\n");
3088 printf("usage: ffplay [options] input_file\n");
01310af2 3089 printf("\n");
0c2a18cb
RP
3090}
3091
3092static void show_help(void)
3093{
3094 show_usage();
02d504a7
FB
3095 show_help_options(options, "Main options:\n",
3096 OPT_EXPERT, 0);
3097 show_help_options(options, "\nAdvanced options:\n",
3098 OPT_EXPERT, OPT_EXPERT);
01310af2
FB
3099 printf("\nWhile playing:\n"
3100 "q, ESC quit\n"
3101 "f toggle full screen\n"
3102 "p, SPC pause\n"
638c9d91
FB
3103 "a cycle audio channel\n"
3104 "v cycle video channel\n"
72ce053b 3105 "t cycle subtitle channel\n"
638c9d91 3106 "w show audio waves\n"
79f8b328 3107 "s activate frame-step mode\n"
72ea344b
FB
3108 "left/right seek backward/forward 10 seconds\n"
3109 "down/up seek backward/forward 1 minute\n"
a11d11aa 3110 "mouse click seek to percentage in file corresponding to fraction of width\n"
01310af2 3111 );
01310af2
FB
3112}
3113
358061f6 3114static void opt_input_file(const char *filename)
01310af2 3115{
07a70b38
SS
3116 if (input_filename) {
3117 fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
3118 filename, input_filename);
3119 exit(1);
3120 }
e8d83e1c 3121 if (!strcmp(filename, "-"))
9fcfc0b7 3122 filename = "pipe:";
01310af2
FB
3123 input_filename = filename;
3124}
3125
3126/* Called from the main */
3127int main(int argc, char **argv)
3128{
e43d7a18 3129 int flags, i;
115329f1 3130
01310af2 3131 /* register all codecs, demux and protocols */
c721d803 3132 avcodec_register_all();
9b157b0c 3133#if CONFIG_AVDEVICE
c721d803 3134 avdevice_register_all();
9b157b0c 3135#endif
917d2bb3
MN
3136#if CONFIG_AVFILTER
3137 avfilter_register_all();
3138#endif
01310af2
FB
3139 av_register_all();
3140
72415b2a 3141 for(i=0; i<AVMEDIA_TYPE_NB; i++){
636f1c4c 3142 avcodec_opts[i]= avcodec_alloc_context2(i);
e43d7a18 3143 }
8e2fd8e1 3144 avformat_opts = avformat_alloc_context();
917d2bb3 3145#if !CONFIG_AVFILTER
e43d7a18 3146 sws_opts = sws_getContext(16,16,0, 16,16,0, sws_flags, NULL,NULL,NULL);
917d2bb3 3147#endif
e43d7a18 3148
ea9c581f 3149 show_banner();
4cfac5bc 3150
f5da5c93 3151 parse_options(argc, argv, options, opt_input_file);
01310af2 3152
aab1b7e5 3153 if (!input_filename) {
7f11e745 3154 show_usage();
7a7da6b4 3155 fprintf(stderr, "An input file must be specified\n");
7f11e745 3156 fprintf(stderr, "Use -h to get full help or, even better, run 'man ffplay'\n");
aab1b7e5
SS
3157 exit(1);
3158 }
01310af2
FB
3159
3160 if (display_disable) {
3161 video_disable = 1;
3162 }
31319a8c 3163 flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
c97f5402
DB
3164#if !defined(__MINGW32__) && !defined(__APPLE__)
3165 flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c 3166#endif
01310af2 3167 if (SDL_Init (flags)) {
05ab0b76 3168 fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
01310af2
FB
3169 exit(1);
3170 }
3171
3172 if (!display_disable) {
b250f9c6 3173#if HAVE_SDL_VIDEO_SIZE
3ef17d62
MR
3174 const SDL_VideoInfo *vi = SDL_GetVideoInfo();
3175 fs_screen_width = vi->current_w;
3176 fs_screen_height = vi->current_h;
29f3b38a 3177#endif
01310af2
FB
3178 }
3179
3180 SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
01310af2
FB
3181 SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
3182 SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
3183
39c6a118
MN
3184 av_init_packet(&flush_pkt);
3185 flush_pkt.data= "FLUSH";
3186
638c9d91 3187 cur_stream = stream_open(input_filename, file_iformat);
01310af2
FB
3188
3189 event_loop();
3190
3191 /* never returns */
3192
3193 return 0;
3194}