rtsp: Check if the rtp stream actually has an RTPDemuxContext
[libav.git] / ffplay.c
CommitLineData
01310af2 1/*
f05ef45c 2 * FFplay : Simple Media Player based on the FFmpeg libraries
01310af2
FB
3 * Copyright (c) 2003 Fabrice Bellard
4 *
b78e7197
DB
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
01310af2
FB
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
b78e7197 10 * version 2.1 of the License, or (at your option) any later version.
01310af2 11 *
b78e7197 12 * FFmpeg is distributed in the hope that it will be useful,
01310af2
FB
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
b78e7197 18 * License along with FFmpeg; if not, write to the Free Software
5509bffa 19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2 20 */
364a9607 21
91880e46
DY
22#define _XOPEN_SOURCE 600
23
ba11257e 24#include "config.h"
8a3ceaf4 25#include <inttypes.h>
0f4e8165
RB
26#include <math.h>
27#include <limits.h>
245976da 28#include "libavutil/avstring.h"
2b4abbd6 29#include "libavutil/colorspace.h"
718c7b18 30#include "libavutil/pixdesc.h"
34017fd9 31#include "libavcore/imgutils.h"
126b638e 32#include "libavcore/parseutils.h"
ba7d6e79 33#include "libavcore/samplefmt.h"
245976da 34#include "libavformat/avformat.h"
245976da
DB
35#include "libavdevice/avdevice.h"
36#include "libswscale/swscale.h"
5a4476e2 37#include "libavcodec/audioconvert.h"
e43d7a18 38#include "libavcodec/opt.h"
166621ab 39#include "libavcodec/avfft.h"
01310af2 40
917d2bb3
MN
41#if CONFIG_AVFILTER
42# include "libavfilter/avfilter.h"
43# include "libavfilter/avfiltergraph.h"
917d2bb3
MN
44#endif
45
01310af2
FB
46#include "cmdutils.h"
47
48#include <SDL.h>
49#include <SDL_thread.h>
50
2f30a81d 51#ifdef __MINGW32__
31319a8c
FB
52#undef main /* We don't want SDL to override our main() */
53#endif
54
d38c9e7a
MN
55#include <unistd.h>
56#include <assert.h>
57
64555bd9 58const char program_name[] = "FFplay";
ea9c581f 59const int program_birth_year = 2003;
4cfac5bc 60
638c9d91
FB
61//#define DEBUG_SYNC
62
79ee4683
MN
63#define MAX_QUEUE_SIZE (15 * 1024 * 1024)
64#define MIN_AUDIOQ_SIZE (20 * 16 * 1024)
65#define MIN_FRAMES 5
01310af2 66
638c9d91
FB
67/* SDL audio buffer size, in samples. Should be small to have precise
68 A/V sync as SDL does not have hardware buffer fullness info. */
69#define SDL_AUDIO_BUFFER_SIZE 1024
70
71/* no AV sync correction is done if below the AV sync threshold */
7e0140cb 72#define AV_SYNC_THRESHOLD 0.01
638c9d91
FB
73/* no AV correction is done if too big error */
74#define AV_NOSYNC_THRESHOLD 10.0
75
d38c9e7a
MN
76#define FRAME_SKIP_FACTOR 0.05
77
638c9d91
FB
78/* maximum audio speed change to get correct sync */
79#define SAMPLE_CORRECTION_PERCENT_MAX 10
80
81/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
82#define AUDIO_DIFF_AVG_NB 20
83
01310af2
FB
84/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
85#define SAMPLE_ARRAY_SIZE (2*65536)
86
03ae87a3
LA
87static int sws_flags = SWS_BICUBIC;
88
01310af2
FB
89typedef struct PacketQueue {
90 AVPacketList *first_pkt, *last_pkt;
91 int nb_packets;
92 int size;
93 int abort_request;
94 SDL_mutex *mutex;
95 SDL_cond *cond;
96} PacketQueue;
97
562f382c 98#define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b 99#define SUBPICTURE_QUEUE_SIZE 4
01310af2
FB
100
101typedef struct VideoPicture {
267e9dfa 102 double pts; ///<presentation time stamp for this picture
d38c9e7a 103 double target_clock; ///<av_gettime() time at which this should be displayed ideally
1a620dd7 104 int64_t pos; ///<byte position in file
01310af2
FB
105 SDL_Overlay *bmp;
106 int width, height; /* source height & width */
107 int allocated;
917d2bb3
MN
108 enum PixelFormat pix_fmt;
109
110#if CONFIG_AVFILTER
ecc8dada 111 AVFilterBufferRef *picref;
917d2bb3 112#endif
01310af2
FB
113} VideoPicture;
114
72ce053b
IC
115typedef struct SubPicture {
116 double pts; /* presentation time stamp for this picture */
117 AVSubtitle sub;
118} SubPicture;
119
01310af2
FB
120enum {
121 AV_SYNC_AUDIO_MASTER, /* default choice */
122 AV_SYNC_VIDEO_MASTER,
638c9d91 123 AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
FB
124};
125
126typedef struct VideoState {
127 SDL_Thread *parse_tid;
128 SDL_Thread *video_tid;
d38c9e7a 129 SDL_Thread *refresh_tid;
638c9d91 130 AVInputFormat *iformat;
01310af2
FB
131 int no_background;
132 int abort_request;
133 int paused;
416e3508 134 int last_paused;
72ea344b 135 int seek_req;
3ba1438d 136 int seek_flags;
72ea344b 137 int64_t seek_pos;
4ed29207 138 int64_t seek_rel;
f5668147 139 int read_pause_return;
01310af2
FB
140 AVFormatContext *ic;
141 int dtg_active_format;
142
143 int audio_stream;
115329f1 144
01310af2 145 int av_sync_type;
638c9d91
FB
146 double external_clock; /* external clock base */
147 int64_t external_clock_time;
115329f1 148
638c9d91
FB
149 double audio_clock;
150 double audio_diff_cum; /* used for AV difference average computation */
151 double audio_diff_avg_coef;
152 double audio_diff_threshold;
153 int audio_diff_avg_count;
01310af2
FB
154 AVStream *audio_st;
155 PacketQueue audioq;
156 int audio_hw_buf_size;
157 /* samples output by the codec. we reserve more space for avsync
158 compensation */
c6727809
MR
159 DECLARE_ALIGNED(16,uint8_t,audio_buf1)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
160 DECLARE_ALIGNED(16,uint8_t,audio_buf2)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
5a4476e2 161 uint8_t *audio_buf;
7fea94ce 162 unsigned int audio_buf_size; /* in bytes */
01310af2 163 int audio_buf_index; /* in bytes */
bea18375 164 AVPacket audio_pkt_temp;
01310af2 165 AVPacket audio_pkt;
5d6e4c16 166 enum AVSampleFormat audio_src_fmt;
5a4476e2 167 AVAudioConvert *reformat_ctx;
115329f1 168
01310af2
FB
169 int show_audio; /* if true, display audio samples */
170 int16_t sample_array[SAMPLE_ARRAY_SIZE];
171 int sample_array_index;
5e0257e3 172 int last_i_start;
166621ab 173 RDFTContext *rdft;
12eeda34 174 int rdft_bits;
7dbbf6a1 175 FFTSample *rdft_data;
12eeda34 176 int xpos;
115329f1 177
72ce053b
IC
178 SDL_Thread *subtitle_tid;
179 int subtitle_stream;
180 int subtitle_stream_changed;
181 AVStream *subtitle_st;
182 PacketQueue subtitleq;
183 SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
184 int subpq_size, subpq_rindex, subpq_windex;
185 SDL_mutex *subpq_mutex;
186 SDL_cond *subpq_cond;
115329f1 187
638c9d91
FB
188 double frame_timer;
189 double frame_last_pts;
190 double frame_last_delay;
115329f1 191 double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
01310af2
FB
192 int video_stream;
193 AVStream *video_st;
194 PacketQueue videoq;
267e9dfa 195 double video_current_pts; ///<current displayed pts (different from video_clock if frame fifos are used)
68aefbe8 196 double video_current_pts_drift; ///<video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
1a620dd7 197 int64_t video_current_pos; ///<current displayed file pos
01310af2
FB
198 VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
199 int pictq_size, pictq_rindex, pictq_windex;
200 SDL_mutex *pictq_mutex;
201 SDL_cond *pictq_cond;
917d2bb3 202#if !CONFIG_AVFILTER
3ac56e28 203 struct SwsContext *img_convert_ctx;
917d2bb3 204#endif
115329f1 205
01310af2
FB
206 // QETimer *video_timer;
207 char filename[1024];
208 int width, height, xleft, ytop;
41db429d 209
7a8bfa5d 210 PtsCorrectionContext pts_ctx;
41db429d 211
917d2bb3
MN
212#if CONFIG_AVFILTER
213 AVFilterContext *out_video_filter; ///<the last filter in the video chain
214#endif
d38c9e7a
MN
215
216 float skip_frames;
217 float skip_frames_index;
218 int refresh;
01310af2
FB
219} VideoState;
220
358061f6 221static void show_help(void);
638c9d91 222static int audio_write_get_buf_size(VideoState *is);
01310af2
FB
223
224/* options specified by the user */
225static AVInputFormat *file_iformat;
226static const char *input_filename;
076db5ed 227static const char *window_title;
01310af2
FB
228static int fs_screen_width;
229static int fs_screen_height;
fccb19e3
MN
230static int screen_width = 0;
231static int screen_height = 0;
e4b89522
LW
232static int frame_width = 0;
233static int frame_height = 0;
234static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
01310af2
FB
235static int audio_disable;
236static int video_disable;
72415b2a
SS
237static int wanted_stream[AVMEDIA_TYPE_NB]={
238 [AVMEDIA_TYPE_AUDIO]=-1,
239 [AVMEDIA_TYPE_VIDEO]=-1,
240 [AVMEDIA_TYPE_SUBTITLE]=-1,
5b369983 241};
70a4764d 242static int seek_by_bytes=-1;
01310af2 243static int display_disable;
1e1a0b18 244static int show_status = 1;
638c9d91 245static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b 246static int64_t start_time = AV_NOPTS_VALUE;
d834d63b 247static int64_t duration = AV_NOPTS_VALUE;
e26a8335 248static int debug = 0;
0c9bbaec 249static int debug_mv = 0;
bba04f1e 250static int step = 0;
c62c07d3 251static int thread_count = 1;
6387c3e6 252static int workaround_bugs = 1;
6fc5b059 253static int fast = 0;
30bc6613 254static int genpts = 0;
178fcca8
MN
255static int lowres = 0;
256static int idct = FF_IDCT_AUTO;
8c3eba7c
MN
257static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
258static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
259static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
047599a4 260static int error_recognition = FF_ER_CAREFUL;
1b51e051 261static int error_concealment = 3;
41db429d 262static int decoder_reorder_pts= -1;
2d1653b0 263static int autoexit;
066ce8c9
AS
264static int exit_on_keydown;
265static int exit_on_mousedown;
1922c0a7 266static int loop=1;
d38c9e7a 267static int framedrop=1;
2b3da32f
MN
268
269static int rdftspeed=20;
917d2bb3
MN
270#if CONFIG_AVFILTER
271static char *vfilters = NULL;
272#endif
01310af2
FB
273
274/* current context */
275static int is_full_screen;
276static VideoState *cur_stream;
5e0257e3 277static int64_t audio_callback_time;
01310af2 278
2c676c33 279static AVPacket flush_pkt;
39c6a118 280
01310af2
FB
281#define FF_ALLOC_EVENT (SDL_USEREVENT)
282#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91 283#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
01310af2 284
2c676c33 285static SDL_Surface *screen;
01310af2 286
515bd00e
MN
287static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
288
01310af2
FB
289/* packet queue handling */
290static void packet_queue_init(PacketQueue *q)
291{
292 memset(q, 0, sizeof(PacketQueue));
293 q->mutex = SDL_CreateMutex();
294 q->cond = SDL_CreateCond();
515bd00e 295 packet_queue_put(q, &flush_pkt);
01310af2
FB
296}
297
72ea344b 298static void packet_queue_flush(PacketQueue *q)
01310af2
FB
299{
300 AVPacketList *pkt, *pkt1;
301
687fae2b 302 SDL_LockMutex(q->mutex);
01310af2
FB
303 for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
304 pkt1 = pkt->next;
305 av_free_packet(&pkt->pkt);
da6c4573 306 av_freep(&pkt);
01310af2 307 }
72ea344b
FB
308 q->last_pkt = NULL;
309 q->first_pkt = NULL;
310 q->nb_packets = 0;
311 q->size = 0;
687fae2b 312 SDL_UnlockMutex(q->mutex);
72ea344b
FB
313}
314
315static void packet_queue_end(PacketQueue *q)
316{
317 packet_queue_flush(q);
01310af2
FB
318 SDL_DestroyMutex(q->mutex);
319 SDL_DestroyCond(q->cond);
320}
321
322static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
323{
324 AVPacketList *pkt1;
325
72ea344b 326 /* duplicate the packet */
39c6a118 327 if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
72ea344b 328 return -1;
115329f1 329
01310af2
FB
330 pkt1 = av_malloc(sizeof(AVPacketList));
331 if (!pkt1)
332 return -1;
333 pkt1->pkt = *pkt;
334 pkt1->next = NULL;
335
72ea344b 336
01310af2
FB
337 SDL_LockMutex(q->mutex);
338
339 if (!q->last_pkt)
340
341 q->first_pkt = pkt1;
342 else
343 q->last_pkt->next = pkt1;
344 q->last_pkt = pkt1;
345 q->nb_packets++;
7b776589 346 q->size += pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
347 /* XXX: should duplicate packet data in DV case */
348 SDL_CondSignal(q->cond);
349
350 SDL_UnlockMutex(q->mutex);
351 return 0;
352}
353
354static void packet_queue_abort(PacketQueue *q)
355{
356 SDL_LockMutex(q->mutex);
357
358 q->abort_request = 1;
115329f1 359
01310af2
FB
360 SDL_CondSignal(q->cond);
361
362 SDL_UnlockMutex(q->mutex);
363}
364
365/* return < 0 if aborted, 0 if no packet and > 0 if packet. */
366static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
367{
368 AVPacketList *pkt1;
369 int ret;
370
371 SDL_LockMutex(q->mutex);
372
373 for(;;) {
374 if (q->abort_request) {
375 ret = -1;
376 break;
377 }
115329f1 378
01310af2
FB
379 pkt1 = q->first_pkt;
380 if (pkt1) {
381 q->first_pkt = pkt1->next;
382 if (!q->first_pkt)
383 q->last_pkt = NULL;
384 q->nb_packets--;
7b776589 385 q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
386 *pkt = pkt1->pkt;
387 av_free(pkt1);
388 ret = 1;
389 break;
390 } else if (!block) {
391 ret = 0;
392 break;
393 } else {
394 SDL_CondWait(q->cond, q->mutex);
395 }
396 }
397 SDL_UnlockMutex(q->mutex);
398 return ret;
399}
400
115329f1 401static inline void fill_rectangle(SDL_Surface *screen,
01310af2
FB
402 int x, int y, int w, int h, int color)
403{
404 SDL_Rect rect;
405 rect.x = x;
406 rect.y = y;
407 rect.w = w;
408 rect.h = h;
409 SDL_FillRect(screen, &rect, color);
410}
411
412#if 0
413/* draw only the border of a rectangle */
414void fill_border(VideoState *s, int x, int y, int w, int h, int color)
415{
416 int w1, w2, h1, h2;
417
418 /* fill the background */
419 w1 = x;
420 if (w1 < 0)
421 w1 = 0;
422 w2 = s->width - (x + w);
423 if (w2 < 0)
424 w2 = 0;
425 h1 = y;
426 if (h1 < 0)
427 h1 = 0;
428 h2 = s->height - (y + h);
429 if (h2 < 0)
430 h2 = 0;
115329f1
DB
431 fill_rectangle(screen,
432 s->xleft, s->ytop,
433 w1, s->height,
01310af2 434 color);
115329f1
DB
435 fill_rectangle(screen,
436 s->xleft + s->width - w2, s->ytop,
437 w2, s->height,
01310af2 438 color);
115329f1
DB
439 fill_rectangle(screen,
440 s->xleft + w1, s->ytop,
441 s->width - w1 - w2, h1,
01310af2 442 color);
115329f1 443 fill_rectangle(screen,
01310af2
FB
444 s->xleft + w1, s->ytop + s->height - h2,
445 s->width - w1 - w2, h2,
446 color);
447}
448#endif
449
72ce053b
IC
450#define ALPHA_BLEND(a, oldp, newp, s)\
451((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
452
453#define RGBA_IN(r, g, b, a, s)\
454{\
455 unsigned int v = ((const uint32_t *)(s))[0];\
456 a = (v >> 24) & 0xff;\
457 r = (v >> 16) & 0xff;\
458 g = (v >> 8) & 0xff;\
459 b = v & 0xff;\
460}
461
462#define YUVA_IN(y, u, v, a, s, pal)\
463{\
57cf99f2 464 unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
IC
465 a = (val >> 24) & 0xff;\
466 y = (val >> 16) & 0xff;\
467 u = (val >> 8) & 0xff;\
468 v = val & 0xff;\
469}
470
471#define YUVA_OUT(d, y, u, v, a)\
472{\
473 ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
474}
475
476
477#define BPP 1
478
0a8cd696 479static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
IC
480{
481 int wrap, wrap3, width2, skip2;
482 int y, u, v, a, u1, v1, a1, w, h;
483 uint8_t *lum, *cb, *cr;
484 const uint8_t *p;
485 const uint32_t *pal;
9cb5a11e
RD
486 int dstx, dsty, dstw, dsth;
487
7cf9c6ae
MN
488 dstw = av_clip(rect->w, 0, imgw);
489 dsth = av_clip(rect->h, 0, imgh);
490 dstx = av_clip(rect->x, 0, imgw - dstw);
491 dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
RD
492 lum = dst->data[0] + dsty * dst->linesize[0];
493 cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
494 cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
495
f54b31b9 496 width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e 497 skip2 = dstx >> 1;
72ce053b 498 wrap = dst->linesize[0];
25b4c651
MN
499 wrap3 = rect->pict.linesize[0];
500 p = rect->pict.data[0];
501 pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */
115329f1 502
9cb5a11e
RD
503 if (dsty & 1) {
504 lum += dstx;
72ce053b
IC
505 cb += skip2;
506 cr += skip2;
115329f1 507
9cb5a11e 508 if (dstx & 1) {
72ce053b
IC
509 YUVA_IN(y, u, v, a, p, pal);
510 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
511 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
512 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
513 cb++;
514 cr++;
515 lum++;
516 p += BPP;
517 }
9cb5a11e 518 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
519 YUVA_IN(y, u, v, a, p, pal);
520 u1 = u;
521 v1 = v;
522 a1 = a;
523 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
524
525 YUVA_IN(y, u, v, a, p + BPP, pal);
526 u1 += u;
527 v1 += v;
528 a1 += a;
529 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
530 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
531 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
532 cb++;
533 cr++;
534 p += 2 * BPP;
535 lum += 2;
536 }
537 if (w) {
538 YUVA_IN(y, u, v, a, p, pal);
539 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
540 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
541 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
BA
542 p++;
543 lum++;
72ce053b 544 }
4606a059
BA
545 p += wrap3 - dstw * BPP;
546 lum += wrap - dstw - dstx;
72ce053b
IC
547 cb += dst->linesize[1] - width2 - skip2;
548 cr += dst->linesize[2] - width2 - skip2;
549 }
9cb5a11e
RD
550 for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
551 lum += dstx;
72ce053b
IC
552 cb += skip2;
553 cr += skip2;
115329f1 554
9cb5a11e 555 if (dstx & 1) {
72ce053b
IC
556 YUVA_IN(y, u, v, a, p, pal);
557 u1 = u;
558 v1 = v;
559 a1 = a;
560 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
561 p += wrap3;
562 lum += wrap;
563 YUVA_IN(y, u, v, a, p, pal);
564 u1 += u;
565 v1 += v;
566 a1 += a;
567 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
568 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
569 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
570 cb++;
571 cr++;
572 p += -wrap3 + BPP;
573 lum += -wrap + 1;
574 }
9cb5a11e 575 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
576 YUVA_IN(y, u, v, a, p, pal);
577 u1 = u;
578 v1 = v;
579 a1 = a;
580 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
581
f8ca63e8 582 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
583 u1 += u;
584 v1 += v;
585 a1 += a;
586 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
587 p += wrap3;
588 lum += wrap;
589
590 YUVA_IN(y, u, v, a, p, pal);
591 u1 += u;
592 v1 += v;
593 a1 += a;
594 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
595
f8ca63e8 596 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
597 u1 += u;
598 v1 += v;
599 a1 += a;
600 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
601
602 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
603 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
604
605 cb++;
606 cr++;
607 p += -wrap3 + 2 * BPP;
608 lum += -wrap + 2;
609 }
610 if (w) {
611 YUVA_IN(y, u, v, a, p, pal);
612 u1 = u;
613 v1 = v;
614 a1 = a;
615 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
616 p += wrap3;
617 lum += wrap;
618 YUVA_IN(y, u, v, a, p, pal);
619 u1 += u;
620 v1 += v;
621 a1 += a;
622 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
623 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
624 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
625 cb++;
626 cr++;
627 p += -wrap3 + BPP;
628 lum += -wrap + 1;
629 }
9cb5a11e
RD
630 p += wrap3 + (wrap3 - dstw * BPP);
631 lum += wrap + (wrap - dstw - dstx);
72ce053b
IC
632 cb += dst->linesize[1] - width2 - skip2;
633 cr += dst->linesize[2] - width2 - skip2;
634 }
635 /* handle odd height */
636 if (h) {
9cb5a11e 637 lum += dstx;
72ce053b
IC
638 cb += skip2;
639 cr += skip2;
115329f1 640
9cb5a11e 641 if (dstx & 1) {
72ce053b
IC
642 YUVA_IN(y, u, v, a, p, pal);
643 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
644 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
645 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
646 cb++;
647 cr++;
648 lum++;
649 p += BPP;
650 }
9cb5a11e 651 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
652 YUVA_IN(y, u, v, a, p, pal);
653 u1 = u;
654 v1 = v;
655 a1 = a;
656 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
657
658 YUVA_IN(y, u, v, a, p + BPP, pal);
659 u1 += u;
660 v1 += v;
661 a1 += a;
662 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
663 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
664 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
665 cb++;
666 cr++;
667 p += 2 * BPP;
668 lum += 2;
669 }
670 if (w) {
671 YUVA_IN(y, u, v, a, p, pal);
672 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
673 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
674 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
675 }
676 }
677}
678
679static void free_subpicture(SubPicture *sp)
680{
e1d7c883 681 avsubtitle_free(&sp->sub);
72ce053b
IC
682}
683
01310af2
FB
684static void video_image_display(VideoState *is)
685{
686 VideoPicture *vp;
72ce053b
IC
687 SubPicture *sp;
688 AVPicture pict;
01310af2
FB
689 float aspect_ratio;
690 int width, height, x, y;
691 SDL_Rect rect;
72ce053b 692 int i;
01310af2
FB
693
694 vp = &is->pictq[is->pictq_rindex];
695 if (vp->bmp) {
917d2bb3 696#if CONFIG_AVFILTER
cc80caff 697 if (vp->picref->video->pixel_aspect.num == 0)
917d2bb3
MN
698 aspect_ratio = 0;
699 else
cc80caff 700 aspect_ratio = av_q2d(vp->picref->video->pixel_aspect);
917d2bb3
MN
701#else
702
01310af2 703 /* XXX: use variable in the frame */
c30a4489
AJ
704 if (is->video_st->sample_aspect_ratio.num)
705 aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
706 else if (is->video_st->codec->sample_aspect_ratio.num)
707 aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
72ea344b 708 else
c30a4489 709 aspect_ratio = 0;
917d2bb3 710#endif
01310af2 711 if (aspect_ratio <= 0.0)
c30a4489 712 aspect_ratio = 1.0;
917d2bb3 713 aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
FB
714 /* if an active format is indicated, then it overrides the
715 mpeg format */
716#if 0
01f4895c
MN
717 if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
718 is->dtg_active_format = is->video_st->codec->dtg_active_format;
01310af2
FB
719 printf("dtg_active_format=%d\n", is->dtg_active_format);
720 }
721#endif
722#if 0
01f4895c 723 switch(is->video_st->codec->dtg_active_format) {
01310af2
FB
724 case FF_DTG_AFD_SAME:
725 default:
726 /* nothing to do */
727 break;
728 case FF_DTG_AFD_4_3:
729 aspect_ratio = 4.0 / 3.0;
730 break;
731 case FF_DTG_AFD_16_9:
732 aspect_ratio = 16.0 / 9.0;
733 break;
734 case FF_DTG_AFD_14_9:
735 aspect_ratio = 14.0 / 9.0;
736 break;
737 case FF_DTG_AFD_4_3_SP_14_9:
738 aspect_ratio = 14.0 / 9.0;
739 break;
740 case FF_DTG_AFD_16_9_SP_14_9:
741 aspect_ratio = 14.0 / 9.0;
742 break;
743 case FF_DTG_AFD_SP_4_3:
744 aspect_ratio = 4.0 / 3.0;
745 break;
746 }
747#endif
748
72ce053b
IC
749 if (is->subtitle_st)
750 {
751 if (is->subpq_size > 0)
752 {
753 sp = &is->subpq[is->subpq_rindex];
754
755 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
756 {
757 SDL_LockYUVOverlay (vp->bmp);
758
759 pict.data[0] = vp->bmp->pixels[0];
760 pict.data[1] = vp->bmp->pixels[2];
761 pict.data[2] = vp->bmp->pixels[1];
762
763 pict.linesize[0] = vp->bmp->pitches[0];
764 pict.linesize[1] = vp->bmp->pitches[2];
765 pict.linesize[2] = vp->bmp->pitches[1];
766
767 for (i = 0; i < sp->sub.num_rects; i++)
db4fac64 768 blend_subrect(&pict, sp->sub.rects[i],
0a8cd696 769 vp->bmp->w, vp->bmp->h);
72ce053b
IC
770
771 SDL_UnlockYUVOverlay (vp->bmp);
772 }
773 }
774 }
775
776
01310af2
FB
777 /* XXX: we suppose the screen has a 1.0 pixel ratio */
778 height = is->height;
bb6c34e5 779 width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
FB
780 if (width > is->width) {
781 width = is->width;
bb6c34e5 782 height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
FB
783 }
784 x = (is->width - width) / 2;
785 y = (is->height - height) / 2;
786 if (!is->no_background) {
787 /* fill the background */
788 // fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
789 } else {
790 is->no_background = 0;
791 }
792 rect.x = is->xleft + x;
2f6547fb 793 rect.y = is->ytop + y;
01310af2
FB
794 rect.w = width;
795 rect.h = height;
796 SDL_DisplayYUVOverlay(vp->bmp, &rect);
797 } else {
798#if 0
115329f1
DB
799 fill_rectangle(screen,
800 is->xleft, is->ytop, is->width, is->height,
01310af2
FB
801 QERGB(0x00, 0x00, 0x00));
802#endif
803 }
804}
805
806static inline int compute_mod(int a, int b)
807{
808 a = a % b;
115329f1 809 if (a >= 0)
01310af2
FB
810 return a;
811 else
812 return a + b;
813}
814
815static void video_audio_display(VideoState *s)
816{
817 int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
818 int ch, channels, h, h2, bgcolor, fgcolor;
819 int16_t time_diff;
4c7c7645
MN
820 int rdft_bits, nb_freq;
821
822 for(rdft_bits=1; (1<<rdft_bits)<2*s->height; rdft_bits++)
823 ;
824 nb_freq= 1<<(rdft_bits-1);
115329f1 825
01310af2 826 /* compute display index : center on currently output samples */
01f4895c 827 channels = s->audio_st->codec->channels;
01310af2 828 nb_display_channels = channels;
5e0257e3 829 if (!s->paused) {
4c7c7645 830 int data_used= s->show_audio==1 ? s->width : (2*nb_freq);
5e0257e3
FB
831 n = 2 * channels;
832 delay = audio_write_get_buf_size(s);
833 delay /= n;
115329f1 834
5e0257e3
FB
835 /* to be more precise, we take into account the time spent since
836 the last buffer computation */
837 if (audio_callback_time) {
838 time_diff = av_gettime() - audio_callback_time;
122dcdcb 839 delay -= (time_diff * s->audio_st->codec->sample_rate) / 1000000;
5e0257e3 840 }
115329f1 841
122dcdcb 842 delay += 2*data_used;
4c7c7645
MN
843 if (delay < data_used)
844 delay = data_used;
ac50bcc8
MN
845
846 i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
12eeda34 847 if(s->show_audio==1){
6c7165c7
JM
848 h= INT_MIN;
849 for(i=0; i<1000; i+=channels){
850 int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
851 int a= s->sample_array[idx];
852 int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
853 int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
854 int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
855 int score= a-d;
856 if(h<score && (b^c)<0){
857 h= score;
858 i_start= idx;
859 }
ac50bcc8
MN
860 }
861 }
862
5e0257e3
FB
863 s->last_i_start = i_start;
864 } else {
865 i_start = s->last_i_start;
01310af2
FB
866 }
867
01310af2 868 bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 869 if(s->show_audio==1){
6c7165c7
JM
870 fill_rectangle(screen,
871 s->xleft, s->ytop, s->width, s->height,
872 bgcolor);
873
874 fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
875
876 /* total height for one channel */
877 h = s->height / nb_display_channels;
878 /* graph height / 2 */
879 h2 = (h * 9) / 20;
880 for(ch = 0;ch < nb_display_channels; ch++) {
881 i = i_start + ch;
882 y1 = s->ytop + ch * h + (h / 2); /* position of center line */
883 for(x = 0; x < s->width; x++) {
884 y = (s->sample_array[i] * h2) >> 15;
885 if (y < 0) {
886 y = -y;
887 ys = y1 - y;
888 } else {
889 ys = y1;
890 }
891 fill_rectangle(screen,
892 s->xleft + x, ys, 1, y,
893 fgcolor);
894 i += channels;
895 if (i >= SAMPLE_ARRAY_SIZE)
896 i -= SAMPLE_ARRAY_SIZE;
01310af2 897 }
01310af2 898 }
01310af2 899
6c7165c7 900 fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2 901
6c7165c7
JM
902 for(ch = 1;ch < nb_display_channels; ch++) {
903 y = s->ytop + ch * h;
904 fill_rectangle(screen,
905 s->xleft, y, s->width, 1,
906 fgcolor);
907 }
908 SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
12eeda34 909 }else{
12eeda34 910 nb_display_channels= FFMIN(nb_display_channels, 2);
12eeda34 911 if(rdft_bits != s->rdft_bits){
166621ab 912 av_rdft_end(s->rdft);
7dbbf6a1 913 av_free(s->rdft_data);
166621ab 914 s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
12eeda34 915 s->rdft_bits= rdft_bits;
7dbbf6a1 916 s->rdft_data= av_malloc(4*nb_freq*sizeof(*s->rdft_data));
12eeda34 917 }
12eeda34 918 {
7dbbf6a1 919 FFTSample *data[2];
12eeda34 920 for(ch = 0;ch < nb_display_channels; ch++) {
7dbbf6a1 921 data[ch] = s->rdft_data + 2*nb_freq*ch;
12eeda34
MN
922 i = i_start + ch;
923 for(x = 0; x < 2*nb_freq; x++) {
924 double w= (x-nb_freq)*(1.0/nb_freq);
925 data[ch][x]= s->sample_array[i]*(1.0-w*w);
926 i += channels;
927 if (i >= SAMPLE_ARRAY_SIZE)
928 i -= SAMPLE_ARRAY_SIZE;
929 }
166621ab 930 av_rdft_calc(s->rdft, data[ch]);
12eeda34
MN
931 }
932 //least efficient way to do this, we should of course directly access it but its more than fast enough
092421cf 933 for(y=0; y<s->height; y++){
12eeda34
MN
934 double w= 1/sqrt(nb_freq);
935 int a= sqrt(w*sqrt(data[0][2*y+0]*data[0][2*y+0] + data[0][2*y+1]*data[0][2*y+1]));
00f72577
JM
936 int b= (nb_display_channels == 2 ) ? sqrt(w*sqrt(data[1][2*y+0]*data[1][2*y+0]
937 + data[1][2*y+1]*data[1][2*y+1])) : a;
12eeda34
MN
938 a= FFMIN(a,255);
939 b= FFMIN(b,255);
940 fgcolor = SDL_MapRGB(screen->format, a, b, (a+b)/2);
941
942 fill_rectangle(screen,
943 s->xpos, s->height-y, 1, 1,
944 fgcolor);
945 }
946 }
947 SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
948 s->xpos++;
949 if(s->xpos >= s->width)
950 s->xpos= s->xleft;
951 }
01310af2
FB
952}
953
990c8438
MN
954static int video_open(VideoState *is){
955 int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
956 int w,h;
957
fb84155b
MN
958 if(is_full_screen) flags |= SDL_FULLSCREEN;
959 else flags |= SDL_RESIZABLE;
960
990c8438
MN
961 if (is_full_screen && fs_screen_width) {
962 w = fs_screen_width;
963 h = fs_screen_height;
fb84155b
MN
964 } else if(!is_full_screen && screen_width){
965 w = screen_width;
966 h = screen_height;
917d2bb3
MN
967#if CONFIG_AVFILTER
968 }else if (is->out_video_filter && is->out_video_filter->inputs[0]){
969 w = is->out_video_filter->inputs[0]->w;
970 h = is->out_video_filter->inputs[0]->h;
971#else
fb84155b
MN
972 }else if (is->video_st && is->video_st->codec->width){
973 w = is->video_st->codec->width;
974 h = is->video_st->codec->height;
917d2bb3 975#endif
990c8438 976 } else {
fb84155b
MN
977 w = 640;
978 h = 480;
990c8438 979 }
d3d7b12e
MN
980 if(screen && is->width == screen->w && screen->w == w
981 && is->height== screen->h && screen->h == h)
982 return 0;
983
c97f5402 984#ifndef __APPLE__
990c8438
MN
985 screen = SDL_SetVideoMode(w, h, 0, flags);
986#else
987 /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
988 screen = SDL_SetVideoMode(w, h, 24, flags);
989#endif
990 if (!screen) {
991 fprintf(stderr, "SDL: could not set video mode - exiting\n");
992 return -1;
993 }
076db5ed
RK
994 if (!window_title)
995 window_title = input_filename;
996 SDL_WM_SetCaption(window_title, window_title);
990c8438
MN
997
998 is->width = screen->w;
999 is->height = screen->h;
1000
1001 return 0;
1002}
8c982c5d 1003
01310af2
FB
1004/* display the current picture, if any */
1005static void video_display(VideoState *is)
1006{
8c982c5d
MN
1007 if(!screen)
1008 video_open(cur_stream);
115329f1 1009 if (is->audio_st && is->show_audio)
01310af2
FB
1010 video_audio_display(is);
1011 else if (is->video_st)
1012 video_image_display(is);
1013}
1014
d38c9e7a 1015static int refresh_thread(void *opaque)
01310af2 1016{
d38c9e7a
MN
1017 VideoState *is= opaque;
1018 while(!is->abort_request){
01310af2
FB
1019 SDL_Event event;
1020 event.type = FF_REFRESH_EVENT;
1021 event.user.data1 = opaque;
d38c9e7a
MN
1022 if(!is->refresh){
1023 is->refresh=1;
01310af2 1024 SDL_PushEvent(&event);
d38c9e7a 1025 }
2b3da32f 1026 usleep(is->audio_st && is->show_audio ? rdftspeed*1000 : 5000); //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
d38c9e7a
MN
1027 }
1028 return 0;
01310af2
FB
1029}
1030
638c9d91
FB
1031/* get the current audio clock value */
1032static double get_audio_clock(VideoState *is)
1033{
1034 double pts;
1035 int hw_buf_size, bytes_per_sec;
1036 pts = is->audio_clock;
1037 hw_buf_size = audio_write_get_buf_size(is);
1038 bytes_per_sec = 0;
1039 if (is->audio_st) {
115329f1 1040 bytes_per_sec = is->audio_st->codec->sample_rate *
01f4895c 1041 2 * is->audio_st->codec->channels;
638c9d91
FB
1042 }
1043 if (bytes_per_sec)
1044 pts -= (double)hw_buf_size / bytes_per_sec;
1045 return pts;
1046}
1047
1048/* get the current video clock value */
1049static double get_video_clock(VideoState *is)
1050{
04108619 1051 if (is->paused) {
41a4cd0c 1052 return is->video_current_pts;
72ea344b 1053 } else {
68aefbe8 1054 return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b 1055 }
638c9d91
FB
1056}
1057
1058/* get the current external clock value */
1059static double get_external_clock(VideoState *is)
1060{
1061 int64_t ti;
1062 ti = av_gettime();
1063 return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
1064}
1065
1066/* get the current master clock value */
1067static double get_master_clock(VideoState *is)
1068{
1069 double val;
1070
72ea344b
FB
1071 if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
1072 if (is->video_st)
1073 val = get_video_clock(is);
1074 else
1075 val = get_audio_clock(is);
1076 } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
1077 if (is->audio_st)
1078 val = get_audio_clock(is);
1079 else
1080 val = get_video_clock(is);
1081 } else {
638c9d91 1082 val = get_external_clock(is);
72ea344b 1083 }
638c9d91
FB
1084 return val;
1085}
1086
72ea344b 1087/* seek in the stream */
2ef46053 1088static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b 1089{
687fae2b
IW
1090 if (!is->seek_req) {
1091 is->seek_pos = pos;
4ed29207 1092 is->seek_rel = rel;
3890dd3a 1093 is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
SH
1094 if (seek_by_bytes)
1095 is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
IW
1096 is->seek_req = 1;
1097 }
72ea344b
FB
1098}
1099
1100/* pause or resume the video */
1101static void stream_pause(VideoState *is)
1102{
68aefbe8
MN
1103 if (is->paused) {
1104 is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
f5668147 1105 if(is->read_pause_return != AVERROR(ENOSYS)){
68aefbe8 1106 is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147 1107 }
68aefbe8 1108 is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b 1109 }
68aefbe8 1110 is->paused = !is->paused;
72ea344b
FB
1111}
1112
d38c9e7a 1113static double compute_target_time(double frame_current_pts, VideoState *is)
49410784 1114{
d38c9e7a 1115 double delay, sync_threshold, diff;
49410784
TB
1116
1117 /* compute nominal delay */
1118 delay = frame_current_pts - is->frame_last_pts;
1119 if (delay <= 0 || delay >= 10.0) {
1120 /* if incorrect delay, use previous one */
1121 delay = is->frame_last_delay;
443658fd 1122 } else {
712de377 1123 is->frame_last_delay = delay;
443658fd 1124 }
49410784
TB
1125 is->frame_last_pts = frame_current_pts;
1126
1127 /* update delay to follow master synchronisation source */
1128 if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1129 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1130 /* if video is slave, we try to correct big delays by
1131 duplicating or deleting a frame */
f04c6e35 1132 diff = get_video_clock(is) - get_master_clock(is);
49410784
TB
1133
1134 /* skip or repeat frame. We take into account the
1135 delay to compute the threshold. I still don't know
1136 if it is the best guess */
1137 sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
1138 if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1139 if (diff <= -sync_threshold)
1140 delay = 0;
1141 else if (diff >= sync_threshold)
1142 delay = 2 * delay;
1143 }
1144 }
49410784 1145 is->frame_timer += delay;
eecc17a7
TB
1146#if defined(DEBUG_SYNC)
1147 printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1148 delay, actual_delay, frame_current_pts, -diff);
1149#endif
1150
d38c9e7a 1151 return is->frame_timer;
49410784
TB
1152}
1153
01310af2
FB
1154/* called to display each frame */
1155static void video_refresh_timer(void *opaque)
1156{
1157 VideoState *is = opaque;
1158 VideoPicture *vp;
638c9d91 1159
72ce053b 1160 SubPicture *sp, *sp2;
01310af2
FB
1161
1162 if (is->video_st) {
d38c9e7a 1163retry:
01310af2 1164 if (is->pictq_size == 0) {
d38c9e7a 1165 //nothing to do, no picture to display in the que
01310af2 1166 } else {
d38c9e7a
MN
1167 double time= av_gettime()/1000000.0;
1168 double next_target;
638c9d91 1169 /* dequeue the picture */
01310af2 1170 vp = &is->pictq[is->pictq_rindex];
638c9d91 1171
d38c9e7a
MN
1172 if(time < vp->target_clock)
1173 return;
638c9d91
FB
1174 /* update current video pts */
1175 is->video_current_pts = vp->pts;
d38c9e7a 1176 is->video_current_pts_drift = is->video_current_pts - time;
a3cc2160 1177 is->video_current_pos = vp->pos;
d38c9e7a
MN
1178 if(is->pictq_size > 1){
1179 VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
1180 assert(nextvp->target_clock >= vp->target_clock);
1181 next_target= nextvp->target_clock;
1182 }else{
1183 next_target= vp->target_clock + is->video_clock - vp->pts; //FIXME pass durations cleanly
1184 }
1185 if(framedrop && time > next_target){
1186 is->skip_frames *= 1.0 + FRAME_SKIP_FACTOR;
1187 if(is->pictq_size > 1 || time > next_target + 0.5){
1188 /* update queue size and signal for next picture */
1189 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1190 is->pictq_rindex = 0;
1191
1192 SDL_LockMutex(is->pictq_mutex);
1193 is->pictq_size--;
1194 SDL_CondSignal(is->pictq_cond);
1195 SDL_UnlockMutex(is->pictq_mutex);
1196 goto retry;
1197 }
1198 }
638c9d91 1199
72ce053b
IC
1200 if(is->subtitle_st) {
1201 if (is->subtitle_stream_changed) {
1202 SDL_LockMutex(is->subpq_mutex);
115329f1 1203
72ce053b
IC
1204 while (is->subpq_size) {
1205 free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1 1206
72ce053b
IC
1207 /* update queue size and signal for next picture */
1208 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1209 is->subpq_rindex = 0;
115329f1 1210
72ce053b
IC
1211 is->subpq_size--;
1212 }
1213 is->subtitle_stream_changed = 0;
1214
1215 SDL_CondSignal(is->subpq_cond);
1216 SDL_UnlockMutex(is->subpq_mutex);
1217 } else {
1218 if (is->subpq_size > 0) {
1219 sp = &is->subpq[is->subpq_rindex];
1220
1221 if (is->subpq_size > 1)
1222 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1223 else
1224 sp2 = NULL;
1225
1226 if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1227 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1228 {
1229 free_subpicture(sp);
1230
1231 /* update queue size and signal for next picture */
1232 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1233 is->subpq_rindex = 0;
1234
1235 SDL_LockMutex(is->subpq_mutex);
1236 is->subpq_size--;
1237 SDL_CondSignal(is->subpq_cond);
1238 SDL_UnlockMutex(is->subpq_mutex);
1239 }
1240 }
1241 }
1242 }
1243
01310af2
FB
1244 /* display picture */
1245 video_display(is);
115329f1 1246
01310af2
FB
1247 /* update queue size and signal for next picture */
1248 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1249 is->pictq_rindex = 0;
115329f1 1250
01310af2
FB
1251 SDL_LockMutex(is->pictq_mutex);
1252 is->pictq_size--;
1253 SDL_CondSignal(is->pictq_cond);
1254 SDL_UnlockMutex(is->pictq_mutex);
1255 }
1256 } else if (is->audio_st) {
1257 /* draw the next audio frame */
1258
01310af2
FB
1259 /* if only audio stream, then display the audio bars (better
1260 than nothing, just to test the implementation */
115329f1 1261
01310af2
FB
1262 /* display picture */
1263 video_display(is);
01310af2
FB
1264 }
1265 if (show_status) {
1266 static int64_t last_time;
1267 int64_t cur_time;
72ce053b 1268 int aqsize, vqsize, sqsize;
638c9d91 1269 double av_diff;
115329f1 1270
01310af2 1271 cur_time = av_gettime();
1e1a0b18 1272 if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
FB
1273 aqsize = 0;
1274 vqsize = 0;
72ce053b 1275 sqsize = 0;
01310af2
FB
1276 if (is->audio_st)
1277 aqsize = is->audioq.size;
1278 if (is->video_st)
1279 vqsize = is->videoq.size;
72ce053b
IC
1280 if (is->subtitle_st)
1281 sqsize = is->subtitleq.size;
638c9d91
FB
1282 av_diff = 0;
1283 if (is->audio_st && is->video_st)
1284 av_diff = get_audio_clock(is) - get_video_clock(is);
382f3a5b 1285 printf("%7.2f A-V:%7.3f s:%3.1f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
7a8bfa5d 1286 get_master_clock(is), av_diff, FFMAX(is->skip_frames-1, 0), aqsize / 1024, vqsize / 1024, sqsize, is->pts_ctx.num_faulty_dts, is->pts_ctx.num_faulty_pts);
01310af2
FB
1287 fflush(stdout);
1288 last_time = cur_time;
1289 }
1290 }
1291}
1292
f7b8c814
MS
1293static void stream_close(VideoState *is)
1294{
1295 VideoPicture *vp;
1296 int i;
1297 /* XXX: use a special url_shutdown call to abort parse cleanly */
1298 is->abort_request = 1;
1299 SDL_WaitThread(is->parse_tid, NULL);
1300 SDL_WaitThread(is->refresh_tid, NULL);
1301
1302 /* free all pictures */
1303 for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
1304 vp = &is->pictq[i];
1305#if CONFIG_AVFILTER
1306 if (vp->picref) {
1307 avfilter_unref_buffer(vp->picref);
1308 vp->picref = NULL;
1309 }
1310#endif
1311 if (vp->bmp) {
1312 SDL_FreeYUVOverlay(vp->bmp);
1313 vp->bmp = NULL;
1314 }
1315 }
1316 SDL_DestroyMutex(is->pictq_mutex);
1317 SDL_DestroyCond(is->pictq_cond);
1318 SDL_DestroyMutex(is->subpq_mutex);
1319 SDL_DestroyCond(is->subpq_cond);
1320#if !CONFIG_AVFILTER
1321 if (is->img_convert_ctx)
1322 sws_freeContext(is->img_convert_ctx);
1323#endif
1324 av_free(is);
1325}
1326
1327static void do_exit(void)
1328{
f7b8c814
MS
1329 if (cur_stream) {
1330 stream_close(cur_stream);
1331 cur_stream = NULL;
1332 }
a5c33faa 1333 uninit_opts();
f7b8c814
MS
1334#if CONFIG_AVFILTER
1335 avfilter_uninit();
1336#endif
1337 if (show_status)
1338 printf("\n");
1339 SDL_Quit();
6b6bca64 1340 av_log(NULL, AV_LOG_QUIET, "");
f7b8c814
MS
1341 exit(0);
1342}
1343
01310af2
FB
1344/* allocate a picture (needs to do that in main thread to avoid
1345 potential locking problems */
1346static void alloc_picture(void *opaque)
1347{
1348 VideoState *is = opaque;
1349 VideoPicture *vp;
01310af2
FB
1350
1351 vp = &is->pictq[is->pictq_windex];
1352
1353 if (vp->bmp)
1354 SDL_FreeYUVOverlay(vp->bmp);
1355
917d2bb3
MN
1356#if CONFIG_AVFILTER
1357 if (vp->picref)
7fce481a 1358 avfilter_unref_buffer(vp->picref);
917d2bb3
MN
1359 vp->picref = NULL;
1360
1361 vp->width = is->out_video_filter->inputs[0]->w;
1362 vp->height = is->out_video_filter->inputs[0]->h;
1363 vp->pix_fmt = is->out_video_filter->inputs[0]->format;
1364#else
1365 vp->width = is->video_st->codec->width;
1366 vp->height = is->video_st->codec->height;
1367 vp->pix_fmt = is->video_st->codec->pix_fmt;
1368#endif
1369
1370 vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1 1371 SDL_YV12_OVERLAY,
61890b02 1372 screen);
cb036f90
MS
1373 if (!vp->bmp || vp->bmp->pitches[0] < vp->width) {
1374 /* SDL allocates a buffer smaller than requested if the video
1375 * overlay hardware is unable to support the requested size. */
1376 fprintf(stderr, "Error: the video system does not support an image\n"
aa78a6d6 1377 "size of %dx%d pixels. Try using -lowres or -vf \"scale=w:h\"\n"
cb036f90
MS
1378 "to reduce the image size.\n", vp->width, vp->height );
1379 do_exit();
1380 }
01310af2
FB
1381
1382 SDL_LockMutex(is->pictq_mutex);
1383 vp->allocated = 1;
1384 SDL_CondSignal(is->pictq_cond);
1385 SDL_UnlockMutex(is->pictq_mutex);
1386}
1387
267e9dfa
MN
1388/**
1389 *
1390 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1391 */
1a620dd7 1392static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos)
01310af2
FB
1393{
1394 VideoPicture *vp;
1395 int dst_pix_fmt;
917d2bb3
MN
1396#if CONFIG_AVFILTER
1397 AVPicture pict_src;
1398#endif
01310af2
FB
1399 /* wait until we have space to put a new picture */
1400 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1401
1402 if(is->pictq_size>=VIDEO_PICTURE_QUEUE_SIZE && !is->refresh)
1403 is->skip_frames= FFMAX(1.0 - FRAME_SKIP_FACTOR, is->skip_frames * (1.0-FRAME_SKIP_FACTOR));
1404
01310af2
FB
1405 while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1406 !is->videoq.abort_request) {
1407 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1408 }
1409 SDL_UnlockMutex(is->pictq_mutex);
115329f1 1410
01310af2
FB
1411 if (is->videoq.abort_request)
1412 return -1;
1413
1414 vp = &is->pictq[is->pictq_windex];
1415
1416 /* alloc or resize hardware picture buffer */
115329f1 1417 if (!vp->bmp ||
917d2bb3
MN
1418#if CONFIG_AVFILTER
1419 vp->width != is->out_video_filter->inputs[0]->w ||
1420 vp->height != is->out_video_filter->inputs[0]->h) {
1421#else
01f4895c
MN
1422 vp->width != is->video_st->codec->width ||
1423 vp->height != is->video_st->codec->height) {
917d2bb3 1424#endif
01310af2
FB
1425 SDL_Event event;
1426
1427 vp->allocated = 0;
1428
1429 /* the allocation must be done in the main thread to avoid
1430 locking problems */
1431 event.type = FF_ALLOC_EVENT;
1432 event.user.data1 = is;
1433 SDL_PushEvent(&event);
115329f1 1434
01310af2
FB
1435 /* wait until the picture is allocated */
1436 SDL_LockMutex(is->pictq_mutex);
1437 while (!vp->allocated && !is->videoq.abort_request) {
1438 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1439 }
1440 SDL_UnlockMutex(is->pictq_mutex);
1441
1442 if (is->videoq.abort_request)
1443 return -1;
1444 }
1445
638c9d91 1446 /* if the frame is not skipped, then display it */
01310af2 1447 if (vp->bmp) {
fbf1b885 1448 AVPicture pict;
917d2bb3
MN
1449#if CONFIG_AVFILTER
1450 if(vp->picref)
7fce481a 1451 avfilter_unref_buffer(vp->picref);
917d2bb3
MN
1452 vp->picref = src_frame->opaque;
1453#endif
fbf1b885 1454
01310af2
FB
1455 /* get a pointer on the bitmap */
1456 SDL_LockYUVOverlay (vp->bmp);
1457
1458 dst_pix_fmt = PIX_FMT_YUV420P;
fbf1b885 1459 memset(&pict,0,sizeof(AVPicture));
01310af2
FB
1460 pict.data[0] = vp->bmp->pixels[0];
1461 pict.data[1] = vp->bmp->pixels[2];
1462 pict.data[2] = vp->bmp->pixels[1];
1463
1464 pict.linesize[0] = vp->bmp->pitches[0];
1465 pict.linesize[1] = vp->bmp->pitches[2];
1466 pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
MN
1467
1468#if CONFIG_AVFILTER
1469 pict_src.data[0] = src_frame->data[0];
1470 pict_src.data[1] = src_frame->data[1];
1471 pict_src.data[2] = src_frame->data[2];
1472
1473 pict_src.linesize[0] = src_frame->linesize[0];
1474 pict_src.linesize[1] = src_frame->linesize[1];
1475 pict_src.linesize[2] = src_frame->linesize[2];
1476
1477 //FIXME use direct rendering
1478 av_picture_copy(&pict, &pict_src,
1479 vp->pix_fmt, vp->width, vp->height);
1480#else
e43d7a18 1481 sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28 1482 is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3 1483 vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
feb7bc67 1484 dst_pix_fmt, sws_flags, NULL, NULL, NULL);
3ac56e28 1485 if (is->img_convert_ctx == NULL) {
26ba8235
AB
1486 fprintf(stderr, "Cannot initialize the conversion context\n");
1487 exit(1);
1488 }
3ac56e28 1489 sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
MN
1490 0, vp->height, pict.data, pict.linesize);
1491#endif
01310af2
FB
1492 /* update the bitmap content */
1493 SDL_UnlockYUVOverlay(vp->bmp);
1494
638c9d91 1495 vp->pts = pts;
1a620dd7 1496 vp->pos = pos;
01310af2
FB
1497
1498 /* now we can update the picture count */
1499 if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1500 is->pictq_windex = 0;
1501 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1502 vp->target_clock= compute_target_time(vp->pts, is);
1503
01310af2
FB
1504 is->pictq_size++;
1505 SDL_UnlockMutex(is->pictq_mutex);
1506 }
638c9d91
FB
1507 return 0;
1508}
1509
115329f1
DB
1510/**
1511 * compute the exact PTS for the picture if it is omitted in the stream
267e9dfa
MN
1512 * @param pts1 the dts of the pkt / pts of the frame
1513 */
1a620dd7 1514static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
638c9d91
FB
1515{
1516 double frame_delay, pts;
115329f1 1517
638c9d91
FB
1518 pts = pts1;
1519
01310af2 1520 if (pts != 0) {
638c9d91 1521 /* update video clock with pts, if present */
01310af2
FB
1522 is->video_clock = pts;
1523 } else {
72ea344b
FB
1524 pts = is->video_clock;
1525 }
1526 /* update video clock for next frame */
01f4895c 1527 frame_delay = av_q2d(is->video_st->codec->time_base);
72ea344b
FB
1528 /* for MPEG2, the frame can be repeated, so we update the
1529 clock accordingly */
267e9dfa 1530 frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
72ea344b 1531 is->video_clock += frame_delay;
638c9d91
FB
1532
1533#if defined(DEBUG_SYNC) && 0
ff358eca
SS
1534 printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1535 av_get_pict_type_char(src_frame->pict_type), pts, pts1);
638c9d91 1536#endif
1a620dd7 1537 return queue_picture(is, src_frame, pts, pos);
01310af2
FB
1538}
1539
3966a574 1540static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2 1541{
6c7d3ead 1542 int len1, got_picture, i;
01310af2 1543
01310af2 1544 if (packet_queue_get(&is->videoq, pkt, 1) < 0)
917d2bb3 1545 return -1;
39c6a118
MN
1546
1547 if(pkt->data == flush_pkt.data){
1548 avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
MN
1549
1550 SDL_LockMutex(is->pictq_mutex);
1551 //Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
1552 for(i=0; i<VIDEO_PICTURE_QUEUE_SIZE; i++){
d38c9e7a 1553 is->pictq[i].target_clock= 0;
6c7d3ead
MN
1554 }
1555 while (is->pictq_size && !is->videoq.abort_request) {
1556 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1557 }
1a620dd7 1558 is->video_current_pos= -1;
6c7d3ead
MN
1559 SDL_UnlockMutex(is->pictq_mutex);
1560
7a8bfa5d 1561 init_pts_correction(&is->pts_ctx);
967030eb 1562 is->frame_last_pts= AV_NOPTS_VALUE;
f7119e42 1563 is->frame_last_delay = 0;
b25453bd 1564 is->frame_timer = (double)av_gettime() / 1000000.0;
d38c9e7a
MN
1565 is->skip_frames= 1;
1566 is->skip_frames_index= 0;
917d2bb3 1567 return 0;
39c6a118
MN
1568 }
1569
638c9d91
FB
1570 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1571 this packet, if any */
7fb262b5 1572 is->video_st->codec->reordered_opaque= pkt->pts;
bea18375 1573 len1 = avcodec_decode_video2(is->video_st->codec,
620e8baf 1574 frame, &got_picture,
bea18375 1575 pkt);
620e8baf 1576
99e0b12b 1577 if (got_picture) {
7a8bfa5d
AS
1578 if (decoder_reorder_pts == -1) {
1579 *pts = guess_correct_pts(&is->pts_ctx, frame->reordered_opaque, pkt->dts);
1580 } else if (decoder_reorder_pts) {
1581 *pts = frame->reordered_opaque;
1582 } else {
1583 *pts = pkt->dts;
df7d6e48 1584 }
7a8bfa5d
AS
1585
1586 if (*pts == AV_NOPTS_VALUE) {
1587 *pts = 0;
df7d6e48 1588 }
99e0b12b 1589 }
41db429d 1590
fb966f99
MN
1591// if (len1 < 0)
1592// break;
d38c9e7a
MN
1593 if (got_picture){
1594 is->skip_frames_index += 1;
1595 if(is->skip_frames_index >= is->skip_frames){
1596 is->skip_frames_index -= FFMAX(is->skip_frames, 1.0);
1597 return 1;
1598 }
1599
1600 }
917d2bb3
MN
1601 return 0;
1602}
1603
1604#if CONFIG_AVFILTER
1605typedef struct {
1606 VideoState *is;
1607 AVFrame *frame;
dd0c789b 1608 int use_dr1;
917d2bb3
MN
1609} FilterPriv;
1610
dd0c789b
BB
1611static int input_get_buffer(AVCodecContext *codec, AVFrame *pic)
1612{
1613 AVFilterContext *ctx = codec->opaque;
ecc8dada 1614 AVFilterBufferRef *ref;
dd0c789b 1615 int perms = AV_PERM_WRITE;
65929418 1616 int i, w, h, stride[4];
dd0c789b
BB
1617 unsigned edge;
1618
1619 if(pic->buffer_hints & FF_BUFFER_HINTS_VALID) {
1620 if(pic->buffer_hints & FF_BUFFER_HINTS_READABLE) perms |= AV_PERM_READ;
1621 if(pic->buffer_hints & FF_BUFFER_HINTS_PRESERVE) perms |= AV_PERM_PRESERVE;
1622 if(pic->buffer_hints & FF_BUFFER_HINTS_REUSABLE) perms |= AV_PERM_REUSE2;
1623 }
1624 if(pic->reference) perms |= AV_PERM_READ | AV_PERM_PRESERVE;
1625
1626 w = codec->width;
1627 h = codec->height;
1628 avcodec_align_dimensions2(codec, &w, &h, stride);
1629 edge = codec->flags & CODEC_FLAG_EMU_EDGE ? 0 : avcodec_get_edge_width();
1630 w += edge << 1;
1631 h += edge << 1;
1632
1633 if(!(ref = avfilter_get_video_buffer(ctx->outputs[0], perms, w, h)))
1634 return -1;
1635
cc80caff
HM
1636 ref->video->w = codec->width;
1637 ref->video->h = codec->height;
cfb7e6e6 1638 for(i = 0; i < 4; i ++) {
d54e0948
HM
1639 unsigned hshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_w : 0;
1640 unsigned vshift = (i == 1 || i == 2) ? av_pix_fmt_descriptors[ref->format].log2_chroma_h : 0;
dd0c789b 1641
3635c07b 1642 if (ref->data[i]) {
e53ca636 1643 ref->data[i] += (edge >> hshift) + ((edge * ref->linesize[i]) >> vshift);
3635c07b 1644 }
dd0c789b
BB
1645 pic->data[i] = ref->data[i];
1646 pic->linesize[i] = ref->linesize[i];
1647 }
1648 pic->opaque = ref;
1649 pic->age = INT_MAX;
1650 pic->type = FF_BUFFER_TYPE_USER;
867ab7fb 1651 pic->reordered_opaque = codec->reordered_opaque;
dd0c789b
BB
1652 return 0;
1653}
1654
1655static void input_release_buffer(AVCodecContext *codec, AVFrame *pic)
1656{
1657 memset(pic->data, 0, sizeof(pic->data));
7fce481a 1658 avfilter_unref_buffer(pic->opaque);
dd0c789b
BB
1659}
1660
12bd3c1f
JM
1661static int input_reget_buffer(AVCodecContext *codec, AVFrame *pic)
1662{
ecc8dada 1663 AVFilterBufferRef *ref = pic->opaque;
12bd3c1f
JM
1664
1665 if (pic->data[0] == NULL) {
1666 pic->buffer_hints |= FF_BUFFER_HINTS_READABLE;
1667 return codec->get_buffer(codec, pic);
1668 }
1669
cc80caff 1670 if ((codec->width != ref->video->w) || (codec->height != ref->video->h) ||
d54e0948 1671 (codec->pix_fmt != ref->format)) {
12bd3c1f
JM
1672 av_log(codec, AV_LOG_ERROR, "Picture properties changed.\n");
1673 return -1;
1674 }
1675
1676 pic->reordered_opaque = codec->reordered_opaque;
1677 return 0;
1678}
1679
917d2bb3
MN
1680static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
1681{
1682 FilterPriv *priv = ctx->priv;
dd0c789b 1683 AVCodecContext *codec;
917d2bb3
MN
1684 if(!opaque) return -1;
1685
1686 priv->is = opaque;
dd0c789b
BB
1687 codec = priv->is->video_st->codec;
1688 codec->opaque = ctx;
1689 if(codec->codec->capabilities & CODEC_CAP_DR1) {
1690 priv->use_dr1 = 1;
1691 codec->get_buffer = input_get_buffer;
1692 codec->release_buffer = input_release_buffer;
12bd3c1f 1693 codec->reget_buffer = input_reget_buffer;
dd0c789b
BB
1694 }
1695
917d2bb3
MN
1696 priv->frame = avcodec_alloc_frame();
1697
1698 return 0;
1699}
1700
1701static void input_uninit(AVFilterContext *ctx)
1702{
1703 FilterPriv *priv = ctx->priv;
1704 av_free(priv->frame);
1705}
1706
1707static int input_request_frame(AVFilterLink *link)
1708{
1709 FilterPriv *priv = link->src->priv;
ecc8dada 1710 AVFilterBufferRef *picref;
3966a574 1711 int64_t pts = 0;
917d2bb3
MN
1712 AVPacket pkt;
1713 int ret;
1714
1715 while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
1716 av_free_packet(&pkt);
1717 if (ret < 0)
1718 return -1;
1719
dd0c789b 1720 if(priv->use_dr1) {
7fce481a 1721 picref = avfilter_ref_buffer(priv->frame->opaque, ~0);
dd0c789b 1722 } else {
cf097cbc 1723 picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, link->w, link->h);
34017fd9 1724 av_image_copy(picref->data, picref->linesize,
4afbcf46
SS
1725 priv->frame->data, priv->frame->linesize,
1726 picref->format, link->w, link->h);
dd0c789b 1727 }
917d2bb3
MN
1728 av_free_packet(&pkt);
1729
1730 picref->pts = pts;
bb409513 1731 picref->pos = pkt.pos;
cc80caff 1732 picref->video->pixel_aspect = priv->is->video_st->codec->sample_aspect_ratio;
c41c5b02 1733 avfilter_start_frame(link, picref);
917d2bb3
MN
1734 avfilter_draw_slice(link, 0, link->h, 1);
1735 avfilter_end_frame(link);
917d2bb3
MN
1736
1737 return 0;
1738}
1739
1740static int input_query_formats(AVFilterContext *ctx)
1741{
1742 FilterPriv *priv = ctx->priv;
1743 enum PixelFormat pix_fmts[] = {
1744 priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
1745 };
1746
1747 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1748 return 0;
1749}
1750
1751static int input_config_props(AVFilterLink *link)
1752{
1753 FilterPriv *priv = link->src->priv;
1754 AVCodecContext *c = priv->is->video_st->codec;
1755
1756 link->w = c->width;
1757 link->h = c->height;
387b4ac9 1758 link->time_base = priv->is->video_st->time_base;
917d2bb3
MN
1759
1760 return 0;
1761}
1762
1763static AVFilter input_filter =
1764{
1765 .name = "ffplay_input",
1766
1767 .priv_size = sizeof(FilterPriv),
1768
1769 .init = input_init,
1770 .uninit = input_uninit,
1771
1772 .query_formats = input_query_formats,
1773
1774 .inputs = (AVFilterPad[]) {{ .name = NULL }},
1775 .outputs = (AVFilterPad[]) {{ .name = "default",
72415b2a 1776 .type = AVMEDIA_TYPE_VIDEO,
917d2bb3
MN
1777 .request_frame = input_request_frame,
1778 .config_props = input_config_props, },
1779 { .name = NULL }},
1780};
1781
917d2bb3
MN
1782#endif /* CONFIG_AVFILTER */
1783
1784static int video_thread(void *arg)
1785{
1786 VideoState *is = arg;
1787 AVFrame *frame= avcodec_alloc_frame();
4903b5ca 1788 int64_t pts_int;
917d2bb3
MN
1789 double pts;
1790 int ret;
1791
1792#if CONFIG_AVFILTER
4903b5ca 1793 int64_t pos;
3f073fa2 1794 char sws_flags_str[128];
f7ead94c 1795 FFSinkContext ffsink_ctx = { .pix_fmt = PIX_FMT_YUV420P };
917d2bb3 1796 AVFilterContext *filt_src = NULL, *filt_out = NULL;
e15aeea6 1797 AVFilterGraph *graph = avfilter_graph_alloc();
3f073fa2
SS
1798 snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
1799 graph->scale_sws_opts = av_strdup(sws_flags_str);
917d2bb3 1800
037be76e
SS
1801 if (avfilter_graph_create_filter(&filt_src, &input_filter, "src",
1802 NULL, is, graph) < 0)
1803 goto the_end;
1804 if (avfilter_graph_create_filter(&filt_out, &ffsink, "out",
1805 NULL, &ffsink_ctx, graph) < 0)
1806 goto the_end;
917d2bb3
MN
1807
1808 if(vfilters) {
1809 AVFilterInOut *outputs = av_malloc(sizeof(AVFilterInOut));
1810 AVFilterInOut *inputs = av_malloc(sizeof(AVFilterInOut));
1811
1812 outputs->name = av_strdup("in");
7313132b 1813 outputs->filter_ctx = filt_src;
917d2bb3
MN
1814 outputs->pad_idx = 0;
1815 outputs->next = NULL;
1816
1817 inputs->name = av_strdup("out");
7313132b 1818 inputs->filter_ctx = filt_out;
917d2bb3
MN
1819 inputs->pad_idx = 0;
1820 inputs->next = NULL;
1821
1822 if (avfilter_graph_parse(graph, vfilters, inputs, outputs, NULL) < 0)
1823 goto the_end;
1824 av_freep(&vfilters);
1825 } else {
1826 if(avfilter_link(filt_src, 0, filt_out, 0) < 0) goto the_end;
1827 }
917d2bb3 1828
2a24df93
SS
1829 if (avfilter_graph_config(graph, NULL) < 0)
1830 goto the_end;
917d2bb3
MN
1831
1832 is->out_video_filter = filt_out;
1833#endif
1834
1835 for(;;) {
1836#if !CONFIG_AVFILTER
1837 AVPacket pkt;
387b4ac9 1838#else
ff0652e5 1839 AVFilterBufferRef *picref;
387b4ac9 1840 AVRational tb;
917d2bb3
MN
1841#endif
1842 while (is->paused && !is->videoq.abort_request)
1843 SDL_Delay(10);
1844#if CONFIG_AVFILTER
ff0652e5
SS
1845 ret = get_filtered_video_frame(filt_out, frame, &picref, &tb);
1846 if (picref) {
1847 pts_int = picref->pts;
1848 pos = picref->pos;
1849 frame->opaque = picref;
1850 }
387b4ac9
SS
1851
1852 if (av_cmp_q(tb, is->video_st->time_base)) {
1853 int64_t pts1 = pts_int;
1854 pts_int = av_rescale_q(pts_int, tb, is->video_st->time_base);
1855 av_log(NULL, AV_LOG_DEBUG, "video_thread(): "
1856 "tb:%d/%d pts:%"PRId64" -> tb:%d/%d pts:%"PRId64"\n",
1857 tb.num, tb.den, pts1,
1858 is->video_st->time_base.num, is->video_st->time_base.den, pts_int);
1859 }
917d2bb3
MN
1860#else
1861 ret = get_video_frame(is, frame, &pts_int, &pkt);
1862#endif
1863
1864 if (ret < 0) goto the_end;
1865
1866 if (!ret)
1867 continue;
1868
3966a574 1869 pts = pts_int*av_q2d(is->video_st->time_base);
917d2bb3
MN
1870
1871#if CONFIG_AVFILTER
bb409513 1872 ret = output_picture2(is, frame, pts, pos);
917d2bb3 1873#else
fca62599 1874 ret = output_picture2(is, frame, pts, pkt.pos);
917d2bb3
MN
1875 av_free_packet(&pkt);
1876#endif
1877 if (ret < 0)
1878 goto the_end;
1879
115329f1 1880 if (step)
bba04f1e
WH
1881 if (cur_stream)
1882 stream_pause(cur_stream);
01310af2
FB
1883 }
1884 the_end:
917d2bb3 1885#if CONFIG_AVFILTER
97dd1e4a 1886 avfilter_graph_free(graph);
917d2bb3
MN
1887 av_freep(&graph);
1888#endif
c6b1edc9 1889 av_free(frame);
01310af2
FB
1890 return 0;
1891}
1892
72ce053b
IC
1893static int subtitle_thread(void *arg)
1894{
1895 VideoState *is = arg;
1896 SubPicture *sp;
1897 AVPacket pkt1, *pkt = &pkt1;
1898 int len1, got_subtitle;
1899 double pts;
1900 int i, j;
1901 int r, g, b, y, u, v, a;
1902
1903 for(;;) {
1904 while (is->paused && !is->subtitleq.abort_request) {
1905 SDL_Delay(10);
1906 }
1907 if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1908 break;
115329f1 1909
39c6a118
MN
1910 if(pkt->data == flush_pkt.data){
1911 avcodec_flush_buffers(is->subtitle_st->codec);
1912 continue;
1913 }
72ce053b
IC
1914 SDL_LockMutex(is->subpq_mutex);
1915 while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1916 !is->subtitleq.abort_request) {
1917 SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1918 }
1919 SDL_UnlockMutex(is->subpq_mutex);
115329f1 1920
72ce053b
IC
1921 if (is->subtitleq.abort_request)
1922 goto the_end;
115329f1 1923
72ce053b
IC
1924 sp = &is->subpq[is->subpq_windex];
1925
1926 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1927 this packet, if any */
1928 pts = 0;
1929 if (pkt->pts != AV_NOPTS_VALUE)
1930 pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1931
bea18375 1932 len1 = avcodec_decode_subtitle2(is->subtitle_st->codec,
115329f1 1933 &sp->sub, &got_subtitle,
bea18375 1934 pkt);
72ce053b
IC
1935// if (len1 < 0)
1936// break;
1937 if (got_subtitle && sp->sub.format == 0) {
1938 sp->pts = pts;
115329f1 1939
72ce053b
IC
1940 for (i = 0; i < sp->sub.num_rects; i++)
1941 {
db4fac64 1942 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b 1943 {
25b4c651 1944 RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
IC
1945 y = RGB_TO_Y_CCIR(r, g, b);
1946 u = RGB_TO_U_CCIR(r, g, b, 0);
1947 v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651 1948 YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
IC
1949 }
1950 }
1951
1952 /* now we can update the picture count */
1953 if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1954 is->subpq_windex = 0;
1955 SDL_LockMutex(is->subpq_mutex);
1956 is->subpq_size++;
1957 SDL_UnlockMutex(is->subpq_mutex);
1958 }
1959 av_free_packet(pkt);
115329f1 1960// if (step)
72ce053b
IC
1961// if (cur_stream)
1962// stream_pause(cur_stream);
1963 }
1964 the_end:
1965 return 0;
1966}
1967
01310af2
FB
1968/* copy samples for viewing in editor window */
1969static void update_sample_display(VideoState *is, short *samples, int samples_size)
1970{
1971 int size, len, channels;
1972
01f4895c 1973 channels = is->audio_st->codec->channels;
01310af2
FB
1974
1975 size = samples_size / sizeof(short);
1976 while (size > 0) {
1977 len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1978 if (len > size)
1979 len = size;
1980 memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1981 samples += len;
1982 is->sample_array_index += len;
1983 if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1984 is->sample_array_index = 0;
1985 size -= len;
1986 }
1987}
1988
01310af2
FB
1989/* return the new audio buffer size (samples can be added or deleted
1990 to get better sync if video or external master clock) */
115329f1 1991static int synchronize_audio(VideoState *is, short *samples,
638c9d91 1992 int samples_size1, double pts)
01310af2 1993{
638c9d91 1994 int n, samples_size;
01310af2 1995 double ref_clock;
115329f1 1996
01f4895c 1997 n = 2 * is->audio_st->codec->channels;
638c9d91 1998 samples_size = samples_size1;
01310af2 1999
01310af2 2000 /* if not master, then we try to remove or add samples to correct the clock */
01310af2 2001 if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
FB
2002 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
2003 double diff, avg_diff;
01310af2 2004 int wanted_size, min_size, max_size, nb_samples;
115329f1 2005
638c9d91
FB
2006 ref_clock = get_master_clock(is);
2007 diff = get_audio_clock(is) - ref_clock;
115329f1 2008
638c9d91
FB
2009 if (diff < AV_NOSYNC_THRESHOLD) {
2010 is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
2011 if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
2012 /* not enough measures to have a correct estimate */
2013 is->audio_diff_avg_count++;
2014 } else {
2015 /* estimate the A-V difference */
2016 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
2017
2018 if (fabs(avg_diff) >= is->audio_diff_threshold) {
01f4895c 2019 wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
638c9d91 2020 nb_samples = samples_size / n;
115329f1 2021
638c9d91
FB
2022 min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
2023 max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
2024 if (wanted_size < min_size)
2025 wanted_size = min_size;
2026 else if (wanted_size > max_size)
2027 wanted_size = max_size;
115329f1 2028
638c9d91
FB
2029 /* add or remove samples to correction the synchro */
2030 if (wanted_size < samples_size) {
2031 /* remove samples */
2032 samples_size = wanted_size;
2033 } else if (wanted_size > samples_size) {
2034 uint8_t *samples_end, *q;
2035 int nb;
115329f1 2036
638c9d91
FB
2037 /* add samples */
2038 nb = (samples_size - wanted_size);
2039 samples_end = (uint8_t *)samples + samples_size - n;
2040 q = samples_end + n;
2041 while (nb > 0) {
2042 memcpy(q, samples_end, n);
2043 q += n;
2044 nb -= n;
2045 }
2046 samples_size = wanted_size;
2047 }
2048 }
2049#if 0
115329f1
DB
2050 printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
2051 diff, avg_diff, samples_size - samples_size1,
638c9d91
FB
2052 is->audio_clock, is->video_clock, is->audio_diff_threshold);
2053#endif
01310af2 2054 }
638c9d91
FB
2055 } else {
2056 /* too big difference : may be initial PTS errors, so
2057 reset A-V filter */
2058 is->audio_diff_avg_count = 0;
2059 is->audio_diff_cum = 0;
01310af2
FB
2060 }
2061 }
2062
01310af2
FB
2063 return samples_size;
2064}
2065
2066/* decode one audio frame and returns its uncompressed size */
5a4476e2 2067static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2 2068{
bea18375 2069 AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2 2070 AVPacket *pkt = &is->audio_pkt;
abdff646 2071 AVCodecContext *dec= is->audio_st->codec;
72ea344b 2072 int n, len1, data_size;
01310af2
FB
2073 double pts;
2074
2075 for(;;) {
72ea344b 2076 /* NOTE: the audio packet can contain several frames */
bea18375 2077 while (pkt_temp->size > 0) {
5a4476e2 2078 data_size = sizeof(is->audio_buf1);
bea18375 2079 len1 = avcodec_decode_audio3(dec,
5a4476e2 2080 (int16_t *)is->audio_buf1, &data_size,
bea18375 2081 pkt_temp);
72ea344b
FB
2082 if (len1 < 0) {
2083 /* if error, we skip the frame */
bea18375 2084 pkt_temp->size = 0;
01310af2 2085 break;
72ea344b 2086 }
115329f1 2087
bea18375
TB
2088 pkt_temp->data += len1;
2089 pkt_temp->size -= len1;
72ea344b
FB
2090 if (data_size <= 0)
2091 continue;
5a4476e2
PR
2092
2093 if (dec->sample_fmt != is->audio_src_fmt) {
2094 if (is->reformat_ctx)
2095 av_audio_convert_free(is->reformat_ctx);
5d6e4c16 2096 is->reformat_ctx= av_audio_convert_alloc(AV_SAMPLE_FMT_S16, 1,
5a4476e2
PR
2097 dec->sample_fmt, 1, NULL, 0);
2098 if (!is->reformat_ctx) {
2099 fprintf(stderr, "Cannot convert %s sample format to %s sample format\n",
ba7d6e79 2100 av_get_sample_fmt_name(dec->sample_fmt),
5d6e4c16 2101 av_get_sample_fmt_name(AV_SAMPLE_FMT_S16));
5a4476e2
PR
2102 break;
2103 }
2104 is->audio_src_fmt= dec->sample_fmt;
2105 }
2106
2107 if (is->reformat_ctx) {
2108 const void *ibuf[6]= {is->audio_buf1};
2109 void *obuf[6]= {is->audio_buf2};
ba7d6e79 2110 int istride[6]= {av_get_bits_per_sample_fmt(dec->sample_fmt)/8};
5a4476e2
PR
2111 int ostride[6]= {2};
2112 int len= data_size/istride[0];
2113 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
2114 printf("av_audio_convert() failed\n");
2115 break;
2116 }
2117 is->audio_buf= is->audio_buf2;
2118 /* FIXME: existing code assume that data_size equals framesize*channels*2
2119 remove this legacy cruft */
2120 data_size= len*2;
2121 }else{
2122 is->audio_buf= is->audio_buf1;
2123 }
2124
72ea344b
FB
2125 /* if no pts, then compute it */
2126 pts = is->audio_clock;
2127 *pts_ptr = pts;
abdff646 2128 n = 2 * dec->channels;
115329f1 2129 is->audio_clock += (double)data_size /
abdff646 2130 (double)(n * dec->sample_rate);
638c9d91 2131#if defined(DEBUG_SYNC)
72ea344b
FB
2132 {
2133 static double last_clock;
2134 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
2135 is->audio_clock - last_clock,
2136 is->audio_clock, pts);
2137 last_clock = is->audio_clock;
01310af2 2138 }
72ea344b
FB
2139#endif
2140 return data_size;
01310af2
FB
2141 }
2142
72ea344b
FB
2143 /* free the current packet */
2144 if (pkt->data)
01310af2 2145 av_free_packet(pkt);
115329f1 2146
72ea344b
FB
2147 if (is->paused || is->audioq.abort_request) {
2148 return -1;
2149 }
115329f1 2150
01310af2
FB
2151 /* read next packet */
2152 if (packet_queue_get(&is->audioq, pkt, 1) < 0)
2153 return -1;
39c6a118 2154 if(pkt->data == flush_pkt.data){
abdff646 2155 avcodec_flush_buffers(dec);
39c6a118
MN
2156 continue;
2157 }
2158
bea18375
TB
2159 pkt_temp->data = pkt->data;
2160 pkt_temp->size = pkt->size;
115329f1 2161
72ea344b
FB
2162 /* if update the audio clock with the pts */
2163 if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75 2164 is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b 2165 }
01310af2
FB
2166 }
2167}
2168
638c9d91
FB
2169/* get the current audio output buffer size, in samples. With SDL, we
2170 cannot have a precise information */
2171static int audio_write_get_buf_size(VideoState *is)
01310af2 2172{
b09b580b 2173 return is->audio_buf_size - is->audio_buf_index;
01310af2
FB
2174}
2175
2176
2177/* prepare a new audio buffer */
358061f6 2178static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
FB
2179{
2180 VideoState *is = opaque;
2181 int audio_size, len1;
2182 double pts;
2183
2184 audio_callback_time = av_gettime();
115329f1 2185
01310af2
FB
2186 while (len > 0) {
2187 if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2 2188 audio_size = audio_decode_frame(is, &pts);
01310af2
FB
2189 if (audio_size < 0) {
2190 /* if error, just output silence */
1a1078fa 2191 is->audio_buf = is->audio_buf1;
01310af2
FB
2192 is->audio_buf_size = 1024;
2193 memset(is->audio_buf, 0, is->audio_buf_size);
2194 } else {
2195 if (is->show_audio)
2196 update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
115329f1 2197 audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
01310af2
FB
2198 pts);
2199 is->audio_buf_size = audio_size;
2200 }
2201 is->audio_buf_index = 0;
2202 }
2203 len1 = is->audio_buf_size - is->audio_buf_index;
2204 if (len1 > len)
2205 len1 = len;
2206 memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
2207 len -= len1;
2208 stream += len1;
2209 is->audio_buf_index += len1;
2210 }
2211}
2212
01310af2
FB
2213/* open a given stream. Return 0 if OK */
2214static int stream_component_open(VideoState *is, int stream_index)
2215{
2216 AVFormatContext *ic = is->ic;
fe74099a 2217 AVCodecContext *avctx;
01310af2
FB
2218 AVCodec *codec;
2219 SDL_AudioSpec wanted_spec, spec;
2220
2221 if (stream_index < 0 || stream_index >= ic->nb_streams)
2222 return -1;
fe74099a 2223 avctx = ic->streams[stream_index]->codec;
115329f1 2224
01310af2 2225 /* prepare audio output */
72415b2a 2226 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a
SS
2227 if (avctx->channels > 0) {
2228 avctx->request_channels = FFMIN(2, avctx->channels);
94eadc8b 2229 } else {
fe74099a 2230 avctx->request_channels = 2;
638c9d91 2231 }
01310af2
FB
2232 }
2233
fe74099a
SS
2234 codec = avcodec_find_decoder(avctx->codec_id);
2235 avctx->debug_mv = debug_mv;
2236 avctx->debug = debug;
2237 avctx->workaround_bugs = workaround_bugs;
2238 avctx->lowres = lowres;
2239 if(lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
2240 avctx->idct_algo= idct;
2241 if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
2242 avctx->skip_frame= skip_frame;
2243 avctx->skip_idct= skip_idct;
2244 avctx->skip_loop_filter= skip_loop_filter;
2245 avctx->error_recognition= error_recognition;
2246 avctx->error_concealment= error_concealment;
2247 avcodec_thread_init(avctx, thread_count);
2248
0093ebc2 2249 set_context_opts(avctx, avcodec_opts[avctx->codec_type], 0, codec);
e43d7a18 2250
01310af2 2251 if (!codec ||
fe74099a 2252 avcodec_open(avctx, codec) < 0)
01310af2 2253 return -1;
51b73087
JR
2254
2255 /* prepare audio output */
72415b2a 2256 if (avctx->codec_type == AVMEDIA_TYPE_AUDIO) {
fe74099a 2257 wanted_spec.freq = avctx->sample_rate;
51b73087 2258 wanted_spec.format = AUDIO_S16SYS;
fe74099a 2259 wanted_spec.channels = avctx->channels;
51b73087
JR
2260 wanted_spec.silence = 0;
2261 wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
2262 wanted_spec.callback = sdl_audio_callback;
2263 wanted_spec.userdata = is;
2264 if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
2265 fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
2266 return -1;
2267 }
2268 is->audio_hw_buf_size = spec.size;
5d6e4c16 2269 is->audio_src_fmt= AV_SAMPLE_FMT_S16;
51b73087
JR
2270 }
2271
3f3fe38d 2272 ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
fe74099a 2273 switch(avctx->codec_type) {
72415b2a 2274 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2275 is->audio_stream = stream_index;
2276 is->audio_st = ic->streams[stream_index];
2277 is->audio_buf_size = 0;
2278 is->audio_buf_index = 0;
638c9d91
FB
2279
2280 /* init averaging filter */
2281 is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
2282 is->audio_diff_avg_count = 0;
2283 /* since we do not have a precise anough audio fifo fullness,
2284 we correct audio sync only if larger than this threshold */
fe74099a 2285 is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / avctx->sample_rate;
638c9d91 2286
01310af2
FB
2287 memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
2288 packet_queue_init(&is->audioq);
bb270c08 2289 SDL_PauseAudio(0);
01310af2 2290 break;
72415b2a 2291 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2292 is->video_stream = stream_index;
2293 is->video_st = ic->streams[stream_index];
2294
68aefbe8 2295// is->video_current_pts_time = av_gettime();
638c9d91 2296
01310af2
FB
2297 packet_queue_init(&is->videoq);
2298 is->video_tid = SDL_CreateThread(video_thread, is);
2299 break;
72415b2a 2300 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2301 is->subtitle_stream = stream_index;
2302 is->subtitle_st = ic->streams[stream_index];
2303 packet_queue_init(&is->subtitleq);
115329f1 2304
72ce053b
IC
2305 is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
2306 break;
01310af2
FB
2307 default:
2308 break;
2309 }
2310 return 0;
2311}
2312
2313static void stream_component_close(VideoState *is, int stream_index)
2314{
2315 AVFormatContext *ic = is->ic;
fe74099a 2316 AVCodecContext *avctx;
115329f1 2317
72ce053b
IC
2318 if (stream_index < 0 || stream_index >= ic->nb_streams)
2319 return;
fe74099a 2320 avctx = ic->streams[stream_index]->codec;
01310af2 2321
fe74099a 2322 switch(avctx->codec_type) {
72415b2a 2323 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2324 packet_queue_abort(&is->audioq);
2325
2326 SDL_CloseAudio();
2327
2328 packet_queue_end(&is->audioq);
5a4476e2
PR
2329 if (is->reformat_ctx)
2330 av_audio_convert_free(is->reformat_ctx);
bc77fce6 2331 is->reformat_ctx = NULL;
01310af2 2332 break;
72415b2a 2333 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2334 packet_queue_abort(&is->videoq);
2335
2336 /* note: we also signal this mutex to make sure we deblock the
2337 video thread in all cases */
2338 SDL_LockMutex(is->pictq_mutex);
2339 SDL_CondSignal(is->pictq_cond);
2340 SDL_UnlockMutex(is->pictq_mutex);
2341
2342 SDL_WaitThread(is->video_tid, NULL);
2343
2344 packet_queue_end(&is->videoq);
2345 break;
72415b2a 2346 case AVMEDIA_TYPE_SUBTITLE:
72ce053b 2347 packet_queue_abort(&is->subtitleq);
115329f1 2348
72ce053b
IC
2349 /* note: we also signal this mutex to make sure we deblock the
2350 video thread in all cases */
2351 SDL_LockMutex(is->subpq_mutex);
2352 is->subtitle_stream_changed = 1;
115329f1 2353
72ce053b
IC
2354 SDL_CondSignal(is->subpq_cond);
2355 SDL_UnlockMutex(is->subpq_mutex);
2356
2357 SDL_WaitThread(is->subtitle_tid, NULL);
2358
2359 packet_queue_end(&is->subtitleq);
2360 break;
01310af2
FB
2361 default:
2362 break;
2363 }
2364
3f3fe38d 2365 ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
SS
2366 avcodec_close(avctx);
2367 switch(avctx->codec_type) {
72415b2a 2368 case AVMEDIA_TYPE_AUDIO:
01310af2
FB
2369 is->audio_st = NULL;
2370 is->audio_stream = -1;
2371 break;
72415b2a 2372 case AVMEDIA_TYPE_VIDEO:
01310af2
FB
2373 is->video_st = NULL;
2374 is->video_stream = -1;
2375 break;
72415b2a 2376 case AVMEDIA_TYPE_SUBTITLE:
72ce053b
IC
2377 is->subtitle_st = NULL;
2378 is->subtitle_stream = -1;
2379 break;
01310af2
FB
2380 default:
2381 break;
2382 }
2383}
2384
416e3508
FB
2385/* since we have only one decoding thread, we can use a global
2386 variable instead of a thread local variable */
2387static VideoState *global_video_state;
2388
2389static int decode_interrupt_cb(void)
2390{
2391 return (global_video_state && global_video_state->abort_request);
2392}
01310af2
FB
2393
2394/* this thread gets the stream from the disk or the network */
2395static int decode_thread(void *arg)
2396{
2397 VideoState *is = arg;
2398 AVFormatContext *ic;
6625a3de 2399 int err, i, ret;
72415b2a 2400 int st_index[AVMEDIA_TYPE_NB];
01310af2 2401 AVPacket pkt1, *pkt = &pkt1;
61890b02 2402 AVFormatParameters params, *ap = &params;
75bb7b0a 2403 int eof=0;
d834d63b 2404 int pkt_in_play_range = 0;
01310af2 2405
6299a229
MN
2406 ic = avformat_alloc_context();
2407
6625a3de 2408 memset(st_index, -1, sizeof(st_index));
01310af2
FB
2409 is->video_stream = -1;
2410 is->audio_stream = -1;
72ce053b 2411 is->subtitle_stream = -1;
01310af2 2412
416e3508
FB
2413 global_video_state = is;
2414 url_set_interrupt_cb(decode_interrupt_cb);
2415
61890b02 2416 memset(ap, 0, sizeof(*ap));
115329f1 2417
6299a229 2418 ap->prealloced_context = 1;
e4b89522
LW
2419 ap->width = frame_width;
2420 ap->height= frame_height;
7e042912 2421 ap->time_base= (AVRational){1, 25};
e4b89522 2422 ap->pix_fmt = frame_pix_fmt;
7e042912 2423
0093ebc2 2424 set_context_opts(ic, avformat_opts, AV_OPT_FLAG_DECODING_PARAM, NULL);
6299a229 2425
61890b02 2426 err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
638c9d91
FB
2427 if (err < 0) {
2428 print_error(is->filename, err);
2429 ret = -1;
2430 goto fail;
2431 }
01310af2 2432 is->ic = ic;
30bc6613
MN
2433
2434 if(genpts)
2435 ic->flags |= AVFMT_FLAG_GENPTS;
2436
24c07998
LA
2437 err = av_find_stream_info(ic);
2438 if (err < 0) {
2439 fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
2440 ret = -1;
2441 goto fail;
2442 }
899681cd
BA
2443 if(ic->pb)
2444 ic->pb->eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b 2445
70a4764d
MN
2446 if(seek_by_bytes<0)
2447 seek_by_bytes= !!(ic->iformat->flags & AVFMT_TS_DISCONT);
2448
72ea344b
FB
2449 /* if seeking requested, we execute it */
2450 if (start_time != AV_NOPTS_VALUE) {
2451 int64_t timestamp;
2452
2453 timestamp = start_time;
2454 /* add the stream start time */
2455 if (ic->start_time != AV_NOPTS_VALUE)
2456 timestamp += ic->start_time;
4ed29207 2457 ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b 2458 if (ret < 0) {
115329f1 2459 fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
FB
2460 is->filename, (double)timestamp / AV_TIME_BASE);
2461 }
2462 }
72ea344b 2463
406f0f1b 2464 for (i = 0; i < ic->nb_streams; i++)
3f3fe38d 2465 ic->streams[i]->discard = AVDISCARD_ALL;
406f0f1b
NG
2466 if (!audio_disable)
2467 st_index[AVMEDIA_TYPE_VIDEO] =
2468 av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO,
2469 wanted_stream[AVMEDIA_TYPE_VIDEO], -1, NULL, 0);
2470 if (!video_disable) {
2471 st_index[AVMEDIA_TYPE_AUDIO] =
2472 av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO,
2473 wanted_stream[AVMEDIA_TYPE_AUDIO],
2474 st_index[AVMEDIA_TYPE_VIDEO],
2475 NULL, 0);
2476 st_index[AVMEDIA_TYPE_SUBTITLE] =
2477 av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE,
2478 wanted_stream[AVMEDIA_TYPE_SUBTITLE],
2479 (st_index[AVMEDIA_TYPE_AUDIO] >= 0 ?
2480 st_index[AVMEDIA_TYPE_AUDIO] :
2481 st_index[AVMEDIA_TYPE_VIDEO]),
2482 NULL, 0);
01310af2
FB
2483 }
2484 if (show_status) {
2485 dump_format(ic, 0, is->filename, 0);
2486 }
2487
2488 /* open the streams */
72415b2a
SS
2489 if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) {
2490 stream_component_open(is, st_index[AVMEDIA_TYPE_AUDIO]);
01310af2
FB
2491 }
2492
077a8d61 2493 ret=-1;
72415b2a
SS
2494 if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) {
2495 ret= stream_component_open(is, st_index[AVMEDIA_TYPE_VIDEO]);
077a8d61 2496 }
d38c9e7a 2497 is->refresh_tid = SDL_CreateThread(refresh_thread, is);
077a8d61 2498 if(ret<0) {
01310af2 2499 if (!display_disable)
bf8ae197 2500 is->show_audio = 2;
01310af2
FB
2501 }
2502
72415b2a
SS
2503 if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) {
2504 stream_component_open(is, st_index[AVMEDIA_TYPE_SUBTITLE]);
16a59a7b
BA
2505 }
2506
01310af2 2507 if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
FB
2508 fprintf(stderr, "%s: could not open codecs\n", is->filename);
2509 ret = -1;
01310af2
FB
2510 goto fail;
2511 }
2512
2513 for(;;) {
2514 if (is->abort_request)
2515 break;
416e3508
FB
2516 if (is->paused != is->last_paused) {
2517 is->last_paused = is->paused;
72ea344b 2518 if (is->paused)
f5668147 2519 is->read_pause_return= av_read_pause(ic);
72ea344b
FB
2520 else
2521 av_read_play(ic);
416e3508 2522 }
2f642393
AJ
2523#if CONFIG_RTSP_DEMUXER
2524 if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
416e3508
FB
2525 /* wait 10 ms to avoid trying to get another packet */
2526 /* XXX: horrible */
2527 SDL_Delay(10);
2528 continue;
2529 }
400738b1 2530#endif
72ea344b 2531 if (is->seek_req) {
8e606cc8 2532 int64_t seek_target= is->seek_pos;
4ed29207
MN
2533 int64_t seek_min= is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
2534 int64_t seek_max= is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
2535//FIXME the +-2 is due to rounding being not done in the correct direction in generation
2536// of the seek_pos/seek_rel variables
8e606cc8 2537
4ed29207 2538 ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
FB
2539 if (ret < 0) {
2540 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
e6c0297f
MN
2541 }else{
2542 if (is->audio_stream >= 0) {
2543 packet_queue_flush(&is->audioq);
39c6a118 2544 packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f 2545 }
72ce053b
IC
2546 if (is->subtitle_stream >= 0) {
2547 packet_queue_flush(&is->subtitleq);
39c6a118 2548 packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b 2549 }
e6c0297f
MN
2550 if (is->video_stream >= 0) {
2551 packet_queue_flush(&is->videoq);
39c6a118 2552 packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f 2553 }
72ea344b
FB
2554 }
2555 is->seek_req = 0;
e45aeb38 2556 eof= 0;
72ea344b 2557 }
416e3508 2558
01310af2 2559 /* if the queue are full, no need to read more */
79ee4683
MN
2560 if ( is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
2561 || ( (is->audioq .size > MIN_AUDIOQ_SIZE || is->audio_stream<0)
2562 && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream<0)
2563 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream<0))) {
01310af2
FB
2564 /* wait 10 ms */
2565 SDL_Delay(10);
2566 continue;
2567 }
27d97fde 2568 if(eof) {
9dc41767 2569 if(is->video_stream >= 0){
26534fe8
MN
2570 av_init_packet(pkt);
2571 pkt->data=NULL;
2572 pkt->size=0;
2573 pkt->stream_index= is->video_stream;
2574 packet_queue_put(&is->videoq, pkt);
9dc41767 2575 }
b4083171 2576 SDL_Delay(10);
1922c0a7
RK
2577 if(is->audioq.size + is->videoq.size + is->subtitleq.size ==0){
2578 if(loop!=1 && (!loop || --loop)){
2579 stream_seek(cur_stream, start_time != AV_NOPTS_VALUE ? start_time : 0, 0, 0);
2580 }else if(autoexit){
2581 ret=AVERROR_EOF;
2582 goto fail;
2583 }
2d1653b0 2584 }
600a331c
MN
2585 continue;
2586 }
72ea344b 2587 ret = av_read_frame(ic, pkt);
01310af2 2588 if (ret < 0) {
27d97fde 2589 if (ret == AVERROR_EOF || url_feof(ic->pb))
75bb7b0a
MN
2590 eof=1;
2591 if (url_ferror(ic->pb))
bb270c08 2592 break;
75bb7b0a
MN
2593 SDL_Delay(100); /* wait for user event */
2594 continue;
01310af2 2595 }
d834d63b
RK
2596 /* check if packet is in play range specified by user, then queue, otherwise discard */
2597 pkt_in_play_range = duration == AV_NOPTS_VALUE ||
2598 (pkt->pts - ic->streams[pkt->stream_index]->start_time) *
2599 av_q2d(ic->streams[pkt->stream_index]->time_base) -
2600 (double)(start_time != AV_NOPTS_VALUE ? start_time : 0)/1000000
2601 <= ((double)duration/1000000);
2602 if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
01310af2 2603 packet_queue_put(&is->audioq, pkt);
d834d63b 2604 } else if (pkt->stream_index == is->video_stream && pkt_in_play_range) {
01310af2 2605 packet_queue_put(&is->videoq, pkt);
d834d63b 2606 } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
72ce053b 2607 packet_queue_put(&is->subtitleq, pkt);
01310af2
FB
2608 } else {
2609 av_free_packet(pkt);
2610 }
2611 }
2612 /* wait until the end */
2613 while (!is->abort_request) {
2614 SDL_Delay(100);
2615 }
2616
638c9d91 2617 ret = 0;
01310af2 2618 fail:
416e3508
FB
2619 /* disable interrupting */
2620 global_video_state = NULL;
2621
01310af2
FB
2622 /* close each stream */
2623 if (is->audio_stream >= 0)
2624 stream_component_close(is, is->audio_stream);
2625 if (is->video_stream >= 0)
2626 stream_component_close(is, is->video_stream);
72ce053b
IC
2627 if (is->subtitle_stream >= 0)
2628 stream_component_close(is, is->subtitle_stream);
638c9d91
FB
2629 if (is->ic) {
2630 av_close_input_file(is->ic);
2631 is->ic = NULL; /* safety */
2632 }
416e3508
FB
2633 url_set_interrupt_cb(NULL);
2634
638c9d91
FB
2635 if (ret != 0) {
2636 SDL_Event event;
115329f1 2637
638c9d91
FB
2638 event.type = FF_QUIT_EVENT;
2639 event.user.data1 = is;
2640 SDL_PushEvent(&event);
2641 }
01310af2
FB
2642 return 0;
2643}
2644
638c9d91 2645static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
FB
2646{
2647 VideoState *is;
2648
2649 is = av_mallocz(sizeof(VideoState));
2650 if (!is)
2651 return NULL;
f7d78f36 2652 av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91 2653 is->iformat = iformat;
01310af2
FB
2654 is->ytop = 0;
2655 is->xleft = 0;
2656
2657 /* start video display */
2658 is->pictq_mutex = SDL_CreateMutex();
2659 is->pictq_cond = SDL_CreateCond();
115329f1 2660
72ce053b
IC
2661 is->subpq_mutex = SDL_CreateMutex();
2662 is->subpq_cond = SDL_CreateCond();
115329f1 2663
638c9d91 2664 is->av_sync_type = av_sync_type;
01310af2
FB
2665 is->parse_tid = SDL_CreateThread(decode_thread, is);
2666 if (!is->parse_tid) {
2667 av_free(is);
2668 return NULL;
2669 }
2670 return is;
2671}
2672
7b49ce2e 2673static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
FB
2674{
2675 AVFormatContext *ic = is->ic;
2676 int start_index, stream_index;
2677 AVStream *st;
2678
72415b2a 2679 if (codec_type == AVMEDIA_TYPE_VIDEO)
638c9d91 2680 start_index = is->video_stream;
72415b2a 2681 else if (codec_type == AVMEDIA_TYPE_AUDIO)
638c9d91 2682 start_index = is->audio_stream;
72ce053b
IC
2683 else
2684 start_index = is->subtitle_stream;
72415b2a 2685 if (start_index < (codec_type == AVMEDIA_TYPE_SUBTITLE ? -1 : 0))
638c9d91
FB
2686 return;
2687 stream_index = start_index;
2688 for(;;) {
2689 if (++stream_index >= is->ic->nb_streams)
72ce053b 2690 {
72415b2a 2691 if (codec_type == AVMEDIA_TYPE_SUBTITLE)
72ce053b
IC
2692 {
2693 stream_index = -1;
2694 goto the_end;
2695 } else
2696 stream_index = 0;
2697 }
638c9d91
FB
2698 if (stream_index == start_index)
2699 return;
2700 st = ic->streams[stream_index];
01f4895c 2701 if (st->codec->codec_type == codec_type) {
638c9d91
FB
2702 /* check that parameters are OK */
2703 switch(codec_type) {
72415b2a 2704 case AVMEDIA_TYPE_AUDIO:
01f4895c
MN
2705 if (st->codec->sample_rate != 0 &&
2706 st->codec->channels != 0)
638c9d91
FB
2707 goto the_end;
2708 break;
72415b2a
SS
2709 case AVMEDIA_TYPE_VIDEO:
2710 case AVMEDIA_TYPE_SUBTITLE:
638c9d91
FB
2711 goto the_end;
2712 default:
2713 break;
2714 }
2715 }
2716 }
2717 the_end:
2718 stream_component_close(is, start_index);
2719 stream_component_open(is, stream_index);
2720}
2721
2722
7b49ce2e 2723static void toggle_full_screen(void)
01310af2 2724{
01310af2 2725 is_full_screen = !is_full_screen;
29f3b38a
MR
2726 if (!fs_screen_width) {
2727 /* use default SDL method */
fb84155b 2728// SDL_WM_ToggleFullScreen(screen);
01310af2 2729 }
fb84155b 2730 video_open(cur_stream);
01310af2
FB
2731}
2732
7b49ce2e 2733static void toggle_pause(void)
01310af2
FB
2734{
2735 if (cur_stream)
2736 stream_pause(cur_stream);
bba04f1e
WH
2737 step = 0;
2738}
2739
7b49ce2e 2740static void step_to_next_frame(void)
bba04f1e
WH
2741{
2742 if (cur_stream) {
19cc524a 2743 /* if the stream is paused unpause it, then step */
bba04f1e 2744 if (cur_stream->paused)
19cc524a 2745 stream_pause(cur_stream);
bba04f1e
WH
2746 }
2747 step = 1;
01310af2
FB
2748}
2749
7b49ce2e 2750static void toggle_audio_display(void)
01310af2
FB
2751{
2752 if (cur_stream) {
f5968788 2753 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 2754 cur_stream->show_audio = (cur_stream->show_audio + 1) % 3;
f5968788
MN
2755 fill_rectangle(screen,
2756 cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height,
2757 bgcolor);
2758 SDL_UpdateRect(screen, cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height);
01310af2
FB
2759 }
2760}
2761
2762/* handle an event sent by the GUI */
7b49ce2e 2763static void event_loop(void)
01310af2
FB
2764{
2765 SDL_Event event;
a11d11aa 2766 double incr, pos, frac;
01310af2
FB
2767
2768 for(;;) {
d52ec002 2769 double x;
01310af2
FB
2770 SDL_WaitEvent(&event);
2771 switch(event.type) {
2772 case SDL_KEYDOWN:
066ce8c9
AS
2773 if (exit_on_keydown) {
2774 do_exit();
2775 break;
2776 }
01310af2
FB
2777 switch(event.key.keysym.sym) {
2778 case SDLK_ESCAPE:
2779 case SDLK_q:
2780 do_exit();
2781 break;
2782 case SDLK_f:
2783 toggle_full_screen();
2784 break;
2785 case SDLK_p:
2786 case SDLK_SPACE:
2787 toggle_pause();
2788 break;
bba04f1e
WH
2789 case SDLK_s: //S: Step to next frame
2790 step_to_next_frame();
2791 break;
01310af2 2792 case SDLK_a:
115329f1 2793 if (cur_stream)
72415b2a 2794 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
638c9d91
FB
2795 break;
2796 case SDLK_v:
115329f1 2797 if (cur_stream)
72415b2a 2798 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
638c9d91 2799 break;
72ce053b 2800 case SDLK_t:
115329f1 2801 if (cur_stream)
72415b2a 2802 stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
72ce053b 2803 break;
638c9d91 2804 case SDLK_w:
01310af2
FB
2805 toggle_audio_display();
2806 break;
72ea344b
FB
2807 case SDLK_LEFT:
2808 incr = -10.0;
2809 goto do_seek;
2810 case SDLK_RIGHT:
2811 incr = 10.0;
2812 goto do_seek;
2813 case SDLK_UP:
2814 incr = 60.0;
2815 goto do_seek;
2816 case SDLK_DOWN:
2817 incr = -60.0;
2818 do_seek:
2819 if (cur_stream) {
94b594c6 2820 if (seek_by_bytes) {
1a620dd7
MN
2821 if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos>=0){
2822 pos= cur_stream->video_current_pos;
2823 }else if(cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos>=0){
2824 pos= cur_stream->audio_pkt.pos;
2825 }else
2826 pos = url_ftell(cur_stream->ic->pb);
94b594c6 2827 if (cur_stream->ic->bit_rate)
566cd2cb 2828 incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
SH
2829 else
2830 incr *= 180000.0;
2831 pos += incr;
2ef46053 2832 stream_seek(cur_stream, pos, incr, 1);
94b594c6
SH
2833 } else {
2834 pos = get_master_clock(cur_stream);
2835 pos += incr;
2ef46053 2836 stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6 2837 }
72ea344b
FB
2838 }
2839 break;
01310af2
FB
2840 default:
2841 break;
2842 }
2843 break;
a11d11aa 2844 case SDL_MOUSEBUTTONDOWN:
066ce8c9
AS
2845 if (exit_on_mousedown) {
2846 do_exit();
2847 break;
2848 }
d52ec002
MN
2849 case SDL_MOUSEMOTION:
2850 if(event.type ==SDL_MOUSEBUTTONDOWN){
2851 x= event.button.x;
2852 }else{
2853 if(event.motion.state != SDL_PRESSED)
2854 break;
2855 x= event.motion.x;
2856 }
bb270c08 2857 if (cur_stream) {
2ef46053
MN
2858 if(seek_by_bytes || cur_stream->ic->duration<=0){
2859 uint64_t size= url_fsize(cur_stream->ic->pb);
d52ec002 2860 stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
2ef46053 2861 }else{
6371c81a
MN
2862 int64_t ts;
2863 int ns, hh, mm, ss;
2864 int tns, thh, tmm, tss;
2865 tns = cur_stream->ic->duration/1000000LL;
2866 thh = tns/3600;
2867 tmm = (tns%3600)/60;
2868 tss = (tns%60);
d52ec002 2869 frac = x/cur_stream->width;
6371c81a
MN
2870 ns = frac*tns;
2871 hh = ns/3600;
2872 mm = (ns%3600)/60;
2873 ss = (ns%60);
2874 fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
2875 hh, mm, ss, thh, tmm, tss);
2876 ts = frac*cur_stream->ic->duration;
2877 if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
2878 ts += cur_stream->ic->start_time;
2879 stream_seek(cur_stream, ts, 0, 0);
2ef46053 2880 }
bb270c08
DB
2881 }
2882 break;
01310af2
FB
2883 case SDL_VIDEORESIZE:
2884 if (cur_stream) {
115329f1 2885 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2 2886 SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
c57d3469
MN
2887 screen_width = cur_stream->width = event.resize.w;
2888 screen_height= cur_stream->height= event.resize.h;
01310af2
FB
2889 }
2890 break;
2891 case SDL_QUIT:
638c9d91 2892 case FF_QUIT_EVENT:
01310af2
FB
2893 do_exit();
2894 break;
2895 case FF_ALLOC_EVENT:
fccb19e3 2896 video_open(event.user.data1);
01310af2
FB
2897 alloc_picture(event.user.data1);
2898 break;
2899 case FF_REFRESH_EVENT:
2900 video_refresh_timer(event.user.data1);
d38c9e7a 2901 cur_stream->refresh=0;
01310af2
FB
2902 break;
2903 default:
2904 break;
2905 }
2906 }
2907}
2908
e4b89522
LW
2909static void opt_frame_size(const char *arg)
2910{
126b638e 2911 if (av_parse_video_size(&frame_width, &frame_height, arg) < 0) {
e4b89522
LW
2912 fprintf(stderr, "Incorrect frame size\n");
2913 exit(1);
2914 }
2915 if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
2916 fprintf(stderr, "Frame size must be a multiple of 2\n");
2917 exit(1);
2918 }
2919}
2920
a5b3b5f6 2921static int opt_width(const char *opt, const char *arg)
01310af2 2922{
a5b3b5f6
SS
2923 screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2924 return 0;
01310af2
FB
2925}
2926
a5b3b5f6 2927static int opt_height(const char *opt, const char *arg)
01310af2 2928{
a5b3b5f6
SS
2929 screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2930 return 0;
01310af2
FB
2931}
2932
2933static void opt_format(const char *arg)
2934{
2935 file_iformat = av_find_input_format(arg);
2936 if (!file_iformat) {
2937 fprintf(stderr, "Unknown input format: %s\n", arg);
2938 exit(1);
2939 }
2940}
61890b02 2941
e4b89522
LW
2942static void opt_frame_pix_fmt(const char *arg)
2943{
718c7b18 2944 frame_pix_fmt = av_get_pix_fmt(arg);
e4b89522
LW
2945}
2946
b81d6235 2947static int opt_sync(const char *opt, const char *arg)
638c9d91
FB
2948{
2949 if (!strcmp(arg, "audio"))
2950 av_sync_type = AV_SYNC_AUDIO_MASTER;
2951 else if (!strcmp(arg, "video"))
2952 av_sync_type = AV_SYNC_VIDEO_MASTER;
2953 else if (!strcmp(arg, "ext"))
2954 av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5 2955 else {
b81d6235 2956 fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
SS
2957 exit(1);
2958 }
b81d6235 2959 return 0;
638c9d91
FB
2960}
2961
e11bc2c6 2962static int opt_seek(const char *opt, const char *arg)
72ea344b 2963{
e11bc2c6
SS
2964 start_time = parse_time_or_die(opt, arg, 1);
2965 return 0;
72ea344b
FB
2966}
2967
d834d63b
RK
2968static int opt_duration(const char *opt, const char *arg)
2969{
2970 duration = parse_time_or_die(opt, arg, 1);
2971 return 0;
2972}
2973
a5b3b5f6 2974static int opt_debug(const char *opt, const char *arg)
e26a8335 2975{
a309073b 2976 av_log_set_level(99);
a5b3b5f6
SS
2977 debug = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
2978 return 0;
e26a8335 2979}
115329f1 2980
a5b3b5f6 2981static int opt_vismv(const char *opt, const char *arg)
0c9bbaec 2982{
a5b3b5f6
SS
2983 debug_mv = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
2984 return 0;
0c9bbaec 2985}
c62c07d3 2986
a5b3b5f6 2987static int opt_thread_count(const char *opt, const char *arg)
c62c07d3 2988{
a5b3b5f6 2989 thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
b250f9c6 2990#if !HAVE_THREADS
c62c07d3
MN
2991 fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
2992#endif
a5b3b5f6 2993 return 0;
c62c07d3 2994}
115329f1 2995
358061f6 2996static const OptionDef options[] = {
992f8eae 2997#include "cmdutils_common_opts.h"
a5b3b5f6
SS
2998 { "x", HAS_ARG | OPT_FUNC2, {(void*)opt_width}, "force displayed width", "width" },
2999 { "y", HAS_ARG | OPT_FUNC2, {(void*)opt_height}, "force displayed height", "height" },
e4b89522 3000 { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
638c9d91 3001 { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
01310af2
FB
3002 { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
3003 { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
72415b2a
SS
3004 { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_AUDIO]}, "select desired audio stream", "stream_number" },
3005 { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_VIDEO]}, "select desired video stream", "stream_number" },
3006 { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[AVMEDIA_TYPE_SUBTITLE]}, "select desired subtitle stream", "stream_number" },
e11bc2c6 3007 { "ss", HAS_ARG | OPT_FUNC2, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
d834d63b 3008 { "t", HAS_ARG | OPT_FUNC2, {(void*)&opt_duration}, "play \"duration\" seconds of audio/video", "duration" },
674fe163 3009 { "bytes", OPT_INT | HAS_ARG, {(void*)&seek_by_bytes}, "seek by bytes 0=off 1=on -1=auto", "val" },
01310af2
FB
3010 { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
3011 { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
e4b89522 3012 { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
98ae6acf 3013 { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
a5b3b5f6 3014 { "debug", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
6387c3e6 3015 { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
a5b3b5f6 3016 { "vismv", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
6fc5b059 3017 { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
30bc6613 3018 { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
59055363 3019 { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts 0=off 1=on -1=auto", ""},
178fcca8 3020 { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
8c3eba7c
MN
3021 { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
3022 { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
3023 { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
178fcca8 3024 { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo", "algo" },
047599a4 3025 { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_recognition}, "set error detection threshold (0-4)", "threshold" },
1b51e051 3026 { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options", "bit_mask" },
b81d6235 3027 { "sync", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
a5b3b5f6 3028 { "threads", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2d1653b0 3029 { "autoexit", OPT_BOOL | OPT_EXPERT, {(void*)&autoexit}, "exit at the end", "" },
066ce8c9
AS
3030 { "exitonkeydown", OPT_BOOL | OPT_EXPERT, {(void*)&exit_on_keydown}, "exit on key down", "" },
3031 { "exitonmousedown", OPT_BOOL | OPT_EXPERT, {(void*)&exit_on_mousedown}, "exit on mouse down", "" },
1922c0a7 3032 { "loop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&loop}, "set number of times the playback shall be looped", "loop count" },
d38c9e7a 3033 { "framedrop", OPT_BOOL | OPT_EXPERT, {(void*)&framedrop}, "drop frames when cpu is too slow", "" },
076db5ed 3034 { "window_title", OPT_STRING | HAS_ARG, {(void*)&window_title}, "set window title", "window title" },
917d2bb3 3035#if CONFIG_AVFILTER
09ed11e5 3036 { "vf", OPT_STRING | HAS_ARG, {(void*)&vfilters}, "video filters", "filter list" },
917d2bb3 3037#endif
2b3da32f 3038 { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, {(void*)&rdftspeed}, "rdft speed", "msecs" },
e43d7a18 3039 { "default", OPT_FUNC2 | HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {(void*)opt_default}, "generic catch all option", "" },
01310af2
FB
3040 { NULL, },
3041};
3042
0c2a18cb 3043static void show_usage(void)
01310af2 3044{
27daa420
RP
3045 printf("Simple media player\n");
3046 printf("usage: ffplay [options] input_file\n");
01310af2 3047 printf("\n");
0c2a18cb
RP
3048}
3049
3050static void show_help(void)
3051{
f66eb58e 3052 av_log_set_callback(log_callback_help);
0c2a18cb 3053 show_usage();
02d504a7
FB
3054 show_help_options(options, "Main options:\n",
3055 OPT_EXPERT, 0);
3056 show_help_options(options, "\nAdvanced options:\n",
3057 OPT_EXPERT, OPT_EXPERT);
f66eb58e
SS
3058 printf("\n");
3059 av_opt_show2(avcodec_opts[0], NULL,
3060 AV_OPT_FLAG_DECODING_PARAM, 0);
3061 printf("\n");
3062 av_opt_show2(avformat_opts, NULL,
3063 AV_OPT_FLAG_DECODING_PARAM, 0);
3064#if !CONFIG_AVFILTER
3065 printf("\n");
3066 av_opt_show2(sws_opts, NULL,
3067 AV_OPT_FLAG_ENCODING_PARAM, 0);
3068#endif
01310af2
FB
3069 printf("\nWhile playing:\n"
3070 "q, ESC quit\n"
3071 "f toggle full screen\n"
3072 "p, SPC pause\n"
638c9d91
FB
3073 "a cycle audio channel\n"
3074 "v cycle video channel\n"
72ce053b 3075 "t cycle subtitle channel\n"
638c9d91 3076 "w show audio waves\n"
79f8b328 3077 "s activate frame-step mode\n"
72ea344b
FB
3078 "left/right seek backward/forward 10 seconds\n"
3079 "down/up seek backward/forward 1 minute\n"
a11d11aa 3080 "mouse click seek to percentage in file corresponding to fraction of width\n"
01310af2 3081 );
01310af2
FB
3082}
3083
358061f6 3084static void opt_input_file(const char *filename)
01310af2 3085{
07a70b38
SS
3086 if (input_filename) {
3087 fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
3088 filename, input_filename);
3089 exit(1);
3090 }
e8d83e1c 3091 if (!strcmp(filename, "-"))
9fcfc0b7 3092 filename = "pipe:";
01310af2
FB
3093 input_filename = filename;
3094}
3095
3096/* Called from the main */
3097int main(int argc, char **argv)
3098{
a5c33faa 3099 int flags;
115329f1 3100
6b6bca64
MN
3101 av_log_set_flags(AV_LOG_SKIP_REPEATED);
3102
01310af2 3103 /* register all codecs, demux and protocols */
c721d803 3104 avcodec_register_all();
9b157b0c 3105#if CONFIG_AVDEVICE
c721d803 3106 avdevice_register_all();
9b157b0c 3107#endif
917d2bb3
MN
3108#if CONFIG_AVFILTER
3109 avfilter_register_all();
3110#endif
01310af2
FB
3111 av_register_all();
3112
a5c33faa 3113 init_opts();
e43d7a18 3114
ea9c581f 3115 show_banner();
4cfac5bc 3116
f5da5c93 3117 parse_options(argc, argv, options, opt_input_file);
01310af2 3118
aab1b7e5 3119 if (!input_filename) {
7f11e745 3120 show_usage();
7a7da6b4 3121 fprintf(stderr, "An input file must be specified\n");
7f11e745 3122 fprintf(stderr, "Use -h to get full help or, even better, run 'man ffplay'\n");
aab1b7e5
SS
3123 exit(1);
3124 }
01310af2
FB
3125
3126 if (display_disable) {
3127 video_disable = 1;
3128 }
31319a8c 3129 flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
c97f5402
DB
3130#if !defined(__MINGW32__) && !defined(__APPLE__)
3131 flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c 3132#endif
01310af2 3133 if (SDL_Init (flags)) {
05ab0b76 3134 fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
01310af2
FB
3135 exit(1);
3136 }
3137
3138 if (!display_disable) {
b250f9c6 3139#if HAVE_SDL_VIDEO_SIZE
3ef17d62
MR
3140 const SDL_VideoInfo *vi = SDL_GetVideoInfo();
3141 fs_screen_width = vi->current_w;
3142 fs_screen_height = vi->current_h;
29f3b38a 3143#endif
01310af2
FB
3144 }
3145
3146 SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
01310af2
FB
3147 SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
3148 SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
3149
39c6a118
MN
3150 av_init_packet(&flush_pkt);
3151 flush_pkt.data= "FLUSH";
3152
638c9d91 3153 cur_stream = stream_open(input_filename, file_iformat);
01310af2
FB
3154
3155 event_loop();
3156
3157 /* never returns */
3158
3159 return 0;
3160}