* oops fixed bad initialization of ff vals.
[libav.git] / libav / grab.c
CommitLineData
de6d9b64
FB
1/*
2 * Linux video grab interface
19720f15 3 * Copyright (c) 2000,2001 Fabrice Bellard.
de6d9b64 4 *
19720f15
FB
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
de6d9b64 9 *
19720f15 10 * This library is distributed in the hope that it will be useful,
de6d9b64 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19720f15
FB
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
de6d9b64 14 *
19720f15
FB
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
de6d9b64 18 */
8be1c656 19#include "avformat.h"
de6d9b64
FB
20#include <linux/videodev.h>
21#include <unistd.h>
22#include <fcntl.h>
23#include <sys/ioctl.h>
24#include <sys/mman.h>
de6d9b64 25#include <sys/time.h>
608d0dee 26#include <time.h>
de6d9b64
FB
27
28typedef struct {
29 int fd;
30 int frame_format; /* see VIDEO_PALETTE_xxx */
31 int use_mmap;
32 int width, height;
4972b26f 33 int frame_rate;
de6d9b64 34 INT64 time_frame;
4972b26f 35 int frame_size;
de6d9b64
FB
36} VideoData;
37
38const char *v4l_device = "/dev/video";
39
40/* XXX: move all that to the context */
41
42static struct video_capability video_cap;
43static UINT8 *video_buf;
44static struct video_mbuf gb_buffers;
45static struct video_mmap gb_buf;
5a56c87c 46static struct video_audio audio, audio_saved;
de6d9b64
FB
47static int gb_frame = 0;
48
4972b26f 49static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
de6d9b64 50{
c9a65ca8 51 VideoData *s = s1->priv_data;
4972b26f 52 AVStream *st;
de6d9b64 53 int width, height;
de6d9b64 54 int video_fd, frame_size;
4972b26f 55 int ret, frame_rate;
4606ac8d 56 int desired_palette;
4972b26f
FB
57
58 if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
59 return -1;
de6d9b64 60
4972b26f
FB
61 width = ap->width;
62 height = ap->height;
63 frame_rate = ap->frame_rate;
64
c9a65ca8
FB
65 st = av_new_stream(s1, 0);
66 if (!st)
4972b26f 67 return -ENOMEM;
4972b26f
FB
68
69 s->width = width;
70 s->height = height;
71 s->frame_rate = frame_rate;
de6d9b64
FB
72
73 video_fd = open(v4l_device, O_RDWR);
74 if (video_fd < 0) {
75 perror(v4l_device);
4972b26f 76 goto fail;
de6d9b64
FB
77 }
78
79 if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
80 perror("VIDIOCGCAP");
81 goto fail;
82 }
83
84 if (!(video_cap.type & VID_TYPE_CAPTURE)) {
85 fprintf(stderr, "Fatal: grab device does not handle capture\n");
86 goto fail;
87 }
4606ac8d
ZK
88
89 desired_palette = -1;
90 if (st->codec.pix_fmt == PIX_FMT_YUV420P) {
91 desired_palette = VIDEO_PALETTE_YUV420P;
92 } else if (st->codec.pix_fmt == PIX_FMT_YUV422) {
93 desired_palette = VIDEO_PALETTE_YUV422;
94 } else if (st->codec.pix_fmt == PIX_FMT_BGR24) {
95 desired_palette = VIDEO_PALETTE_RGB24;
96 }
de6d9b64
FB
97
98 /* unmute audio */
99 ioctl(video_fd, VIDIOCGAUDIO, &audio);
5a56c87c 100 memcpy(&audio_saved, &audio, sizeof(audio));
de6d9b64
FB
101 audio.flags &= ~VIDEO_AUDIO_MUTE;
102 ioctl(video_fd, VIDIOCSAUDIO, &audio);
103
104 ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
105 if (ret < 0) {
106 /* try to use read based access */
107 struct video_window win;
108 struct video_picture pict;
109 int val;
110
111 win.x = 0;
112 win.y = 0;
113 win.width = width;
114 win.height = height;
115 win.chromakey = -1;
116 win.flags = 0;
117
118 ioctl(video_fd, VIDIOCSWIN, &win);
119
120 ioctl(video_fd, VIDIOCGPICT, &pict);
121#if 0
122 printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
123 pict.colour,
124 pict.hue,
125 pict.brightness,
126 pict.contrast,
127 pict.whiteness);
128#endif
129 /* try to choose a suitable video format */
4606ac8d
ZK
130 pict.palette = desired_palette;
131 if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCSPICT, &pict)) < 0) {
132 pict.palette=VIDEO_PALETTE_YUV420P;
de6d9b64
FB
133 ret = ioctl(video_fd, VIDIOCSPICT, &pict);
134 if (ret < 0) {
4606ac8d 135 pict.palette=VIDEO_PALETTE_YUV422;
de6d9b64 136 ret = ioctl(video_fd, VIDIOCSPICT, &pict);
4606ac8d
ZK
137 if (ret < 0) {
138 pict.palette=VIDEO_PALETTE_RGB24;
139 ret = ioctl(video_fd, VIDIOCSPICT, &pict);
140 if (ret < 0)
141 goto fail1;
142 }
de6d9b64
FB
143 }
144 }
145
146 s->frame_format = pict.palette;
147
148 val = 1;
149 ioctl(video_fd, VIDIOCCAPTURE, &val);
150
a11bf0bd 151 s->time_frame = av_gettime();
de6d9b64
FB
152 s->use_mmap = 0;
153 } else {
154 video_buf = mmap(0,gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,video_fd,0);
155 if ((unsigned char*)-1 == video_buf) {
156 perror("mmap");
157 goto fail;
158 }
159 gb_frame = 0;
a11bf0bd 160 s->time_frame = av_gettime();
de6d9b64
FB
161
162 /* start to grab the first frame */
4606ac8d 163 gb_buf.frame = gb_frame % gb_buffers.frames;
de6d9b64
FB
164 gb_buf.height = height;
165 gb_buf.width = width;
4606ac8d
ZK
166 gb_buf.format = desired_palette;
167
168 if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf)) < 0) {
169 gb_buf.format = VIDEO_PALETTE_YUV420P;
de6d9b64
FB
170
171 ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
4606ac8d
ZK
172 if (ret < 0 && errno != EAGAIN) {
173 /* try YUV422 */
174 gb_buf.format = VIDEO_PALETTE_YUV422;
175
176 ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
177 if (ret < 0 && errno != EAGAIN) {
178 /* try RGB24 */
179 gb_buf.format = VIDEO_PALETTE_RGB24;
180 ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
181 }
182 }
de6d9b64
FB
183 }
184 if (ret < 0) {
185 if (errno != EAGAIN) {
186 fail1:
187 fprintf(stderr, "Fatal: grab device does not support suitable format\n");
188 } else {
189 fprintf(stderr,"Fatal: grab device does not receive any video signal\n");
190 }
191 goto fail;
192 }
193 s->frame_format = gb_buf.format;
194 s->use_mmap = 1;
195 }
196
197 switch(s->frame_format) {
198 case VIDEO_PALETTE_YUV420P:
199 frame_size = (width * height * 3) / 2;
4972b26f 200 st->codec.pix_fmt = PIX_FMT_YUV420P;
de6d9b64
FB
201 break;
202 case VIDEO_PALETTE_YUV422:
203 frame_size = width * height * 2;
4972b26f 204 st->codec.pix_fmt = PIX_FMT_YUV422;
de6d9b64
FB
205 break;
206 case VIDEO_PALETTE_RGB24:
207 frame_size = width * height * 3;
4972b26f 208 st->codec.pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
de6d9b64
FB
209 break;
210 default:
211 goto fail;
212 }
213 s->fd = video_fd;
4972b26f
FB
214 s->frame_size = frame_size;
215
28c66901 216 st->codec.codec_type = CODEC_TYPE_VIDEO;
4972b26f
FB
217 st->codec.codec_id = CODEC_ID_RAWVIDEO;
218 st->codec.width = width;
219 st->codec.height = height;
220 st->codec.frame_rate = frame_rate;
221
de6d9b64
FB
222 return 0;
223 fail:
4972b26f
FB
224 if (video_fd >= 0)
225 close(video_fd);
1ea4f593 226 av_free(st);
de6d9b64
FB
227 return -EIO;
228}
229
4972b26f 230static int v4l_mm_read_picture(VideoData *s, UINT8 *buf)
de6d9b64 231{
de6d9b64
FB
232 UINT8 *ptr;
233
4606ac8d
ZK
234 /* Setup to capture the next frame */
235 gb_buf.frame = (gb_frame + 1) % gb_buffers.frames;
de6d9b64 236 if (ioctl(s->fd, VIDIOCMCAPTURE, &gb_buf) < 0) {
28c66901
PG
237 if (errno == EAGAIN)
238 fprintf(stderr,"Cannot Sync\n");
239 else
de6d9b64 240 perror("VIDIOCMCAPTURE");
28c66901 241 return -EIO;
de6d9b64 242 }
4606ac8d 243
de6d9b64
FB
244 while (ioctl(s->fd, VIDIOCSYNC, &gb_frame) < 0 &&
245 (errno == EAGAIN || errno == EINTR));
246
247 ptr = video_buf + gb_buffers.offsets[gb_frame];
4972b26f 248 memcpy(buf, ptr, s->frame_size);
4606ac8d
ZK
249
250 /* This is now the grabbing frame */
251 gb_frame = gb_buf.frame;
252
4972b26f 253 return s->frame_size;
de6d9b64
FB
254}
255
4972b26f 256static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
de6d9b64 257{
4972b26f
FB
258 VideoData *s = s1->priv_data;
259 INT64 curtime, delay;
260 struct timespec ts;
4606ac8d
ZK
261 int first;
262 INT64 per_frame = (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
263 int dropped = 0;
264
265 /* Calculate the time of the next frame */
266 s->time_frame += per_frame;
de6d9b64
FB
267
268 /* wait based on the frame rate */
4606ac8d 269 for(first = 1;; first = 0) {
a11bf0bd 270 curtime = av_gettime();
4972b26f 271 delay = s->time_frame - curtime;
4606ac8d
ZK
272 if (delay <= 0) {
273 if (delay < -per_frame) {
274 /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
275 dropped = 1;
276 s->time_frame += per_frame;
277 }
4972b26f 278 break;
4606ac8d 279 }
4972b26f
FB
280 ts.tv_sec = delay / 1000000;
281 ts.tv_nsec = (delay % 1000000) * 1000;
282 nanosleep(&ts, NULL);
283 }
284
285 if (av_new_packet(pkt, s->frame_size) < 0)
286 return -EIO;
de6d9b64 287
4606ac8d
ZK
288 if (dropped)
289 pkt->flags |= PKT_FLAG_DROPPED_FRAME;
290
de6d9b64
FB
291 /* read one frame */
292 if (s->use_mmap) {
4972b26f 293 return v4l_mm_read_picture(s, pkt->data);
de6d9b64 294 } else {
4972b26f 295 if (read(s->fd, pkt->data, pkt->size) != pkt->size)
de6d9b64 296 return -EIO;
4972b26f 297 return s->frame_size;
de6d9b64
FB
298 }
299}
300
4972b26f 301static int grab_read_close(AVFormatContext *s1)
de6d9b64 302{
4972b26f 303 VideoData *s = s1->priv_data;
e61efa24
LB
304
305 if (s->use_mmap)
306 munmap(video_buf, gb_buffers.size);
307
5a56c87c
FB
308 /* restore audio settings */
309 ioctl(s->fd, VIDIOCSAUDIO, &audio_saved);
310
de6d9b64 311 close(s->fd);
de6d9b64
FB
312 return 0;
313}
314
c18a2692 315static AVInputFormat video_grab_device_format = {
4972b26f
FB
316 "video_grab_device",
317 "video grab",
c9a65ca8 318 sizeof(VideoData),
4972b26f 319 NULL,
4972b26f
FB
320 grab_read_header,
321 grab_read_packet,
322 grab_read_close,
bb76a117 323 .flags = AVFMT_NOFILE,
de6d9b64 324};
c9a65ca8 325
3f9bff71
MN
326/*
327 * Done below so we can register the aiw grabber
328 * /
329int video_grab_init(void)
330{
331 av_register_input_format(&video_grab_device_format);
332 return 0;
333}
334*/
335
336typedef struct {
337 int fd;
338 int frame_format; /* see VIDEO_PALETTE_xxx */
339 int width, height;
340 int frame_rate;
341 INT64 time_frame;
342 int frame_size;
343 int deint;
344 int halfw;
345 UINT8 *src_mem;
346 UINT8 *lum_m4_mem;
347} AIWVideoData;
348
349static int aiw_grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
350{
351 AIWVideoData *s = s1->priv_data;
352 AVStream *st;
353 int width, height;
354 int video_fd, frame_size;
355 int ret, frame_rate;
356 int desired_palette;
357
358 if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
359 return -1;
360
361 width = ap->width;
362 height = ap->height;
363 frame_rate = ap->frame_rate;
364
365 st = av_new_stream(s1, 0);
366 if (!st)
367 return -ENOMEM;
368
369 s->width = width;
370 s->height = height;
371 s->frame_rate = frame_rate;
372
373 video_fd = open(v4l_device, O_RDONLY | O_NONBLOCK);
374 if (video_fd < 0) {
375 perror(v4l_device);
376 goto fail;
377 }
378
379 if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
380 perror("VIDIOCGCAP");
381 goto fail;
382 }
383
384 if (!(video_cap.type & VID_TYPE_CAPTURE)) {
385 fprintf(stderr, "Fatal: grab device does not handle capture\n");
386 goto fail;
387 }
388
389 desired_palette = -1;
390 if (st->codec.pix_fmt == PIX_FMT_YUV420P) {
391 desired_palette = VIDEO_PALETTE_YUV420P;
392 } else if (st->codec.pix_fmt == PIX_FMT_YUV422) {
393 desired_palette = VIDEO_PALETTE_YUV422;
394 } else if (st->codec.pix_fmt == PIX_FMT_BGR24) {
395 desired_palette = VIDEO_PALETTE_RGB24;
396 }
397
398 /* unmute audio */
399
400 ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
401 if (ret < 0) {
402 /* try to use read based access */
403 struct video_window win;
404 struct video_picture pict;
405 int val;
406
407 win.x = 0;
408 win.y = 0;
409 win.width = width;
410 win.height = height;
411 win.chromakey = -1;
412 win.flags = 0;
413
414 ioctl(video_fd, VIDIOCSWIN, &win);
415
416 ioctl(video_fd, VIDIOCGPICT, &pict);
417#if 0
418 printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
419 pict.colour,
420 pict.hue,
421 pict.brightness,
422 pict.contrast,
423 pict.whiteness);
424#endif
425 /* try to choose a suitable video format */
426 pict.palette=VIDEO_PALETTE_YUV422;
427 ret = ioctl(video_fd, VIDIOCSPICT, &pict);
428 if (ret < 0) {
429 fprintf(stderr,"Could Not Find YUY2 capture window.\n");
430 goto fail;
431 }
432 if ((width == video_cap.maxwidth && height == video_cap.maxheight) ||
433 (width == video_cap.maxwidth && height == video_cap.maxheight*2) ||
434 (width == video_cap.maxwidth/2 && height == video_cap.maxheight)) {
435
436 s->deint=0;
437 s->halfw=0;
438 if (height == video_cap.maxheight*2) s->deint=1;
439 if (width == video_cap.maxwidth/2) s->halfw=1;
440 } else {
441 fprintf(stderr,"\nIncorrect Grab Size Supplied - Supported Sizes Are:\n");
442 fprintf(stderr," %dx%d %dx%d %dx%d\n\n",
443 video_cap.maxwidth,video_cap.maxheight,
444 video_cap.maxwidth,video_cap.maxheight*2,
445 video_cap.maxwidth/2,video_cap.maxheight);
446 goto fail;
447 }
448
449 s->frame_format = pict.palette;
450
451 val = 1;
452 ioctl(video_fd, VIDIOCCAPTURE, &val);
453
454 s->time_frame = av_gettime();
455 } else {
456 fprintf(stderr,"mmap-based capture will not work with this grab.\n");
457 goto fail;
458 }
459
460 frame_size = (width * height * 3) / 2;
461 st->codec.pix_fmt = PIX_FMT_YUV420P;
462 s->fd = video_fd;
463 s->frame_size = frame_size;
464
465 st->codec.codec_type = CODEC_TYPE_VIDEO;
466 st->codec.codec_id = CODEC_ID_RAWVIDEO;
467 st->codec.width = width;
468 st->codec.height = height;
469 st->codec.frame_rate = frame_rate;
470
471 if (s->halfw == 0) {
472 s->src_mem = av_malloc(s->width*2);
473 } else {
474 s->src_mem = av_malloc(s->width*4);
475 }
476 if (!s->src_mem) goto fail;
477
478 s->lum_m4_mem = av_malloc(s->width);
479 if (!s->lum_m4_mem) {
480 av_free(s->src_mem);
481 goto fail;
482 }
483
484 return 0;
485 fail:
486 if (video_fd >= 0)
487 close(video_fd);
488 av_free(st);
489 return -EIO;
490}
491
492//#ifdef HAVE_MMX
493//#undef HAVE_MMX
494//#endif
495
496#ifdef HAVE_MMX
497#include "../libavcodec/i386/mmx.h"
498
499#define LINE_WITH_UV \
500 movq_m2r(ptr[0],mm0); \
501 movq_m2r(ptr[8],mm1); \
502 movq_r2r(mm0, mm4); \
503 punpcklbw_r2r(mm1,mm0); \
504 punpckhbw_r2r(mm1,mm4); \
505 movq_r2r(mm0,mm5); \
506 punpcklbw_r2r(mm4,mm0); \
507 punpckhbw_r2r(mm4,mm5); \
508 movq_r2r(mm0,mm1); \
509 punpcklbw_r2r(mm5,mm1); \
510 movq_r2m(mm1,lum[0]); \
511 movq_m2r(ptr[16],mm2); \
512 movq_m2r(ptr[24],mm1); \
513 movq_r2r(mm2,mm4); \
514 punpcklbw_r2r(mm1,mm2); \
515 punpckhbw_r2r(mm1,mm4); \
516 movq_r2r(mm2,mm3); \
517 punpcklbw_r2r(mm4,mm2); \
518 punpckhbw_r2r(mm4,mm3); \
519 movq_r2r(mm2,mm1); \
520 punpcklbw_r2r(mm3,mm1); \
521 movq_r2m(mm1,lum[8]); \
522 punpckhdq_r2r(mm2,mm0); \
523 punpckhdq_r2r(mm3,mm5); \
524 movq_r2m(mm0,cb[0]); \
525 movq_r2m(mm5,cr[0]);
526
527#define LINE_NO_UV \
528 movq_m2r(ptr[0],mm0);\
529 movq_m2r(ptr[8],mm1);\
530 movq_r2r(mm0, mm4);\
531 punpcklbw_r2r(mm1,mm0); \
532 punpckhbw_r2r(mm1,mm4);\
533 movq_r2r(mm0,mm5);\
534 punpcklbw_r2r(mm4,mm0);\
535 punpckhbw_r2r(mm4,mm5);\
536 movq_r2r(mm0,mm1);\
537 punpcklbw_r2r(mm5,mm1);\
538 movq_r2m(mm1,lum[0]);\
539 movq_m2r(ptr[16],mm2);\
540 movq_m2r(ptr[24],mm1);\
541 movq_r2r(mm2,mm4);\
542 punpcklbw_r2r(mm1,mm2);\
543 punpckhbw_r2r(mm1,mm4);\
544 movq_r2r(mm2,mm3);\
545 punpcklbw_r2r(mm4,mm2);\
546 punpckhbw_r2r(mm4,mm3);\
547 movq_r2r(mm2,mm1);\
548 punpcklbw_r2r(mm3,mm1);\
549 movq_r2m(mm1,lum[8]);
550
551#define LINE_WITHUV_AVG \
552 movq_m2r(ptr[0], mm0);\
553 movq_m2r(ptr[8], mm1);\
554 movq_r2r(mm0, mm4);\
555 punpcklbw_r2r(mm1,mm0);\
556 punpckhbw_r2r(mm1,mm4);\
557 movq_r2r(mm0,mm5);\
558 punpcklbw_r2r(mm4,mm0);\
559 punpckhbw_r2r(mm4,mm5);\
560 movq_r2r(mm0,mm1);\
561 movq_r2r(mm5,mm2);\
562 punpcklbw_r2r(mm7,mm1);\
563 punpcklbw_r2r(mm7,mm2);\
564 paddw_r2r(mm6,mm1);\
565 paddw_r2r(mm2,mm1);\
566 psraw_i2r(1,mm1);\
567 packuswb_r2r(mm7,mm1);\
568 movd_r2m(mm1,lum[0]);\
569 movq_m2r(ptr[16],mm2);\
570 movq_m2r(ptr[24],mm1);\
571 movq_r2r(mm2,mm4);\
572 punpcklbw_r2r(mm1,mm2);\
573 punpckhbw_r2r(mm1,mm4);\
574 movq_r2r(mm2,mm3);\
575 punpcklbw_r2r(mm4,mm2);\
576 punpckhbw_r2r(mm4,mm3);\
577 movq_r2r(mm2,mm1);\
578 movq_r2r(mm3,mm4);\
579 punpcklbw_r2r(mm7,mm1);\
580 punpcklbw_r2r(mm7,mm4);\
581 paddw_r2r(mm6,mm1);\
582 paddw_r2r(mm4,mm1);\
583 psraw_i2r(1,mm1);\
584 packuswb_r2r(mm7,mm1);\
585 movd_r2m(mm1,lum[4]);\
586 punpckhbw_r2r(mm7,mm0);\
587 punpckhbw_r2r(mm7,mm2);\
588 paddw_r2r(mm6,mm0);\
589 paddw_r2r(mm2,mm0);\
590 psraw_i2r(1,mm0);\
591 packuswb_r2r(mm7,mm0);\
592 punpckhbw_r2r(mm7,mm5);\
593 punpckhbw_r2r(mm7,mm3);\
594 paddw_r2r(mm6,mm5);\
595 paddw_r2r(mm3,mm5);\
596 psraw_i2r(1,mm5);\
597 packuswb_r2r(mm7,mm5);\
598 movd_r2m(mm0,cb[0]);\
599 movd_r2m(mm5,cr[0]);
600
601#define LINE_NOUV_AVG \
602 movq_m2r(ptr[0],mm0);\
603 movq_m2r(ptr[8],mm1);\
604 pand_r2r(mm5,mm0);\
605 pand_r2r(mm5,mm1);\
606 pmaddwd_r2r(mm6,mm0);\
607 pmaddwd_r2r(mm6,mm1);\
608 packssdw_r2r(mm1,mm0);\
609 paddw_r2r(mm6,mm0);\
610 psraw_i2r(1,mm0);\
611 movq_m2r(ptr[16],mm2);\
612 movq_m2r(ptr[24],mm3);\
613 pand_r2r(mm5,mm2);\
614 pand_r2r(mm5,mm3);\
615 pmaddwd_r2r(mm6,mm2);\
616 pmaddwd_r2r(mm6,mm3);\
617 packssdw_r2r(mm3,mm2);\
618 paddw_r2r(mm6,mm2);\
619 psraw_i2r(1,mm2);\
620 packuswb_r2r(mm2,mm0);\
621 movq_r2m(mm0,lum[0]);
622
623#define DEINT_LINE_LUM(ptroff) \
624 movd_m2r(lum_m4[(ptroff)],mm0);\
625 movd_m2r(lum_m3[(ptroff)],mm1);\
626 movd_m2r(lum_m2[(ptroff)],mm2);\
627 movd_m2r(lum_m1[(ptroff)],mm3);\
628 movd_m2r(lum[(ptroff)],mm4);\
629 punpcklbw_r2r(mm7,mm0);\
630 movd_r2m(mm2,lum_m4[(ptroff)]);\
631 punpcklbw_r2r(mm7,mm1);\
632 punpcklbw_r2r(mm7,mm2);\
633 punpcklbw_r2r(mm7,mm3);\
634 punpcklbw_r2r(mm7,mm4);\
635 psllw_i2r(2,mm1);\
636 psllw_i2r(1,mm2);\
637 paddw_r2r(mm6,mm1);\
638 psllw_i2r(2,mm3);\
639 paddw_r2r(mm2,mm1);\
640 paddw_r2r(mm4,mm0);\
641 paddw_r2r(mm3,mm1);\
642 psubusw_r2r(mm0,mm1);\
643 psrlw_i2r(3,mm1);\
644 packuswb_r2r(mm7,mm1);\
645 movd_r2m(mm1,lum_m2[(ptroff)]);
646
647#else
648#include "../libavcodec/dsputil.h"
649
650#define LINE_WITH_UV \
651 lum[0]=ptr[0];lum[1]=ptr[2];lum[2]=ptr[4];lum[3]=ptr[6];\
652 cb[0]=ptr[1];cb[1]=ptr[5];\
653 cr[0]=ptr[3];cr[1]=ptr[7];\
654 lum[4]=ptr[8];lum[5]=ptr[10];lum[6]=ptr[12];lum[7]=ptr[14];\
655 cb[2]=ptr[9];cb[3]=ptr[13];\
656 cr[2]=ptr[11];cr[3]=ptr[15];\
657 lum[8]=ptr[16];lum[9]=ptr[18];lum[10]=ptr[20];lum[11]=ptr[22];\
658 cb[4]=ptr[17];cb[5]=ptr[21];\
659 cr[4]=ptr[19];cr[5]=ptr[23];\
660 lum[12]=ptr[24];lum[13]=ptr[26];lum[14]=ptr[28];lum[15]=ptr[30];\
661 cb[6]=ptr[25];cb[7]=ptr[29];\
662 cr[6]=ptr[27];cr[7]=ptr[31];
663
664#define LINE_NO_UV \
665 lum[0]=ptr[0];lum[1]=ptr[2];lum[2]=ptr[4];lum[3]=ptr[6];\
666 lum[4]=ptr[8];lum[5]=ptr[10];lum[6]=ptr[12];lum[7]=ptr[14];\
667 lum[8]=ptr[16];lum[9]=ptr[18];lum[10]=ptr[20];lum[11]=ptr[22];\
668 lum[12]=ptr[24];lum[13]=ptr[26];lum[14]=ptr[28];lum[15]=ptr[30];
669
670#define LINE_WITHUV_AVG \
671 sum=(ptr[0]+ptr[2]+1) >> 1;lum[0]=sum; \
672 sum=(ptr[4]+ptr[6]+1) >> 1;lum[1]=sum; \
673 sum=(ptr[1]+ptr[5]+1) >> 1;cb[0]=sum; \
674 sum=(ptr[3]+ptr[7]+1) >> 1;cr[0]=sum; \
675 sum=(ptr[8]+ptr[10]+1) >> 1;lum[2]=sum; \
676 sum=(ptr[12]+ptr[14]+1) >> 1;lum[3]=sum; \
677 sum=(ptr[9]+ptr[13]+1) >> 1;cb[1]=sum; \
678 sum=(ptr[11]+ptr[15]+1) >> 1;cr[1]=sum; \
679 sum=(ptr[16]+ptr[18]+1) >> 1;lum[4]=sum; \
680 sum=(ptr[20]+ptr[22]+1) >> 1;lum[5]=sum; \
681 sum=(ptr[17]+ptr[21]+1) >> 1;cb[2]=sum; \
682 sum=(ptr[19]+ptr[23]+1) >> 1;cr[2]=sum; \
683 sum=(ptr[24]+ptr[26]+1) >> 1;lum[6]=sum; \
684 sum=(ptr[28]+ptr[30]+1) >> 1;lum[7]=sum; \
685 sum=(ptr[25]+ptr[29]+1) >> 1;cb[3]=sum; \
686 sum=(ptr[27]+ptr[31]+1) >> 1;cr[3]=sum;
687
688#define LINE_NOUV_AVG \
689 sum=(ptr[0]+ptr[2]+1) >> 1;lum[0]=sum; \
690 sum=(ptr[4]+ptr[6]+1) >> 1;lum[1]=sum; \
691 sum=(ptr[8]+ptr[10]+1) >> 1;lum[2]=sum; \
692 sum=(ptr[12]+ptr[14]+1) >> 1;lum[3]=sum; \
693 sum=(ptr[16]+ptr[18]+1) >> 1;lum[4]=sum; \
694 sum=(ptr[20]+ptr[22]+1) >> 1;lum[5]=sum; \
695 sum=(ptr[24]+ptr[26]+1) >> 1;lum[6]=sum; \
696 sum=(ptr[28]+ptr[30]+1) >> 1;lum[7]=sum;
697
698#define DEINT_LINE_LUM(ptroff) \
699 sum=(-lum_m4[(ptroff)]+(lum_m3[(ptroff)]<<2)+(lum_m2[(ptroff)]<<1)+(lum_m1[(ptroff)]<<2)-lum[(ptroff)]); \
700 lum_m4[(ptroff)]=lum_m2[(ptroff)];\
701 lum_m2[(ptroff)]=cm[(sum+4)>>3];\
702 sum=(-lum_m4[(ptroff)+1]+(lum_m3[(ptroff)+1]<<2)+(lum_m2[(ptroff)+1]<<1)+(lum_m1[(ptroff)+1]<<2)-lum[(ptroff)+1]); \
703 lum_m4[(ptroff)+1]=lum_m2[(ptroff)+1];\
704 lum_m2[(ptroff)+1]=cm[(sum+4)>>3];\
705 sum=(-lum_m4[(ptroff)+2]+(lum_m3[(ptroff)+2]<<2)+(lum_m2[(ptroff)+2]<<1)+(lum_m1[(ptroff)+2]<<2)-lum[(ptroff)+2]); \
706 lum_m4[(ptroff)+2]=lum_m2[(ptroff)+2];\
707 lum_m2[(ptroff)+2]=cm[(sum+4)>>3];\
708 sum=(-lum_m4[(ptroff)+3]+(lum_m3[(ptroff)+3]<<2)+(lum_m2[(ptroff)+3]<<1)+(lum_m1[(ptroff)+3]<<2)-lum[(ptroff)+3]); \
709 lum_m4[(ptroff)+3]=lum_m2[(ptroff)+3];\
710 lum_m2[(ptroff)+3]=cm[(sum+4)>>3];
711
712#endif
713
714
715static int aiw_grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
716{
717 AIWVideoData *s = s1->priv_data;
718 INT64 curtime, delay;
719 struct timespec ts;
720 int first;
721 INT64 per_frame = (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
722 int dropped = 0;
723
724 /* Calculate the time of the next frame */
725 s->time_frame += per_frame;
726
727 /* wait based on the frame rate */
728 for(first = 1;; first = 0) {
729 curtime = av_gettime();
730 delay = s->time_frame - curtime;
731 if (delay <= 0) {
732 if (delay < -per_frame) {
733 /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
734 dropped = 1;
735 s->time_frame += per_frame;
736 }
737 break;
738 }
739 ts.tv_sec = delay / 1000000;
740 ts.tv_nsec = (delay % 1000000) * 1000;
741 nanosleep(&ts, NULL);
742 }
743
744 if (av_new_packet(pkt, s->frame_size) < 0)
745 return -EIO;
746
747 if (dropped)
748 pkt->flags |= PKT_FLAG_DROPPED_FRAME;
749
750 /* read fields */
751 {
752 UINT8 *ptr, *lum, *cb, *cr;
753 int h;
754#ifndef HAVE_MMX
755 int sum;
756#endif
757 UINT8* src = s->src_mem;
758 UINT8 *ptrend = &src[s->width*2];
759 lum=&pkt->data[0];
760 cb=&lum[s->width*s->height];
761 cr=&cb[(s->width*s->height)/4];
762 if (s->deint == 0 && s->halfw == 0) {
763 while (read(s->fd,src,s->width*2) < 0) {
764 usleep(100);
765 }
766 for (h = 0; h < s->height-2; h+=2) {
767 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
768 LINE_WITH_UV
769 }
770 read(s->fd,src,s->width*2);
771 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16) {
772 LINE_NO_UV
773 }
774 read(s->fd,src,s->width*2);
775 }
776/*
777 * Do last two lines
778 */
779 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
780 LINE_WITH_UV
781 }
782 read(s->fd,src,s->width*2);
783 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16) {
784 LINE_NO_UV
785 }
786 /* drop second field */
787 while (read(s->fd,src,s->width*2) < 0) {
788 usleep(100);
789 }
790 for (h = 0; h < s->height - 1; h++) {
791 read(s->fd,src,s->width*2);
792 }
793 } else if (s->halfw == 1) {
794#ifdef HAVE_MMX
795 mmx_t rounder;
796 mmx_t masker;
797 rounder.uw[0]=1;
798 rounder.uw[1]=1;
799 rounder.uw[2]=1;
800 rounder.uw[3]=1;
801 masker.ub[0]=0xff;
802 masker.ub[1]=0;
803 masker.ub[2]=0xff;
804 masker.ub[3]=0;
805 masker.ub[4]=0xff;
806 masker.ub[5]=0;
807 masker.ub[6]=0xff;
808 masker.ub[7]=0;
809 pxor_r2r(mm7,mm7);
810 movq_m2r(rounder,mm6);
811#endif
812 while (read(s->fd,src,s->width*4) < 0) {
813 usleep(100);
814 }
815 ptrend = &src[s->width*4];
816 for (h = 0; h < s->height-2; h+=2) {
817 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=8, cb+=4, cr+=4) {
818 LINE_WITHUV_AVG
819 }
820 read(s->fd,src,s->width*4);
821#ifdef HAVE_MMX
822 movq_m2r(masker,mm5);
823#endif
824 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=8) {
825 LINE_NOUV_AVG
826 }
827 read(s->fd,src,s->width*4);
828 }
829/*
830 * Do last two lines
831 */
832 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=8, cb+=4, cr+=4) {
833 LINE_WITHUV_AVG
834 }
835 read(s->fd,src,s->width*4);
836#ifdef HAVE_MMX
837 movq_m2r(masker,mm5);
838#endif
839 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=8) {
840 LINE_NOUV_AVG
841 }
842 /* drop second field */
843 while (read(s->fd,src,s->width*4) < 0) {
844 usleep(100);
845 }
846 for (h = 0; h < s->height - 1; h++) {
847 read(s->fd,src,s->width*4);
848 }
849 } else {
850 UINT8 *lum_m1, *lum_m2, *lum_m3, *lum_m4;
851#ifdef HAVE_MMX
852 mmx_t rounder;
853 rounder.uw[0]=4;
854 rounder.uw[1]=4;
855 rounder.uw[2]=4;
856 rounder.uw[3]=4;
857 movq_m2r(rounder,mm6);
858 pxor_r2r(mm7,mm7);
859#else
860 UINT8 *cm = cropTbl + MAX_NEG_CROP;
861#endif
862
863 /* read two fields and deinterlace them */
864 while (read(s->fd,src,s->width*2) < 0) {
865 usleep(100);
866 }
867 for (h = 0; h < (s->height/2)-2; h+=2) {
868 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
869 LINE_WITH_UV
870 }
871 read(s->fd,src,s->width*2);
872/* skip a luminance line - will be filled in later */
873 lum += s->width;
874 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
875 LINE_WITH_UV
876 }
877/* skip a luminance line - will be filled in later */
878 lum += s->width;
879 read(s->fd,src,s->width*2);
880 }
881/*
882 * Do last two lines
883 */
884 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
885 LINE_WITH_UV
886 }
887/* skip a luminance line - will be filled in later */
888 lum += s->width;
889 read(s->fd,src,s->width*2);
890 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, cb+=8, cr+=8) {
891 LINE_WITH_UV
892 }
893/*
894 *
895 * SECOND FIELD
896 *
897 */
898 lum=&pkt->data[s->width];
899 while (read(s->fd,src,s->width*2) < 0) {
900 usleep(10);
901 }
902/* First (and last) two lines not interlaced */
903 for (h = 0; h < 2; h++) {
904 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16) {
905 LINE_NO_UV
906 }
907 read(s->fd,src,s->width*2);
908/* skip a luminance line */
909 lum += s->width;
910 }
911 lum_m1=&lum[-s->width];
912 lum_m2=&lum_m1[-s->width];
913 lum_m3=&lum_m2[-s->width];
914 memmove(s->lum_m4_mem,&lum_m3[-s->width],s->width);
915 for (; h < (s->height/2)-1; h++) {
916 lum_m4=s->lum_m4_mem;
917 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16,lum_m1+=16,lum_m2+=16,lum_m3+=16,lum_m4+=16) {
918 LINE_NO_UV
919
920 DEINT_LINE_LUM(0)
921 DEINT_LINE_LUM(4)
922 DEINT_LINE_LUM(8)
923 DEINT_LINE_LUM(12)
924 }
925 read(s->fd,src,s->width*2);
926/* skip a luminance line */
927 lum += s->width;
928 lum_m1 += s->width;
929 lum_m2 += s->width;
930 lum_m3 += s->width;
931// lum_m4 += s->width;
932 }
933/*
934 * Do last line
935 */
936 lum_m4=s->lum_m4_mem;
937 for (ptr = &src[0]; ptr < ptrend; ptr+=32, lum+=16, lum_m1+=16, lum_m2+=16, lum_m3+=16, lum_m4+=16) {
938 LINE_NO_UV
939
940 DEINT_LINE_LUM(0)
941 DEINT_LINE_LUM(4)
942 DEINT_LINE_LUM(8)
943 DEINT_LINE_LUM(12)
944 }
945 }
946#ifdef HAVE_MMX
947 emms();
948#endif
949 }
950 return s->frame_size;
951}
952
953static int aiw_grab_read_close(AVFormatContext *s1)
954{
955 AIWVideoData *s = s1->priv_data;
956
957 close(s->fd);
958 av_free(s->lum_m4_mem);
959 av_free(s->src_mem);
960
961 return 0;
962}
963
c18a2692 964static AVInputFormat aiw_grab_device_format = {
3f9bff71
MN
965 "aiw_grab_device",
966 "All-In-Wonder (km read-based) video grab",
967 sizeof(AIWVideoData),
968 NULL,
969 aiw_grab_read_header,
970 aiw_grab_read_packet,
971 aiw_grab_read_close,
972 .flags = AVFMT_NOFILE,
973};
974
c9a65ca8
FB
975int video_grab_init(void)
976{
977 av_register_input_format(&video_grab_device_format);
3f9bff71 978 av_register_input_format(&aiw_grab_device_format);
c9a65ca8
FB
979 return 0;
980}