* some minor modification by Philip Gladston
[libav.git] / libav / grab.c
1 /*
2 * Linux video grab interface
3 * Copyright (c) 2000,2001 Gerard Lantau.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18 */
19 #include "avformat.h"
20 #include <linux/videodev.h>
21 #include <unistd.h>
22 #include <fcntl.h>
23 #include <sys/ioctl.h>
24 #include <sys/mman.h>
25 #include <sys/time.h>
26 #include <time.h>
27
28 typedef struct {
29 int fd;
30 int frame_format; /* see VIDEO_PALETTE_xxx */
31 int use_mmap;
32 int width, height;
33 int frame_rate;
34 INT64 time_frame;
35 int frame_size;
36 } VideoData;
37
38 const char *v4l_device = "/dev/video";
39
40 /* XXX: move all that to the context */
41
42 static struct video_capability video_cap;
43 static UINT8 *video_buf;
44 static struct video_mbuf gb_buffers;
45 static struct video_mmap gb_buf;
46 static struct video_audio audio, audio_saved;
47 static int gb_frame = 0;
48
49 static int grab_read_header(AVFormatContext *s1, AVFormatParameters *ap)
50 {
51 VideoData *s;
52 AVStream *st;
53 int width, height;
54 int video_fd, frame_size;
55 int ret, frame_rate;
56 int desired_palette;
57
58 if (!ap || ap->width <= 0 || ap->height <= 0 || ap->frame_rate <= 0)
59 return -1;
60
61 width = ap->width;
62 height = ap->height;
63 frame_rate = ap->frame_rate;
64
65 s = av_mallocz(sizeof(VideoData));
66 if (!s)
67 return -ENOMEM;
68 st = av_mallocz(sizeof(AVStream));
69 if (!st) {
70 free(s);
71 return -ENOMEM;
72 }
73 s1->priv_data = s;
74 s1->nb_streams = 1;
75 s1->streams[0] = st;
76
77 s->width = width;
78 s->height = height;
79 s->frame_rate = frame_rate;
80
81 video_fd = open(v4l_device, O_RDWR);
82 if (video_fd < 0) {
83 perror(v4l_device);
84 goto fail;
85 }
86
87 if (ioctl(video_fd,VIDIOCGCAP,&video_cap) < 0) {
88 perror("VIDIOCGCAP");
89 goto fail;
90 }
91
92 if (!(video_cap.type & VID_TYPE_CAPTURE)) {
93 fprintf(stderr, "Fatal: grab device does not handle capture\n");
94 goto fail;
95 }
96
97 desired_palette = -1;
98 if (st->codec.pix_fmt == PIX_FMT_YUV420P) {
99 desired_palette = VIDEO_PALETTE_YUV420P;
100 } else if (st->codec.pix_fmt == PIX_FMT_YUV422) {
101 desired_palette = VIDEO_PALETTE_YUV422;
102 } else if (st->codec.pix_fmt == PIX_FMT_BGR24) {
103 desired_palette = VIDEO_PALETTE_RGB24;
104 }
105
106 /* unmute audio */
107 ioctl(video_fd, VIDIOCGAUDIO, &audio);
108 memcpy(&audio_saved, &audio, sizeof(audio));
109 audio.flags &= ~VIDEO_AUDIO_MUTE;
110 ioctl(video_fd, VIDIOCSAUDIO, &audio);
111
112 ret = ioctl(video_fd,VIDIOCGMBUF,&gb_buffers);
113 if (ret < 0) {
114 /* try to use read based access */
115 struct video_window win;
116 struct video_picture pict;
117 int val;
118
119 win.x = 0;
120 win.y = 0;
121 win.width = width;
122 win.height = height;
123 win.chromakey = -1;
124 win.flags = 0;
125
126 ioctl(video_fd, VIDIOCSWIN, &win);
127
128 ioctl(video_fd, VIDIOCGPICT, &pict);
129 #if 0
130 printf("v4l: colour=%d hue=%d brightness=%d constrast=%d whiteness=%d\n",
131 pict.colour,
132 pict.hue,
133 pict.brightness,
134 pict.contrast,
135 pict.whiteness);
136 #endif
137 /* try to choose a suitable video format */
138 pict.palette = desired_palette;
139 if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCSPICT, &pict)) < 0) {
140 pict.palette=VIDEO_PALETTE_YUV420P;
141 ret = ioctl(video_fd, VIDIOCSPICT, &pict);
142 if (ret < 0) {
143 pict.palette=VIDEO_PALETTE_YUV422;
144 ret = ioctl(video_fd, VIDIOCSPICT, &pict);
145 if (ret < 0) {
146 pict.palette=VIDEO_PALETTE_RGB24;
147 ret = ioctl(video_fd, VIDIOCSPICT, &pict);
148 if (ret < 0)
149 goto fail1;
150 }
151 }
152 }
153
154 s->frame_format = pict.palette;
155
156 val = 1;
157 ioctl(video_fd, VIDIOCCAPTURE, &val);
158
159 s->time_frame = gettime();
160 s->use_mmap = 0;
161 } else {
162 video_buf = mmap(0,gb_buffers.size,PROT_READ|PROT_WRITE,MAP_SHARED,video_fd,0);
163 if ((unsigned char*)-1 == video_buf) {
164 perror("mmap");
165 goto fail;
166 }
167 gb_frame = 0;
168 s->time_frame = gettime();
169
170 /* start to grab the first frame */
171 gb_buf.frame = gb_frame % gb_buffers.frames;
172 gb_buf.height = height;
173 gb_buf.width = width;
174 gb_buf.format = desired_palette;
175
176 if (desired_palette == -1 || (ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf)) < 0) {
177 gb_buf.format = VIDEO_PALETTE_YUV420P;
178
179 ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
180 if (ret < 0 && errno != EAGAIN) {
181 /* try YUV422 */
182 gb_buf.format = VIDEO_PALETTE_YUV422;
183
184 ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
185 if (ret < 0 && errno != EAGAIN) {
186 /* try RGB24 */
187 gb_buf.format = VIDEO_PALETTE_RGB24;
188 ret = ioctl(video_fd, VIDIOCMCAPTURE, &gb_buf);
189 }
190 }
191 }
192 if (ret < 0) {
193 if (errno != EAGAIN) {
194 fail1:
195 fprintf(stderr, "Fatal: grab device does not support suitable format\n");
196 } else {
197 fprintf(stderr,"Fatal: grab device does not receive any video signal\n");
198 }
199 goto fail;
200 }
201 s->frame_format = gb_buf.format;
202 s->use_mmap = 1;
203 }
204
205 switch(s->frame_format) {
206 case VIDEO_PALETTE_YUV420P:
207 frame_size = (width * height * 3) / 2;
208 st->codec.pix_fmt = PIX_FMT_YUV420P;
209 break;
210 case VIDEO_PALETTE_YUV422:
211 frame_size = width * height * 2;
212 st->codec.pix_fmt = PIX_FMT_YUV422;
213 break;
214 case VIDEO_PALETTE_RGB24:
215 frame_size = width * height * 3;
216 st->codec.pix_fmt = PIX_FMT_BGR24; /* NOTE: v4l uses BGR24, not RGB24 ! */
217 break;
218 default:
219 goto fail;
220 }
221 s->fd = video_fd;
222 s->frame_size = frame_size;
223
224 st->codec.codec_id = CODEC_ID_RAWVIDEO;
225 st->codec.width = width;
226 st->codec.height = height;
227 st->codec.frame_rate = frame_rate;
228
229 return 0;
230 fail:
231 if (video_fd >= 0)
232 close(video_fd);
233 free(st);
234 free(s);
235 return -EIO;
236 }
237
238 static int v4l_mm_read_picture(VideoData *s, UINT8 *buf)
239 {
240 UINT8 *ptr;
241 struct timeval tv_s, tv_e;
242 int delay;
243
244 /* Setup to capture the next frame */
245 gb_buf.frame = (gb_frame + 1) % gb_buffers.frames;
246 if (ioctl(s->fd, VIDIOCMCAPTURE, &gb_buf) < 0) {
247 if (errno == EAGAIN)
248 fprintf(stderr,"Cannot Sync\n");
249 else
250 perror("VIDIOCMCAPTURE");
251 return -EIO;
252 }
253
254 gettimeofday(&tv_s, 0);
255
256 while (ioctl(s->fd, VIDIOCSYNC, &gb_frame) < 0 &&
257 (errno == EAGAIN || errno == EINTR));
258
259 /*
260 gettimeofday(&tv_e, 0);
261
262 delay = (tv_e.tv_sec - tv_s.tv_sec) * 1000000 + tv_e.tv_usec - tv_s.tv_usec;
263 if (delay > 10000)
264 printf("VIDIOCSYNC took %d us\n", delay);
265 */
266
267 ptr = video_buf + gb_buffers.offsets[gb_frame];
268 memcpy(buf, ptr, s->frame_size);
269
270 /* This is now the grabbing frame */
271 gb_frame = gb_buf.frame;
272
273 return s->frame_size;
274 }
275
276 static int grab_read_packet(AVFormatContext *s1, AVPacket *pkt)
277 {
278 VideoData *s = s1->priv_data;
279 INT64 curtime, delay;
280 struct timespec ts;
281 int first;
282 INT64 per_frame = (INT64_C(1000000) * FRAME_RATE_BASE) / s->frame_rate;
283 int dropped = 0;
284
285 /* Calculate the time of the next frame */
286 s->time_frame += per_frame;
287
288 /* wait based on the frame rate */
289 for(first = 1;; first = 0) {
290 curtime = gettime();
291 delay = s->time_frame - curtime;
292 if (delay <= 0) {
293 if (delay < -per_frame) {
294 /* printf("grabbing is %d frames late (dropping)\n", (int) -(delay / 16666)); */
295 dropped = 1;
296 s->time_frame += per_frame;
297 }
298 break;
299 }
300 ts.tv_sec = delay / 1000000;
301 ts.tv_nsec = (delay % 1000000) * 1000;
302 nanosleep(&ts, NULL);
303 }
304
305 if (av_new_packet(pkt, s->frame_size) < 0)
306 return -EIO;
307
308 if (dropped)
309 pkt->flags |= PKT_FLAG_DROPPED_FRAME;
310
311 /* read one frame */
312 if (s->use_mmap) {
313 return v4l_mm_read_picture(s, pkt->data);
314 } else {
315 if (read(s->fd, pkt->data, pkt->size) != pkt->size)
316 return -EIO;
317 return s->frame_size;
318 }
319 }
320
321 static int grab_read_close(AVFormatContext *s1)
322 {
323 VideoData *s = s1->priv_data;
324 /* restore audio settings */
325 ioctl(s->fd, VIDIOCSAUDIO, &audio_saved);
326
327 close(s->fd);
328 free(s);
329 return 0;
330 }
331
332 AVFormat video_grab_device_format = {
333 "video_grab_device",
334 "video grab",
335 "",
336 "",
337 CODEC_ID_NONE,
338 CODEC_ID_NONE,
339 NULL,
340 NULL,
341 NULL,
342
343 grab_read_header,
344 grab_read_packet,
345 grab_read_close,
346 NULL,
347 AVFMT_NOFILE,
348 };