h264_metadata: Add option to delete filler data
[libav.git] / libavdevice / avfoundation.m
1 /*
2 * AVFoundation input device
3 * Copyright (c) 2015 Luca Barbato
4 * Alexandre Lision
5 *
6 * This file is part of Libav.
7 *
8 * Libav is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
12 *
13 * Libav is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with Libav; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 */
22
23 #import <AVFoundation/AVFoundation.h>
24 #include <pthread.h>
25
26 #include "libavformat/avformat.h"
27 #include "libavformat/internal.h"
28
29 #include "libavutil/log.h"
30 #include "libavutil/mathematics.h"
31 #include "libavutil/opt.h"
32 #include "libavutil/parseutils.h"
33 #include "libavutil/pixdesc.h"
34 #include "libavutil/time.h"
35
36 #include "avdevice.h"
37
38 struct AVPixelFormatMap {
39 enum AVPixelFormat pix_fmt;
40 OSType core_video_fmt;
41 };
42
43 static const struct AVPixelFormatMap pixel_format_map[] = {
44 { AV_PIX_FMT_ABGR, kCVPixelFormatType_32ABGR },
45 { AV_PIX_FMT_ARGB, kCVPixelFormatType_32ARGB },
46 { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
47 { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
48 { AV_PIX_FMT_BGRA, kCVPixelFormatType_32BGRA },
49 { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
50 { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
51 { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
52 { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
53 { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
54 { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
55 { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
56 { AV_PIX_FMT_RGBA, kCVPixelFormatType_32RGBA },
57 { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
58 { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
59 { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
60 { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
61 { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
62 { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
63 { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
64 { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
65 { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
66 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
67 { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
68 #endif
69 { AV_PIX_FMT_NONE, 0 }
70 };
71
72 static enum AVPixelFormat core_video_to_pix_fmt(OSType core_video_fmt)
73 {
74 int i;
75 for (i = 0; pixel_format_map[i].pix_fmt != AV_PIX_FMT_NONE; i++)
76 if (core_video_fmt == pixel_format_map[i].core_video_fmt)
77 return pixel_format_map[i].pix_fmt;
78 return AV_PIX_FMT_NONE;
79 }
80
81 static OSType pix_fmt_to_core_video(enum AVPixelFormat pix_fmt)
82 {
83 int i;
84 for (i = 0; pixel_format_map[i].pix_fmt != AV_PIX_FMT_NONE; i++)
85 if (pix_fmt == pixel_format_map[i].pix_fmt)
86 return pixel_format_map[i].core_video_fmt;
87 return 0;
88 }
89
90 typedef struct AVFoundationCaptureContext {
91 AVClass *class;
92 /* AVOptions */
93 int list_devices;
94 int list_formats;
95 char *pixel_format;
96 char *video_size; /* String describing video size */
97 char *framerate; /* String describing the framerate */
98
99 int video_stream_index;
100 int width, height;
101 AVRational internal_framerate;
102
103 int64_t first_pts;
104 int frames_captured;
105 pthread_mutex_t frame_lock;
106 pthread_cond_t frame_wait_cond;
107
108 /* ARC-compatible pointers to ObjC objects */
109 CFTypeRef session; /* AVCaptureSession */
110 CFTypeRef video_output;
111 CFTypeRef video_delegate;
112 CVImageBufferRef current_frame;
113 } AVFoundationCaptureContext;
114
115 #define AUDIO_DEVICES 1
116 #define VIDEO_DEVICES 2
117 #define ALL_DEVICES AUDIO_DEVICES | VIDEO_DEVICES
118
119 #define OFFSET(x) offsetof(AVFoundationCaptureContext, x)
120 #define DEC AV_OPT_FLAG_DECODING_PARAM
121 static const AVOption options[] = {
122 { "list_devices", "List available devices and exit", OFFSET(list_devices), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, DEC, "list_devices" },
123 { "all", "Show all the supported devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = ALL_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
124 { "audio", "Show only the audio devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = AUDIO_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
125 { "video", "Show only the video devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = VIDEO_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
126 { "list_formats", "List available formats and exit", OFFSET(list_formats), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, DEC, "list_formats" },
127 { "pixel_format", "Preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
128 { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
129 { "framerate", "A string representing desired framerate", OFFSET(framerate), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
130 { NULL },
131 };
132
133 static void list_capture_devices_by_type(AVFormatContext *s, NSString *type)
134 {
135 NSArray *devices = [AVCaptureDevice devicesWithMediaType:type];
136
137 av_log(s, AV_LOG_INFO, "Type: %s\n", [type UTF8String]);
138 for (AVCaptureDevice *device in devices) {
139 av_log(s, AV_LOG_INFO, "uniqueID: %s\nname: %s\nformat:\n",
140 [[device uniqueID] UTF8String],
141 [[device localizedName] UTF8String]);
142
143 for (AVCaptureDeviceFormat *format in device.formats)
144 av_log(s, AV_LOG_INFO, "\t%s\n",
145 [[NSString stringWithFormat: @ "%@", format] UTF8String]);
146 }
147 }
148
149 static int avfoundation_list_capture_devices(AVFormatContext *s)
150 {
151 AVFoundationCaptureContext *ctx = s->priv_data;
152
153 if (ctx->list_devices & AUDIO_DEVICES)
154 list_capture_devices_by_type(s, AVMediaTypeAudio);
155
156 if (ctx->list_devices & VIDEO_DEVICES)
157 list_capture_devices_by_type(s, AVMediaTypeVideo);
158
159 return AVERROR_EXIT;
160 }
161
162 static int list_formats(AVFormatContext *s)
163 {
164 av_log(s, AV_LOG_VERBOSE, "Supported pixel formats (first is more efficient):\n");
165 AVCaptureVideoDataOutput *out = [[AVCaptureVideoDataOutput alloc] init];
166
167 for (NSNumber *cv_pixel_format in[out availableVideoCVPixelFormatTypes]) {
168 OSType cv_fmt = [cv_pixel_format intValue];
169 enum AVPixelFormat pix_fmt = core_video_to_pix_fmt(cv_fmt);
170 if (pix_fmt != AV_PIX_FMT_NONE) {
171 av_log(s, AV_LOG_VERBOSE, " %s: %d\n",
172 av_get_pix_fmt_name(pix_fmt),
173 cv_fmt);
174 }
175 }
176 return AVERROR_EXIT;
177 }
178
179 static void lock_frames(AVFoundationCaptureContext *ctx)
180 {
181 pthread_mutex_lock(&ctx->frame_lock);
182 }
183
184 static void unlock_frames(AVFoundationCaptureContext *ctx)
185 {
186 pthread_mutex_unlock(&ctx->frame_lock);
187 }
188
189 @interface VideoCapture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
190 {
191 AVFoundationCaptureContext *_context;
192 }
193
194 - (id)initWithContext:(AVFoundationCaptureContext *)context;
195
196 - (void)captureOutput:(AVCaptureOutput *)captureOutput
197 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
198 fromConnection:(AVCaptureConnection *)connection;
199
200 @end
201
202 @implementation VideoCapture
203
204 - (id)initWithContext:(AVFoundationCaptureContext *)context
205 {
206 if (self = [super init]) {
207 _context = context;
208 }
209 return self;
210 }
211
212 - (void)captureOutput:(AVCaptureOutput *)captureOutput
213 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
214 fromConnection:(AVCaptureConnection *)connection
215 {
216 CVImageBufferRef buf;
217 lock_frames(_context);
218
219 if (_context->current_frame != nil) {
220 CFRelease(_context->current_frame);
221 }
222
223 buf = CMSampleBufferGetImageBuffer(videoFrame);
224 if (!buf)
225 return;
226
227 CFRetain(buf);
228
229 _context->current_frame = buf;
230
231 pthread_cond_signal(&_context->frame_wait_cond);
232
233 unlock_frames(_context);
234
235 ++_context->frames_captured;
236 }
237
238 @end
239
240 /**
241 * Configure the video device.
242 */
243 static bool configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
244 {
245 AVFoundationCaptureContext *ctx = s->priv_data;
246 AVCaptureDeviceFormat *selected_format = nil;
247 AVFrameRateRange *selected_range = nil;
248 double framerate = av_q2d(ctx->internal_framerate);
249 double epsilon = 0.00000001;
250
251 for (AVCaptureDeviceFormat *format in[video_device formats]) {
252 CMFormatDescriptionRef formatDescription;
253 CMVideoDimensions dimensions;
254
255 formatDescription = (CMFormatDescriptionRef)format.formatDescription;
256 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
257
258 if ((ctx->width == 0 && ctx->height == 0) ||
259 (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
260 av_log(s, AV_LOG_VERBOSE, "Trying video size %dx%d\n",
261 dimensions.width, dimensions.height);
262 ctx->width = dimensions.width;
263 ctx->height = dimensions.height;
264 selected_format = format;
265 if (framerate) {
266 av_log(s, AV_LOG_VERBOSE, "Checking support for framerate %f\n",
267 framerate);
268 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
269 if (range.minFrameRate <= (framerate + epsilon) &&
270 range.maxFrameRate >= (framerate - epsilon)) {
271 selected_range = range;
272 break;
273 }
274 } else {
275 selected_range = format.videoSupportedFrameRateRanges[0];
276 framerate = selected_range.maxFrameRate;
277 break;
278 }
279
280 if (selected_format && selected_range)
281 break;
282 }
283 }
284
285 if (!selected_format) {
286 av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device\n",
287 ctx->width, ctx->height);
288 return false;
289 } else {
290 av_log(s, AV_LOG_VERBOSE, "Setting video size to %dx%d\n",
291 ctx->width, ctx->height);
292 }
293
294 if (framerate && !selected_range) {
295 av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device\n",
296 framerate);
297 return false;
298 } else {
299 av_log(s, AV_LOG_VERBOSE, "Setting framerate to %f\n",
300 framerate);
301 }
302
303 if ([video_device lockForConfiguration : NULL] == YES) {
304 [video_device setActiveFormat : selected_format];
305 [video_device setActiveVideoMinFrameDuration : CMTimeMake(1, framerate)];
306 [video_device setActiveVideoMaxFrameDuration : CMTimeMake(1, framerate)];
307 } else {
308 av_log(s, AV_LOG_ERROR, "Could not lock device for configuration\n");
309 return false;
310 }
311 return true;
312 }
313
314 static void print_supported_formats(AVFormatContext *s, AVCaptureDevice *device)
315 {
316 av_log(s, AV_LOG_WARNING, "Supported modes:\n");
317 for (AVCaptureDeviceFormat *format in[device formats]) {
318 CMFormatDescriptionRef formatDescription;
319 CMVideoDimensions dimensions;
320
321 formatDescription = (CMFormatDescriptionRef)format.formatDescription;
322 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
323
324 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
325 av_log(s, AV_LOG_WARNING, " %dx%d@[%f %f]fps\n",
326 dimensions.width, dimensions.height,
327 range.minFrameRate, range.maxFrameRate);
328 }
329 }
330
331 static int setup_stream(AVFormatContext *s, AVCaptureDevice *device)
332 {
333 AVFoundationCaptureContext *ctx = s->priv_data;
334 NSError *__autoreleasing error = nil;
335 AVCaptureDeviceInput *input;
336 AVCaptureSession *session = (__bridge AVCaptureSession *)ctx->session;
337
338 av_log(s, AV_LOG_VERBOSE, "Setting up stream for device %s\n", [[device uniqueID] UTF8String]);
339
340 if (!configure_video_device(s, device)) {
341 av_log(s, AV_LOG_ERROR, "device configuration failed\n");
342 print_supported_formats(s, device);
343 return AVERROR(EINVAL);
344 }
345
346 // add the input devices
347 input = [AVCaptureDeviceInput deviceInputWithDevice:device
348 error:&error];
349 if (!input) {
350 av_log(s, AV_LOG_ERROR, "%s\n",
351 [[error localizedDescription] UTF8String]);
352 return AVERROR_UNKNOWN;
353 }
354
355 if ([session canAddInput : input]) {
356 [session addInput : input];
357 } else {
358 av_log(s, AV_LOG_ERROR, "Cannot add video input to capture session\n");
359 return AVERROR(EINVAL);
360 }
361
362 // add the output devices
363 if ([device hasMediaType : AVMediaTypeVideo]) {
364 AVCaptureVideoDataOutput *out = [[AVCaptureVideoDataOutput alloc] init];
365 NSNumber *core_video_fmt = nil;
366 if (!out) {
367 av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
368 return AVERROR(EINVAL);
369 }
370
371 [out setAlwaysDiscardsLateVideoFrames : YES];
372
373 if (ctx->pixel_format) {
374 // Try to use specified pixel format
375 core_video_fmt = [NSNumber numberWithInt:pix_fmt_to_core_video(av_get_pix_fmt(ctx->pixel_format))];
376 if ([[out availableVideoCVPixelFormatTypes] indexOfObject : core_video_fmt] != NSNotFound) {
377 av_log(s, AV_LOG_VERBOSE, "Pixel format %s supported!\n", ctx->pixel_format);
378 } else {
379 core_video_fmt = nil;
380 }
381 }
382
383 if (!ctx->pixel_format || !core_video_fmt) {
384 av_log(s, AV_LOG_VERBOSE, "Pixel format not supported or not provided, overriding...\n");
385 for (NSNumber *cv_pixel_format in[out availableVideoCVPixelFormatTypes]) {
386 OSType cv_fmt = [cv_pixel_format intValue];
387 enum AVPixelFormat pix_fmt = core_video_to_pix_fmt(cv_fmt);
388 // Use the first one in the list, it will be the most effective
389 if (pix_fmt != AV_PIX_FMT_NONE) {
390 core_video_fmt = cv_pixel_format;
391 ctx->pixel_format = av_strdup(av_get_pix_fmt_name(pix_fmt));
392 break;
393 }
394 }
395 }
396
397 // fail if there is no appropriate pixel format
398 if (!core_video_fmt) {
399 return AVERROR(EINVAL);
400 } else {
401 av_log(s, AV_LOG_VERBOSE, "Using %s.\n",
402 ctx->pixel_format);
403 }
404
405 NSDictionary *capture_dict = [NSDictionary dictionaryWithObject:core_video_fmt
406 forKey:(const NSString *)kCVPixelBufferPixelFormatTypeKey];
407 [out setVideoSettings : capture_dict];
408
409 VideoCapture *delegate = [[VideoCapture alloc] initWithContext:ctx];
410
411 dispatch_queue_t queue = dispatch_queue_create("avf_queue", NULL);
412 [out setSampleBufferDelegate : delegate queue : queue];
413
414 if ([session canAddOutput : out]) {
415 [session addOutput : out];
416 ctx->video_output = (__bridge_retained CFTypeRef)out;
417 ctx->video_delegate = (__bridge_retained CFTypeRef)delegate;
418 } else {
419 av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
420 return AVERROR(EINVAL);
421 }
422 }
423
424 return 0;
425 }
426
427 static int get_video_config(AVFormatContext *s)
428 {
429 AVFoundationCaptureContext *ctx = (AVFoundationCaptureContext *)s->priv_data;
430 CVImageBufferRef image_buffer;
431 CGSize image_buffer_size;
432 AVStream *stream = avformat_new_stream(s, NULL);
433
434 if (!stream) {
435 av_log(s, AV_LOG_ERROR, "Failed to create AVStream\n");
436 return AVERROR(EINVAL);
437 }
438
439 // Take stream info from the first frame.
440 while (ctx->frames_captured < 1)
441 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
442
443 lock_frames(ctx);
444
445 ctx->video_stream_index = stream->index;
446
447 avpriv_set_pts_info(stream, 64, 1, 1000000);
448
449 image_buffer = ctx->current_frame;
450 image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
451
452 stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
453 stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
454 stream->codecpar->width = (int)image_buffer_size.width;
455 stream->codecpar->height = (int)image_buffer_size.height;
456 stream->codecpar->format = av_get_pix_fmt(ctx->pixel_format);
457
458 CFRelease(ctx->current_frame);
459 ctx->current_frame = nil;
460
461 unlock_frames(ctx);
462
463 return 0;
464 }
465
466 static void destroy_context(AVFoundationCaptureContext *ctx)
467 {
468 AVCaptureSession *session = (__bridge AVCaptureSession *)ctx->session;
469 [session stopRunning];
470
471 ctx->session = NULL;
472
473 pthread_mutex_destroy(&ctx->frame_lock);
474 pthread_cond_destroy(&ctx->frame_wait_cond);
475
476 if (ctx->current_frame) {
477 CFRelease(ctx->current_frame);
478 }
479 }
480
481 static int setup_default_stream(AVFormatContext *s)
482 {
483 AVCaptureDevice *device;
484 for (NSString *type in @[AVMediaTypeVideo]) {
485 device = [AVCaptureDevice defaultDeviceWithMediaType:type];
486 if (device) {
487 av_log(s, AV_LOG_VERBOSE, "Using default device %s\n",
488 [[device uniqueID] UTF8String]);
489 return setup_stream(s, device);
490 }
491 }
492 return AVERROR(EINVAL);
493 }
494
495 /**
496 * Try to open device given in filename
497 * Two supported formats: "device_unique_id" or "[device_unique_id]"
498 */
499 static AVCaptureDevice *create_device(AVFormatContext *s)
500 {
501 NSString *filename;
502 NSError *__autoreleasing error = nil;
503 NSRegularExpression *exp;
504 NSArray *matches;
505 AVCaptureDevice *device;
506
507 filename = [NSString stringWithFormat:@ "%s", s->filename];
508
509 if ((device = [AVCaptureDevice deviceWithUniqueID:filename])) {
510 av_log(s, AV_LOG_VERBOSE, "Device with name %s found\n", [filename UTF8String]);
511 return device;
512 }
513
514 // Remove '[]' from the device name
515 NSString *pat = @"(?<=\\[).*?(?=\\])";
516 exp = [NSRegularExpression regularExpressionWithPattern:pat
517 options:0
518 error:&error];
519 if (!exp) {
520 av_log(s, AV_LOG_ERROR, "%s\n",
521 [[error localizedDescription] UTF8String]);
522 return NULL;
523 }
524
525 matches = [exp matchesInString:filename options:0
526 range:NSMakeRange(0, [filename length])];
527
528 if (matches.count > 0) {
529 for (NSTextCheckingResult *match in matches) {
530 NSRange range = [match rangeAtIndex:0];
531 NSString *uniqueID = [filename substringWithRange:NSMakeRange(range.location, range.length)];
532 av_log(s, AV_LOG_VERBOSE, "opening device with ID: %s\n", [uniqueID UTF8String]);
533 if (!(device = [AVCaptureDevice deviceWithUniqueID:uniqueID])) {
534 av_log(s, AV_LOG_ERROR, "Device with name %s not found", [filename UTF8String]);
535 return NULL;
536 }
537 return device;
538 }
539 }
540 return NULL;
541 }
542
543 static int setup_streams(AVFormatContext *s)
544 {
545 AVFoundationCaptureContext *ctx = s->priv_data;
546 int ret;
547 AVCaptureDevice *device;
548
549 pthread_mutex_init(&ctx->frame_lock, NULL);
550 pthread_cond_init(&ctx->frame_wait_cond, NULL);
551
552 ctx->session = (__bridge_retained CFTypeRef)[[AVCaptureSession alloc] init];
553
554 if (!strncmp(s->filename, "default", 7)) {
555 ret = setup_default_stream(s);
556 } else {
557 device = create_device(s);
558 if (device) {
559 ret = setup_stream(s, device);
560 } else {
561 av_log(s, AV_LOG_ERROR, "No matches for %s\n", s->filename);
562 ret = setup_default_stream(s);
563 }
564 }
565
566 if (ret < 0) {
567 av_log(s, AV_LOG_ERROR, "No device could be added\n");
568 return ret;
569 }
570
571 av_log(s, AV_LOG_VERBOSE, "Starting session!\n");
572 [(__bridge AVCaptureSession *)ctx->session startRunning];
573
574 // Session is started, unlock device
575 [device unlockForConfiguration];
576
577 av_log(s, AV_LOG_VERBOSE, "Checking video config\n");
578 if (get_video_config(s)) {
579 destroy_context(ctx);
580 return AVERROR(EIO);
581 }
582
583 return 0;
584 }
585
586 static int avfoundation_read_header(AVFormatContext *s)
587 {
588 AVFoundationCaptureContext *ctx = s->priv_data;
589 ctx->first_pts = av_gettime();
590
591 AVRational framerate_q = { 0, 1 };
592 ctx->internal_framerate = framerate_q;
593
594 if (ctx->list_devices)
595 return avfoundation_list_capture_devices(s);
596 if (ctx->list_formats) {
597 return list_formats(s);
598 }
599
600 if (ctx->pixel_format) {
601 if (av_get_pix_fmt(ctx->pixel_format) == AV_PIX_FMT_NONE) {
602 av_log(s, AV_LOG_ERROR, "No such input format: %s.\n",
603 ctx->pixel_format);
604 return AVERROR(EINVAL);
605 }
606 }
607
608 if (ctx->video_size &&
609 (av_parse_video_size(&ctx->width, &ctx->height, ctx->video_size)) < 0) {
610 av_log(s, AV_LOG_ERROR, "Could not parse video size '%s'.\n",
611 ctx->video_size);
612 return AVERROR(EINVAL);
613 }
614
615 if (ctx->framerate &&
616 (av_parse_video_rate(&ctx->internal_framerate, ctx->framerate)) < 0) {
617 av_log(s, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
618 ctx->framerate);
619 return AVERROR(EINVAL);
620 }
621
622 return setup_streams(s);
623 }
624
625 static int avfoundation_read_packet(AVFormatContext *s, AVPacket *pkt)
626 {
627 AVFoundationCaptureContext *ctx = (AVFoundationCaptureContext *)s->priv_data;
628
629 do {
630 lock_frames(ctx);
631
632 if (ctx->current_frame != nil) {
633 if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) {
634 return AVERROR(EIO);
635 }
636
637 pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts,
638 AV_TIME_BASE_Q,
639 (AVRational) {1, 1000000 });
640 pkt->stream_index = ctx->video_stream_index;
641 pkt->flags |= AV_PKT_FLAG_KEY;
642
643 CVPixelBufferLockBaseAddress(ctx->current_frame, 0);
644
645 void *data = CVPixelBufferGetBaseAddress(ctx->current_frame);
646 memcpy(pkt->data, data, pkt->size);
647
648 CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0);
649 CFRelease(ctx->current_frame);
650 ctx->current_frame = nil;
651 } else {
652 pkt->data = NULL;
653 pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
654 }
655
656 unlock_frames(ctx);
657 } while (!pkt->data);
658
659 return 0;
660 }
661
662 static int avfoundation_read_close(AVFormatContext *s)
663 {
664 av_log(s, AV_LOG_VERBOSE, "Closing session...\n");
665 AVFoundationCaptureContext *ctx = s->priv_data;
666 destroy_context(ctx);
667 return 0;
668 }
669
670 static const AVClass avfoundation_class = {
671 .class_name = "AVFoundation AVCaptureDevice indev",
672 .item_name = av_default_item_name,
673 .option = options,
674 .version = LIBAVUTIL_VERSION_INT,
675 };
676
677 AVInputFormat ff_avfoundation_demuxer = {
678 .name = "avfoundation",
679 .long_name = NULL_IF_CONFIG_SMALL("AVFoundation AVCaptureDevice grab"),
680 .priv_data_size = sizeof(AVFoundationCaptureContext),
681 .read_header = avfoundation_read_header,
682 .read_packet = avfoundation_read_packet,
683 .read_close = avfoundation_read_close,
684 .flags = AVFMT_NOFILE,
685 .priv_class = &avfoundation_class,
686 };