Details | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
6147 | serge | 1 | /* |
2 | * QTKit input device |
||
3 | * Copyright (c) 2013 Vadim Kalinsky |
||
4 | * |
||
5 | * This file is part of FFmpeg. |
||
6 | * |
||
7 | * FFmpeg is free software; you can redistribute it and/or |
||
8 | * modify it under the terms of the GNU Lesser General Public |
||
9 | * License as published by the Free Software Foundation; either |
||
10 | * version 2.1 of the License, or (at your option) any later version. |
||
11 | * |
||
12 | * FFmpeg is distributed in the hope that it will be useful, |
||
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
||
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
||
15 | * Lesser General Public License for more details. |
||
16 | * |
||
17 | * You should have received a copy of the GNU Lesser General Public |
||
18 | * License along with FFmpeg; if not, write to the Free Software |
||
19 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
||
20 | */ |
||
21 | |||
22 | /** |
||
23 | * @file |
||
24 | * QTKit input device |
||
25 | * @author Vadim Kalinsky |
||
26 | */ |
||
27 | |||
28 | #if defined(__clang__) |
||
29 | #pragma clang diagnostic ignored "-Wdeprecated-declarations" |
||
30 | #endif |
||
31 | |||
32 | #import |
||
33 | #include |
||
34 | |||
35 | #include "libavutil/pixdesc.h" |
||
36 | #include "libavutil/opt.h" |
||
37 | #include "libavformat/internal.h" |
||
38 | #include "libavutil/internal.h" |
||
39 | #include "libavutil/time.h" |
||
40 | #include "avdevice.h" |
||
41 | |||
42 | #define QTKIT_TIMEBASE 100 |
||
43 | |||
44 | static const AVRational kQTKitTimeBase_q = { |
||
45 | .num = 1, |
||
46 | .den = QTKIT_TIMEBASE |
||
47 | }; |
||
48 | |||
49 | typedef struct |
||
50 | { |
||
51 | AVClass* class; |
||
52 | |||
53 | float frame_rate; |
||
54 | int frames_captured; |
||
55 | int64_t first_pts; |
||
56 | pthread_mutex_t frame_lock; |
||
57 | pthread_cond_t frame_wait_cond; |
||
58 | id qt_delegate; |
||
59 | |||
60 | int list_devices; |
||
61 | int video_device_index; |
||
62 | |||
63 | QTCaptureSession* capture_session; |
||
64 | QTCaptureDecompressedVideoOutput* video_output; |
||
65 | CVImageBufferRef current_frame; |
||
66 | } CaptureContext; |
||
67 | |||
68 | static void lock_frames(CaptureContext* ctx) |
||
69 | { |
||
70 | pthread_mutex_lock(&ctx->frame_lock); |
||
71 | } |
||
72 | |||
73 | static void unlock_frames(CaptureContext* ctx) |
||
74 | { |
||
75 | pthread_mutex_unlock(&ctx->frame_lock); |
||
76 | } |
||
77 | |||
78 | /** FrameReciever class - delegate for QTCaptureSession |
||
79 | */ |
||
80 | @interface FFMPEG_FrameReceiver : NSObject |
||
81 | { |
||
82 | CaptureContext* _context; |
||
83 | } |
||
84 | |||
85 | - (id)initWithContext:(CaptureContext*)context; |
||
86 | |||
87 | - (void)captureOutput:(QTCaptureOutput *)captureOutput |
||
88 | didOutputVideoFrame:(CVImageBufferRef)videoFrame |
||
89 | withSampleBuffer:(QTSampleBuffer *)sampleBuffer |
||
90 | fromConnection:(QTCaptureConnection *)connection; |
||
91 | |||
92 | @end |
||
93 | |||
94 | @implementation FFMPEG_FrameReceiver |
||
95 | |||
96 | - (id)initWithContext:(CaptureContext*)context |
||
97 | { |
||
98 | if (self = [super init]) { |
||
99 | _context = context; |
||
100 | } |
||
101 | return self; |
||
102 | } |
||
103 | |||
104 | - (void)captureOutput:(QTCaptureOutput *)captureOutput |
||
105 | didOutputVideoFrame:(CVImageBufferRef)videoFrame |
||
106 | withSampleBuffer:(QTSampleBuffer *)sampleBuffer |
||
107 | fromConnection:(QTCaptureConnection *)connection |
||
108 | { |
||
109 | lock_frames(_context); |
||
110 | if (_context->current_frame != nil) { |
||
111 | CVBufferRelease(_context->current_frame); |
||
112 | } |
||
113 | |||
114 | _context->current_frame = CVBufferRetain(videoFrame); |
||
115 | |||
116 | pthread_cond_signal(&_context->frame_wait_cond); |
||
117 | |||
118 | unlock_frames(_context); |
||
119 | |||
120 | ++_context->frames_captured; |
||
121 | } |
||
122 | |||
123 | @end |
||
124 | |||
125 | static void destroy_context(CaptureContext* ctx) |
||
126 | { |
||
127 | [ctx->capture_session stopRunning]; |
||
128 | |||
129 | [ctx->capture_session release]; |
||
130 | [ctx->video_output release]; |
||
131 | [ctx->qt_delegate release]; |
||
132 | |||
133 | ctx->capture_session = NULL; |
||
134 | ctx->video_output = NULL; |
||
135 | ctx->qt_delegate = NULL; |
||
136 | |||
137 | pthread_mutex_destroy(&ctx->frame_lock); |
||
138 | pthread_cond_destroy(&ctx->frame_wait_cond); |
||
139 | |||
140 | if (ctx->current_frame) |
||
141 | CVBufferRelease(ctx->current_frame); |
||
142 | } |
||
143 | |||
144 | static int qtkit_read_header(AVFormatContext *s) |
||
145 | { |
||
146 | NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init]; |
||
147 | |||
148 | CaptureContext* ctx = (CaptureContext*)s->priv_data; |
||
149 | |||
150 | ctx->first_pts = av_gettime(); |
||
151 | |||
152 | pthread_mutex_init(&ctx->frame_lock, NULL); |
||
153 | pthread_cond_init(&ctx->frame_wait_cond, NULL); |
||
154 | |||
155 | // List devices if requested |
||
156 | if (ctx->list_devices) { |
||
157 | av_log(ctx, AV_LOG_INFO, "QTKit video devices:\n"); |
||
158 | NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; |
||
159 | for (QTCaptureDevice *device in devices) { |
||
160 | const char *name = [[device localizedDisplayName] UTF8String]; |
||
161 | int index = [devices indexOfObject:device]; |
||
162 | av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name); |
||
163 | } |
||
164 | goto fail; |
||
165 | } |
||
166 | |||
167 | // Find capture device |
||
168 | QTCaptureDevice *video_device = nil; |
||
169 | |||
170 | // check for device index given in filename |
||
171 | if (ctx->video_device_index == -1) { |
||
172 | sscanf(s->filename, "%d", &ctx->video_device_index); |
||
173 | } |
||
174 | |||
175 | if (ctx->video_device_index >= 0) { |
||
176 | NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; |
||
177 | |||
178 | if (ctx->video_device_index >= [devices count]) { |
||
179 | av_log(ctx, AV_LOG_ERROR, "Invalid device index\n"); |
||
180 | goto fail; |
||
181 | } |
||
182 | |||
183 | video_device = [devices objectAtIndex:ctx->video_device_index]; |
||
184 | } else if (strncmp(s->filename, "", 1) && |
||
185 | strncmp(s->filename, "default", 7)) { |
||
186 | NSArray *devices = [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]; |
||
187 | |||
188 | for (QTCaptureDevice *device in devices) { |
||
189 | if (!strncmp(s->filename, [[device localizedDisplayName] UTF8String], strlen(s->filename))) { |
||
190 | video_device = device; |
||
191 | break; |
||
192 | } |
||
193 | } |
||
194 | if (!video_device) { |
||
195 | av_log(ctx, AV_LOG_ERROR, "Video device not found\n"); |
||
196 | goto fail; |
||
197 | } |
||
198 | } else { |
||
199 | video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed]; |
||
200 | } |
||
201 | |||
202 | BOOL success = [video_device open:nil]; |
||
203 | |||
204 | // Video capture device not found, looking for QTMediaTypeVideo |
||
205 | if (!success) { |
||
206 | video_device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo]; |
||
207 | success = [video_device open:nil]; |
||
208 | |||
209 | if (!success) { |
||
210 | av_log(s, AV_LOG_ERROR, "No QT capture device found\n"); |
||
211 | goto fail; |
||
212 | } |
||
213 | } |
||
214 | |||
215 | NSString* dev_display_name = [video_device localizedDisplayName]; |
||
216 | av_log (s, AV_LOG_DEBUG, "'%s' opened\n", [dev_display_name UTF8String]); |
||
217 | |||
218 | // Initialize capture session |
||
219 | ctx->capture_session = [[QTCaptureSession alloc] init]; |
||
220 | |||
221 | QTCaptureDeviceInput* capture_dev_input = [[[QTCaptureDeviceInput alloc] initWithDevice:video_device] autorelease]; |
||
222 | success = [ctx->capture_session addInput:capture_dev_input error:nil]; |
||
223 | |||
224 | if (!success) { |
||
225 | av_log (s, AV_LOG_ERROR, "Failed to add QT capture device to session\n"); |
||
226 | goto fail; |
||
227 | } |
||
228 | |||
229 | // Attaching output |
||
230 | // FIXME: Allow for a user defined pixel format |
||
231 | ctx->video_output = [[QTCaptureDecompressedVideoOutput alloc] init]; |
||
232 | |||
233 | NSDictionary *captureDictionary = [NSDictionary dictionaryWithObject: |
||
234 | [NSNumber numberWithUnsignedInt:kCVPixelFormatType_24RGB] |
||
235 | forKey:(id)kCVPixelBufferPixelFormatTypeKey]; |
||
236 | |||
237 | [ctx->video_output setPixelBufferAttributes:captureDictionary]; |
||
238 | |||
239 | ctx->qt_delegate = [[FFMPEG_FrameReceiver alloc] initWithContext:ctx]; |
||
240 | |||
241 | [ctx->video_output setDelegate:ctx->qt_delegate]; |
||
242 | [ctx->video_output setAutomaticallyDropsLateVideoFrames:YES]; |
||
243 | [ctx->video_output setMinimumVideoFrameInterval:1.0/ctx->frame_rate]; |
||
244 | |||
245 | success = [ctx->capture_session addOutput:ctx->video_output error:nil]; |
||
246 | |||
247 | if (!success) { |
||
248 | av_log (s, AV_LOG_ERROR, "can't add video output to capture session\n"); |
||
249 | goto fail; |
||
250 | } |
||
251 | |||
252 | [ctx->capture_session startRunning]; |
||
253 | |||
254 | // Take stream info from the first frame. |
||
255 | while (ctx->frames_captured < 1) { |
||
256 | CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES); |
||
257 | } |
||
258 | |||
259 | lock_frames(ctx); |
||
260 | |||
261 | AVStream* stream = avformat_new_stream(s, NULL); |
||
262 | |||
263 | if (!stream) { |
||
264 | goto fail; |
||
265 | } |
||
266 | |||
267 | avpriv_set_pts_info(stream, 64, 1, QTKIT_TIMEBASE); |
||
268 | |||
269 | stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO; |
||
270 | stream->codec->codec_type = AVMEDIA_TYPE_VIDEO; |
||
271 | stream->codec->width = (int)CVPixelBufferGetWidth (ctx->current_frame); |
||
272 | stream->codec->height = (int)CVPixelBufferGetHeight(ctx->current_frame); |
||
273 | stream->codec->pix_fmt = AV_PIX_FMT_RGB24; |
||
274 | |||
275 | CVBufferRelease(ctx->current_frame); |
||
276 | ctx->current_frame = nil; |
||
277 | |||
278 | unlock_frames(ctx); |
||
279 | |||
280 | [pool release]; |
||
281 | |||
282 | return 0; |
||
283 | |||
284 | fail: |
||
285 | [pool release]; |
||
286 | |||
287 | destroy_context(ctx); |
||
288 | |||
289 | return AVERROR(EIO); |
||
290 | } |
||
291 | |||
292 | static int qtkit_read_packet(AVFormatContext *s, AVPacket *pkt) |
||
293 | { |
||
294 | CaptureContext* ctx = (CaptureContext*)s->priv_data; |
||
295 | |||
296 | do { |
||
297 | lock_frames(ctx); |
||
298 | |||
299 | if (ctx->current_frame != nil) { |
||
300 | if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) { |
||
301 | return AVERROR(EIO); |
||
302 | } |
||
303 | |||
304 | pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts, AV_TIME_BASE_Q, kQTKitTimeBase_q); |
||
305 | pkt->stream_index = 0; |
||
306 | pkt->flags |= AV_PKT_FLAG_KEY; |
||
307 | |||
308 | CVPixelBufferLockBaseAddress(ctx->current_frame, 0); |
||
309 | |||
310 | void* data = CVPixelBufferGetBaseAddress(ctx->current_frame); |
||
311 | memcpy(pkt->data, data, pkt->size); |
||
312 | |||
313 | CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0); |
||
314 | CVBufferRelease(ctx->current_frame); |
||
315 | ctx->current_frame = nil; |
||
316 | } else { |
||
317 | pkt->data = NULL; |
||
318 | pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock); |
||
319 | } |
||
320 | |||
321 | unlock_frames(ctx); |
||
322 | } while (!pkt->data); |
||
323 | |||
324 | return 0; |
||
325 | } |
||
326 | |||
327 | static int qtkit_close(AVFormatContext *s) |
||
328 | { |
||
329 | CaptureContext* ctx = (CaptureContext*)s->priv_data; |
||
330 | |||
331 | destroy_context(ctx); |
||
332 | |||
333 | return 0; |
||
334 | } |
||
335 | |||
336 | static const AVOption options[] = { |
||
337 | { "frame_rate", "set frame rate", offsetof(CaptureContext, frame_rate), AV_OPT_TYPE_FLOAT, { .dbl = 30.0 }, 0.1, 30.0, AV_OPT_TYPE_VIDEO_RATE, NULL }, |
||
338 | { "list_devices", "list available devices", offsetof(CaptureContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, |
||
339 | { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, |
||
340 | { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" }, |
||
341 | { "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(CaptureContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM }, |
||
342 | { NULL }, |
||
343 | }; |
||
344 | |||
345 | static const AVClass qtkit_class = { |
||
346 | .class_name = "QTKit input device", |
||
347 | .item_name = av_default_item_name, |
||
348 | .option = options, |
||
349 | .version = LIBAVUTIL_VERSION_INT, |
||
350 | .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT, |
||
351 | }; |
||
352 | |||
353 | AVInputFormat ff_qtkit_demuxer = { |
||
354 | .name = "qtkit", |
||
355 | .long_name = NULL_IF_CONFIG_SMALL("QTKit input device"), |
||
356 | .priv_data_size = sizeof(CaptureContext), |
||
357 | .read_header = qtkit_read_header, |
||
358 | .read_packet = qtkit_read_packet, |
||
359 | .read_close = qtkit_close, |
||
360 | .flags = AVFMT_NOFILE, |
||
361 | .priv_class = &qtkit_class, |
||
362 | };>> |