Commit | Line | Data |
---|---|---|
85f07f22 FB |
1 | /* |
2 | * Multiple format streaming server | |
773a21b8 | 3 | * Copyright (c) 2000, 2001, 2002 Fabrice Bellard |
85f07f22 | 4 | * |
773a21b8 FB |
5 | * This library is free software; you can redistribute it and/or |
6 | * modify it under the terms of the GNU Lesser General Public | |
7 | * License as published by the Free Software Foundation; either | |
8 | * version 2 of the License, or (at your option) any later version. | |
85f07f22 | 9 | * |
773a21b8 | 10 | * This library is distributed in the hope that it will be useful, |
85f07f22 | 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
773a21b8 FB |
12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
13 | * Lesser General Public License for more details. | |
85f07f22 | 14 | * |
773a21b8 FB |
15 | * You should have received a copy of the GNU Lesser General Public |
16 | * License along with this library; if not, write to the Free Software | |
17 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | |
85f07f22 | 18 | */ |
773a21b8 FB |
19 | #define HAVE_AV_CONFIG_H |
20 | #include "avformat.h" | |
21 | ||
85f07f22 | 22 | #include <stdarg.h> |
85f07f22 FB |
23 | #include <netinet/in.h> |
24 | #include <unistd.h> | |
25 | #include <fcntl.h> | |
26 | #include <sys/ioctl.h> | |
27 | #include <sys/poll.h> | |
28 | #include <errno.h> | |
29 | #include <sys/time.h> | |
30 | #include <time.h> | |
85f07f22 FB |
31 | #include <sys/types.h> |
32 | #include <sys/socket.h> | |
5eb765ef | 33 | #include <sys/wait.h> |
85f07f22 FB |
34 | #include <arpa/inet.h> |
35 | #include <netdb.h> | |
36 | #include <ctype.h> | |
37 | #include <signal.h> | |
2effd274 FB |
38 | #include <dlfcn.h> |
39 | ||
40 | #include "ffserver.h" | |
85f07f22 FB |
41 | |
42 | /* maximum number of simultaneous HTTP connections */ | |
43 | #define HTTP_MAX_CONNECTIONS 2000 | |
44 | ||
45 | enum HTTPState { | |
46 | HTTPSTATE_WAIT_REQUEST, | |
47 | HTTPSTATE_SEND_HEADER, | |
48 | HTTPSTATE_SEND_DATA_HEADER, | |
2effd274 | 49 | HTTPSTATE_SEND_DATA, /* sending TCP or UDP data */ |
85f07f22 | 50 | HTTPSTATE_SEND_DATA_TRAILER, |
2effd274 FB |
51 | HTTPSTATE_RECEIVE_DATA, |
52 | HTTPSTATE_WAIT_FEED, /* wait for data from the feed */ | |
53 | HTTPSTATE_WAIT, /* wait before sending next packets */ | |
54 | HTTPSTATE_WAIT_SHORT, /* short wait for short term | |
55 | bandwidth limitation */ | |
56 | HTTPSTATE_READY, | |
57 | ||
58 | RTSPSTATE_WAIT_REQUEST, | |
59 | RTSPSTATE_SEND_REPLY, | |
85f07f22 FB |
60 | }; |
61 | ||
62 | const char *http_state[] = { | |
2effd274 FB |
63 | "HTTP_WAIT_REQUEST", |
64 | "HTTP_SEND_HEADER", | |
65 | ||
85f07f22 FB |
66 | "SEND_DATA_HEADER", |
67 | "SEND_DATA", | |
68 | "SEND_DATA_TRAILER", | |
69 | "RECEIVE_DATA", | |
70 | "WAIT_FEED", | |
2effd274 FB |
71 | "WAIT", |
72 | "WAIT_SHORT", | |
73 | "READY", | |
74 | ||
75 | "RTSP_WAIT_REQUEST", | |
76 | "RTSP_SEND_REPLY", | |
85f07f22 FB |
77 | }; |
78 | ||
cde25790 | 79 | #define IOBUFFER_INIT_SIZE 8192 |
85f07f22 FB |
80 | |
81 | /* coef for exponential mean for bitrate estimation in statistics */ | |
82 | #define AVG_COEF 0.9 | |
83 | ||
84 | /* timeouts are in ms */ | |
2effd274 FB |
85 | #define HTTP_REQUEST_TIMEOUT (15 * 1000) |
86 | #define RTSP_REQUEST_TIMEOUT (3600 * 24 * 1000) | |
87 | ||
85f07f22 FB |
88 | #define SYNC_TIMEOUT (10 * 1000) |
89 | ||
5eb765ef PG |
90 | typedef struct { |
91 | INT64 count1, count2; | |
92 | long time1, time2; | |
93 | } DataRateData; | |
94 | ||
85f07f22 FB |
95 | /* context associated with one connection */ |
96 | typedef struct HTTPContext { | |
97 | enum HTTPState state; | |
98 | int fd; /* socket file descriptor */ | |
99 | struct sockaddr_in from_addr; /* origin */ | |
100 | struct pollfd *poll_entry; /* used when polling */ | |
101 | long timeout; | |
85f07f22 FB |
102 | UINT8 *buffer_ptr, *buffer_end; |
103 | int http_error; | |
104 | struct HTTPContext *next; | |
42a63c6a | 105 | int got_key_frame; /* stream 0 => 1, stream 1 => 2, stream 2=> 4 */ |
85f07f22 FB |
106 | INT64 data_count; |
107 | /* feed input */ | |
108 | int feed_fd; | |
109 | /* input format handling */ | |
110 | AVFormatContext *fmt_in; | |
2effd274 FB |
111 | long start_time; /* In milliseconds - this wraps fairly often */ |
112 | INT64 first_pts; /* initial pts value */ | |
113 | int pts_stream_index; /* stream we choose as clock reference */ | |
85f07f22 FB |
114 | /* output format handling */ |
115 | struct FFStream *stream; | |
cde25790 PG |
116 | /* -1 is invalid stream */ |
117 | int feed_streams[MAX_STREAMS]; /* index of streams in the feed */ | |
118 | int switch_feed_streams[MAX_STREAMS]; /* index of streams in the feed */ | |
119 | int switch_pending; | |
2effd274 | 120 | AVFormatContext fmt_ctx; /* instance of FFStream for one user */ |
85f07f22 | 121 | int last_packet_sent; /* true if last data packet was sent */ |
7434ba6d | 122 | int suppress_log; |
42a63c6a | 123 | int bandwidth; |
5eb765ef | 124 | DataRateData datarate; |
3120d2a2 | 125 | int wmp_client_id; |
7434ba6d PG |
126 | char protocol[16]; |
127 | char method[16]; | |
128 | char url[128]; | |
cde25790 PG |
129 | int buffer_size; |
130 | UINT8 *buffer; | |
2effd274 FB |
131 | int is_packetized; /* if true, the stream is packetized */ |
132 | int packet_stream_index; /* current stream for output in state machine */ | |
133 | ||
134 | /* RTSP state specific */ | |
135 | UINT8 *pb_buffer; /* XXX: use that in all the code */ | |
136 | ByteIOContext *pb; | |
137 | int seq; /* RTSP sequence number */ | |
138 | ||
139 | /* RTP state specific */ | |
140 | enum RTSPProtocol rtp_protocol; | |
141 | char session_id[32]; /* session id */ | |
142 | AVFormatContext *rtp_ctx[MAX_STREAMS]; | |
143 | URLContext *rtp_handles[MAX_STREAMS]; | |
144 | /* RTP short term bandwidth limitation */ | |
145 | int packet_byte_count; | |
146 | int packet_start_time_us; /* used for short durations (a few | |
147 | seconds max) */ | |
85f07f22 FB |
148 | } HTTPContext; |
149 | ||
150 | /* each generated stream is described here */ | |
151 | enum StreamType { | |
152 | STREAM_TYPE_LIVE, | |
153 | STREAM_TYPE_STATUS, | |
cde25790 | 154 | STREAM_TYPE_REDIRECT, |
85f07f22 FB |
155 | }; |
156 | ||
8256c0a3 PG |
157 | enum IPAddressAction { |
158 | IP_ALLOW = 1, | |
159 | IP_DENY, | |
160 | }; | |
161 | ||
162 | typedef struct IPAddressACL { | |
163 | struct IPAddressACL *next; | |
164 | enum IPAddressAction action; | |
165 | struct in_addr first; | |
166 | struct in_addr last; | |
167 | } IPAddressACL; | |
168 | ||
85f07f22 FB |
169 | /* description of each stream of the ffserver.conf file */ |
170 | typedef struct FFStream { | |
171 | enum StreamType stream_type; | |
172 | char filename[1024]; /* stream filename */ | |
2effd274 FB |
173 | struct FFStream *feed; /* feed we are using (can be null if |
174 | coming from file) */ | |
bd7cf6ad | 175 | AVOutputFormat *fmt; |
8256c0a3 | 176 | IPAddressACL *acl; |
85f07f22 | 177 | int nb_streams; |
42a63c6a | 178 | int prebuffer; /* Number of millseconds early to start */ |
2ac887ba | 179 | long max_time; /* Number of milliseconds to run */ |
79c4ea3c | 180 | int send_on_key; |
85f07f22 FB |
181 | AVStream *streams[MAX_STREAMS]; |
182 | int feed_streams[MAX_STREAMS]; /* index of streams in the feed */ | |
183 | char feed_filename[1024]; /* file name of the feed storage, or | |
184 | input file name for a stream */ | |
2ac887ba PG |
185 | char author[512]; |
186 | char title[512]; | |
187 | char copyright[512]; | |
188 | char comment[512]; | |
cde25790 | 189 | pid_t pid; /* Of ffmpeg process */ |
5eb765ef | 190 | time_t pid_start; /* Of ffmpeg process */ |
cde25790 | 191 | char **child_argv; |
85f07f22 | 192 | struct FFStream *next; |
2effd274 FB |
193 | /* RTSP options */ |
194 | char *rtsp_option; | |
85f07f22 | 195 | /* feed specific */ |
2effd274 | 196 | int feed_opened; /* true if someone is writing to the feed */ |
85f07f22 | 197 | int is_feed; /* true if it is a feed */ |
a6e14edd PG |
198 | int conns_served; |
199 | INT64 bytes_served; | |
85f07f22 FB |
200 | INT64 feed_max_size; /* maximum storage size */ |
201 | INT64 feed_write_index; /* current write position in feed (it wraps round) */ | |
202 | INT64 feed_size; /* current size of feed */ | |
203 | struct FFStream *next_feed; | |
204 | } FFStream; | |
205 | ||
206 | typedef struct FeedData { | |
207 | long long data_count; | |
208 | float avg_frame_size; /* frame size averraged over last frames with exponential mean */ | |
209 | } FeedData; | |
210 | ||
2effd274 FB |
211 | struct sockaddr_in my_http_addr; |
212 | struct sockaddr_in my_rtsp_addr; | |
213 | ||
85f07f22 FB |
214 | char logfilename[1024]; |
215 | HTTPContext *first_http_ctx; | |
216 | FFStream *first_feed; /* contains only feeds */ | |
217 | FFStream *first_stream; /* contains all streams, including feeds */ | |
218 | ||
2effd274 FB |
219 | static void new_connection(int server_fd, int is_rtsp); |
220 | static void close_connection(HTTPContext *c); | |
221 | ||
222 | /* HTTP handling */ | |
223 | static int handle_connection(HTTPContext *c); | |
85f07f22 | 224 | static int http_parse_request(HTTPContext *c); |
5eb765ef | 225 | static int http_send_data(HTTPContext *c); |
85f07f22 FB |
226 | static void compute_stats(HTTPContext *c); |
227 | static int open_input_stream(HTTPContext *c, const char *info); | |
228 | static int http_start_receive_data(HTTPContext *c); | |
229 | static int http_receive_data(HTTPContext *c); | |
2effd274 FB |
230 | static int compute_send_delay(HTTPContext *c); |
231 | ||
232 | /* RTSP handling */ | |
233 | static int rtsp_parse_request(HTTPContext *c); | |
234 | static void rtsp_cmd_describe(HTTPContext *c, const char *url); | |
235 | static void rtsp_cmd_setup(HTTPContext *c, const char *url, RTSPHeader *h); | |
236 | static void rtsp_cmd_play(HTTPContext *c, const char *url, RTSPHeader *h); | |
237 | static void rtsp_cmd_pause(HTTPContext *c, const char *url, RTSPHeader *h); | |
238 | static void rtsp_cmd_teardown(HTTPContext *c, const char *url, RTSPHeader *h); | |
239 | ||
240 | /* RTP handling */ | |
241 | static HTTPContext *rtp_new_connection(HTTPContext *rtsp_c, | |
242 | FFStream *stream, const char *session_id); | |
243 | static int rtp_new_av_stream(HTTPContext *c, | |
244 | int stream_index, struct sockaddr_in *dest_addr); | |
85f07f22 | 245 | |
cde25790 PG |
246 | static const char *my_program_name; |
247 | ||
2ac887ba | 248 | static int ffserver_debug; |
2effd274 | 249 | static int ffserver_daemon; |
2ac887ba | 250 | static int no_launch; |
5eb765ef | 251 | static int need_to_start_children; |
2ac887ba | 252 | |
85f07f22 FB |
253 | int nb_max_connections; |
254 | int nb_connections; | |
255 | ||
42a63c6a PG |
256 | int nb_max_bandwidth; |
257 | int nb_bandwidth; | |
258 | ||
5eb765ef PG |
259 | static long cur_time; // Making this global saves on passing it around everywhere |
260 | ||
85f07f22 FB |
261 | static long gettime_ms(void) |
262 | { | |
263 | struct timeval tv; | |
264 | ||
265 | gettimeofday(&tv,NULL); | |
266 | return (long long)tv.tv_sec * 1000 + (tv.tv_usec / 1000); | |
267 | } | |
268 | ||
269 | static FILE *logfile = NULL; | |
270 | ||
271 | static void http_log(char *fmt, ...) | |
272 | { | |
273 | va_list ap; | |
274 | va_start(ap, fmt); | |
275 | ||
7434ba6d | 276 | if (logfile) { |
85f07f22 | 277 | vfprintf(logfile, fmt, ap); |
7434ba6d PG |
278 | fflush(logfile); |
279 | } | |
85f07f22 FB |
280 | va_end(ap); |
281 | } | |
282 | ||
7434ba6d PG |
283 | static void log_connection(HTTPContext *c) |
284 | { | |
285 | char buf1[32], buf2[32], *p; | |
286 | time_t ti; | |
287 | ||
288 | if (c->suppress_log) | |
289 | return; | |
290 | ||
291 | /* XXX: reentrant function ? */ | |
292 | p = inet_ntoa(c->from_addr.sin_addr); | |
293 | strcpy(buf1, p); | |
294 | ti = time(NULL); | |
295 | p = ctime(&ti); | |
296 | strcpy(buf2, p); | |
297 | p = buf2 + strlen(p) - 1; | |
298 | if (*p == '\n') | |
299 | *p = '\0'; | |
cde25790 PG |
300 | http_log("%s - - [%s] \"%s %s %s\" %d %lld\n", |
301 | buf1, buf2, c->method, c->url, c->protocol, (c->http_error ? c->http_error : 200), c->data_count); | |
302 | } | |
303 | ||
5eb765ef PG |
304 | static void update_datarate(DataRateData *drd, INT64 count) |
305 | { | |
306 | if (!drd->time1 && !drd->count1) { | |
307 | drd->time1 = drd->time2 = cur_time; | |
308 | drd->count1 = drd->count2 = count; | |
309 | } else { | |
310 | if (cur_time - drd->time2 > 5000) { | |
311 | drd->time1 = drd->time2; | |
312 | drd->count1 = drd->count2; | |
313 | drd->time2 = cur_time; | |
314 | drd->count2 = count; | |
315 | } | |
316 | } | |
317 | } | |
318 | ||
319 | /* In bytes per second */ | |
320 | static int compute_datarate(DataRateData *drd, INT64 count) | |
321 | { | |
322 | if (cur_time == drd->time1) | |
323 | return 0; | |
324 | ||
325 | return ((count - drd->count1) * 1000) / (cur_time - drd->time1); | |
326 | } | |
327 | ||
a782f209 PG |
328 | static int get_longterm_datarate(DataRateData *drd, INT64 count) |
329 | { | |
330 | /* You get the first 3 seconds flat out */ | |
331 | if (cur_time - drd->time1 < 3000) | |
332 | return 0; | |
333 | ||
334 | return compute_datarate(drd, count); | |
335 | } | |
336 | ||
337 | ||
cde25790 PG |
338 | static void start_children(FFStream *feed) |
339 | { | |
2ac887ba PG |
340 | if (no_launch) |
341 | return; | |
342 | ||
cde25790 | 343 | for (; feed; feed = feed->next) { |
5eb765ef PG |
344 | if (feed->child_argv && !feed->pid) { |
345 | feed->pid_start = time(0); | |
346 | ||
cde25790 PG |
347 | feed->pid = fork(); |
348 | ||
349 | if (feed->pid < 0) { | |
350 | fprintf(stderr, "Unable to create children\n"); | |
351 | exit(1); | |
352 | } | |
353 | if (!feed->pid) { | |
354 | /* In child */ | |
355 | char pathname[1024]; | |
356 | char *slash; | |
357 | int i; | |
358 | ||
5eb765ef PG |
359 | for (i = 3; i < 256; i++) { |
360 | close(i); | |
361 | } | |
cde25790 | 362 | |
5eb765ef | 363 | if (!ffserver_debug) { |
2ac887ba PG |
364 | i = open("/dev/null", O_RDWR); |
365 | if (i) | |
366 | dup2(i, 0); | |
367 | dup2(i, 1); | |
368 | dup2(i, 2); | |
5eb765ef PG |
369 | if (i) |
370 | close(i); | |
2ac887ba | 371 | } |
cde25790 PG |
372 | |
373 | pstrcpy(pathname, sizeof(pathname), my_program_name); | |
374 | ||
375 | slash = strrchr(pathname, '/'); | |
376 | if (!slash) { | |
377 | slash = pathname; | |
378 | } else { | |
379 | slash++; | |
380 | } | |
381 | strcpy(slash, "ffmpeg"); | |
382 | ||
383 | execvp(pathname, feed->child_argv); | |
384 | ||
385 | _exit(1); | |
386 | } | |
387 | } | |
388 | } | |
7434ba6d PG |
389 | } |
390 | ||
2effd274 FB |
391 | /* open a listening socket */ |
392 | static int socket_open_listen(struct sockaddr_in *my_addr) | |
85f07f22 | 393 | { |
2effd274 | 394 | int server_fd, tmp; |
85f07f22 FB |
395 | |
396 | server_fd = socket(AF_INET,SOCK_STREAM,0); | |
397 | if (server_fd < 0) { | |
398 | perror ("socket"); | |
399 | return -1; | |
400 | } | |
401 | ||
402 | tmp = 1; | |
403 | setsockopt(server_fd, SOL_SOCKET, SO_REUSEADDR, &tmp, sizeof(tmp)); | |
404 | ||
2effd274 | 405 | if (bind (server_fd, (struct sockaddr *) my_addr, sizeof (*my_addr)) < 0) { |
85f07f22 FB |
406 | perror ("bind"); |
407 | close(server_fd); | |
408 | return -1; | |
409 | } | |
410 | ||
411 | if (listen (server_fd, 5) < 0) { | |
412 | perror ("listen"); | |
413 | close(server_fd); | |
414 | return -1; | |
415 | } | |
2effd274 FB |
416 | fcntl(server_fd, F_SETFL, O_NONBLOCK); |
417 | ||
418 | return server_fd; | |
419 | } | |
420 | ||
421 | ||
422 | /* main loop of the http server */ | |
423 | static int http_server(void) | |
424 | { | |
425 | int server_fd, ret, rtsp_server_fd, delay, delay1; | |
426 | struct pollfd poll_table[HTTP_MAX_CONNECTIONS + 2], *poll_entry; | |
427 | HTTPContext *c, *c_next; | |
428 | ||
429 | server_fd = socket_open_listen(&my_http_addr); | |
430 | if (server_fd < 0) | |
431 | return -1; | |
85f07f22 | 432 | |
2effd274 FB |
433 | rtsp_server_fd = socket_open_listen(&my_rtsp_addr); |
434 | if (rtsp_server_fd < 0) | |
435 | return -1; | |
436 | ||
85f07f22 FB |
437 | http_log("ffserver started.\n"); |
438 | ||
cde25790 PG |
439 | start_children(first_feed); |
440 | ||
85f07f22 FB |
441 | first_http_ctx = NULL; |
442 | nb_connections = 0; | |
443 | first_http_ctx = NULL; | |
444 | for(;;) { | |
445 | poll_entry = poll_table; | |
446 | poll_entry->fd = server_fd; | |
447 | poll_entry->events = POLLIN; | |
448 | poll_entry++; | |
449 | ||
2effd274 FB |
450 | poll_entry->fd = rtsp_server_fd; |
451 | poll_entry->events = POLLIN; | |
452 | poll_entry++; | |
453 | ||
85f07f22 FB |
454 | /* wait for events on each HTTP handle */ |
455 | c = first_http_ctx; | |
2effd274 | 456 | delay = 1000; |
85f07f22 FB |
457 | while (c != NULL) { |
458 | int fd; | |
459 | fd = c->fd; | |
460 | switch(c->state) { | |
2effd274 FB |
461 | case HTTPSTATE_SEND_HEADER: |
462 | case RTSPSTATE_SEND_REPLY: | |
85f07f22 FB |
463 | c->poll_entry = poll_entry; |
464 | poll_entry->fd = fd; | |
2effd274 | 465 | poll_entry->events = POLLOUT; |
85f07f22 FB |
466 | poll_entry++; |
467 | break; | |
85f07f22 FB |
468 | case HTTPSTATE_SEND_DATA_HEADER: |
469 | case HTTPSTATE_SEND_DATA: | |
470 | case HTTPSTATE_SEND_DATA_TRAILER: | |
2effd274 FB |
471 | if (!c->is_packetized) { |
472 | /* for TCP, we output as much as we can (may need to put a limit) */ | |
473 | c->poll_entry = poll_entry; | |
474 | poll_entry->fd = fd; | |
475 | poll_entry->events = POLLOUT; | |
476 | poll_entry++; | |
477 | } else { | |
478 | /* not strictly correct, but currently cannot add | |
479 | more than one fd in poll entry */ | |
480 | delay = 0; | |
481 | } | |
85f07f22 | 482 | break; |
2effd274 | 483 | case HTTPSTATE_WAIT_REQUEST: |
85f07f22 | 484 | case HTTPSTATE_RECEIVE_DATA: |
85f07f22 | 485 | case HTTPSTATE_WAIT_FEED: |
2effd274 | 486 | case RTSPSTATE_WAIT_REQUEST: |
85f07f22 FB |
487 | /* need to catch errors */ |
488 | c->poll_entry = poll_entry; | |
489 | poll_entry->fd = fd; | |
a6e14edd | 490 | poll_entry->events = POLLIN;/* Maybe this will work */ |
85f07f22 FB |
491 | poll_entry++; |
492 | break; | |
2effd274 FB |
493 | case HTTPSTATE_WAIT: |
494 | c->poll_entry = NULL; | |
495 | delay1 = compute_send_delay(c); | |
496 | if (delay1 < delay) | |
497 | delay = delay1; | |
498 | break; | |
499 | case HTTPSTATE_WAIT_SHORT: | |
500 | c->poll_entry = NULL; | |
501 | delay1 = 10; /* one tick wait XXX: 10 ms assumed */ | |
502 | if (delay1 < delay) | |
503 | delay = delay1; | |
504 | break; | |
85f07f22 FB |
505 | default: |
506 | c->poll_entry = NULL; | |
507 | break; | |
508 | } | |
509 | c = c->next; | |
510 | } | |
511 | ||
512 | /* wait for an event on one connection. We poll at least every | |
513 | second to handle timeouts */ | |
514 | do { | |
2effd274 | 515 | ret = poll(poll_table, poll_entry - poll_table, delay); |
85f07f22 FB |
516 | } while (ret == -1); |
517 | ||
518 | cur_time = gettime_ms(); | |
519 | ||
5eb765ef PG |
520 | if (need_to_start_children) { |
521 | need_to_start_children = 0; | |
522 | start_children(first_feed); | |
523 | } | |
524 | ||
85f07f22 | 525 | /* now handle the events */ |
2effd274 FB |
526 | for(c = first_http_ctx; c != NULL; c = c_next) { |
527 | c_next = c->next; | |
528 | if (handle_connection(c) < 0) { | |
85f07f22 | 529 | /* close and free the connection */ |
7434ba6d | 530 | log_connection(c); |
2effd274 | 531 | close_connection(c); |
85f07f22 FB |
532 | } |
533 | } | |
534 | ||
85f07f22 | 535 | poll_entry = poll_table; |
2effd274 | 536 | /* new HTTP connection request ? */ |
85f07f22 | 537 | if (poll_entry->revents & POLLIN) { |
2effd274 | 538 | new_connection(server_fd, 0); |
85f07f22 FB |
539 | } |
540 | poll_entry++; | |
2effd274 FB |
541 | /* new RTSP connection request ? */ |
542 | if (poll_entry->revents & POLLIN) { | |
543 | new_connection(rtsp_server_fd, 1); | |
544 | } | |
85f07f22 FB |
545 | } |
546 | } | |
547 | ||
2effd274 FB |
548 | /* start waiting for a new HTTP/RTSP request */ |
549 | static void start_wait_request(HTTPContext *c, int is_rtsp) | |
85f07f22 | 550 | { |
2effd274 FB |
551 | c->buffer_ptr = c->buffer; |
552 | c->buffer_end = c->buffer + c->buffer_size - 1; /* leave room for '\0' */ | |
553 | ||
554 | if (is_rtsp) { | |
555 | c->timeout = cur_time + RTSP_REQUEST_TIMEOUT; | |
556 | c->state = RTSPSTATE_WAIT_REQUEST; | |
557 | } else { | |
558 | c->timeout = cur_time + HTTP_REQUEST_TIMEOUT; | |
559 | c->state = HTTPSTATE_WAIT_REQUEST; | |
560 | } | |
561 | } | |
562 | ||
563 | static void new_connection(int server_fd, int is_rtsp) | |
564 | { | |
565 | struct sockaddr_in from_addr; | |
566 | int fd, len; | |
567 | HTTPContext *c = NULL; | |
568 | ||
569 | len = sizeof(from_addr); | |
570 | fd = accept(server_fd, (struct sockaddr *)&from_addr, | |
571 | &len); | |
572 | if (fd < 0) | |
573 | return; | |
574 | fcntl(fd, F_SETFL, O_NONBLOCK); | |
575 | ||
576 | /* XXX: should output a warning page when coming | |
577 | close to the connection limit */ | |
578 | if (nb_connections >= nb_max_connections) | |
579 | goto fail; | |
580 | ||
581 | /* add a new connection */ | |
582 | c = av_mallocz(sizeof(HTTPContext)); | |
583 | if (!c) | |
584 | goto fail; | |
585 | ||
586 | c->next = first_http_ctx; | |
587 | first_http_ctx = c; | |
588 | c->fd = fd; | |
589 | c->poll_entry = NULL; | |
590 | c->from_addr = from_addr; | |
591 | c->buffer_size = IOBUFFER_INIT_SIZE; | |
592 | c->buffer = av_malloc(c->buffer_size); | |
593 | if (!c->buffer) | |
594 | goto fail; | |
595 | nb_connections++; | |
596 | ||
597 | start_wait_request(c, is_rtsp); | |
598 | ||
599 | return; | |
600 | ||
601 | fail: | |
602 | if (c) { | |
603 | av_free(c->buffer); | |
604 | av_free(c); | |
605 | } | |
606 | close(fd); | |
607 | } | |
608 | ||
609 | static void close_connection(HTTPContext *c) | |
610 | { | |
611 | HTTPContext **cp, *c1; | |
612 | int i, nb_streams; | |
613 | AVFormatContext *ctx; | |
614 | URLContext *h; | |
615 | AVStream *st; | |
616 | ||
617 | /* remove connection from list */ | |
618 | cp = &first_http_ctx; | |
619 | while ((*cp) != NULL) { | |
620 | c1 = *cp; | |
621 | if (c1 == c) { | |
622 | *cp = c->next; | |
623 | } else { | |
624 | cp = &c1->next; | |
625 | } | |
626 | } | |
627 | ||
628 | /* remove connection associated resources */ | |
629 | if (c->fd >= 0) | |
630 | close(c->fd); | |
631 | if (c->fmt_in) { | |
632 | /* close each frame parser */ | |
633 | for(i=0;i<c->fmt_in->nb_streams;i++) { | |
634 | st = c->fmt_in->streams[i]; | |
635 | if (st->codec.codec) { | |
636 | avcodec_close(&st->codec); | |
637 | } | |
638 | } | |
639 | av_close_input_file(c->fmt_in); | |
640 | } | |
641 | ||
642 | /* free RTP output streams if any */ | |
643 | nb_streams = 0; | |
644 | if (c->stream) | |
645 | nb_streams = c->stream->nb_streams; | |
646 | ||
647 | for(i=0;i<nb_streams;i++) { | |
648 | ctx = c->rtp_ctx[i]; | |
649 | if (ctx) { | |
650 | av_write_trailer(ctx); | |
651 | av_free(ctx); | |
652 | } | |
653 | h = c->rtp_handles[i]; | |
654 | if (h) { | |
655 | url_close(h); | |
656 | } | |
657 | } | |
658 | ||
659 | nb_bandwidth -= c->bandwidth; | |
660 | av_freep(&c->pb_buffer); | |
661 | av_free(c->buffer); | |
662 | av_free(c); | |
663 | nb_connections--; | |
664 | } | |
665 | ||
666 | static int handle_connection(HTTPContext *c) | |
667 | { | |
668 | int len, ret; | |
85f07f22 FB |
669 | |
670 | switch(c->state) { | |
671 | case HTTPSTATE_WAIT_REQUEST: | |
2effd274 | 672 | case RTSPSTATE_WAIT_REQUEST: |
85f07f22 FB |
673 | /* timeout ? */ |
674 | if ((c->timeout - cur_time) < 0) | |
675 | return -1; | |
676 | if (c->poll_entry->revents & (POLLERR | POLLHUP)) | |
677 | return -1; | |
678 | ||
679 | /* no need to read if no events */ | |
680 | if (!(c->poll_entry->revents & POLLIN)) | |
681 | return 0; | |
682 | /* read the data */ | |
683 | len = read(c->fd, c->buffer_ptr, c->buffer_end - c->buffer_ptr); | |
684 | if (len < 0) { | |
685 | if (errno != EAGAIN && errno != EINTR) | |
686 | return -1; | |
687 | } else if (len == 0) { | |
688 | return -1; | |
689 | } else { | |
690 | /* search for end of request. XXX: not fully correct since garbage could come after the end */ | |
691 | UINT8 *ptr; | |
692 | c->buffer_ptr += len; | |
693 | ptr = c->buffer_ptr; | |
694 | if ((ptr >= c->buffer + 2 && !memcmp(ptr-2, "\n\n", 2)) || | |
695 | (ptr >= c->buffer + 4 && !memcmp(ptr-4, "\r\n\r\n", 4))) { | |
696 | /* request found : parse it and reply */ | |
2effd274 FB |
697 | if (c->state == HTTPSTATE_WAIT_REQUEST) { |
698 | ret = http_parse_request(c); | |
699 | } else { | |
700 | ret = rtsp_parse_request(c); | |
701 | } | |
702 | if (ret < 0) | |
85f07f22 FB |
703 | return -1; |
704 | } else if (ptr >= c->buffer_end) { | |
705 | /* request too long: cannot do anything */ | |
706 | return -1; | |
707 | } | |
708 | } | |
709 | break; | |
710 | ||
711 | case HTTPSTATE_SEND_HEADER: | |
712 | if (c->poll_entry->revents & (POLLERR | POLLHUP)) | |
713 | return -1; | |
714 | ||
2effd274 | 715 | /* no need to write if no events */ |
85f07f22 FB |
716 | if (!(c->poll_entry->revents & POLLOUT)) |
717 | return 0; | |
718 | len = write(c->fd, c->buffer_ptr, c->buffer_end - c->buffer_ptr); | |
719 | if (len < 0) { | |
720 | if (errno != EAGAIN && errno != EINTR) { | |
721 | /* error : close connection */ | |
2effd274 | 722 | av_freep(&c->pb_buffer); |
85f07f22 FB |
723 | return -1; |
724 | } | |
725 | } else { | |
726 | c->buffer_ptr += len; | |
2e04edb3 PG |
727 | if (c->stream) |
728 | c->stream->bytes_served += len; | |
a6e14edd | 729 | c->data_count += len; |
85f07f22 | 730 | if (c->buffer_ptr >= c->buffer_end) { |
2effd274 | 731 | av_freep(&c->pb_buffer); |
85f07f22 | 732 | /* if error, exit */ |
2effd274 | 733 | if (c->http_error) { |
85f07f22 | 734 | return -1; |
2effd274 FB |
735 | } |
736 | /* all the buffer was sent : synchronize to the incoming stream */ | |
85f07f22 FB |
737 | c->state = HTTPSTATE_SEND_DATA_HEADER; |
738 | c->buffer_ptr = c->buffer_end = c->buffer; | |
739 | } | |
740 | } | |
741 | break; | |
742 | ||
743 | case HTTPSTATE_SEND_DATA: | |
744 | case HTTPSTATE_SEND_DATA_HEADER: | |
745 | case HTTPSTATE_SEND_DATA_TRAILER: | |
2effd274 FB |
746 | /* for packetized output, we consider we can always write (the |
747 | input streams sets the speed). It may be better to verify | |
748 | that we do not rely too much on the kernel queues */ | |
749 | if (!c->is_packetized) { | |
750 | if (c->poll_entry->revents & (POLLERR | POLLHUP)) | |
751 | return -1; | |
752 | ||
753 | /* no need to read if no events */ | |
754 | if (!(c->poll_entry->revents & POLLOUT)) | |
755 | return 0; | |
756 | } | |
5eb765ef | 757 | if (http_send_data(c) < 0) |
85f07f22 FB |
758 | return -1; |
759 | break; | |
760 | case HTTPSTATE_RECEIVE_DATA: | |
761 | /* no need to read if no events */ | |
762 | if (c->poll_entry->revents & (POLLERR | POLLHUP)) | |
763 | return -1; | |
764 | if (!(c->poll_entry->revents & POLLIN)) | |
765 | return 0; | |
766 | if (http_receive_data(c) < 0) | |
767 | return -1; | |
768 | break; | |
769 | case HTTPSTATE_WAIT_FEED: | |
770 | /* no need to read if no events */ | |
a6e14edd | 771 | if (c->poll_entry->revents & (POLLIN | POLLERR | POLLHUP)) |
85f07f22 FB |
772 | return -1; |
773 | ||
774 | /* nothing to do, we'll be waken up by incoming feed packets */ | |
775 | break; | |
2effd274 FB |
776 | |
777 | case HTTPSTATE_WAIT: | |
778 | /* if the delay expired, we can send new packets */ | |
779 | if (compute_send_delay(c) <= 0) | |
780 | c->state = HTTPSTATE_SEND_DATA; | |
781 | break; | |
782 | case HTTPSTATE_WAIT_SHORT: | |
783 | /* just return back to send data */ | |
784 | c->state = HTTPSTATE_SEND_DATA; | |
785 | break; | |
786 | ||
787 | case RTSPSTATE_SEND_REPLY: | |
788 | if (c->poll_entry->revents & (POLLERR | POLLHUP)) { | |
789 | av_freep(&c->pb_buffer); | |
790 | return -1; | |
791 | } | |
792 | /* no need to write if no events */ | |
793 | if (!(c->poll_entry->revents & POLLOUT)) | |
794 | return 0; | |
795 | len = write(c->fd, c->buffer_ptr, c->buffer_end - c->buffer_ptr); | |
796 | if (len < 0) { | |
797 | if (errno != EAGAIN && errno != EINTR) { | |
798 | /* error : close connection */ | |
799 | av_freep(&c->pb_buffer); | |
800 | return -1; | |
801 | } | |
802 | } else { | |
803 | c->buffer_ptr += len; | |
804 | c->data_count += len; | |
805 | if (c->buffer_ptr >= c->buffer_end) { | |
806 | /* all the buffer was sent : wait for a new request */ | |
807 | av_freep(&c->pb_buffer); | |
808 | start_wait_request(c, 1); | |
809 | } | |
810 | } | |
811 | break; | |
812 | case HTTPSTATE_READY: | |
813 | /* nothing to do */ | |
814 | break; | |
85f07f22 FB |
815 | default: |
816 | return -1; | |
817 | } | |
818 | return 0; | |
819 | } | |
820 | ||
3120d2a2 PG |
821 | static int extract_rates(char *rates, int ratelen, const char *request) |
822 | { | |
823 | const char *p; | |
824 | ||
825 | for (p = request; *p && *p != '\r' && *p != '\n'; ) { | |
826 | if (strncasecmp(p, "Pragma:", 7) == 0) { | |
827 | const char *q = p + 7; | |
828 | ||
829 | while (*q && *q != '\n' && isspace(*q)) | |
830 | q++; | |
831 | ||
832 | if (strncasecmp(q, "stream-switch-entry=", 20) == 0) { | |
833 | int stream_no; | |
834 | int rate_no; | |
835 | ||
836 | q += 20; | |
837 | ||
cde25790 | 838 | memset(rates, 0xff, ratelen); |
3120d2a2 PG |
839 | |
840 | while (1) { | |
841 | while (*q && *q != '\n' && *q != ':') | |
842 | q++; | |
843 | ||
844 | if (sscanf(q, ":%d:%d", &stream_no, &rate_no) != 2) { | |
845 | break; | |
846 | } | |
847 | stream_no--; | |
848 | if (stream_no < ratelen && stream_no >= 0) { | |
849 | rates[stream_no] = rate_no; | |
850 | } | |
851 | ||
852 | while (*q && *q != '\n' && !isspace(*q)) | |
853 | q++; | |
854 | } | |
855 | ||
856 | return 1; | |
857 | } | |
858 | } | |
859 | p = strchr(p, '\n'); | |
860 | if (!p) | |
861 | break; | |
862 | ||
863 | p++; | |
864 | } | |
865 | ||
866 | return 0; | |
867 | } | |
868 | ||
cde25790 | 869 | static int find_stream_in_feed(FFStream *feed, AVCodecContext *codec, int bit_rate) |
3120d2a2 PG |
870 | { |
871 | int i; | |
cde25790 PG |
872 | int best_bitrate = 100000000; |
873 | int best = -1; | |
874 | ||
875 | for (i = 0; i < feed->nb_streams; i++) { | |
876 | AVCodecContext *feed_codec = &feed->streams[i]->codec; | |
877 | ||
878 | if (feed_codec->codec_id != codec->codec_id || | |
879 | feed_codec->sample_rate != codec->sample_rate || | |
880 | feed_codec->width != codec->width || | |
881 | feed_codec->height != codec->height) { | |
882 | continue; | |
883 | } | |
884 | ||
885 | /* Potential stream */ | |
886 | ||
887 | /* We want the fastest stream less than bit_rate, or the slowest | |
888 | * faster than bit_rate | |
889 | */ | |
890 | ||
891 | if (feed_codec->bit_rate <= bit_rate) { | |
892 | if (best_bitrate > bit_rate || feed_codec->bit_rate > best_bitrate) { | |
893 | best_bitrate = feed_codec->bit_rate; | |
894 | best = i; | |
895 | } | |
896 | } else { | |
897 | if (feed_codec->bit_rate < best_bitrate) { | |
898 | best_bitrate = feed_codec->bit_rate; | |
899 | best = i; | |
900 | } | |
901 | } | |
902 | } | |
903 | ||
904 | return best; | |
905 | } | |
906 | ||
907 | static int modify_current_stream(HTTPContext *c, char *rates) | |
908 | { | |
909 | int i; | |
910 | FFStream *req = c->stream; | |
911 | int action_required = 0; | |
3120d2a2 PG |
912 | |
913 | for (i = 0; i < req->nb_streams; i++) { | |
914 | AVCodecContext *codec = &req->streams[i]->codec; | |
915 | ||
3120d2a2 PG |
916 | switch(rates[i]) { |
917 | case 0: | |
cde25790 | 918 | c->switch_feed_streams[i] = req->feed_streams[i]; |
3120d2a2 PG |
919 | break; |
920 | case 1: | |
cde25790 | 921 | c->switch_feed_streams[i] = find_stream_in_feed(req->feed, codec, codec->bit_rate / 2); |
3120d2a2 PG |
922 | break; |
923 | case 2: | |
cde25790 PG |
924 | /* Wants off or slow */ |
925 | c->switch_feed_streams[i] = find_stream_in_feed(req->feed, codec, codec->bit_rate / 4); | |
926 | #ifdef WANTS_OFF | |
927 | /* This doesn't work well when it turns off the only stream! */ | |
928 | c->switch_feed_streams[i] = -2; | |
929 | c->feed_streams[i] = -2; | |
930 | #endif | |
3120d2a2 PG |
931 | break; |
932 | } | |
3120d2a2 | 933 | |
cde25790 PG |
934 | if (c->switch_feed_streams[i] >= 0 && c->switch_feed_streams[i] != c->feed_streams[i]) |
935 | action_required = 1; | |
936 | } | |
3120d2a2 | 937 | |
cde25790 PG |
938 | return action_required; |
939 | } | |
3120d2a2 | 940 | |
3120d2a2 | 941 | |
cde25790 PG |
942 | static void do_switch_stream(HTTPContext *c, int i) |
943 | { | |
944 | if (c->switch_feed_streams[i] >= 0) { | |
945 | #ifdef PHILIP | |
946 | c->feed_streams[i] = c->switch_feed_streams[i]; | |
947 | #endif | |
3120d2a2 | 948 | |
cde25790 | 949 | /* Now update the stream */ |
3120d2a2 | 950 | } |
cde25790 | 951 | c->switch_feed_streams[i] = -1; |
3120d2a2 | 952 | } |
7434ba6d | 953 | |
2effd274 FB |
954 | /* XXX: factorize in utils.c ? */ |
955 | /* XXX: take care with different space meaning */ | |
956 | static void skip_spaces(const char **pp) | |
957 | { | |
958 | const char *p; | |
959 | p = *pp; | |
960 | while (*p == ' ' || *p == '\t') | |
961 | p++; | |
962 | *pp = p; | |
963 | } | |
964 | ||
965 | static void get_word(char *buf, int buf_size, const char **pp) | |
966 | { | |
967 | const char *p; | |
968 | char *q; | |
969 | ||
970 | p = *pp; | |
971 | skip_spaces(&p); | |
972 | q = buf; | |
973 | while (!isspace(*p) && *p != '\0') { | |
974 | if ((q - buf) < buf_size - 1) | |
975 | *q++ = *p; | |
976 | p++; | |
977 | } | |
978 | if (buf_size > 0) | |
979 | *q = '\0'; | |
980 | *pp = p; | |
981 | } | |
982 | ||
8256c0a3 PG |
983 | static int validate_acl(FFStream *stream, HTTPContext *c) |
984 | { | |
985 | enum IPAddressAction last_action = IP_DENY; | |
986 | IPAddressACL *acl; | |
987 | struct in_addr *src = &c->from_addr.sin_addr; | |
988 | ||
989 | for (acl = stream->acl; acl; acl = acl->next) { | |
990 | if (src->s_addr >= acl->first.s_addr && src->s_addr <= acl->last.s_addr) { | |
991 | return (acl->action == IP_ALLOW) ? 1 : 0; | |
992 | } | |
993 | last_action = acl->action; | |
994 | } | |
995 | ||
996 | /* Nothing matched, so return not the last action */ | |
997 | return (last_action == IP_DENY) ? 1 : 0; | |
998 | } | |
999 | ||
85f07f22 FB |
1000 | /* parse http request and prepare header */ |
1001 | static int http_parse_request(HTTPContext *c) | |
1002 | { | |
1003 | char *p; | |
1004 | int post; | |
7434ba6d | 1005 | int doing_asx; |
cde25790 | 1006 | int doing_asf_redirector; |
42a63c6a | 1007 | int doing_ram; |
2effd274 | 1008 | int doing_rtsp_redirector; |
85f07f22 FB |
1009 | char cmd[32]; |
1010 | char info[1024], *filename; | |
1011 | char url[1024], *q; | |
1012 | char protocol[32]; | |
1013 | char msg[1024]; | |
1014 | const char *mime_type; | |
1015 | FFStream *stream; | |
42a63c6a | 1016 | int i; |
3120d2a2 | 1017 | char ratebuf[32]; |
cde25790 | 1018 | char *useragent = 0; |
85f07f22 FB |
1019 | |
1020 | p = c->buffer; | |
2effd274 | 1021 | get_word(cmd, sizeof(cmd), (const char **)&p); |
bd7cf6ad | 1022 | pstrcpy(c->method, sizeof(c->method), cmd); |
7434ba6d | 1023 | |
85f07f22 FB |
1024 | if (!strcmp(cmd, "GET")) |
1025 | post = 0; | |
1026 | else if (!strcmp(cmd, "POST")) | |
1027 | post = 1; | |
1028 | else | |
1029 | return -1; | |
1030 | ||
2effd274 | 1031 | get_word(url, sizeof(url), (const char **)&p); |
bd7cf6ad | 1032 | pstrcpy(c->url, sizeof(c->url), url); |
7434ba6d | 1033 | |
2effd274 | 1034 | get_word(protocol, sizeof(protocol), (const char **)&p); |
85f07f22 FB |
1035 | if (strcmp(protocol, "HTTP/1.0") && strcmp(protocol, "HTTP/1.1")) |
1036 | return -1; | |
7434ba6d | 1037 | |
bd7cf6ad | 1038 | pstrcpy(c->protocol, sizeof(c->protocol), protocol); |
85f07f22 FB |
1039 | |
1040 | /* find the filename and the optional info string in the request */ | |
1041 | p = url; | |
1042 | if (*p == '/') | |
1043 | p++; | |
1044 | filename = p; | |
1045 | p = strchr(p, '?'); | |
1046 | if (p) { | |
bd7cf6ad | 1047 | pstrcpy(info, sizeof(info), p); |
85f07f22 FB |
1048 | *p = '\0'; |
1049 | } else { | |
1050 | info[0] = '\0'; | |
1051 | } | |
1052 | ||
cde25790 PG |
1053 | for (p = c->buffer; *p && *p != '\r' && *p != '\n'; ) { |
1054 | if (strncasecmp(p, "User-Agent:", 11) == 0) { | |
1055 | useragent = p + 11; | |
1056 | if (*useragent && *useragent != '\n' && isspace(*useragent)) | |
1057 | useragent++; | |
1058 | break; | |
1059 | } | |
1060 | p = strchr(p, '\n'); | |
1061 | if (!p) | |
1062 | break; | |
1063 | ||
1064 | p++; | |
1065 | } | |
1066 | ||
7434ba6d PG |
1067 | if (strlen(filename) > 4 && strcmp(".asx", filename + strlen(filename) - 4) == 0) { |
1068 | doing_asx = 1; | |
1069 | filename[strlen(filename)-1] = 'f'; | |
1070 | } else { | |
1071 | doing_asx = 0; | |
1072 | } | |
1073 | ||
cde25790 PG |
1074 | if (strlen(filename) > 4 && strcmp(".asf", filename + strlen(filename) - 4) == 0 && |
1075 | (!useragent || strncasecmp(useragent, "NSPlayer", 8) != 0)) { | |
1076 | /* if this isn't WMP or lookalike, return the redirector file */ | |
1077 | doing_asf_redirector = 1; | |
1078 | } else { | |
1079 | doing_asf_redirector = 0; | |
1080 | } | |
1081 | ||
42a63c6a PG |
1082 | if (strlen(filename) > 4 && |
1083 | (strcmp(".rpm", filename + strlen(filename) - 4) == 0 || | |
1084 | strcmp(".ram", filename + strlen(filename) - 4) == 0)) { | |
1085 | doing_ram = 1; | |
1086 | strcpy(filename + strlen(filename)-2, "m"); | |
1087 | } else { | |
1088 | doing_ram = 0; | |
1089 | } | |
1090 | ||
2effd274 FB |
1091 | if (strlen(filename) > 5 && |
1092 | strcmp(".rtsp", filename + strlen(filename) - 5) == 0) { | |
1093 | char file1[1024]; | |
1094 | char file2[1024]; | |
1095 | char *p; | |
1096 | ||
1097 | doing_rtsp_redirector = 1; | |
1098 | /* compute filename by matching without the file extensions */ | |
1099 | pstrcpy(file1, sizeof(file1), filename); | |
1100 | p = strrchr(file1, '.'); | |
1101 | if (p) | |
1102 | *p = '\0'; | |
1103 | for(stream = first_stream; stream != NULL; stream = stream->next) { | |
1104 | pstrcpy(file2, sizeof(file2), stream->filename); | |
1105 | p = strrchr(file2, '.'); | |
1106 | if (p) | |
1107 | *p = '\0'; | |
1108 | if (!strcmp(file1, file2)) { | |
1109 | pstrcpy(url, sizeof(url), stream->filename); | |
1110 | filename = url; | |
1111 | break; | |
1112 | } | |
1113 | } | |
1114 | } else { | |
1115 | doing_rtsp_redirector = 0; | |
1116 | } | |
1117 | ||
85f07f22 FB |
1118 | stream = first_stream; |
1119 | while (stream != NULL) { | |
8256c0a3 | 1120 | if (!strcmp(stream->filename, filename) && validate_acl(stream, c)) |
85f07f22 FB |
1121 | break; |
1122 | stream = stream->next; | |
1123 | } | |
1124 | if (stream == NULL) { | |
1125 | sprintf(msg, "File '%s' not found", url); | |
1126 | goto send_error; | |
1127 | } | |
42a63c6a | 1128 | |
cde25790 PG |
1129 | c->stream = stream; |
1130 | memcpy(c->feed_streams, stream->feed_streams, sizeof(c->feed_streams)); | |
1131 | memset(c->switch_feed_streams, -1, sizeof(c->switch_feed_streams)); | |
1132 | ||
1133 | if (stream->stream_type == STREAM_TYPE_REDIRECT) { | |
1134 | c->http_error = 301; | |
1135 | q = c->buffer; | |
1136 | q += sprintf(q, "HTTP/1.0 301 Moved\r\n"); | |
1137 | q += sprintf(q, "Location: %s\r\n", stream->feed_filename); | |
1138 | q += sprintf(q, "Content-type: text/html\r\n"); | |
1139 | q += sprintf(q, "\r\n"); | |
1140 | q += sprintf(q, "<html><head><title>Moved</title></head><body>\r\n"); | |
1141 | q += sprintf(q, "You should be <a href=\"%s\">redirected</a>.\r\n", stream->feed_filename); | |
1142 | q += sprintf(q, "</body></html>\r\n"); | |
1143 | ||
1144 | /* prepare output buffer */ | |
1145 | c->buffer_ptr = c->buffer; | |
1146 | c->buffer_end = q; | |
1147 | c->state = HTTPSTATE_SEND_HEADER; | |
1148 | return 0; | |
1149 | } | |
1150 | ||
3120d2a2 PG |
1151 | /* If this is WMP, get the rate information */ |
1152 | if (extract_rates(ratebuf, sizeof(ratebuf), c->buffer)) { | |
cde25790 PG |
1153 | if (modify_current_stream(c, ratebuf)) { |
1154 | for (i = 0; i < sizeof(c->feed_streams) / sizeof(c->feed_streams[0]); i++) { | |
1155 | if (c->switch_feed_streams[i] >= 0) | |
1156 | do_switch_stream(c, i); | |
1157 | } | |
1158 | } | |
3120d2a2 PG |
1159 | } |
1160 | ||
42a63c6a PG |
1161 | if (post == 0 && stream->stream_type == STREAM_TYPE_LIVE) { |
1162 | /* See if we meet the bandwidth requirements */ | |
1163 | for(i=0;i<stream->nb_streams;i++) { | |
1164 | AVStream *st = stream->streams[i]; | |
1165 | switch(st->codec.codec_type) { | |
1166 | case CODEC_TYPE_AUDIO: | |
1167 | c->bandwidth += st->codec.bit_rate; | |
1168 | break; | |
1169 | case CODEC_TYPE_VIDEO: | |
1170 | c->bandwidth += st->codec.bit_rate; | |
1171 | break; | |
1172 | default: | |
ec3b2232 | 1173 | av_abort(); |
42a63c6a PG |
1174 | } |
1175 | } | |
1176 | } | |
1177 | ||
1178 | c->bandwidth /= 1000; | |
1179 | nb_bandwidth += c->bandwidth; | |
1180 | ||
1181 | if (post == 0 && nb_max_bandwidth < nb_bandwidth) { | |
1182 | c->http_error = 200; | |
1183 | q = c->buffer; | |
1184 | q += sprintf(q, "HTTP/1.0 200 Server too busy\r\n"); | |
1185 | q += sprintf(q, "Content-type: text/html\r\n"); | |
1186 | q += sprintf(q, "\r\n"); | |
1187 | q += sprintf(q, "<html><head><title>Too busy</title></head><body>\r\n"); | |
1188 | q += sprintf(q, "The server is too busy to serve your request at this time.<p>\r\n"); | |
1189 | q += sprintf(q, "The bandwidth being served (including your stream) is %dkbit/sec, and this exceeds the limit of %dkbit/sec\r\n", | |
1190 | nb_bandwidth, nb_max_bandwidth); | |
1191 | q += sprintf(q, "</body></html>\r\n"); | |
1192 | ||
1193 | /* prepare output buffer */ | |
1194 | c->buffer_ptr = c->buffer; | |
1195 | c->buffer_end = q; | |
1196 | c->state = HTTPSTATE_SEND_HEADER; | |
1197 | return 0; | |
1198 | } | |
1199 | ||
2effd274 FB |
1200 | if (doing_asx || doing_ram || doing_asf_redirector || |
1201 | doing_rtsp_redirector) { | |
7434ba6d PG |
1202 | char *hostinfo = 0; |
1203 | ||
1204 | for (p = c->buffer; *p && *p != '\r' && *p != '\n'; ) { | |
1205 | if (strncasecmp(p, "Host:", 5) == 0) { | |
1206 | hostinfo = p + 5; | |
1207 | break; | |
1208 | } | |
1209 | p = strchr(p, '\n'); | |
1210 | if (!p) | |
1211 | break; | |
1212 | ||
1213 | p++; | |
1214 | } | |
1215 | ||
1216 | if (hostinfo) { | |
1217 | char *eoh; | |
1218 | char hostbuf[260]; | |
1219 | ||
1220 | while (isspace(*hostinfo)) | |
1221 | hostinfo++; | |
1222 | ||
1223 | eoh = strchr(hostinfo, '\n'); | |
1224 | if (eoh) { | |
1225 | if (eoh[-1] == '\r') | |
1226 | eoh--; | |
1227 | ||
1228 | if (eoh - hostinfo < sizeof(hostbuf) - 1) { | |
1229 | memcpy(hostbuf, hostinfo, eoh - hostinfo); | |
1230 | hostbuf[eoh - hostinfo] = 0; | |
1231 | ||
1232 | c->http_error = 200; | |
1233 | q = c->buffer; | |
42a63c6a PG |
1234 | if (doing_asx) { |
1235 | q += sprintf(q, "HTTP/1.0 200 ASX Follows\r\n"); | |
1236 | q += sprintf(q, "Content-type: video/x-ms-asf\r\n"); | |
1237 | q += sprintf(q, "\r\n"); | |
1238 | q += sprintf(q, "<ASX Version=\"3\">\r\n"); | |
1239 | q += sprintf(q, "<!-- Autogenerated by ffserver -->\r\n"); | |
1240 | q += sprintf(q, "<ENTRY><REF HREF=\"http://%s/%s%s\"/></ENTRY>\r\n", | |
1241 | hostbuf, filename, info); | |
1242 | q += sprintf(q, "</ASX>\r\n"); | |
1243 | } else if (doing_ram) { | |
1244 | q += sprintf(q, "HTTP/1.0 200 RAM Follows\r\n"); | |
1245 | q += sprintf(q, "Content-type: audio/x-pn-realaudio\r\n"); | |
1246 | q += sprintf(q, "\r\n"); | |
1247 | q += sprintf(q, "# Autogenerated by ffserver\r\n"); | |
1248 | q += sprintf(q, "http://%s/%s%s\r\n", | |
1249 | hostbuf, filename, info); | |
cde25790 PG |
1250 | } else if (doing_asf_redirector) { |
1251 | q += sprintf(q, "HTTP/1.0 200 ASF Redirect follows\r\n"); | |
1252 | q += sprintf(q, "Content-type: video/x-ms-asf\r\n"); | |
1253 | q += sprintf(q, "\r\n"); | |
1254 | q += sprintf(q, "[Reference]\r\n"); | |
1255 | q += sprintf(q, "Ref1=http://%s/%s%s\r\n", | |
1256 | hostbuf, filename, info); | |
2effd274 FB |
1257 | } else if (doing_rtsp_redirector) { |
1258 | char hostname[256], *p; | |
1259 | /* extract only hostname */ | |
1260 | pstrcpy(hostname, sizeof(hostname), hostbuf); | |
1261 | p = strrchr(hostname, ':'); | |
1262 | if (p) | |
1263 | *p = '\0'; | |
1264 | q += sprintf(q, "HTTP/1.0 200 RTSP Redirect follows\r\n"); | |
1265 | /* XXX: incorrect mime type ? */ | |
1266 | q += sprintf(q, "Content-type: application/x-rtsp\r\n"); | |
1267 | q += sprintf(q, "\r\n"); | |
1268 | q += sprintf(q, "rtsp://%s:%d/%s\r\n", | |
1269 | hostname, ntohs(my_rtsp_addr.sin_port), | |
1270 | filename); | |
1271 | } else { | |
ec3b2232 | 1272 | av_abort(); |
2effd274 | 1273 | } |
7434ba6d PG |
1274 | |
1275 | /* prepare output buffer */ | |
1276 | c->buffer_ptr = c->buffer; | |
1277 | c->buffer_end = q; | |
1278 | c->state = HTTPSTATE_SEND_HEADER; | |
1279 | return 0; | |
1280 | } | |
1281 | } | |
1282 | } | |
1283 | ||
42a63c6a | 1284 | sprintf(msg, "ASX/RAM file not handled"); |
7434ba6d | 1285 | goto send_error; |
85f07f22 FB |
1286 | } |
1287 | ||
a6e14edd | 1288 | stream->conns_served++; |
7434ba6d | 1289 | |
85f07f22 FB |
1290 | /* XXX: add there authenticate and IP match */ |
1291 | ||
1292 | if (post) { | |
1293 | /* if post, it means a feed is being sent */ | |
1294 | if (!stream->is_feed) { | |
7434ba6d PG |
1295 | /* However it might be a status report from WMP! Lets log the data |
1296 | * as it might come in handy one day | |
1297 | */ | |
1298 | char *logline = 0; | |
3120d2a2 | 1299 | int client_id = 0; |
7434ba6d PG |
1300 | |
1301 | for (p = c->buffer; *p && *p != '\r' && *p != '\n'; ) { | |
1302 | if (strncasecmp(p, "Pragma: log-line=", 17) == 0) { | |
1303 | logline = p; | |
1304 | break; | |
1305 | } | |
3120d2a2 PG |
1306 | if (strncasecmp(p, "Pragma: client-id=", 18) == 0) { |
1307 | client_id = strtol(p + 18, 0, 10); | |
1308 | } | |
7434ba6d PG |
1309 | p = strchr(p, '\n'); |
1310 | if (!p) | |
1311 | break; | |
1312 | ||
1313 | p++; | |
1314 | } | |
1315 | ||
1316 | if (logline) { | |
1317 | char *eol = strchr(logline, '\n'); | |
1318 | ||
1319 | logline += 17; | |
1320 | ||
1321 | if (eol) { | |
1322 | if (eol[-1] == '\r') | |
1323 | eol--; | |
1324 | http_log("%.*s\n", eol - logline, logline); | |
1325 | c->suppress_log = 1; | |
1326 | } | |
1327 | } | |
3120d2a2 | 1328 | |
cde25790 PG |
1329 | #ifdef DEBUG_WMP |
1330 | http_log("\nGot request:\n%s\n", c->buffer); | |
3120d2a2 PG |
1331 | #endif |
1332 | ||
1333 | if (client_id && extract_rates(ratebuf, sizeof(ratebuf), c->buffer)) { | |
1334 | HTTPContext *wmpc; | |
1335 | ||
1336 | /* Now we have to find the client_id */ | |
1337 | for (wmpc = first_http_ctx; wmpc; wmpc = wmpc->next) { | |
1338 | if (wmpc->wmp_client_id == client_id) | |
1339 | break; | |
1340 | } | |
1341 | ||
1342 | if (wmpc) { | |
cde25790 PG |
1343 | if (modify_current_stream(wmpc, ratebuf)) { |
1344 | wmpc->switch_pending = 1; | |
3120d2a2 PG |
1345 | } |
1346 | } | |
1347 | } | |
7434ba6d | 1348 | |
85f07f22 FB |
1349 | sprintf(msg, "POST command not handled"); |
1350 | goto send_error; | |
1351 | } | |
1352 | if (http_start_receive_data(c) < 0) { | |
1353 | sprintf(msg, "could not open feed"); | |
1354 | goto send_error; | |
1355 | } | |
1356 | c->http_error = 0; | |
1357 | c->state = HTTPSTATE_RECEIVE_DATA; | |
1358 | return 0; | |
1359 | } | |
1360 | ||
cde25790 | 1361 | #ifdef DEBUG_WMP |
3120d2a2 | 1362 | if (strcmp(stream->filename + strlen(stream->filename) - 4, ".asf") == 0) { |
cde25790 | 1363 | http_log("\nGot request:\n%s\n", c->buffer); |
3120d2a2 PG |
1364 | } |
1365 | #endif | |
1366 | ||
85f07f22 FB |
1367 | if (c->stream->stream_type == STREAM_TYPE_STATUS) |
1368 | goto send_stats; | |
1369 | ||
1370 | /* open input stream */ | |
1371 | if (open_input_stream(c, info) < 0) { | |
1372 | sprintf(msg, "Input stream corresponding to '%s' not found", url); | |
1373 | goto send_error; | |
1374 | } | |
1375 | ||
1376 | /* prepare http header */ | |
1377 | q = c->buffer; | |
1378 | q += sprintf(q, "HTTP/1.0 200 OK\r\n"); | |
1379 | mime_type = c->stream->fmt->mime_type; | |
1380 | if (!mime_type) | |
1381 | mime_type = "application/x-octet_stream"; | |
85f07f22 FB |
1382 | q += sprintf(q, "Pragma: no-cache\r\n"); |
1383 | ||
1384 | /* for asf, we need extra headers */ | |
8256c0a3 | 1385 | if (!strcmp(c->stream->fmt->name,"asf_stream")) { |
3120d2a2 | 1386 | /* Need to allocate a client id */ |
3120d2a2 | 1387 | |
8256c0a3 | 1388 | c->wmp_client_id = random() & 0x7fffffff; |
3120d2a2 PG |
1389 | |
1390 | q += sprintf(q, "Server: Cougar 4.1.0.3923\r\nCache-Control: no-cache\r\nPragma: client-id=%d\r\nPragma: features=\"broadcast\"\r\n", c->wmp_client_id); | |
cde25790 | 1391 | mime_type = "application/octet-stream"; |
85f07f22 | 1392 | } |
f747e6d3 | 1393 | q += sprintf(q, "Content-Type: %s\r\n", mime_type); |
85f07f22 FB |
1394 | q += sprintf(q, "\r\n"); |
1395 | ||
1396 | /* prepare output buffer */ | |
1397 | c->http_error = 0; | |
1398 | c->buffer_ptr = c->buffer; | |
1399 | c->buffer_end = q; | |
1400 | c->state = HTTPSTATE_SEND_HEADER; | |
1401 | return 0; | |
1402 | send_error: | |
1403 | c->http_error = 404; | |
1404 | q = c->buffer; | |
1405 | q += sprintf(q, "HTTP/1.0 404 Not Found\r\n"); | |
1406 | q += sprintf(q, "Content-type: %s\r\n", "text/html"); | |
1407 | q += sprintf(q, "\r\n"); | |
1408 | q += sprintf(q, "<HTML>\n"); | |
1409 | q += sprintf(q, "<HEAD><TITLE>404 Not Found</TITLE></HEAD>\n"); | |
1410 | q += sprintf(q, "<BODY>%s</BODY>\n", msg); | |
1411 | q += sprintf(q, "</HTML>\n"); | |
1412 | ||
1413 | /* prepare output buffer */ | |
1414 | c->buffer_ptr = c->buffer; | |
1415 | c->buffer_end = q; | |
1416 | c->state = HTTPSTATE_SEND_HEADER; | |
1417 | return 0; | |
1418 | send_stats: | |
1419 | compute_stats(c); | |
1420 | c->http_error = 200; /* horrible : we use this value to avoid | |
1421 | going to the send data state */ | |
1422 | c->state = HTTPSTATE_SEND_HEADER; | |
1423 | return 0; | |
1424 | } | |
1425 | ||
2effd274 | 1426 | static void fmt_bytecount(ByteIOContext *pb, INT64 count) |
2ac887ba PG |
1427 | { |
1428 | static const char *suffix = " kMGTP"; | |
1429 | const char *s; | |
1430 | ||
1431 | for (s = suffix; count >= 100000 && s[1]; count /= 1000, s++) { | |
1432 | } | |
1433 | ||
2effd274 | 1434 | url_fprintf(pb, "%lld%c", count, *s); |
2ac887ba PG |
1435 | } |
1436 | ||
85f07f22 FB |
1437 | static void compute_stats(HTTPContext *c) |
1438 | { | |
1439 | HTTPContext *c1; | |
1440 | FFStream *stream; | |
2effd274 | 1441 | char *p; |
85f07f22 | 1442 | time_t ti; |
2effd274 FB |
1443 | int i, len; |
1444 | ByteIOContext pb1, *pb = &pb1; | |
cde25790 | 1445 | |
2effd274 FB |
1446 | if (url_open_dyn_buf(pb) < 0) { |
1447 | /* XXX: return an error ? */ | |
cde25790 | 1448 | c->buffer_ptr = c->buffer; |
2effd274 FB |
1449 | c->buffer_end = c->buffer; |
1450 | return; | |
cde25790 | 1451 | } |
85f07f22 | 1452 | |
2effd274 FB |
1453 | url_fprintf(pb, "HTTP/1.0 200 OK\r\n"); |
1454 | url_fprintf(pb, "Content-type: %s\r\n", "text/html"); | |
1455 | url_fprintf(pb, "Pragma: no-cache\r\n"); | |
1456 | url_fprintf(pb, "\r\n"); | |
85f07f22 | 1457 | |
2effd274 | 1458 | url_fprintf(pb, "<HEAD><TITLE>FFServer Status</TITLE>\n"); |
cde25790 | 1459 | if (c->stream->feed_filename) { |
2effd274 | 1460 | url_fprintf(pb, "<link rel=\"shortcut icon\" href=\"%s\">\n", c->stream->feed_filename); |
cde25790 | 1461 | } |
2effd274 FB |
1462 | url_fprintf(pb, "</HEAD>\n<BODY>"); |
1463 | url_fprintf(pb, "<H1>FFServer Status</H1>\n"); | |
85f07f22 | 1464 | /* format status */ |
2effd274 FB |
1465 | url_fprintf(pb, "<H2>Available Streams</H2>\n"); |
1466 | url_fprintf(pb, "<TABLE cellspacing=0 cellpadding=4>\n"); | |
1467 | url_fprintf(pb, "<TR><Th valign=top>Path<th align=left>Served<br>Conns<Th><br>bytes<Th valign=top>Format<Th>Bit rate<br>kbits/s<Th align=left>Video<br>kbits/s<th><br>Codec<Th align=left>Audio<br>kbits/s<th><br>Codec<Th align=left valign=top>Feed\n"); | |
85f07f22 FB |
1468 | stream = first_stream; |
1469 | while (stream != NULL) { | |
42a63c6a PG |
1470 | char sfilename[1024]; |
1471 | char *eosf; | |
1472 | ||
a6e14edd | 1473 | if (stream->feed != stream) { |
2effd274 | 1474 | pstrcpy(sfilename, sizeof(sfilename) - 10, stream->filename); |
a6e14edd PG |
1475 | eosf = sfilename + strlen(sfilename); |
1476 | if (eosf - sfilename >= 4) { | |
1477 | if (strcmp(eosf - 4, ".asf") == 0) { | |
1478 | strcpy(eosf - 4, ".asx"); | |
1479 | } else if (strcmp(eosf - 3, ".rm") == 0) { | |
1480 | strcpy(eosf - 3, ".ram"); | |
2effd274 FB |
1481 | } else if (stream->fmt == &rtp_mux) { |
1482 | /* generate a sample RTSP director - maybe should | |
1483 | generate a .sdp file ? */ | |
1484 | eosf = strrchr(sfilename, '.'); | |
1485 | if (!eosf) | |
1486 | eosf = sfilename + strlen(sfilename); | |
1487 | strcpy(eosf, ".rtsp"); | |
a6e14edd | 1488 | } |
42a63c6a | 1489 | } |
a6e14edd | 1490 | |
2effd274 | 1491 | url_fprintf(pb, "<TR><TD><A HREF=\"/%s\">%s</A> ", |
a6e14edd | 1492 | sfilename, stream->filename); |
2effd274 | 1493 | url_fprintf(pb, "<td align=right> %d <td align=right> ", |
2ac887ba | 1494 | stream->conns_served); |
2effd274 | 1495 | fmt_bytecount(pb, stream->bytes_served); |
a6e14edd PG |
1496 | switch(stream->stream_type) { |
1497 | case STREAM_TYPE_LIVE: | |
1498 | { | |
1499 | int audio_bit_rate = 0; | |
1500 | int video_bit_rate = 0; | |
1501 | char *audio_codec_name = ""; | |
1502 | char *video_codec_name = ""; | |
1503 | char *audio_codec_name_extra = ""; | |
1504 | char *video_codec_name_extra = ""; | |
1505 | ||
1506 | for(i=0;i<stream->nb_streams;i++) { | |
1507 | AVStream *st = stream->streams[i]; | |
1508 | AVCodec *codec = avcodec_find_encoder(st->codec.codec_id); | |
1509 | switch(st->codec.codec_type) { | |
1510 | case CODEC_TYPE_AUDIO: | |
1511 | audio_bit_rate += st->codec.bit_rate; | |
1512 | if (codec) { | |
1513 | if (*audio_codec_name) | |
1514 | audio_codec_name_extra = "..."; | |
1515 | audio_codec_name = codec->name; | |
1516 | } | |
1517 | break; | |
1518 | case CODEC_TYPE_VIDEO: | |
1519 | video_bit_rate += st->codec.bit_rate; | |
1520 | if (codec) { | |
1521 | if (*video_codec_name) | |
1522 | video_codec_name_extra = "..."; | |
1523 | video_codec_name = codec->name; | |
1524 | } | |
1525 | break; | |
1526 | default: | |
ec3b2232 | 1527 | av_abort(); |
79c4ea3c | 1528 | } |
85f07f22 | 1529 | } |
2effd274 | 1530 | url_fprintf(pb, "<TD align=center> %s <TD align=right> %d <TD align=right> %d <TD> %s %s <TD align=right> %d <TD> %s %s", |
a6e14edd PG |
1531 | stream->fmt->name, |
1532 | (audio_bit_rate + video_bit_rate) / 1000, | |
1533 | video_bit_rate / 1000, video_codec_name, video_codec_name_extra, | |
1534 | audio_bit_rate / 1000, audio_codec_name, audio_codec_name_extra); | |
1535 | if (stream->feed) { | |
2effd274 | 1536 | url_fprintf(pb, "<TD>%s", stream->feed->filename); |
a6e14edd | 1537 | } else { |
2effd274 | 1538 | url_fprintf(pb, "<TD>%s", stream->feed_filename); |
a6e14edd | 1539 | } |
2effd274 | 1540 | url_fprintf(pb, "\n"); |
85f07f22 | 1541 | } |
a6e14edd PG |
1542 | break; |
1543 | default: | |
2effd274 | 1544 | url_fprintf(pb, "<TD align=center> - <TD align=right> - <TD align=right> - <td><td align=right> - <TD>\n"); |
a6e14edd | 1545 | break; |
85f07f22 | 1546 | } |
85f07f22 FB |
1547 | } |
1548 | stream = stream->next; | |
1549 | } | |
2effd274 | 1550 | url_fprintf(pb, "</TABLE>\n"); |
a6e14edd PG |
1551 | |
1552 | stream = first_stream; | |
1553 | while (stream != NULL) { | |
1554 | if (stream->feed == stream) { | |
2effd274 | 1555 | url_fprintf(pb, "<h2>Feed %s</h2>", stream->filename); |
cde25790 | 1556 | if (stream->pid) { |
2effd274 | 1557 | url_fprintf(pb, "Running as pid %d.\n", stream->pid); |
cde25790 | 1558 | |
2effd274 FB |
1559 | #if defined(linux) && !defined(CONFIG_NOCUTILS) |
1560 | { | |
1561 | FILE *pid_stat; | |
1562 | char ps_cmd[64]; | |
1563 | ||
1564 | /* This is somewhat linux specific I guess */ | |
1565 | snprintf(ps_cmd, sizeof(ps_cmd), | |
1566 | "ps -o \"%%cpu,cputime\" --no-headers %d", | |
1567 | stream->pid); | |
1568 | ||
1569 | pid_stat = popen(ps_cmd, "r"); | |
1570 | if (pid_stat) { | |
1571 | char cpuperc[10]; | |
1572 | char cpuused[64]; | |
1573 | ||
1574 | if (fscanf(pid_stat, "%10s %64s", cpuperc, | |
1575 | cpuused) == 2) { | |
1576 | url_fprintf(pb, "Currently using %s%% of the cpu. Total time used %s.\n", | |
1577 | cpuperc, cpuused); | |
1578 | } | |
1579 | fclose(pid_stat); | |
cde25790 | 1580 | } |
cde25790 PG |
1581 | } |
1582 | #endif | |
1583 | ||
2effd274 | 1584 | url_fprintf(pb, "<p>"); |
cde25790 | 1585 | } |
2effd274 | 1586 | url_fprintf(pb, "<table cellspacing=0 cellpadding=4><tr><th>Stream<th>type<th>kbits/s<th align=left>codec<th align=left>Parameters\n"); |
a6e14edd PG |
1587 | |
1588 | for (i = 0; i < stream->nb_streams; i++) { | |
1589 | AVStream *st = stream->streams[i]; | |
1590 | AVCodec *codec = avcodec_find_encoder(st->codec.codec_id); | |
1591 | char *type = "unknown"; | |
b582f314 PG |
1592 | char parameters[64]; |
1593 | ||
1594 | parameters[0] = 0; | |
a6e14edd PG |
1595 | |
1596 | switch(st->codec.codec_type) { | |
1597 | case CODEC_TYPE_AUDIO: | |
1598 | type = "audio"; | |
1599 | break; | |
1600 | case CODEC_TYPE_VIDEO: | |
1601 | type = "video"; | |
cde25790 PG |
1602 | sprintf(parameters, "%dx%d, q=%d-%d, fps=%d", st->codec.width, st->codec.height, |
1603 | st->codec.qmin, st->codec.qmax, st->codec.frame_rate / FRAME_RATE_BASE); | |
a6e14edd PG |
1604 | break; |
1605 | default: | |
ec3b2232 | 1606 | av_abort(); |
a6e14edd | 1607 | } |
2effd274 | 1608 | url_fprintf(pb, "<tr><td align=right>%d<td>%s<td align=right>%d<td>%s<td>%s\n", |
b582f314 | 1609 | i, type, st->codec.bit_rate/1000, codec ? codec->name : "", parameters); |
a6e14edd | 1610 | } |
2effd274 | 1611 | url_fprintf(pb, "</table>\n"); |
a6e14edd PG |
1612 | |
1613 | } | |
1614 | stream = stream->next; | |
1615 | } | |
85f07f22 FB |
1616 | |
1617 | #if 0 | |
1618 | { | |
1619 | float avg; | |
1620 | AVCodecContext *enc; | |
1621 | char buf[1024]; | |
1622 | ||
1623 | /* feed status */ | |
1624 | stream = first_feed; | |
1625 | while (stream != NULL) { | |
2effd274 FB |
1626 | url_fprintf(pb, "<H1>Feed '%s'</H1>\n", stream->filename); |
1627 | url_fprintf(pb, "<TABLE>\n"); | |
1628 | url_fprintf(pb, "<TR><TD>Parameters<TD>Frame count<TD>Size<TD>Avg bitrate (kbits/s)\n"); | |
85f07f22 FB |
1629 | for(i=0;i<stream->nb_streams;i++) { |
1630 | AVStream *st = stream->streams[i]; | |
1631 | FeedData *fdata = st->priv_data; | |
1632 | enc = &st->codec; | |
1633 | ||
1634 | avcodec_string(buf, sizeof(buf), enc); | |
1635 | avg = fdata->avg_frame_size * (float)enc->rate * 8.0; | |
1636 | if (enc->codec->type == CODEC_TYPE_AUDIO && enc->frame_size > 0) | |
1637 | avg /= enc->frame_size; | |
2effd274 | 1638 | url_fprintf(pb, "<TR><TD>%s <TD> %d <TD> %Ld <TD> %0.1f\n", |
85f07f22 FB |
1639 | buf, enc->frame_number, fdata->data_count, avg / 1000.0); |
1640 | } | |
2effd274 | 1641 | url_fprintf(pb, "</TABLE>\n"); |
85f07f22 FB |
1642 | stream = stream->next_feed; |
1643 | } | |
1644 | } | |
1645 | #endif | |
1646 | ||
1647 | /* connection status */ | |
2effd274 | 1648 | url_fprintf(pb, "<H2>Connection Status</H2>\n"); |
85f07f22 | 1649 | |
2effd274 | 1650 | url_fprintf(pb, "Number of connections: %d / %d<BR>\n", |
85f07f22 FB |
1651 | nb_connections, nb_max_connections); |
1652 | ||
2effd274 | 1653 | url_fprintf(pb, "Bandwidth in use: %dk / %dk<BR>\n", |
42a63c6a PG |
1654 | nb_bandwidth, nb_max_bandwidth); |
1655 | ||
2effd274 FB |
1656 | url_fprintf(pb, "<TABLE>\n"); |
1657 | url_fprintf(pb, "<TR><th>#<th>File<th>IP<th>Proto<th>State<th>Target bits/sec<th>Actual bits/sec<th>Bytes transferred\n"); | |
85f07f22 FB |
1658 | c1 = first_http_ctx; |
1659 | i = 0; | |
2effd274 | 1660 | while (c1 != NULL) { |
cde25790 PG |
1661 | int bitrate; |
1662 | int j; | |
1663 | ||
1664 | bitrate = 0; | |
2effd274 FB |
1665 | if (c1->stream) { |
1666 | for (j = 0; j < c1->stream->nb_streams; j++) { | |
1667 | if (!c1->stream->feed) { | |
1668 | bitrate += c1->stream->streams[j]->codec.bit_rate; | |
1669 | } else { | |
1670 | if (c1->feed_streams[j] >= 0) { | |
1671 | bitrate += c1->stream->feed->streams[c1->feed_streams[j]]->codec.bit_rate; | |
1672 | } | |
1673 | } | |
cde25790 PG |
1674 | } |
1675 | } | |
1676 | ||
85f07f22 FB |
1677 | i++; |
1678 | p = inet_ntoa(c1->from_addr.sin_addr); | |
2effd274 FB |
1679 | url_fprintf(pb, "<TR><TD><B>%d</B><TD>%s%s<TD>%s<TD>%s<TD>%s<td align=right>", |
1680 | i, | |
1681 | c1->stream ? c1->stream->filename : "", | |
1682 | c1->state == HTTPSTATE_RECEIVE_DATA ? "(input)" : "", | |
1683 | p, | |
1684 | c1->protocol, | |
1685 | http_state[c1->state]); | |
1686 | fmt_bytecount(pb, bitrate); | |
1687 | url_fprintf(pb, "<td align=right>"); | |
1688 | fmt_bytecount(pb, compute_datarate(&c1->datarate, c1->data_count) * 8); | |
1689 | url_fprintf(pb, "<td align=right>"); | |
1690 | fmt_bytecount(pb, c1->data_count); | |
1691 | url_fprintf(pb, "\n"); | |
85f07f22 FB |
1692 | c1 = c1->next; |
1693 | } | |
2effd274 | 1694 | url_fprintf(pb, "</TABLE>\n"); |
85f07f22 FB |
1695 | |
1696 | /* date */ | |
1697 | ti = time(NULL); | |
1698 | p = ctime(&ti); | |
2effd274 FB |
1699 | url_fprintf(pb, "<HR size=1 noshade>Generated at %s", p); |
1700 | url_fprintf(pb, "</BODY>\n</HTML>\n"); | |
85f07f22 | 1701 | |
2effd274 FB |
1702 | len = url_close_dyn_buf(pb, &c->pb_buffer); |
1703 | c->buffer_ptr = c->pb_buffer; | |
1704 | c->buffer_end = c->pb_buffer + len; | |
85f07f22 FB |
1705 | } |
1706 | ||
2effd274 FB |
1707 | /* check if the parser needs to be opened for stream i */ |
1708 | static void open_parser(AVFormatContext *s, int i) | |
85f07f22 | 1709 | { |
2effd274 FB |
1710 | AVStream *st = s->streams[i]; |
1711 | AVCodec *codec; | |
31def229 | 1712 | |
2effd274 FB |
1713 | if (!st->codec.codec) { |
1714 | codec = avcodec_find_decoder(st->codec.codec_id); | |
1715 | if (codec && (codec->capabilities & CODEC_CAP_PARSE_ONLY)) { | |
1716 | st->codec.parse_only = 1; | |
1717 | if (avcodec_open(&st->codec, codec) < 0) { | |
1718 | st->codec.parse_only = 0; | |
1719 | } | |
cde25790 PG |
1720 | } |
1721 | } | |
85f07f22 FB |
1722 | } |
1723 | ||
1724 | static int open_input_stream(HTTPContext *c, const char *info) | |
1725 | { | |
1726 | char buf[128]; | |
1727 | char input_filename[1024]; | |
1728 | AVFormatContext *s; | |
2effd274 | 1729 | int buf_size, i; |
85f07f22 FB |
1730 | INT64 stream_pos; |
1731 | ||
1732 | /* find file name */ | |
1733 | if (c->stream->feed) { | |
1734 | strcpy(input_filename, c->stream->feed->feed_filename); | |
1735 | buf_size = FFM_PACKET_SIZE; | |
1736 | /* compute position (absolute time) */ | |
1737 | if (find_info_tag(buf, sizeof(buf), "date", info)) { | |
1738 | stream_pos = parse_date(buf, 0); | |
f747e6d3 PG |
1739 | } else if (find_info_tag(buf, sizeof(buf), "buffer", info)) { |
1740 | int prebuffer = strtol(buf, 0, 10); | |
59eb2ed1 | 1741 | stream_pos = av_gettime() - prebuffer * (INT64)1000000; |
85f07f22 | 1742 | } else { |
59eb2ed1 | 1743 | stream_pos = av_gettime() - c->stream->prebuffer * (INT64)1000; |
85f07f22 FB |
1744 | } |
1745 | } else { | |
1746 | strcpy(input_filename, c->stream->feed_filename); | |
1747 | buf_size = 0; | |
1748 | /* compute position (relative time) */ | |
1749 | if (find_info_tag(buf, sizeof(buf), "date", info)) { | |
1750 | stream_pos = parse_date(buf, 1); | |
1751 | } else { | |
1752 | stream_pos = 0; | |
1753 | } | |
1754 | } | |
1755 | if (input_filename[0] == '\0') | |
1756 | return -1; | |
1757 | ||
8256c0a3 PG |
1758 | #if 0 |
1759 | { time_t when = stream_pos / 1000000; | |
1760 | http_log("Stream pos = %lld, time=%s", stream_pos, ctime(&when)); | |
1761 | } | |
1762 | #endif | |
1763 | ||
85f07f22 | 1764 | /* open stream */ |
2effd274 FB |
1765 | if (av_open_input_file(&s, input_filename, NULL, buf_size, NULL) < 0) { |
1766 | http_log("%s not found", input_filename); | |
85f07f22 | 1767 | return -1; |
2effd274 | 1768 | } |
85f07f22 | 1769 | c->fmt_in = s; |
2effd274 FB |
1770 | |
1771 | /* open each parser */ | |
1772 | for(i=0;i<s->nb_streams;i++) | |
1773 | open_parser(s, i); | |
1774 | ||
1775 | /* choose stream as clock source (we favorize video stream if | |
1776 | present) for packet sending */ | |
1777 | c->pts_stream_index = 0; | |
1778 | for(i=0;i<c->stream->nb_streams;i++) { | |
1779 | if (c->pts_stream_index == 0 && | |
1780 | c->stream->streams[i]->codec.codec_type == CODEC_TYPE_VIDEO) { | |
1781 | c->pts_stream_index = i; | |
1782 | } | |
1783 | } | |
85f07f22 | 1784 | |
bd7cf6ad FB |
1785 | if (c->fmt_in->iformat->read_seek) { |
1786 | c->fmt_in->iformat->read_seek(c->fmt_in, stream_pos); | |
85f07f22 | 1787 | } |
2effd274 FB |
1788 | /* set the start time (needed for maxtime and RTP packet timing) */ |
1789 | c->start_time = cur_time; | |
1790 | c->first_pts = AV_NOPTS_VALUE; | |
85f07f22 FB |
1791 | return 0; |
1792 | } | |
1793 | ||
2effd274 FB |
1794 | /* currently desactivated because the new PTS handling is not |
1795 | satisfactory yet */ | |
1796 | //#define AV_READ_FRAME | |
1797 | #ifdef AV_READ_FRAME | |
85f07f22 | 1798 | |
2effd274 FB |
1799 | /* XXX: generalize that in ffmpeg for picture/audio/data. Currently |
1800 | the return packet MUST NOT be freed */ | |
1801 | int av_read_frame(AVFormatContext *s, AVPacket *pkt) | |
1802 | { | |
1803 | AVStream *st; | |
1804 | int len, ret, old_nb_streams, i; | |
f747e6d3 | 1805 | |
2effd274 FB |
1806 | /* see if remaining frames must be parsed */ |
1807 | for(;;) { | |
1808 | if (s->cur_len > 0) { | |
1809 | st = s->streams[s->cur_pkt.stream_index]; | |
1810 | len = avcodec_parse_frame(&st->codec, &pkt->data, &pkt->size, | |
1811 | s->cur_ptr, s->cur_len); | |
1812 | if (len < 0) { | |
1813 | /* error: get next packet */ | |
1814 | s->cur_len = 0; | |
1815 | } else { | |
1816 | s->cur_ptr += len; | |
1817 | s->cur_len -= len; | |
1818 | if (pkt->size) { | |
1819 | /* init pts counter if not done */ | |
1820 | if (st->pts.den == 0) { | |
1821 | switch(st->codec.codec_type) { | |
1822 | case CODEC_TYPE_AUDIO: | |
1823 | st->pts_incr = (INT64)s->pts_den; | |
1824 | av_frac_init(&st->pts, st->pts.val, 0, | |
1825 | (INT64)s->pts_num * st->codec.sample_rate); | |
1826 | break; | |
1827 | case CODEC_TYPE_VIDEO: | |
1828 | st->pts_incr = (INT64)s->pts_den * FRAME_RATE_BASE; | |
1829 | av_frac_init(&st->pts, st->pts.val, 0, | |
1830 | (INT64)s->pts_num * st->codec.frame_rate); | |
1831 | break; | |
1832 | default: | |
1833 | av_abort(); | |
1834 | } | |
1835 | } | |
1836 | ||
1837 | /* a frame was read: return it */ | |
1838 | pkt->pts = st->pts.val; | |
1839 | #if 0 | |
1840 | printf("add pts=%Lx num=%Lx den=%Lx incr=%Lx\n", | |
1841 | st->pts.val, st->pts.num, st->pts.den, st->pts_incr); | |
1842 | #endif | |
1843 | switch(st->codec.codec_type) { | |
1844 | case CODEC_TYPE_AUDIO: | |
1845 | av_frac_add(&st->pts, st->pts_incr * st->codec.frame_size); | |
1846 | break; | |
1847 | case CODEC_TYPE_VIDEO: | |
1848 | av_frac_add(&st->pts, st->pts_incr); | |
1849 | break; | |
1850 | default: | |
1851 | av_abort(); | |
1852 | } | |
1853 | pkt->stream_index = s->cur_pkt.stream_index; | |
1854 | /* we use the codec indication because it is | |
1855 | more accurate than the demux flags */ | |
1856 | pkt->flags = 0; | |
1857 | if (st->codec.key_frame) | |
1858 | pkt->flags |= PKT_FLAG_KEY; | |
1859 | return 0; | |
1860 | } | |
85f07f22 FB |
1861 | } |
1862 | } else { | |
2effd274 FB |
1863 | /* free previous packet */ |
1864 | av_free_packet(&s->cur_pkt); | |
1865 | ||
1866 | old_nb_streams = s->nb_streams; | |
1867 | ret = av_read_packet(s, &s->cur_pkt); | |
1868 | if (ret) | |
1869 | return ret; | |
1870 | /* open parsers for each new streams */ | |
1871 | for(i = old_nb_streams; i < s->nb_streams; i++) | |
1872 | open_parser(s, i); | |
1873 | st = s->streams[s->cur_pkt.stream_index]; | |
1874 | ||
1875 | /* update current pts (XXX: dts handling) from packet, or | |
1876 | use current pts if none given */ | |
1877 | if (s->cur_pkt.pts != AV_NOPTS_VALUE) { | |
1878 | av_frac_set(&st->pts, s->cur_pkt.pts); | |
1879 | } else { | |
1880 | s->cur_pkt.pts = st->pts.val; | |
1881 | } | |
1882 | if (!st->codec.codec) { | |
1883 | /* no codec opened: just return the raw packet */ | |
1884 | *pkt = s->cur_pkt; | |
1885 | ||
1886 | /* no codec opened: just update the pts by considering we | |
1887 | have one frame and free the packet */ | |
1888 | if (st->pts.den == 0) { | |
1889 | switch(st->codec.codec_type) { | |
1890 | case CODEC_TYPE_AUDIO: | |
1891 | st->pts_incr = (INT64)s->pts_den * st->codec.frame_size; | |
1892 | av_frac_init(&st->pts, st->pts.val, 0, | |
1893 | (INT64)s->pts_num * st->codec.sample_rate); | |
1894 | break; | |
1895 | case CODEC_TYPE_VIDEO: | |
1896 | st->pts_incr = (INT64)s->pts_den * FRAME_RATE_BASE; | |
1897 | av_frac_init(&st->pts, st->pts.val, 0, | |
1898 | (INT64)s->pts_num * st->codec.frame_rate); | |
1899 | break; | |
1900 | default: | |
1901 | av_abort(); | |
1902 | } | |
1903 | } | |
1904 | av_frac_add(&st->pts, st->pts_incr); | |
1905 | return 0; | |
1906 | } else { | |
1907 | s->cur_ptr = s->cur_pkt.data; | |
1908 | s->cur_len = s->cur_pkt.size; | |
85f07f22 FB |
1909 | } |
1910 | } | |
2effd274 FB |
1911 | } |
1912 | } | |
1913 | ||
1914 | static int compute_send_delay(HTTPContext *c) | |
1915 | { | |
1916 | INT64 cur_pts, delta_pts, next_pts; | |
1917 | int delay1; | |
1918 | ||
1919 | /* compute current pts value from system time */ | |
1920 | cur_pts = ((INT64)(cur_time - c->start_time) * c->fmt_in->pts_den) / | |
1921 | (c->fmt_in->pts_num * 1000LL); | |
1922 | /* compute the delta from the stream we choose as | |
1923 | main clock (we do that to avoid using explicit | |
1924 | buffers to do exact packet reordering for each | |
1925 | stream */ | |
1926 | /* XXX: really need to fix the number of streams */ | |
1927 | if (c->pts_stream_index >= c->fmt_in->nb_streams) | |
1928 | next_pts = cur_pts; | |
1929 | else | |
1930 | next_pts = c->fmt_in->streams[c->pts_stream_index]->pts.val; | |
1931 | delta_pts = next_pts - cur_pts; | |
1932 | if (delta_pts <= 0) { | |
1933 | delay1 = 0; | |
1934 | } else { | |
1935 | delay1 = (delta_pts * 1000 * c->fmt_in->pts_num) / c->fmt_in->pts_den; | |
1936 | } | |
1937 | return delay1; | |
1938 | } | |
1939 | #else | |
1940 | ||
1941 | /* just fall backs */ | |
1942 | int av_read_frame(AVFormatContext *s, AVPacket *pkt) | |
1943 | { | |
1944 | return av_read_packet(s, pkt); | |
1945 | } | |
1946 | ||
1947 | static int compute_send_delay(HTTPContext *c) | |
1948 | { | |
a782f209 PG |
1949 | int datarate = 8 * get_longterm_datarate(&c->datarate, c->data_count); |
1950 | ||
1951 | if (datarate > c->bandwidth * 2000) { | |
1952 | return 1000; | |
1953 | } | |
2effd274 FB |
1954 | return 0; |
1955 | } | |
1956 | ||
1957 | #endif | |
1958 | ||
1959 | static int http_prepare_data(HTTPContext *c) | |
1960 | { | |
1961 | int i, len, ret; | |
1962 | AVFormatContext *ctx; | |
1963 | ||
1964 | switch(c->state) { | |
1965 | case HTTPSTATE_SEND_DATA_HEADER: | |
1966 | memset(&c->fmt_ctx, 0, sizeof(c->fmt_ctx)); | |
1967 | pstrcpy(c->fmt_ctx.author, sizeof(c->fmt_ctx.author), | |
1968 | c->stream->author); | |
1969 | pstrcpy(c->fmt_ctx.comment, sizeof(c->fmt_ctx.comment), | |
1970 | c->stream->comment); | |
1971 | pstrcpy(c->fmt_ctx.copyright, sizeof(c->fmt_ctx.copyright), | |
1972 | c->stream->copyright); | |
1973 | pstrcpy(c->fmt_ctx.title, sizeof(c->fmt_ctx.title), | |
1974 | c->stream->title); | |
1975 | ||
1976 | /* open output stream by using specified codecs */ | |
1977 | c->fmt_ctx.oformat = c->stream->fmt; | |
1978 | c->fmt_ctx.nb_streams = c->stream->nb_streams; | |
1979 | for(i=0;i<c->fmt_ctx.nb_streams;i++) { | |
1980 | AVStream *st; | |
1981 | st = av_mallocz(sizeof(AVStream)); | |
1982 | c->fmt_ctx.streams[i] = st; | |
1983 | /* if file or feed, then just take streams from FFStream struct */ | |
1984 | if (!c->stream->feed || | |
1985 | c->stream->feed == c->stream) | |
1986 | memcpy(st, c->stream->streams[i], sizeof(AVStream)); | |
1987 | else | |
1988 | memcpy(st, c->stream->feed->streams[c->stream->feed_streams[i]], | |
1989 | sizeof(AVStream)); | |
1990 | st->codec.frame_number = 0; /* XXX: should be done in | |
1991 | AVStream, not in codec */ | |
1992 | } | |
1993 | c->got_key_frame = 0; | |
1994 | ||
1995 | /* prepare header and save header data in a stream */ | |
1996 | if (url_open_dyn_buf(&c->fmt_ctx.pb) < 0) { | |
1997 | /* XXX: potential leak */ | |
1998 | return -1; | |
1999 | } | |
2000 | c->fmt_ctx.pb.is_streamed = 1; | |
2001 | ||
2002 | av_write_header(&c->fmt_ctx); | |
2003 | ||
2004 | len = url_close_dyn_buf(&c->fmt_ctx.pb, &c->pb_buffer); | |
2005 | c->buffer_ptr = c->pb_buffer; | |
2006 | c->buffer_end = c->pb_buffer + len; | |
2007 | ||
2008 | c->state = HTTPSTATE_SEND_DATA; | |
85f07f22 FB |
2009 | c->last_packet_sent = 0; |
2010 | break; | |
2011 | case HTTPSTATE_SEND_DATA: | |
2012 | /* find a new packet */ | |
85f07f22 FB |
2013 | { |
2014 | AVPacket pkt; | |
2effd274 | 2015 | |
85f07f22 FB |
2016 | /* read a packet from the input stream */ |
2017 | if (c->stream->feed) { | |
2018 | ffm_set_write_index(c->fmt_in, | |
2019 | c->stream->feed->feed_write_index, | |
2020 | c->stream->feed->feed_size); | |
2021 | } | |
ec3b2232 PG |
2022 | |
2023 | if (c->stream->max_time && | |
2ac887ba | 2024 | c->stream->max_time + c->start_time - cur_time < 0) { |
ec3b2232 PG |
2025 | /* We have timed out */ |
2026 | c->state = HTTPSTATE_SEND_DATA_TRAILER; | |
85f07f22 | 2027 | } else { |
a782f209 | 2028 | if (1 || c->is_packetized) { |
2effd274 FB |
2029 | if (compute_send_delay(c) > 0) { |
2030 | c->state = HTTPSTATE_WAIT; | |
2031 | return 1; /* state changed */ | |
2032 | } | |
2033 | } | |
2034 | if (av_read_frame(c->fmt_in, &pkt) < 0) { | |
2035 | if (c->stream->feed && c->stream->feed->feed_opened) { | |
2036 | /* if coming from feed, it means we reached the end of the | |
2037 | ffm file, so must wait for more data */ | |
2038 | c->state = HTTPSTATE_WAIT_FEED; | |
2039 | return 1; /* state changed */ | |
2040 | } else { | |
2041 | /* must send trailer now because eof or error */ | |
2042 | c->state = HTTPSTATE_SEND_DATA_TRAILER; | |
2043 | } | |
2044 | } else { | |
2045 | /* update first pts if needed */ | |
2046 | if (c->first_pts == AV_NOPTS_VALUE) | |
2047 | c->first_pts = pkt.pts; | |
2048 | ||
2049 | /* send it to the appropriate stream */ | |
2050 | if (c->stream->feed) { | |
2051 | /* if coming from a feed, select the right stream */ | |
2052 | if (c->switch_pending) { | |
2053 | c->switch_pending = 0; | |
2054 | for(i=0;i<c->stream->nb_streams;i++) { | |
2055 | if (c->switch_feed_streams[i] == pkt.stream_index) { | |
2056 | if (pkt.flags & PKT_FLAG_KEY) { | |
2057 | do_switch_stream(c, i); | |
2058 | } | |
2059 | } | |
2060 | if (c->switch_feed_streams[i] >= 0) { | |
2061 | c->switch_pending = 1; | |
2062 | } | |
2063 | } | |
2064 | } | |
cde25790 | 2065 | for(i=0;i<c->stream->nb_streams;i++) { |
2effd274 FB |
2066 | if (c->feed_streams[i] == pkt.stream_index) { |
2067 | pkt.stream_index = i; | |
cde25790 | 2068 | if (pkt.flags & PKT_FLAG_KEY) { |
2effd274 FB |
2069 | c->got_key_frame |= 1 << i; |
2070 | } | |
2071 | /* See if we have all the key frames, then | |
2072 | * we start to send. This logic is not quite | |
2073 | * right, but it works for the case of a | |
2074 | * single video stream with one or more | |
2075 | * audio streams (for which every frame is | |
2076 | * typically a key frame). | |
2077 | */ | |
2078 | if (!c->stream->send_on_key || | |
2079 | ((c->got_key_frame + 1) >> c->stream->nb_streams)) { | |
2080 | goto send_it; | |
cde25790 | 2081 | } |
cde25790 PG |
2082 | } |
2083 | } | |
2effd274 FB |
2084 | } else { |
2085 | AVCodecContext *codec; | |
2086 | ||
2087 | send_it: | |
2088 | /* specific handling for RTP: we use several | |
2089 | output stream (one for each RTP | |
2090 | connection). XXX: need more abstract handling */ | |
2091 | if (c->is_packetized) { | |
2092 | c->packet_stream_index = pkt.stream_index; | |
2093 | ctx = c->rtp_ctx[c->packet_stream_index]; | |
2094 | codec = &ctx->streams[0]->codec; | |
2095 | } else { | |
2096 | ctx = &c->fmt_ctx; | |
2097 | /* Fudge here */ | |
2098 | codec = &ctx->streams[pkt.stream_index]->codec; | |
85f07f22 | 2099 | } |
2effd274 FB |
2100 | |
2101 | codec->key_frame = ((pkt.flags & PKT_FLAG_KEY) != 0); | |
2102 | ||
f747e6d3 | 2103 | #ifdef PJSG |
2effd274 FB |
2104 | if (codec->codec_type == CODEC_TYPE_AUDIO) { |
2105 | codec->frame_size = (codec->sample_rate * pkt.duration + 500000) / 1000000; | |
2106 | /* printf("Calculated size %d, from sr %d, duration %d\n", codec->frame_size, codec->sample_rate, pkt.duration); */ | |
2107 | } | |
2108 | #endif | |
2109 | ||
2110 | if (c->is_packetized) { | |
2111 | ret = url_open_dyn_packet_buf(&ctx->pb, | |
2112 | url_get_max_packet_size(c->rtp_handles[c->packet_stream_index])); | |
2113 | c->packet_byte_count = 0; | |
2114 | c->packet_start_time_us = av_gettime(); | |
2115 | } else { | |
2116 | ret = url_open_dyn_buf(&ctx->pb); | |
2117 | } | |
2118 | if (ret < 0) { | |
2119 | /* XXX: potential leak */ | |
2120 | return -1; | |
2121 | } | |
90dca141 | 2122 | if (av_write_frame(ctx, pkt.stream_index, pkt.data, pkt.size)) { |
2effd274 FB |
2123 | c->state = HTTPSTATE_SEND_DATA_TRAILER; |
2124 | } | |
2125 | ||
2126 | len = url_close_dyn_buf(&ctx->pb, &c->pb_buffer); | |
2127 | c->buffer_ptr = c->pb_buffer; | |
2128 | c->buffer_end = c->pb_buffer + len; | |
2129 | ||
2130 | codec->frame_number++; | |
f747e6d3 | 2131 | } |
2effd274 FB |
2132 | #ifndef AV_READ_FRAME |
2133 | av_free_packet(&pkt); | |
f747e6d3 | 2134 | #endif |
85f07f22 | 2135 | } |
85f07f22 FB |
2136 | } |
2137 | } | |
2138 | break; | |
2139 | default: | |
2140 | case HTTPSTATE_SEND_DATA_TRAILER: | |
2141 | /* last packet test ? */ | |
2effd274 | 2142 | if (c->last_packet_sent || c->is_packetized) |
85f07f22 | 2143 | return -1; |
2effd274 | 2144 | ctx = &c->fmt_ctx; |
85f07f22 | 2145 | /* prepare header */ |
2effd274 FB |
2146 | if (url_open_dyn_buf(&ctx->pb) < 0) { |
2147 | /* XXX: potential leak */ | |
2148 | return -1; | |
2149 | } | |
2150 | av_write_trailer(ctx); | |
2151 | len = url_close_dyn_buf(&ctx->pb, &c->pb_buffer); | |
2152 | c->buffer_ptr = c->pb_buffer; | |
2153 | c->buffer_end = c->pb_buffer + len; | |
2154 | ||
85f07f22 FB |
2155 | c->last_packet_sent = 1; |
2156 | break; | |
2157 | } | |
2158 | return 0; | |
2159 | } | |
2160 | ||
2effd274 FB |
2161 | /* in bit/s */ |
2162 | #define SHORT_TERM_BANDWIDTH 8000000 | |
2163 | ||
85f07f22 | 2164 | /* should convert the format at the same time */ |
5eb765ef | 2165 | static int http_send_data(HTTPContext *c) |
85f07f22 | 2166 | { |
2effd274 FB |
2167 | int len, ret, dt; |
2168 | ||
85f07f22 | 2169 | while (c->buffer_ptr >= c->buffer_end) { |
2effd274 | 2170 | av_freep(&c->pb_buffer); |
5eb765ef | 2171 | ret = http_prepare_data(c); |
85f07f22 FB |
2172 | if (ret < 0) |
2173 | return -1; | |
2174 | else if (ret == 0) { | |
a6e14edd | 2175 | continue; |
85f07f22 FB |
2176 | } else { |
2177 | /* state change requested */ | |
2178 | return 0; | |
2179 | } | |
2180 | } | |
2181 | ||
2effd274 FB |
2182 | if (c->buffer_ptr < c->buffer_end) { |
2183 | if (c->is_packetized) { | |
2184 | /* RTP/UDP data output */ | |
2185 | len = c->buffer_end - c->buffer_ptr; | |
2186 | if (len < 4) { | |
2187 | /* fail safe - should never happen */ | |
2188 | fail1: | |
2189 | c->buffer_ptr = c->buffer_end; | |
2190 | return 0; | |
2191 | } | |
2192 | len = (c->buffer_ptr[0] << 24) | | |
2193 | (c->buffer_ptr[1] << 16) | | |
2194 | (c->buffer_ptr[2] << 8) | | |
2195 | (c->buffer_ptr[3]); | |
2196 | if (len > (c->buffer_end - c->buffer_ptr)) | |
2197 | goto fail1; | |
2198 | ||
2199 | /* short term bandwidth limitation */ | |
2200 | dt = av_gettime() - c->packet_start_time_us; | |
2201 | if (dt < 1) | |
2202 | dt = 1; | |
2203 | ||
2204 | if ((c->packet_byte_count + len) * (INT64)1000000 >= | |
2205 | (SHORT_TERM_BANDWIDTH / 8) * (INT64)dt) { | |
2206 | /* bandwidth overflow : wait at most one tick and retry */ | |
2207 | c->state = HTTPSTATE_WAIT_SHORT; | |
2208 | return 0; | |
f747e6d3 | 2209 | } |
2effd274 FB |
2210 | |
2211 | c->buffer_ptr += 4; | |
2212 | url_write(c->rtp_handles[c->packet_stream_index], | |
2213 | c->buffer_ptr, len); | |
f747e6d3 | 2214 | c->buffer_ptr += len; |
2effd274 FB |
2215 | c->packet_byte_count += len; |
2216 | } else { | |
2217 | /* TCP data output */ | |
2218 | len = write(c->fd, c->buffer_ptr, c->buffer_end - c->buffer_ptr); | |
2219 | if (len < 0) { | |
2220 | if (errno != EAGAIN && errno != EINTR) { | |
2221 | /* error : close connection */ | |
2222 | return -1; | |
2223 | } else { | |
2224 | return 0; | |
2225 | } | |
2226 | } else { | |
2227 | c->buffer_ptr += len; | |
2228 | } | |
85f07f22 | 2229 | } |
2effd274 FB |
2230 | c->data_count += len; |
2231 | update_datarate(&c->datarate, c->data_count); | |
2232 | if (c->stream) | |
2233 | c->stream->bytes_served += len; | |
85f07f22 FB |
2234 | } |
2235 | return 0; | |
2236 | } | |
2237 | ||
2238 | static int http_start_receive_data(HTTPContext *c) | |
2239 | { | |
2240 | int fd; | |
2241 | ||
2242 | if (c->stream->feed_opened) | |
2243 | return -1; | |
2244 | ||
2245 | /* open feed */ | |
2246 | fd = open(c->stream->feed_filename, O_RDWR); | |
2247 | if (fd < 0) | |
2248 | return -1; | |
2249 | c->feed_fd = fd; | |
2250 | ||
2251 | c->stream->feed_write_index = ffm_read_write_index(fd); | |
2252 | c->stream->feed_size = lseek(fd, 0, SEEK_END); | |
2253 | lseek(fd, 0, SEEK_SET); | |
2254 | ||
2255 | /* init buffer input */ | |
2256 | c->buffer_ptr = c->buffer; | |
2257 | c->buffer_end = c->buffer + FFM_PACKET_SIZE; | |
2258 | c->stream->feed_opened = 1; | |
2259 | return 0; | |
2260 | } | |
2261 | ||
2262 | static int http_receive_data(HTTPContext *c) | |
2263 | { | |
85f07f22 FB |
2264 | HTTPContext *c1; |
2265 | ||
a6e14edd PG |
2266 | if (c->buffer_end > c->buffer_ptr) { |
2267 | int len; | |
2268 | ||
2269 | len = read(c->fd, c->buffer_ptr, c->buffer_end - c->buffer_ptr); | |
2270 | if (len < 0) { | |
2271 | if (errno != EAGAIN && errno != EINTR) { | |
2272 | /* error : close connection */ | |
2273 | goto fail; | |
2274 | } | |
2275 | } else if (len == 0) { | |
2276 | /* end of connection : close it */ | |
2277 | goto fail; | |
2278 | } else { | |
2279 | c->buffer_ptr += len; | |
2280 | c->data_count += len; | |
5eb765ef | 2281 | update_datarate(&c->datarate, c->data_count); |
a6e14edd PG |
2282 | } |
2283 | } | |
2284 | ||
85f07f22 | 2285 | if (c->buffer_ptr >= c->buffer_end) { |
f747e6d3 | 2286 | FFStream *feed = c->stream; |
85f07f22 FB |
2287 | /* a packet has been received : write it in the store, except |
2288 | if header */ | |
2289 | if (c->data_count > FFM_PACKET_SIZE) { | |
85f07f22 FB |
2290 | |
2291 | // printf("writing pos=0x%Lx size=0x%Lx\n", feed->feed_write_index, feed->feed_size); | |
2292 | /* XXX: use llseek or url_seek */ | |
2293 | lseek(c->feed_fd, feed->feed_write_index, SEEK_SET); | |
2294 | write(c->feed_fd, c->buffer, FFM_PACKET_SIZE); | |
2295 | ||
2296 | feed->feed_write_index += FFM_PACKET_SIZE; | |
2297 | /* update file size */ | |
2298 | if (feed->feed_write_index > c->stream->feed_size) | |
2299 | feed->feed_size = feed->feed_write_index; | |
2300 | ||
2301 | /* handle wrap around if max file size reached */ | |
2302 | if (feed->feed_write_index >= c->stream->feed_max_size) | |
2303 | feed->feed_write_index = FFM_PACKET_SIZE; | |
2304 | ||
2305 | /* write index */ | |
2306 | ffm_write_write_index(c->feed_fd, feed->feed_write_index); | |
2307 | ||
2308 | /* wake up any waiting connections */ | |
2309 | for(c1 = first_http_ctx; c1 != NULL; c1 = c1->next) { | |
2310 | if (c1->state == HTTPSTATE_WAIT_FEED && | |
2311 | c1->stream->feed == c->stream->feed) { | |
2312 | c1->state = HTTPSTATE_SEND_DATA; | |
2313 | } | |
2314 | } | |
f747e6d3 PG |
2315 | } else { |
2316 | /* We have a header in our hands that contains useful data */ | |
2317 | AVFormatContext s; | |
bd7cf6ad | 2318 | AVInputFormat *fmt_in; |
f747e6d3 PG |
2319 | ByteIOContext *pb = &s.pb; |
2320 | int i; | |
2321 | ||
2322 | memset(&s, 0, sizeof(s)); | |
2323 | ||
2324 | url_open_buf(pb, c->buffer, c->buffer_end - c->buffer, URL_RDONLY); | |
2325 | pb->buf_end = c->buffer_end; /* ?? */ | |
2326 | pb->is_streamed = 1; | |
2327 | ||
bd7cf6ad FB |
2328 | /* use feed output format name to find corresponding input format */ |
2329 | fmt_in = av_find_input_format(feed->fmt->name); | |
2330 | if (!fmt_in) | |
2331 | goto fail; | |
2332 | ||
ec3b2232 PG |
2333 | s.priv_data = av_mallocz(fmt_in->priv_data_size); |
2334 | if (!s.priv_data) | |
2335 | goto fail; | |
2336 | ||
bd7cf6ad | 2337 | if (fmt_in->read_header(&s, 0) < 0) { |
ec3b2232 | 2338 | av_freep(&s.priv_data); |
f747e6d3 PG |
2339 | goto fail; |
2340 | } | |
2341 | ||
2342 | /* Now we have the actual streams */ | |
2343 | if (s.nb_streams != feed->nb_streams) { | |
ec3b2232 | 2344 | av_freep(&s.priv_data); |
f747e6d3 PG |
2345 | goto fail; |
2346 | } | |
2347 | for (i = 0; i < s.nb_streams; i++) { | |
bd7cf6ad FB |
2348 | memcpy(&feed->streams[i]->codec, |
2349 | &s.streams[i]->codec, sizeof(AVCodecContext)); | |
f747e6d3 | 2350 | } |
ec3b2232 | 2351 | av_freep(&s.priv_data); |
85f07f22 FB |
2352 | } |
2353 | c->buffer_ptr = c->buffer; | |
2354 | } | |
2355 | ||
85f07f22 FB |
2356 | return 0; |
2357 | fail: | |
2358 | c->stream->feed_opened = 0; | |
2359 | close(c->feed_fd); | |
2360 | return -1; | |
2361 | } | |
2362 | ||
2effd274 FB |
2363 | /********************************************************************/ |
2364 | /* RTSP handling */ | |
2365 | ||
2366 | static void rtsp_reply_header(HTTPContext *c, enum RTSPStatusCode error_number) | |
2367 | { | |
2368 | const char *str; | |
2369 | time_t ti; | |
2370 | char *p; | |
2371 | char buf2[32]; | |
2372 | ||
2373 | switch(error_number) { | |
2374 | #define DEF(n, c, s) case c: str = s; break; | |
2375 | #include "rtspcodes.h" | |
2376 | #undef DEF | |
2377 | default: | |
2378 | str = "Unknown Error"; | |
2379 | break; | |
2380 | } | |
2381 | ||
2382 | url_fprintf(c->pb, "RTSP/1.0 %d %s\r\n", error_number, str); | |
2383 | url_fprintf(c->pb, "CSeq: %d\r\n", c->seq); | |
2384 | ||
2385 | /* output GMT time */ | |
2386 | ti = time(NULL); | |
2387 | p = ctime(&ti); | |
2388 | strcpy(buf2, p); | |
2389 | p = buf2 + strlen(p) - 1; | |
2390 | if (*p == '\n') | |
2391 | *p = '\0'; | |
2392 | url_fprintf(c->pb, "Date: %s GMT\r\n", buf2); | |
2393 | } | |
2394 | ||
2395 | static void rtsp_reply_error(HTTPContext *c, enum RTSPStatusCode error_number) | |
2396 | { | |
2397 | rtsp_reply_header(c, error_number); | |
2398 | url_fprintf(c->pb, "\r\n"); | |
2399 | } | |
2400 | ||
2401 | static int rtsp_parse_request(HTTPContext *c) | |
2402 | { | |
2403 | const char *p, *p1, *p2; | |
2404 | char cmd[32]; | |
2405 | char url[1024]; | |
2406 | char protocol[32]; | |
2407 | char line[1024]; | |
2408 | ByteIOContext pb1; | |
2409 | int len; | |
2410 | RTSPHeader header1, *header = &header1; | |
2411 | ||
2412 | c->buffer_ptr[0] = '\0'; | |
2413 | p = c->buffer; | |
2414 | ||
2415 | get_word(cmd, sizeof(cmd), &p); | |
2416 | get_word(url, sizeof(url), &p); | |
2417 | get_word(protocol, sizeof(protocol), &p); | |
2418 | ||
2419 | pstrcpy(c->method, sizeof(c->method), cmd); | |
2420 | pstrcpy(c->url, sizeof(c->url), url); | |
2421 | pstrcpy(c->protocol, sizeof(c->protocol), protocol); | |
2422 | ||
2423 | c->pb = &pb1; | |
2424 | if (url_open_dyn_buf(c->pb) < 0) { | |
2425 | /* XXX: cannot do more */ | |
2426 | c->pb = NULL; /* safety */ | |
2427 | return -1; | |
2428 | } | |
2429 | ||
2430 | /* check version name */ | |
2431 | if (strcmp(protocol, "RTSP/1.0") != 0) { | |
2432 | rtsp_reply_error(c, RTSP_STATUS_VERSION); | |
2433 | goto the_end; | |
2434 | } | |
2435 | ||
2436 | /* parse each header line */ | |
2437 | memset(header, 0, sizeof(RTSPHeader)); | |
2438 | /* skip to next line */ | |
2439 | while (*p != '\n' && *p != '\0') | |
2440 | p++; | |
2441 | if (*p == '\n') | |
2442 | p++; | |
2443 | while (*p != '\0') { | |
2444 | p1 = strchr(p, '\n'); | |
2445 | if (!p1) | |
2446 | break; | |
2447 | p2 = p1; | |
2448 | if (p2 > p && p2[-1] == '\r') | |
2449 | p2--; | |
2450 | /* skip empty line */ | |
2451 | if (p2 == p) | |
2452 | break; | |
2453 | len = p2 - p; | |
2454 | if (len > sizeof(line) - 1) | |
2455 | len = sizeof(line) - 1; | |
2456 | memcpy(line, p, len); | |
2457 | line[len] = '\0'; | |
2458 | rtsp_parse_line(header, line); | |
2459 | p = p1 + 1; | |
2460 | } | |
2461 | ||
2462 | /* handle sequence number */ | |
2463 | c->seq = header->seq; | |
2464 | ||
2465 | if (!strcmp(cmd, "DESCRIBE")) { | |
2466 | rtsp_cmd_describe(c, url); | |
2467 | } else if (!strcmp(cmd, "SETUP")) { | |
2468 | rtsp_cmd_setup(c, url, header); | |
2469 | } else if (!strcmp(cmd, "PLAY")) { | |
2470 | rtsp_cmd_play(c, url, header); | |
2471 | } else if (!strcmp(cmd, "PAUSE")) { | |
2472 | rtsp_cmd_pause(c, url, header); | |
2473 | } else if (!strcmp(cmd, "TEARDOWN")) { | |
2474 | rtsp_cmd_teardown(c, url, header); | |
2475 | } else { | |
2476 | rtsp_reply_error(c, RTSP_STATUS_METHOD); | |
2477 | } | |
2478 | the_end: | |
2479 | len = url_close_dyn_buf(c->pb, &c->pb_buffer); | |
2480 | c->pb = NULL; /* safety */ | |
2481 | if (len < 0) { | |
2482 | /* XXX: cannot do more */ | |
2483 | return -1; | |
2484 | } | |
2485 | c->buffer_ptr = c->pb_buffer; | |
2486 | c->buffer_end = c->pb_buffer + len; | |
2487 | c->state = RTSPSTATE_SEND_REPLY; | |
2488 | return 0; | |
2489 | } | |
2490 | ||
2491 | static int prepare_sdp_description(HTTPContext *c, | |
2492 | FFStream *stream, UINT8 **pbuffer) | |
2493 | { | |
2494 | ByteIOContext pb1, *pb = &pb1; | |
2495 | struct sockaddr_in my_addr; | |
2496 | int len, i, payload_type; | |
2497 | const char *ipstr, *title, *mediatype; | |
2498 | AVStream *st; | |
2499 | ||
2500 | len = sizeof(my_addr); | |
2501 | getsockname(c->fd, (struct sockaddr *)&my_addr, &len); | |
2502 | ipstr = inet_ntoa(my_addr.sin_addr); | |
2503 | ||
2504 | if (url_open_dyn_buf(pb) < 0) | |
2505 | return -1; | |
2506 | ||
2507 | /* general media info */ | |
2508 | ||
2509 | url_fprintf(pb, "v=0\n"); | |
2510 | url_fprintf(pb, "o=- 0 0 IN IP4 %s\n", ipstr); | |
2511 | title = stream->title; | |
2512 | if (title[0] == '\0') | |
2513 | title = "No Title"; | |
2514 | url_fprintf(pb, "s=%s\n", title); | |
2515 | if (stream->comment[0] != '\0') | |
2516 | url_fprintf(pb, "i=%s\n", stream->comment); | |
2517 | ||
2518 | /* for each stream, we output the necessary info */ | |
2519 | for(i = 0; i < stream->nb_streams; i++) { | |
2520 | st = stream->streams[i]; | |
2521 | switch(st->codec.codec_type) { | |
2522 | case CODEC_TYPE_AUDIO: | |
2523 | mediatype = "audio"; | |
2524 | break; | |
2525 | case CODEC_TYPE_VIDEO: | |
2526 | mediatype = "video"; | |
2527 | break; | |
2528 | default: | |
2529 | mediatype = "application"; | |
2530 | break; | |
2531 | } | |
2532 | /* XXX: the port indication is not correct (but should be correct | |
2533 | for broadcast) */ | |
2534 | payload_type = rtp_get_payload_type(&st->codec); | |
2535 | ||
2536 | url_fprintf(pb, "m=%s %d RTP/AVP %d\n", | |
2537 | mediatype, 0, payload_type); | |
2538 | url_fprintf(pb, "a=control:streamid=%d\n", i); | |
2539 | } | |
2540 | return url_close_dyn_buf(pb, pbuffer); | |
2541 | } | |
2542 | ||
2543 | static void rtsp_cmd_describe(HTTPContext *c, const char *url) | |
2544 | { | |
2545 | FFStream *stream; | |
2546 | char path1[1024]; | |
2547 | const char *path; | |
2548 | UINT8 *content; | |
2549 | int content_length; | |
2550 | ||
2551 | /* find which url is asked */ | |
2552 | url_split(NULL, 0, NULL, 0, NULL, path1, sizeof(path1), url); | |
2553 | path = path1; | |
2554 | if (*path == '/') | |
2555 | path++; | |
2556 | ||
2557 | for(stream = first_stream; stream != NULL; stream = stream->next) { | |
2558 | if (!stream->is_feed && stream->fmt == &rtp_mux && | |
2559 | !strcmp(path, stream->filename)) { | |
2560 | goto found; | |
2561 | } | |
2562 | } | |
2563 | /* no stream found */ | |
2564 | rtsp_reply_error(c, RTSP_STATUS_SERVICE); /* XXX: right error ? */ | |
2565 | return; | |
2566 | ||
2567 | found: | |
2568 | /* prepare the media description in sdp format */ | |
2569 | content_length = prepare_sdp_description(c, stream, &content); | |
2570 | if (content_length < 0) { | |
2571 | rtsp_reply_error(c, RTSP_STATUS_INTERNAL); | |
2572 | return; | |
2573 | } | |
2574 | rtsp_reply_header(c, RTSP_STATUS_OK); | |
2575 | url_fprintf(c->pb, "Content-Type: application/sdp\r\n"); | |
2576 | url_fprintf(c->pb, "Content-Length: %d\r\n", content_length); | |
2577 | url_fprintf(c->pb, "\r\n"); | |
2578 | put_buffer(c->pb, content, content_length); | |
2579 | } | |
2580 | ||
2581 | static HTTPContext *find_rtp_session(const char *session_id) | |
2582 | { | |
2583 | HTTPContext *c; | |
2584 | ||
2585 | if (session_id[0] == '\0') | |
2586 | return NULL; | |
2587 | ||
2588 | for(c = first_http_ctx; c != NULL; c = c->next) { | |
2589 | if (!strcmp(c->session_id, session_id)) | |
2590 | return c; | |
2591 | } | |
2592 | return NULL; | |
2593 | } | |
2594 | ||
2595 | RTSPTransportField *find_transport(RTSPHeader *h, enum RTSPProtocol protocol) | |
2596 | { | |
2597 | RTSPTransportField *th; | |
2598 | int i; | |
2599 | ||
2600 | for(i=0;i<h->nb_transports;i++) { | |
2601 | th = &h->transports[i]; | |
2602 | if (th->protocol == protocol) | |
2603 | return th; | |
2604 | } | |
2605 | return NULL; | |
2606 | } | |
2607 | ||
2608 | static void rtsp_cmd_setup(HTTPContext *c, const char *url, | |
2609 | RTSPHeader *h) | |
2610 | { | |
2611 | FFStream *stream; | |
2612 | int stream_index, port; | |
2613 | char buf[1024]; | |
2614 | char path1[1024]; | |
2615 | const char *path; | |
2616 | HTTPContext *rtp_c; | |
2617 | RTSPTransportField *th; | |
2618 | struct sockaddr_in dest_addr; | |
2619 | RTSPActionServerSetup setup; | |
2620 | ||
2621 | /* find which url is asked */ | |
2622 | url_split(NULL, 0, NULL, 0, NULL, path1, sizeof(path1), url); | |
2623 | path = path1; | |
2624 | if (*path == '/') | |
2625 | path++; | |
2626 | ||
2627 | /* now check each stream */ | |
2628 | for(stream = first_stream; stream != NULL; stream = stream->next) { | |
2629 | if (!stream->is_feed && stream->fmt == &rtp_mux) { | |
2630 | /* accept aggregate filenames only if single stream */ | |
2631 | if (!strcmp(path, stream->filename)) { | |
2632 | if (stream->nb_streams != 1) { | |
2633 | rtsp_reply_error(c, RTSP_STATUS_AGGREGATE); | |
2634 | return; | |
2635 | } | |
2636 | stream_index = 0; | |
2637 | goto found; | |
2638 | } | |
2639 | ||
2640 | for(stream_index = 0; stream_index < stream->nb_streams; | |
2641 | stream_index++) { | |
2642 | snprintf(buf, sizeof(buf), "%s/streamid=%d", | |
2643 | stream->filename, stream_index); | |
2644 | if (!strcmp(path, buf)) | |
2645 | goto found; | |
2646 | } | |
2647 | } | |
2648 | } | |
2649 | /* no stream found */ | |
2650 | rtsp_reply_error(c, RTSP_STATUS_SERVICE); /* XXX: right error ? */ | |
2651 | return; | |
2652 | found: | |
2653 | ||
2654 | /* generate session id if needed */ | |
2655 | if (h->session_id[0] == '\0') { | |
2656 | snprintf(h->session_id, sizeof(h->session_id), | |
2657 | "%08x%08x", (int)random(), (int)random()); | |
2658 | } | |
2659 | ||
2660 | /* find rtp session, and create it if none found */ | |
2661 | rtp_c = find_rtp_session(h->session_id); | |
2662 | if (!rtp_c) { | |
2663 | rtp_c = rtp_new_connection(c, stream, h->session_id); | |
2664 | if (!rtp_c) { | |
2665 | rtsp_reply_error(c, RTSP_STATUS_BANDWIDTH); | |
2666 | return; | |
2667 | } | |
2668 | ||
2669 | /* open input stream */ | |
2670 | if (open_input_stream(rtp_c, "") < 0) { | |
2671 | rtsp_reply_error(c, RTSP_STATUS_INTERNAL); | |
2672 | return; | |
2673 | } | |
2674 | ||
2675 | /* always prefer UDP */ | |
2676 | th = find_transport(h, RTSP_PROTOCOL_RTP_UDP); | |
2677 | if (!th) { | |
2678 | th = find_transport(h, RTSP_PROTOCOL_RTP_TCP); | |
2679 | if (!th) { | |
2680 | rtsp_reply_error(c, RTSP_STATUS_TRANSPORT); | |
2681 | return; | |
2682 | } | |
2683 | } | |
2684 | rtp_c->rtp_protocol = th->protocol; | |
2685 | } | |
2686 | ||
2687 | /* test if stream is OK (test needed because several SETUP needs | |
2688 | to be done for a given file) */ | |
2689 | if (rtp_c->stream != stream) { | |
2690 | rtsp_reply_error(c, RTSP_STATUS_SERVICE); | |
2691 | return; | |
2692 | } | |
2693 | ||
2694 | /* test if stream is already set up */ | |
2695 | if (rtp_c->rtp_ctx[stream_index]) { | |
2696 | rtsp_reply_error(c, RTSP_STATUS_STATE); | |
2697 | return; | |
2698 | } | |
2699 | ||
2700 | /* check transport */ | |
2701 | th = find_transport(h, rtp_c->rtp_protocol); | |
2702 | if (!th || (th->protocol == RTSP_PROTOCOL_RTP_UDP && | |
2703 | th->client_port_min <= 0)) { | |
2704 | rtsp_reply_error(c, RTSP_STATUS_TRANSPORT); | |
2705 | return; | |
2706 | } | |
2707 | ||
2708 | /* setup default options */ | |
2709 | setup.transport_option[0] = '\0'; | |
2710 | dest_addr = rtp_c->from_addr; | |
2711 | dest_addr.sin_port = htons(th->client_port_min); | |
2712 | ||
2713 | /* add transport option if needed */ | |
2714 | if (ff_rtsp_callback) { | |
2715 | setup.ipaddr = ntohl(dest_addr.sin_addr.s_addr); | |
2716 | if (ff_rtsp_callback(RTSP_ACTION_SERVER_SETUP, rtp_c->session_id, | |
2717 | (char *)&setup, sizeof(setup), | |
2718 | stream->rtsp_option) < 0) { | |
2719 | rtsp_reply_error(c, RTSP_STATUS_TRANSPORT); | |
2720 | return; | |
2721 | } | |
2722 | dest_addr.sin_addr.s_addr = htonl(setup.ipaddr); | |
2723 | } | |
2724 | ||
2725 | /* setup stream */ | |
2726 | if (rtp_new_av_stream(rtp_c, stream_index, &dest_addr) < 0) { | |
2727 | rtsp_reply_error(c, RTSP_STATUS_TRANSPORT); | |
2728 | return; | |
2729 | } | |
2730 | ||
2731 | /* now everything is OK, so we can send the connection parameters */ | |
2732 | rtsp_reply_header(c, RTSP_STATUS_OK); | |
2733 | /* session ID */ | |
2734 | url_fprintf(c->pb, "Session: %s\r\n", rtp_c->session_id); | |
2735 | ||
2736 | switch(rtp_c->rtp_protocol) { | |
2737 | case RTSP_PROTOCOL_RTP_UDP: | |
2738 | port = rtp_get_local_port(rtp_c->rtp_handles[stream_index]); | |
2739 | url_fprintf(c->pb, "Transport: RTP/AVP/UDP;unicast;" | |
2740 | "client_port=%d-%d;server_port=%d-%d", | |
2741 | th->client_port_min, th->client_port_min + 1, | |
2742 | port, port + 1); | |
2743 | break; | |
2744 | case RTSP_PROTOCOL_RTP_TCP: | |
2745 | url_fprintf(c->pb, "Transport: RTP/AVP/TCP;interleaved=%d-%d", | |
2746 | stream_index * 2, stream_index * 2 + 1); | |
2747 | break; | |
2748 | default: | |
2749 | break; | |
2750 | } | |
2751 | if (setup.transport_option[0] != '\0') { | |
2752 | url_fprintf(c->pb, ";%s", setup.transport_option); | |
2753 | } | |
2754 | url_fprintf(c->pb, "\r\n"); | |
2755 | ||
2756 | ||
2757 | url_fprintf(c->pb, "\r\n"); | |
2758 | } | |
2759 | ||
2760 | ||
2761 | /* find an rtp connection by using the session ID. Check consistency | |
2762 | with filename */ | |
2763 | static HTTPContext *find_rtp_session_with_url(const char *url, | |
2764 | const char *session_id) | |
2765 | { | |
2766 | HTTPContext *rtp_c; | |
2767 | char path1[1024]; | |
2768 | const char *path; | |
2769 | ||
2770 | rtp_c = find_rtp_session(session_id); | |
2771 | if (!rtp_c) | |
2772 | return NULL; | |
2773 | ||
2774 | /* find which url is asked */ | |
2775 | url_split(NULL, 0, NULL, 0, NULL, path1, sizeof(path1), url); | |
2776 | path = path1; | |
2777 | if (*path == '/') | |
2778 | path++; | |
2779 | if (strcmp(path, rtp_c->stream->filename) != 0) | |
2780 | return NULL; | |
2781 | return rtp_c; | |
2782 | } | |
2783 | ||
2784 | static void rtsp_cmd_play(HTTPContext *c, const char *url, RTSPHeader *h) | |
2785 | { | |
2786 | HTTPContext *rtp_c; | |
2787 | ||
2788 | rtp_c = find_rtp_session_with_url(url, h->session_id); | |
2789 | if (!rtp_c) { | |
2790 | rtsp_reply_error(c, RTSP_STATUS_SESSION); | |
2791 | return; | |
2792 | } | |
2793 | ||
2794 | if (rtp_c->state != HTTPSTATE_SEND_DATA && | |
2795 | rtp_c->state != HTTPSTATE_WAIT_FEED && | |
2796 | rtp_c->state != HTTPSTATE_READY) { | |
2797 | rtsp_reply_error(c, RTSP_STATUS_STATE); | |
2798 | return; | |
2799 | } | |
2800 | ||
2801 | rtp_c->state = HTTPSTATE_SEND_DATA; | |
2802 | ||
2803 | /* now everything is OK, so we can send the connection parameters */ | |
2804 | rtsp_reply_header(c, RTSP_STATUS_OK); | |
2805 | /* session ID */ | |
2806 | url_fprintf(c->pb, "Session: %s\r\n", rtp_c->session_id); | |
2807 | url_fprintf(c->pb, "\r\n"); | |
2808 | } | |
2809 | ||
2810 | static void rtsp_cmd_pause(HTTPContext *c, const char *url, RTSPHeader *h) | |
2811 | { | |
2812 | HTTPContext *rtp_c; | |
2813 | ||
2814 | rtp_c = find_rtp_session_with_url(url, h->session_id); | |
2815 | if (!rtp_c) { | |
2816 | rtsp_reply_error(c, RTSP_STATUS_SESSION); | |
2817 | return; | |
2818 | } | |
2819 | ||
2820 | if (rtp_c->state != HTTPSTATE_SEND_DATA && | |
2821 | rtp_c->state != HTTPSTATE_WAIT_FEED) { | |
2822 | rtsp_reply_error(c, RTSP_STATUS_STATE); | |
2823 | return; | |
2824 | } | |
2825 | ||
2826 | rtp_c->state = HTTPSTATE_READY; | |
2827 | ||
2828 | /* now everything is OK, so we can send the connection parameters */ | |
2829 | rtsp_reply_header(c, RTSP_STATUS_OK); | |
2830 | /* session ID */ | |
2831 | url_fprintf(c->pb, "Session: %s\r\n", rtp_c->session_id); | |
2832 | url_fprintf(c->pb, "\r\n"); | |
2833 | } | |
2834 | ||
2835 | static void rtsp_cmd_teardown(HTTPContext *c, const char *url, RTSPHeader *h) | |
2836 | { | |
2837 | HTTPContext *rtp_c; | |
2838 | ||
2839 | rtp_c = find_rtp_session_with_url(url, h->session_id); | |
2840 | if (!rtp_c) { | |
2841 | rtsp_reply_error(c, RTSP_STATUS_SESSION); | |
2842 | return; | |
2843 | } | |
2844 | ||
2845 | /* abort the session */ | |
2846 | close_connection(rtp_c); | |
2847 | ||
2848 | if (ff_rtsp_callback) { | |
2849 | ff_rtsp_callback(RTSP_ACTION_SERVER_TEARDOWN, rtp_c->session_id, | |
2850 | NULL, 0, | |
2851 | rtp_c->stream->rtsp_option); | |
2852 | } | |
2853 | ||
2854 | /* now everything is OK, so we can send the connection parameters */ | |
2855 | rtsp_reply_header(c, RTSP_STATUS_OK); | |
2856 | /* session ID */ | |
2857 | url_fprintf(c->pb, "Session: %s\r\n", rtp_c->session_id); | |
2858 | url_fprintf(c->pb, "\r\n"); | |
2859 | } | |
2860 | ||
2861 | ||
2862 | /********************************************************************/ | |
2863 | /* RTP handling */ | |
2864 | ||
2865 | static HTTPContext *rtp_new_connection(HTTPContext *rtsp_c, | |
2866 | FFStream *stream, const char *session_id) | |
2867 | { | |
2868 | HTTPContext *c = NULL; | |
2869 | ||
2870 | /* XXX: should output a warning page when coming | |
2871 | close to the connection limit */ | |
2872 | if (nb_connections >= nb_max_connections) | |
2873 | goto fail; | |
2874 | ||
2875 | /* add a new connection */ | |
2876 | c = av_mallocz(sizeof(HTTPContext)); | |
2877 | if (!c) | |
2878 | goto fail; | |
2879 | ||
2880 | c->fd = -1; | |
2881 | c->poll_entry = NULL; | |
2882 | c->from_addr = rtsp_c->from_addr; | |
2883 | c->buffer_size = IOBUFFER_INIT_SIZE; | |
2884 | c->buffer = av_malloc(c->buffer_size); | |
2885 | if (!c->buffer) | |
2886 | goto fail; | |
2887 | nb_connections++; | |
2888 | c->stream = stream; | |
2889 | pstrcpy(c->session_id, sizeof(c->session_id), session_id); | |
2890 | c->state = HTTPSTATE_READY; | |
2891 | c->is_packetized = 1; | |
2892 | /* protocol is shown in statistics */ | |
2893 | pstrcpy(c->protocol, sizeof(c->protocol), "RTP"); | |
2894 | ||
2895 | c->next = first_http_ctx; | |
2896 | first_http_ctx = c; | |
2897 | return c; | |
2898 | ||
2899 | fail: | |
2900 | if (c) { | |
2901 | av_free(c->buffer); | |
2902 | av_free(c); | |
2903 | } | |
2904 | return NULL; | |
2905 | } | |
2906 | ||
2907 | /* add a new RTP stream in an RTP connection (used in RTSP SETUP | |
2908 | command). if dest_addr is NULL, then TCP tunneling in RTSP is | |
2909 | used. */ | |
2910 | static int rtp_new_av_stream(HTTPContext *c, | |
2911 | int stream_index, struct sockaddr_in *dest_addr) | |
2912 | { | |
2913 | AVFormatContext *ctx; | |
2914 | AVStream *st; | |
2915 | char *ipaddr; | |
2916 | URLContext *h; | |
2917 | UINT8 *dummy_buf; | |
2918 | ||
2919 | /* now we can open the relevant output stream */ | |
2920 | ctx = av_mallocz(sizeof(AVFormatContext)); | |
2921 | if (!ctx) | |
2922 | return -1; | |
2923 | ctx->oformat = &rtp_mux; | |
2924 | ||
2925 | st = av_mallocz(sizeof(AVStream)); | |
2926 | if (!st) | |
2927 | goto fail; | |
2928 | ctx->nb_streams = 1; | |
2929 | ctx->streams[0] = st; | |
2930 | ||
2931 | if (!c->stream->feed || | |
2932 | c->stream->feed == c->stream) { | |
2933 | memcpy(st, c->stream->streams[stream_index], sizeof(AVStream)); | |
2934 | } else { | |
2935 | memcpy(st, | |
2936 | c->stream->feed->streams[c->stream->feed_streams[stream_index]], | |
2937 | sizeof(AVStream)); | |
2938 | } | |
2939 | ||
2940 | if (dest_addr) { | |
2941 | /* build destination RTP address */ | |
2942 | ipaddr = inet_ntoa(dest_addr->sin_addr); | |
2943 | ||
2944 | snprintf(ctx->filename, sizeof(ctx->filename), | |
2945 | "rtp://%s:%d", ipaddr, ntohs(dest_addr->sin_port)); | |
2946 | ||
2947 | printf("open %s\n", ctx->filename); | |
2948 | ||
2949 | if (url_open(&h, ctx->filename, URL_WRONLY) < 0) | |
2950 | goto fail; | |
2951 | c->rtp_handles[stream_index] = h; | |
2952 | } else { | |
2953 | goto fail; | |
2954 | } | |
2955 | ||
2956 | /* normally, no packets should be output here, but the packet size may be checked */ | |
2957 | if (url_open_dyn_packet_buf(&ctx->pb, | |
2958 | url_get_max_packet_size(h)) < 0) { | |
2959 | /* XXX: close stream */ | |
2960 | goto fail; | |
2961 | } | |
2962 | if (av_write_header(ctx) < 0) { | |
2963 | fail: | |
2964 | if (h) | |
2965 | url_close(h); | |
2966 | av_free(ctx); | |
2967 | return -1; | |
2968 | } | |
2969 | url_close_dyn_buf(&ctx->pb, &dummy_buf); | |
2970 | av_free(dummy_buf); | |
2971 | ||
2972 | c->rtp_ctx[stream_index] = ctx; | |
2973 | return 0; | |
2974 | } | |
2975 | ||
2976 | /********************************************************************/ | |
2977 | /* ffserver initialization */ | |
2978 | ||
2979 | AVStream *add_av_stream1(FFStream *stream, AVCodecContext *codec) | |
2980 | { | |
2981 | AVStream *fst; | |
2982 | ||
2983 | fst = av_mallocz(sizeof(AVStream)); | |
2984 | if (!fst) | |
2985 | return NULL; | |
2986 | fst->priv_data = av_mallocz(sizeof(FeedData)); | |
2987 | memcpy(&fst->codec, codec, sizeof(AVCodecContext)); | |
2988 | stream->streams[stream->nb_streams++] = fst; | |
2989 | return fst; | |
2990 | } | |
2991 | ||
85f07f22 FB |
2992 | /* return the stream number in the feed */ |
2993 | int add_av_stream(FFStream *feed, | |
2994 | AVStream *st) | |
2995 | { | |
2996 | AVStream *fst; | |
2997 | AVCodecContext *av, *av1; | |
2998 | int i; | |
2999 | ||
3000 | av = &st->codec; | |
3001 | for(i=0;i<feed->nb_streams;i++) { | |
3002 | st = feed->streams[i]; | |
3003 | av1 = &st->codec; | |
f747e6d3 PG |
3004 | if (av1->codec_id == av->codec_id && |
3005 | av1->codec_type == av->codec_type && | |
85f07f22 FB |
3006 | av1->bit_rate == av->bit_rate) { |
3007 | ||
3008 | switch(av->codec_type) { | |
3009 | case CODEC_TYPE_AUDIO: | |
3010 | if (av1->channels == av->channels && | |
3011 | av1->sample_rate == av->sample_rate) | |
3012 | goto found; | |
3013 | break; | |
3014 | case CODEC_TYPE_VIDEO: | |
3015 | if (av1->width == av->width && | |
3016 | av1->height == av->height && | |
3017 | av1->frame_rate == av->frame_rate && | |
3018 | av1->gop_size == av->gop_size) | |
3019 | goto found; | |
3020 | break; | |
f747e6d3 | 3021 | default: |
ec3b2232 | 3022 | av_abort(); |
85f07f22 FB |
3023 | } |
3024 | } | |
3025 | } | |
3026 | ||
2effd274 | 3027 | fst = add_av_stream1(feed, av); |
85f07f22 FB |
3028 | if (!fst) |
3029 | return -1; | |
85f07f22 FB |
3030 | return feed->nb_streams - 1; |
3031 | found: | |
3032 | return i; | |
3033 | } | |
3034 | ||
2effd274 FB |
3035 | void remove_stream(FFStream *stream) |
3036 | { | |
3037 | FFStream **ps; | |
3038 | ps = &first_stream; | |
3039 | while (*ps != NULL) { | |
3040 | if (*ps == stream) { | |
3041 | *ps = (*ps)->next; | |
3042 | } else { | |
3043 | ps = &(*ps)->next; | |
3044 | } | |
3045 | } | |
3046 | } | |
3047 | ||
3048 | /* compute the needed AVStream for each file */ | |
3049 | void build_file_streams(void) | |
3050 | { | |
3051 | FFStream *stream, *stream_next; | |
3052 | AVFormatContext *infile; | |
3053 | int i; | |
3054 | ||
3055 | /* gather all streams */ | |
3056 | for(stream = first_stream; stream != NULL; stream = stream_next) { | |
3057 | stream_next = stream->next; | |
3058 | if (stream->stream_type == STREAM_TYPE_LIVE && | |
3059 | !stream->feed) { | |
3060 | /* the stream comes from a file */ | |
3061 | /* try to open the file */ | |
3062 | /* open stream */ | |
3063 | if (av_open_input_file(&infile, stream->feed_filename, | |
3064 | NULL, 0, NULL) < 0) { | |
3065 | http_log("%s not found", stream->feed_filename); | |
3066 | /* remove stream (no need to spend more time on it) */ | |
3067 | fail: | |
3068 | remove_stream(stream); | |
3069 | } else { | |
3070 | /* find all the AVStreams inside and reference them in | |
3071 | 'stream' */ | |
3072 | if (av_find_stream_info(infile) < 0) { | |
3073 | http_log("Could not find codec parameters from '%s'", | |
3074 | stream->feed_filename); | |
3075 | av_close_input_file(infile); | |
3076 | goto fail; | |
3077 | } | |
3078 | for(i=0;i<infile->nb_streams;i++) { | |
3079 | add_av_stream1(stream, &infile->streams[i]->codec); | |
3080 | } | |
3081 | av_close_input_file(infile); | |
3082 | } | |
3083 | } | |
3084 | } | |
3085 | } | |
3086 | ||
85f07f22 FB |
3087 | /* compute the needed AVStream for each feed */ |
3088 | void build_feed_streams(void) | |
3089 | { | |
3090 | FFStream *stream, *feed; | |
3091 | int i; | |
3092 | ||
3093 | /* gather all streams */ | |
3094 | for(stream = first_stream; stream != NULL; stream = stream->next) { | |
3095 | feed = stream->feed; | |
3096 | if (feed) { | |
3097 | if (!stream->is_feed) { | |
2effd274 | 3098 | /* we handle a stream coming from a feed */ |
85f07f22 FB |
3099 | for(i=0;i<stream->nb_streams;i++) { |
3100 | stream->feed_streams[i] = add_av_stream(feed, stream->streams[i]); | |
3101 | } | |
cde25790 PG |
3102 | } |
3103 | } | |
3104 | } | |
3105 | ||
3106 | /* gather all streams */ | |
3107 | for(stream = first_stream; stream != NULL; stream = stream->next) { | |
3108 | feed = stream->feed; | |
3109 | if (feed) { | |
3110 | if (stream->is_feed) { | |
85f07f22 FB |
3111 | for(i=0;i<stream->nb_streams;i++) { |
3112 | stream->feed_streams[i] = i; | |
3113 | } | |
3114 | } | |
3115 | } | |
3116 | } | |
3117 | ||
3118 | /* create feed files if needed */ | |
3119 | for(feed = first_feed; feed != NULL; feed = feed->next_feed) { | |
3120 | int fd; | |
3121 | ||
59eb2ed1 PG |
3122 | if (url_exist(feed->feed_filename)) { |
3123 | /* See if it matches */ | |
3124 | AVFormatContext *s; | |
3125 | int matches = 0; | |
3126 | ||
3127 | if (av_open_input_file(&s, feed->feed_filename, NULL, FFM_PACKET_SIZE, NULL) >= 0) { | |
3128 | /* Now see if it matches */ | |
3129 | if (s->nb_streams == feed->nb_streams) { | |
3130 | matches = 1; | |
3131 | for(i=0;i<s->nb_streams;i++) { | |
3132 | AVStream *sf, *ss; | |
3133 | sf = feed->streams[i]; | |
3134 | ss = s->streams[i]; | |
3135 | ||
3136 | if (sf->index != ss->index || | |
3137 | sf->id != ss->id) { | |
3138 | printf("Index & Id do not match for stream %d\n", i); | |
3139 | matches = 0; | |
3140 | } else { | |
3141 | AVCodecContext *ccf, *ccs; | |
3142 | ||
3143 | ccf = &sf->codec; | |
3144 | ccs = &ss->codec; | |
3145 | #define CHECK_CODEC(x) (ccf->x != ccs->x) | |
3146 | ||
3147 | if (CHECK_CODEC(codec) || CHECK_CODEC(codec_type)) { | |
3148 | printf("Codecs do not match for stream %d\n", i); | |
3149 | matches = 0; | |
3150 | } else if (CHECK_CODEC(bit_rate) || CHECK_CODEC(flags)) { | |
3151 | printf("Codec bitrates do not match for stream %d\n", i); | |
3152 | matches = 0; | |
3153 | } else if (ccf->codec_type == CODEC_TYPE_VIDEO) { | |
3154 | if (CHECK_CODEC(frame_rate) || | |
3155 | CHECK_CODEC(width) || | |
3156 | CHECK_CODEC(height)) { | |
3157 | printf("Codec width, height and framerate do not match for stream %d\n", i); | |
3158 | matches = 0; | |
3159 | } | |
3160 | } else if (ccf->codec_type == CODEC_TYPE_AUDIO) { | |
3161 | if (CHECK_CODEC(sample_rate) || | |
3162 | CHECK_CODEC(channels) || | |
3163 | CHECK_CODEC(frame_size)) { | |
3164 | printf("Codec sample_rate, channels, frame_size do not match for stream %d\n", i); | |
3165 | matches = 0; | |
3166 | } | |
3167 | } else { | |
3168 | printf("Unknown codec type\n"); | |
3169 | matches = 0; | |
3170 | } | |
3171 | } | |
3172 | if (!matches) { | |
3173 | break; | |
3174 | } | |
3175 | } | |
3176 | } else { | |
3177 | printf("Deleting feed file '%s' as stream counts differ (%d != %d)\n", | |
3178 | feed->feed_filename, s->nb_streams, feed->nb_streams); | |
3179 | } | |
3180 | ||
3181 | av_close_input_file(s); | |
3182 | } else { | |
3183 | printf("Deleting feed file '%s' as it appears to be corrupt\n", | |
3184 | feed->feed_filename); | |
3185 | } | |
3186 | if (!matches) | |
3187 | unlink(feed->feed_filename); | |
3188 | } | |
85f07f22 FB |
3189 | if (!url_exist(feed->feed_filename)) { |
3190 | AVFormatContext s1, *s = &s1; | |
3191 | ||
3192 | /* only write the header of the ffm file */ | |
3193 | if (url_fopen(&s->pb, feed->feed_filename, URL_WRONLY) < 0) { | |
3194 | fprintf(stderr, "Could not open output feed file '%s'\n", | |
3195 | feed->feed_filename); | |
3196 | exit(1); | |
3197 | } | |
bd7cf6ad | 3198 | s->oformat = feed->fmt; |
85f07f22 FB |
3199 | s->nb_streams = feed->nb_streams; |
3200 | for(i=0;i<s->nb_streams;i++) { | |
3201 | AVStream *st; | |
3202 | st = feed->streams[i]; | |
3203 | s->streams[i] = st; | |
3204 | } | |
bd7cf6ad FB |
3205 | av_write_header(s); |
3206 | /* XXX: need better api */ | |
3207 | av_freep(&s->priv_data); | |
85f07f22 FB |
3208 | url_fclose(&s->pb); |
3209 | } | |
3210 | /* get feed size and write index */ | |
3211 | fd = open(feed->feed_filename, O_RDONLY); | |
3212 | if (fd < 0) { | |
3213 | fprintf(stderr, "Could not open output feed file '%s'\n", | |
3214 | feed->feed_filename); | |
3215 | exit(1); | |
3216 | } | |
3217 | ||
3218 | feed->feed_write_index = ffm_read_write_index(fd); | |
3219 | feed->feed_size = lseek(fd, 0, SEEK_END); | |
3220 | /* ensure that we do not wrap before the end of file */ | |
3221 | if (feed->feed_max_size < feed->feed_size) | |
3222 | feed->feed_max_size = feed->feed_size; | |
3223 | ||
3224 | close(fd); | |
3225 | } | |
3226 | } | |
3227 | ||
3228 | static void get_arg(char *buf, int buf_size, const char **pp) | |
3229 | { | |
3230 | const char *p; | |
3231 | char *q; | |
3232 | int quote; | |
3233 | ||
3234 | p = *pp; | |
3235 | while (isspace(*p)) p++; | |
3236 | q = buf; | |
3237 | quote = 0; | |
3238 | if (*p == '\"' || *p == '\'') | |
3239 | quote = *p++; | |
3240 | for(;;) { | |
3241 | if (quote) { | |
3242 | if (*p == quote) | |
3243 | break; | |
3244 | } else { | |
3245 | if (isspace(*p)) | |
3246 | break; | |
3247 | } | |
3248 | if (*p == '\0') | |
3249 | break; | |
3250 | if ((q - buf) < buf_size - 1) | |
3251 | *q++ = *p; | |
3252 | p++; | |
3253 | } | |
3254 | *q = '\0'; | |
3255 | if (quote && *p == quote) | |
3256 | p++; | |
3257 | *pp = p; | |
3258 | } | |
3259 | ||
3260 | /* add a codec and set the default parameters */ | |
3261 | void add_codec(FFStream *stream, AVCodecContext *av) | |
3262 | { | |
3263 | AVStream *st; | |
3264 | ||
3265 | /* compute default parameters */ | |
3266 | switch(av->codec_type) { | |
3267 | case CODEC_TYPE_AUDIO: | |
3268 | if (av->bit_rate == 0) | |
3269 | av->bit_rate = 64000; | |
3270 | if (av->sample_rate == 0) | |
3271 | av->sample_rate = 22050; | |
3272 | if (av->channels == 0) | |
3273 | av->channels = 1; | |
3274 | break; | |
3275 | case CODEC_TYPE_VIDEO: | |
3276 | if (av->bit_rate == 0) | |
3277 | av->bit_rate = 64000; | |
3278 | if (av->frame_rate == 0) | |
3279 | av->frame_rate = 5 * FRAME_RATE_BASE; | |
3280 | if (av->width == 0 || av->height == 0) { | |
3281 | av->width = 160; | |
3282 | av->height = 128; | |
3283 | } | |
ba9b374f | 3284 | /* Bitrate tolerance is less for streaming */ |
42a63c6a PG |
3285 | if (av->bit_rate_tolerance == 0) |
3286 | av->bit_rate_tolerance = av->bit_rate / 4; | |
3287 | if (av->qmin == 0) | |
3288 | av->qmin = 3; | |
3289 | if (av->qmax == 0) | |
3290 | av->qmax = 31; | |
3291 | if (av->max_qdiff == 0) | |
3292 | av->max_qdiff = 3; | |
ba9b374f J |
3293 | av->qcompress = 0.5; |
3294 | av->qblur = 0.5; | |
68d7eef9 | 3295 | |
a782f209 PG |
3296 | if (!av->rc_eq) |
3297 | av->rc_eq = "tex^qComp"; | |
3298 | if (!av->i_quant_factor) | |
b3a391e8 | 3299 | av->i_quant_factor = -0.8; |
a782f209 PG |
3300 | if (!av->b_quant_factor) |
3301 | av->b_quant_factor = 1.25; | |
3302 | if (!av->b_quant_offset) | |
3303 | av->b_quant_offset = 1.25; | |
3304 | ||
3305 | ||
85f07f22 | 3306 | break; |
f747e6d3 | 3307 | default: |
ec3b2232 | 3308 | av_abort(); |
85f07f22 FB |
3309 | } |
3310 | ||
3311 | st = av_mallocz(sizeof(AVStream)); | |
3312 | if (!st) | |
3313 | return; | |
3314 | stream->streams[stream->nb_streams++] = st; | |
3315 | memcpy(&st->codec, av, sizeof(AVCodecContext)); | |
3316 | } | |
3317 | ||
f747e6d3 PG |
3318 | int opt_audio_codec(const char *arg) |
3319 | { | |
3320 | AVCodec *p; | |
3321 | ||
3322 | p = first_avcodec; | |
3323 | while (p) { | |
3324 | if (!strcmp(p->name, arg) && p->type == CODEC_TYPE_AUDIO) | |
3325 | break; | |
3326 | p = p->next; | |
3327 | } | |
3328 | if (p == NULL) { | |
3329 | return CODEC_ID_NONE; | |
3330 | } | |
3331 | ||
3332 | return p->id; | |
3333 | } | |
3334 | ||
3335 | int opt_video_codec(const char *arg) | |
3336 | { | |
3337 | AVCodec *p; | |
3338 | ||
3339 | p = first_avcodec; | |
3340 | while (p) { | |
3341 | if (!strcmp(p->name, arg) && p->type == CODEC_TYPE_VIDEO) | |
3342 | break; | |
3343 | p = p->next; | |
3344 | } | |
3345 | if (p == NULL) { | |
3346 | return CODEC_ID_NONE; | |
3347 | } | |
3348 | ||
3349 | return p->id; | |
3350 | } | |
3351 | ||
2effd274 FB |
3352 | /* simplistic plugin support */ |
3353 | ||
3354 | void load_module(const char *filename) | |
3355 | { | |
3356 | void *dll; | |
3357 | void (*init_func)(void); | |
3358 | dll = dlopen(filename, RTLD_NOW); | |
3359 | if (!dll) { | |
3360 | fprintf(stderr, "Could not load module '%s' - %s\n", | |
3361 | filename, dlerror()); | |
3362 | return; | |
3363 | } | |
3364 | ||
3365 | init_func = dlsym(dll, "ffserver_module_init"); | |
3366 | if (!init_func) { | |
3367 | fprintf(stderr, | |
3368 | "%s: init function 'ffserver_module_init()' not found\n", | |
3369 | filename); | |
3370 | dlclose(dll); | |
3371 | } | |
3372 | ||
3373 | init_func(); | |
3374 | } | |
3375 | ||
85f07f22 FB |
3376 | int parse_ffconfig(const char *filename) |
3377 | { | |
3378 | FILE *f; | |
3379 | char line[1024]; | |
3380 | char cmd[64]; | |
3381 | char arg[1024]; | |
3382 | const char *p; | |
3383 | int val, errors, line_num; | |
cde25790 | 3384 | FFStream **last_stream, *stream, *redirect; |
85f07f22 FB |
3385 | FFStream **last_feed, *feed; |
3386 | AVCodecContext audio_enc, video_enc; | |
3387 | int audio_id, video_id; | |
3388 | ||
3389 | f = fopen(filename, "r"); | |
3390 | if (!f) { | |
3391 | perror(filename); | |
3392 | return -1; | |
3393 | } | |
3394 | ||
3395 | errors = 0; | |
3396 | line_num = 0; | |
3397 | first_stream = NULL; | |
3398 | last_stream = &first_stream; | |
3399 | first_feed = NULL; | |
3400 | last_feed = &first_feed; | |
3401 | stream = NULL; | |
3402 | feed = NULL; | |
cde25790 | 3403 | redirect = NULL; |
85f07f22 FB |
3404 | audio_id = CODEC_ID_NONE; |
3405 | video_id = CODEC_ID_NONE; | |
3406 | for(;;) { | |
3407 | if (fgets(line, sizeof(line), f) == NULL) | |
3408 | break; | |
3409 | line_num++; | |
3410 | p = line; | |
3411 | while (isspace(*p)) | |
3412 | p++; | |
3413 | if (*p == '\0' || *p == '#') | |
3414 | continue; | |
3415 | ||
3416 | get_arg(cmd, sizeof(cmd), &p); | |
3417 | ||
3418 | if (!strcasecmp(cmd, "Port")) { | |
3419 | get_arg(arg, sizeof(arg), &p); | |
2effd274 | 3420 | my_http_addr.sin_port = htons (atoi(arg)); |
85f07f22 FB |
3421 | } else if (!strcasecmp(cmd, "BindAddress")) { |
3422 | get_arg(arg, sizeof(arg), &p); | |
2effd274 FB |
3423 | if (!inet_aton(arg, &my_http_addr.sin_addr)) { |
3424 | fprintf(stderr, "%s:%d: Invalid IP address: %s\n", | |
3425 | filename, line_num, arg); | |
3426 | errors++; | |
3427 | } | |
3428 | } else if (!strcasecmp(cmd, "NoDaemon")) { | |
3429 | ffserver_daemon = 0; | |
3430 | } else if (!strcasecmp(cmd, "RTSPPort")) { | |
3431 | get_arg(arg, sizeof(arg), &p); | |
3432 | my_rtsp_addr.sin_port = htons (atoi(arg)); | |
3433 | } else if (!strcasecmp(cmd, "RTSPBindAddress")) { | |
3434 | get_arg(arg, sizeof(arg), &p); | |
3435 | if (!inet_aton(arg, &my_rtsp_addr.sin_addr)) { | |
85f07f22 FB |
3436 | fprintf(stderr, "%s:%d: Invalid IP address: %s\n", |
3437 | filename, line_num, arg); | |
3438 | errors++; | |
3439 | } | |
3440 | } else if (!strcasecmp(cmd, "MaxClients")) { | |
3441 | get_arg(arg, sizeof(arg), &p); | |
3442 | val = atoi(arg); | |
3443 | if (val < 1 || val > HTTP_MAX_CONNECTIONS) { | |
3444 | fprintf(stderr, "%s:%d: Invalid MaxClients: %s\n", | |
3445 | filename, line_num, arg); | |
3446 | errors++; | |
3447 | } else { | |
3448 | nb_max_connections = val; | |
3449 | } | |
42a63c6a PG |
3450 | } else if (!strcasecmp(cmd, "MaxBandwidth")) { |
3451 | get_arg(arg, sizeof(arg), &p); | |
3452 | val = atoi(arg); | |
3453 | if (val < 10 || val > 100000) { | |
3454 | fprintf(stderr, "%s:%d: Invalid MaxBandwidth: %s\n", | |
3455 | filename, line_num, arg); | |
3456 | errors++; | |
3457 | } else { | |
3458 | nb_max_bandwidth = val; | |
3459 | } | |
85f07f22 FB |
3460 | } else if (!strcasecmp(cmd, "CustomLog")) { |
3461 | get_arg(logfilename, sizeof(logfilename), &p); | |
3462 | } else if (!strcasecmp(cmd, "<Feed")) { | |
3463 | /*********************************************/ | |
3464 | /* Feed related options */ | |
3465 | char *q; | |
3466 | if (stream || feed) { | |
3467 | fprintf(stderr, "%s:%d: Already in a tag\n", | |
3468 | filename, line_num); | |
3469 | } else { | |
3470 | feed = av_mallocz(sizeof(FFStream)); | |
3471 | /* add in stream list */ | |
3472 | *last_stream = feed; | |
3473 | last_stream = &feed->next; | |
3474 | /* add in feed list */ | |
3475 | *last_feed = feed; | |
3476 | last_feed = &feed->next_feed; | |
3477 | ||
3478 | get_arg(feed->filename, sizeof(feed->filename), &p); | |
3479 | q = strrchr(feed->filename, '>'); | |
3480 | if (*q) | |
3481 | *q = '\0'; | |
3482 | feed->fmt = guess_format("ffm", NULL, NULL); | |
3483 | /* defaut feed file */ | |
3484 | snprintf(feed->feed_filename, sizeof(feed->feed_filename), | |
3485 | "/tmp/%s.ffm", feed->filename); | |
3486 | feed->feed_max_size = 5 * 1024 * 1024; | |
3487 | feed->is_feed = 1; | |
3488 | feed->feed = feed; /* self feeding :-) */ | |
3489 | } | |
cde25790 PG |
3490 | } else if (!strcasecmp(cmd, "Launch")) { |
3491 | if (feed) { | |
3492 | int i; | |
3493 | ||
3494 | feed->child_argv = (char **) av_mallocz(64 * sizeof(char *)); | |
3495 | ||
3496 | feed->child_argv[0] = av_malloc(7); | |
3497 | strcpy(feed->child_argv[0], "ffmpeg"); | |
3498 | ||
3499 | for (i = 1; i < 62; i++) { | |
3500 | char argbuf[256]; | |
3501 | ||
3502 | get_arg(argbuf, sizeof(argbuf), &p); | |
3503 | if (!argbuf[0]) | |
3504 | break; | |
3505 | ||
3506 | feed->child_argv[i] = av_malloc(strlen(argbuf + 1)); | |
3507 | strcpy(feed->child_argv[i], argbuf); | |
3508 | } | |
3509 | ||
3510 | feed->child_argv[i] = av_malloc(30 + strlen(feed->filename)); | |
3511 | ||
3512 | snprintf(feed->child_argv[i], 256, "http://127.0.0.1:%d/%s", | |
2effd274 | 3513 | ntohs(my_http_addr.sin_port), feed->filename); |
cde25790 | 3514 | } |
85f07f22 FB |
3515 | } else if (!strcasecmp(cmd, "File")) { |
3516 | if (feed) { | |
3517 | get_arg(feed->feed_filename, sizeof(feed->feed_filename), &p); | |
3518 | } else if (stream) { | |
3519 | get_arg(stream->feed_filename, sizeof(stream->feed_filename), &p); | |
3520 | } | |
3521 | } else if (!strcasecmp(cmd, "FileMaxSize")) { | |
3522 | if (feed) { | |
3523 | const char *p1; | |
3524 | double fsize; | |
3525 | ||
3526 | get_arg(arg, sizeof(arg), &p); | |
3527 | p1 = arg; | |
3528 | fsize = strtod(p1, (char **)&p1); | |
3529 | switch(toupper(*p1)) { | |
3530 | case 'K': | |
3531 | fsize *= 1024; | |
3532 | break; | |
3533 | case 'M': | |
3534 | fsize *= 1024 * 1024; | |
3535 | break; | |
3536 | case 'G': | |
3537 | fsize *= 1024 * 1024 * 1024; | |
3538 | break; | |
3539 | } | |
3540 | feed->feed_max_size = (INT64)fsize; | |
3541 | } | |
3542 | } else if (!strcasecmp(cmd, "</Feed>")) { | |
3543 | if (!feed) { | |
3544 | fprintf(stderr, "%s:%d: No corresponding <Feed> for </Feed>\n", | |
3545 | filename, line_num); | |
3546 | errors++; | |
59eb2ed1 | 3547 | #if 0 |
f747e6d3 PG |
3548 | } else { |
3549 | /* Make sure that we start out clean */ | |
42a63c6a PG |
3550 | if (unlink(feed->feed_filename) < 0 |
3551 | && errno != ENOENT) { | |
3552 | fprintf(stderr, "%s:%d: Unable to clean old feed file '%s': %s\n", | |
3553 | filename, line_num, feed->feed_filename, strerror(errno)); | |
3554 | errors++; | |
3555 | } | |
59eb2ed1 | 3556 | #endif |
85f07f22 FB |
3557 | } |
3558 | feed = NULL; | |
3559 | } else if (!strcasecmp(cmd, "<Stream")) { | |
3560 | /*********************************************/ | |
3561 | /* Stream related options */ | |
3562 | char *q; | |
3563 | if (stream || feed) { | |
3564 | fprintf(stderr, "%s:%d: Already in a tag\n", | |
3565 | filename, line_num); | |
3566 | } else { | |
3567 | stream = av_mallocz(sizeof(FFStream)); | |
3568 | *last_stream = stream; | |
3569 | last_stream = &stream->next; | |
3570 | ||
3571 | get_arg(stream->filename, sizeof(stream->filename), &p); | |
3572 | q = strrchr(stream->filename, '>'); | |
3573 | if (*q) | |
3574 | *q = '\0'; | |
8256c0a3 | 3575 | stream->fmt = guess_stream_format(NULL, stream->filename, NULL); |
85f07f22 FB |
3576 | memset(&audio_enc, 0, sizeof(AVCodecContext)); |
3577 | memset(&video_enc, 0, sizeof(AVCodecContext)); | |
3578 | audio_id = CODEC_ID_NONE; | |
3579 | video_id = CODEC_ID_NONE; | |
3580 | if (stream->fmt) { | |
3581 | audio_id = stream->fmt->audio_codec; | |
3582 | video_id = stream->fmt->video_codec; | |
3583 | } | |
3584 | } | |
3585 | } else if (!strcasecmp(cmd, "Feed")) { | |
3586 | get_arg(arg, sizeof(arg), &p); | |
3587 | if (stream) { | |
3588 | FFStream *sfeed; | |
3589 | ||
3590 | sfeed = first_feed; | |
3591 | while (sfeed != NULL) { | |
3592 | if (!strcmp(sfeed->filename, arg)) | |
3593 | break; | |
3594 | sfeed = sfeed->next_feed; | |
3595 | } | |
3596 | if (!sfeed) { | |
3597 | fprintf(stderr, "%s:%d: feed '%s' not defined\n", | |
3598 | filename, line_num, arg); | |
3599 | } else { | |
3600 | stream->feed = sfeed; | |
3601 | } | |
3602 | } | |
3603 | } else if (!strcasecmp(cmd, "Format")) { | |
3604 | get_arg(arg, sizeof(arg), &p); | |
3605 | if (!strcmp(arg, "status")) { | |
3606 | stream->stream_type = STREAM_TYPE_STATUS; | |
3607 | stream->fmt = NULL; | |
3608 | } else { | |
3609 | stream->stream_type = STREAM_TYPE_LIVE; | |
dd2af5aa FB |
3610 | /* jpeg cannot be used here, so use single frame jpeg */ |
3611 | if (!strcmp(arg, "jpeg")) | |
3612 | strcpy(arg, "singlejpeg"); | |
8256c0a3 | 3613 | stream->fmt = guess_stream_format(arg, NULL, NULL); |
85f07f22 FB |
3614 | if (!stream->fmt) { |
3615 | fprintf(stderr, "%s:%d: Unknown Format: %s\n", | |
3616 | filename, line_num, arg); | |
3617 | errors++; | |
3618 | } | |
3619 | } | |
3620 | if (stream->fmt) { | |
3621 | audio_id = stream->fmt->audio_codec; | |
3622 | video_id = stream->fmt->video_codec; | |
3623 | } | |
cde25790 PG |
3624 | } else if (!strcasecmp(cmd, "FaviconURL")) { |
3625 | if (stream && stream->stream_type == STREAM_TYPE_STATUS) { | |
3626 | get_arg(stream->feed_filename, sizeof(stream->feed_filename), &p); | |
3627 | } else { | |
3628 | fprintf(stderr, "%s:%d: FaviconURL only permitted for status streams\n", | |
3629 | filename, line_num); | |
3630 | errors++; | |
3631 | } | |
2ac887ba PG |
3632 | } else if (!strcasecmp(cmd, "Author")) { |
3633 | if (stream) { | |
3634 | get_arg(stream->author, sizeof(stream->author), &p); | |
3635 | } | |
3636 | } else if (!strcasecmp(cmd, "Comment")) { | |
3637 | if (stream) { | |
3638 | get_arg(stream->comment, sizeof(stream->comment), &p); | |
3639 | } | |
3640 | } else if (!strcasecmp(cmd, "Copyright")) { | |
3641 | if (stream) { | |
3642 | get_arg(stream->copyright, sizeof(stream->copyright), &p); | |
3643 | } | |
3644 | } else if (!strcasecmp(cmd, "Title")) { | |
3645 | if (stream) { | |
3646 | get_arg(stream->title, sizeof(stream->title), &p); | |
3647 | } | |
42a63c6a PG |
3648 | } else if (!strcasecmp(cmd, "Preroll")) { |
3649 | get_arg(arg, sizeof(arg), &p); | |
3650 | if (stream) { | |
8256c0a3 | 3651 | stream->prebuffer = atof(arg) * 1000; |
42a63c6a | 3652 | } |
79c4ea3c PG |
3653 | } else if (!strcasecmp(cmd, "StartSendOnKey")) { |
3654 | if (stream) { | |
3655 | stream->send_on_key = 1; | |
3656 | } | |
f747e6d3 PG |
3657 | } else if (!strcasecmp(cmd, "AudioCodec")) { |
3658 | get_arg(arg, sizeof(arg), &p); | |
3659 | audio_id = opt_audio_codec(arg); | |
3660 | if (audio_id == CODEC_ID_NONE) { | |
3661 | fprintf(stderr, "%s:%d: Unknown AudioCodec: %s\n", | |
3662 | filename, line_num, arg); | |
3663 | errors++; | |
3664 | } | |
3665 | } else if (!strcasecmp(cmd, "VideoCodec")) { | |
3666 | get_arg(arg, sizeof(arg), &p); | |
3667 | video_id = opt_video_codec(arg); | |
3668 | if (video_id == CODEC_ID_NONE) { | |
3669 | fprintf(stderr, "%s:%d: Unknown VideoCodec: %s\n", | |
3670 | filename, line_num, arg); | |
3671 | errors++; | |
3672 | } | |
ec3b2232 PG |
3673 | } else if (!strcasecmp(cmd, "MaxTime")) { |
3674 | get_arg(arg, sizeof(arg), &p); | |
3675 | if (stream) { | |
8256c0a3 | 3676 | stream->max_time = atof(arg) * 1000; |
ec3b2232 | 3677 | } |
85f07f22 FB |
3678 | } else if (!strcasecmp(cmd, "AudioBitRate")) { |
3679 | get_arg(arg, sizeof(arg), &p); | |
3680 | if (stream) { | |
3681 | audio_enc.bit_rate = atoi(arg) * 1000; | |
3682 | } | |
3683 | } else if (!strcasecmp(cmd, "AudioChannels")) { | |
3684 | get_arg(arg, sizeof(arg), &p); | |
3685 | if (stream) { | |
3686 | audio_enc.channels = atoi(arg); | |
3687 | } | |
3688 | } else if (!strcasecmp(cmd, "AudioSampleRate")) { | |
3689 | get_arg(arg, sizeof(arg), &p); | |
3690 | if (stream) { | |
3691 | audio_enc.sample_rate = atoi(arg); | |
3692 | } | |
81e0d0b4 MH |
3693 | } else if (!strcasecmp(cmd, "AudioQuality")) { |
3694 | get_arg(arg, sizeof(arg), &p); | |
3695 | if (stream) { | |
3696 | audio_enc.quality = atof(arg) * 1000; | |
3697 | } | |
85f07f22 FB |
3698 | } else if (!strcasecmp(cmd, "VideoBitRate")) { |
3699 | get_arg(arg, sizeof(arg), &p); | |
3700 | if (stream) { | |
3701 | video_enc.bit_rate = atoi(arg) * 1000; | |
3702 | } | |
3703 | } else if (!strcasecmp(cmd, "VideoSize")) { | |
3704 | get_arg(arg, sizeof(arg), &p); | |
3705 | if (stream) { | |
3706 | parse_image_size(&video_enc.width, &video_enc.height, arg); | |
3707 | if ((video_enc.width % 16) != 0 || | |
3708 | (video_enc.height % 16) != 0) { | |
3709 | fprintf(stderr, "%s:%d: Image size must be a multiple of 16\n", | |
3710 | filename, line_num); | |
3711 | errors++; | |
3712 | } | |
3713 | } | |
3714 | } else if (!strcasecmp(cmd, "VideoFrameRate")) { | |
3715 | get_arg(arg, sizeof(arg), &p); | |
3716 | if (stream) { | |
3717 | video_enc.frame_rate = (int)(strtod(arg, NULL) * FRAME_RATE_BASE); | |
3718 | } | |
3719 | } else if (!strcasecmp(cmd, "VideoGopSize")) { | |
3720 | get_arg(arg, sizeof(arg), &p); | |
3721 | if (stream) { | |
3722 | video_enc.gop_size = atoi(arg); | |
3723 | } | |
3724 | } else if (!strcasecmp(cmd, "VideoIntraOnly")) { | |
3725 | if (stream) { | |
3726 | video_enc.gop_size = 1; | |
3727 | } | |
e7f9c674 J |
3728 | } else if (!strcasecmp(cmd, "VideoHighQuality")) { |
3729 | if (stream) { | |
3730 | video_enc.flags |= CODEC_FLAG_HQ; | |
3731 | } | |
42a63c6a | 3732 | } else if (!strcasecmp(cmd, "VideoQDiff")) { |
a782f209 | 3733 | get_arg(arg, sizeof(arg), &p); |
42a63c6a PG |
3734 | if (stream) { |
3735 | video_enc.max_qdiff = atoi(arg); | |
3736 | if (video_enc.max_qdiff < 1 || video_enc.max_qdiff > 31) { | |
3737 | fprintf(stderr, "%s:%d: VideoQDiff out of range\n", | |
3738 | filename, line_num); | |
3739 | errors++; | |
3740 | } | |
3741 | } | |
3742 | } else if (!strcasecmp(cmd, "VideoQMax")) { | |
a782f209 | 3743 | get_arg(arg, sizeof(arg), &p); |
42a63c6a PG |
3744 | if (stream) { |
3745 | video_enc.qmax = atoi(arg); | |
3746 | if (video_enc.qmax < 1 || video_enc.qmax > 31) { | |
3747 | fprintf(stderr, "%s:%d: VideoQMax out of range\n", | |
3748 | filename, line_num); | |
3749 | errors++; | |
3750 | } | |
3751 | } | |
3752 | } else if (!strcasecmp(cmd, "VideoQMin")) { | |
a782f209 | 3753 | get_arg(arg, sizeof(arg), &p); |
42a63c6a PG |
3754 | if (stream) { |
3755 | video_enc.qmin = atoi(arg); | |
3756 | if (video_enc.qmin < 1 || video_enc.qmin > 31) { | |
3757 | fprintf(stderr, "%s:%d: VideoQMin out of range\n", | |
3758 | filename, line_num); | |
3759 | errors++; | |
3760 | } | |
3761 | } | |
6b10e6e4 RFI |
3762 | } else if (!strcasecmp(cmd, "LumaElim")) { |
3763 | get_arg(arg, sizeof(arg), &p); | |
3764 | if (stream) { | |
3765 | video_enc.luma_elim_threshold = atoi(arg); | |
3766 | } | |
3767 | } else if (!strcasecmp(cmd, "ChromaElim")) { | |
3768 | get_arg(arg, sizeof(arg), &p); | |
3769 | if (stream) { | |
3770 | video_enc.chroma_elim_threshold = atoi(arg); | |
3771 | } | |
3772 | } else if (!strcasecmp(cmd, "LumiMask")) { | |
3773 | get_arg(arg, sizeof(arg), &p); | |
3774 | if (stream) { | |
3775 | video_enc.lumi_masking = atof(arg); | |
3776 | } | |
3777 | } else if (!strcasecmp(cmd, "DarkMask")) { | |
3778 | get_arg(arg, sizeof(arg), &p); | |
3779 | if (stream) { | |
3780 | video_enc.dark_masking = atof(arg); | |
3781 | } | |
85f07f22 FB |
3782 | } else if (!strcasecmp(cmd, "NoVideo")) { |
3783 | video_id = CODEC_ID_NONE; | |
3784 | } else if (!strcasecmp(cmd, "NoAudio")) { | |
3785 | audio_id = CODEC_ID_NONE; | |
8256c0a3 PG |
3786 | } else if (!strcasecmp(cmd, "ACL")) { |
3787 | IPAddressACL acl; | |
3788 | struct hostent *he; | |
3789 | ||
3790 | get_arg(arg, sizeof(arg), &p); | |
3791 | if (strcasecmp(arg, "allow") == 0) { | |
3792 | acl.action = IP_ALLOW; | |
3793 | } else if (strcasecmp(arg, "deny") == 0) { | |
3794 | acl.action = IP_DENY; | |
3795 | } else { | |
3796 | fprintf(stderr, "%s:%d: ACL action '%s' is not ALLOW or DENY\n", | |
3797 | filename, line_num, arg); | |
3798 | errors++; | |
3799 | } | |
3800 | ||
3801 | get_arg(arg, sizeof(arg), &p); | |
3802 | ||
3803 | he = gethostbyname(arg); | |
3804 | if (!he) { | |
3805 | fprintf(stderr, "%s:%d: ACL refers to invalid host or ip address '%s'\n", | |
3806 | filename, line_num, arg); | |
3807 | errors++; | |
3808 | } else { | |
3809 | /* Only take the first */ | |
3810 | acl.first = *(struct in_addr *) he->h_addr_list[0]; | |
3811 | acl.last = acl.first; | |
3812 | } | |
3813 | ||
3814 | get_arg(arg, sizeof(arg), &p); | |
3815 | ||
3816 | if (arg[0]) { | |
3817 | he = gethostbyname(arg); | |
3818 | if (!he) { | |
3819 | fprintf(stderr, "%s:%d: ACL refers to invalid host or ip address '%s'\n", | |
3820 | filename, line_num, arg); | |
3821 | errors++; | |
3822 | } else { | |
3823 | /* Only take the first */ | |
3824 | acl.last = *(struct in_addr *) he->h_addr_list[0]; | |
3825 | } | |
3826 | } | |
3827 | ||
3828 | if (!errors) { | |
3829 | IPAddressACL *nacl = (IPAddressACL *) av_mallocz(sizeof(*nacl)); | |
3830 | IPAddressACL **naclp = 0; | |
3831 | ||
3832 | *nacl = acl; | |
3833 | nacl->next = 0; | |
3834 | ||
3835 | if (stream) { | |
3836 | naclp = &stream->acl; | |
3837 | } else if (feed) { | |
3838 | naclp = &feed->acl; | |
3839 | } else { | |
3840 | fprintf(stderr, "%s:%d: ACL found not in <stream> or <feed>\n", | |
3841 | filename, line_num); | |
3842 | errors++; | |
3843 | } | |
3844 | ||
3845 | if (naclp) { | |
3846 | while (*naclp) | |
3847 | naclp = &(*naclp)->next; | |
3848 | ||
3849 | *naclp = nacl; | |
3850 | } | |
3851 | } | |
2effd274 FB |
3852 | } else if (!strcasecmp(cmd, "RTSPOption")) { |
3853 | get_arg(arg, sizeof(arg), &p); | |
3854 | if (stream) { | |
3855 | av_freep(&stream->rtsp_option); | |
3856 | /* XXX: av_strdup ? */ | |
3857 | stream->rtsp_option = av_malloc(strlen(arg) + 1); | |
3858 | if (stream->rtsp_option) { | |
3859 | strcpy(stream->rtsp_option, arg); | |
3860 | } | |
3861 | } | |
85f07f22 FB |
3862 | } else if (!strcasecmp(cmd, "</Stream>")) { |
3863 | if (!stream) { | |
3864 | fprintf(stderr, "%s:%d: No corresponding <Stream> for </Stream>\n", | |
3865 | filename, line_num); | |
3866 | errors++; | |
3867 | } | |
3868 | if (stream->feed && stream->fmt && strcmp(stream->fmt->name, "ffm") != 0) { | |
3869 | if (audio_id != CODEC_ID_NONE) { | |
3870 | audio_enc.codec_type = CODEC_TYPE_AUDIO; | |
3871 | audio_enc.codec_id = audio_id; | |
3872 | add_codec(stream, &audio_enc); | |
3873 | } | |
3874 | if (video_id != CODEC_ID_NONE) { | |
3875 | video_enc.codec_type = CODEC_TYPE_VIDEO; | |
3876 | video_enc.codec_id = video_id; | |
3877 | add_codec(stream, &video_enc); | |
3878 | } | |
3879 | } | |
3880 | stream = NULL; | |
cde25790 PG |
3881 | } else if (!strcasecmp(cmd, "<Redirect")) { |
3882 | /*********************************************/ | |
3883 | char *q; | |
3884 | if (stream || feed || redirect) { | |
3885 | fprintf(stderr, "%s:%d: Already in a tag\n", | |
3886 | filename, line_num); | |
3887 | errors++; | |
3888 | } else { | |
3889 | redirect = av_mallocz(sizeof(FFStream)); | |
3890 | *last_stream = redirect; | |
3891 | last_stream = &redirect->next; | |
3892 | ||
3893 | get_arg(redirect->filename, sizeof(redirect->filename), &p); | |
3894 | q = strrchr(redirect->filename, '>'); | |
3895 | if (*q) | |
3896 | *q = '\0'; | |
3897 | redirect->stream_type = STREAM_TYPE_REDIRECT; | |
3898 | } | |
3899 | } else if (!strcasecmp(cmd, "URL")) { | |
3900 | if (redirect) { | |
3901 | get_arg(redirect->feed_filename, sizeof(redirect->feed_filename), &p); | |
3902 | } | |
3903 | } else if (!strcasecmp(cmd, "</Redirect>")) { | |
3904 | if (!redirect) { | |
3905 | fprintf(stderr, "%s:%d: No corresponding <Redirect> for </Redirect>\n", | |
3906 | filename, line_num); | |
3907 | errors++; | |
3908 | } | |
3909 | if (!redirect->feed_filename[0]) { | |
3910 | fprintf(stderr, "%s:%d: No URL found for <Redirect>\n", | |
3911 | filename, line_num); | |
3912 | errors++; | |
3913 | } | |
3914 | redirect = NULL; | |
2effd274 FB |
3915 | } else if (!strcasecmp(cmd, "LoadModule")) { |
3916 | get_arg(arg, sizeof(arg), &p); | |
3917 | load_module(arg); | |
85f07f22 FB |
3918 | } else { |
3919 | fprintf(stderr, "%s:%d: Incorrect keyword: '%s'\n", | |
3920 | filename, line_num, cmd); | |
3921 | errors++; | |
3922 | } | |
3923 | } | |
3924 | ||
3925 | fclose(f); | |
3926 | if (errors) | |
3927 | return -1; | |
3928 | else | |
3929 | return 0; | |
3930 | } | |
3931 | ||
3932 | ||
85f07f22 FB |
3933 | #if 0 |
3934 | static void write_packet(FFCodec *ffenc, | |
3935 | UINT8 *buf, int size) | |
3936 | { | |
3937 | PacketHeader hdr; | |
3938 | AVCodecContext *enc = &ffenc->enc; | |
3939 | UINT8 *wptr; | |
3940 | mk_header(&hdr, enc, size); | |
3941 | wptr = http_fifo.wptr; | |
3942 | fifo_write(&http_fifo, (UINT8 *)&hdr, sizeof(hdr), &wptr); | |
3943 | fifo_write(&http_fifo, buf, size, &wptr); | |
3944 | /* atomic modification of wptr */ | |
3945 | http_fifo.wptr = wptr; | |
3946 | ffenc->data_count += size; | |
3947 | ffenc->avg_frame_size = ffenc->avg_frame_size * |