[asterisk-commits] phsultan: branch phsultan/rtmp-support r278022 - in /team/phsultan/rtmp-suppo...

SVN commits to the Asterisk project asterisk-commits at lists.digium.com
Tue Jul 20 07:18:46 CDT 2010


Author: phsultan
Date: Tue Jul 20 07:18:41 2010
New Revision: 278022

URL: http://svnview.digium.com/svn/asterisk?view=rev&rev=278022
Log:
Added new application FFplayback

Added:
    team/phsultan/rtmp-support/apps/app_ffplayback.c   (with props)
Modified:
    team/phsultan/rtmp-support/channels/chan_sip.c
    team/phsultan/rtmp-support/include/asterisk/rtp_engine.h
    team/phsultan/rtmp-support/main/rtp_engine.c
    team/phsultan/rtmp-support/res/res_rtp_asterisk.c

Added: team/phsultan/rtmp-support/apps/app_ffplayback.c
URL: http://svnview.digium.com/svn/asterisk/team/phsultan/rtmp-support/apps/app_ffplayback.c?view=auto&rev=278022
==============================================================================
--- team/phsultan/rtmp-support/apps/app_ffplayback.c (added)
+++ team/phsultan/rtmp-support/apps/app_ffplayback.c Tue Jul 20 07:18:41 2010
@@ -1,0 +1,693 @@
+/*
+ * Asterisk -- An open source telephony toolkit.
+ *
+ * Copyright (C) 2010, Philippe Sultan
+ *
+ * Philippe Sultan <philippe.sultan at inria.fr> 
+ *
+ * See http://www.asterisk.org for more information about
+ * the Asterisk project. Please do not directly contact
+ * any of the maintainers of this project for assistance;
+ * the project provides a web site, mailing lists and IRC
+ * channels for your use.
+ *
+ * This program is free software, distributed under the terms of
+ * the GNU General Public License Version 2. See the LICENSE file
+ * at the top of the source tree.
+ *
+ */
+
+/*! \file
+ *
+ * \brief Skeleton application
+ *
+ * \author\verbatim Philippe Sultan <philippe.sultan at inria.fr> \endverbatim
+ * 
+ * Playback a media stream
+ * \ingroup applications
+ */
+
+/*** MODULEINFO
+	<defaultenabled>yes</defaultenabled>
+	<depend>avcodec</depend>
+	<depend>avformat</depend>
+	<depend>swscale</depend>
+ ***/
+
+#include "asterisk.h"
+
+ASTERISK_FILE_VERSION(__FILE__, "$Revision$")
+
+#include <libavcodec/avcodec.h>
+#include <libavformat/avformat.h>
+#include <libswscale/swscale.h>
+
+#include "asterisk/channel.h"
+#include "asterisk/module.h"
+#include "asterisk/app.h"
+#include "asterisk/rtp_engine.h"
+#include "asterisk/translate.h"
+
+static int ff_playstream(struct ast_channel *chan, char *url);
+static void *video_thread(void *data);
+static void *audio_thread(void *data);
+static int open_decoding_context(AVCodecContext *cctx, int codecid);
+static int open_encoding_context(AVCodecContext *cctx, int codecid);
+static int open_context(AVCodecContext *cctx, int codecid, int decoding);
+static AVStream *add_video_stream(AVFormatContext *oc, enum CodecID codec_id, int width, int height);
+static struct SwsContext *init_img_converter(AVCodecContext *in_vcctx, AVStream *video_st);
+
+/*** DOCUMENTATION
+	<application name="FFPlayback" language="en_US">
+		<synopsis>
+			Plays back an audio/video stream.
+		</synopsis>
+		<syntax>
+		<parameter name="url" required="true">
+			<para> URL can point to a file stored locally, an HTTP
+			or RTMP resource. Some examples :
+			</para>
+			<enumlist>
+			<enum name="/home/username/file.flv">
+				<para>Plays the file named <literal>file.flv</literal>
+				stored locally under the <literal>/home/username</literal>
+				directory.</para>
+			</enum>
+			<enum name="http://servername.domain.com/file.mp4">
+				<para>Plays the file named <literal>file.mp4</literal>
+				stored on a remote HTTP server.</para>
+			</enum>
+			<enum name="rtmp://servername.domain.com/applicationName/playpath">
+				<para>Plays the file named <literal>playpath</literal>
+				stored on a remote RTMP server, and accessible through the
+				<literal>applicationName</literal> AS3 application.</para>
+				<para><replaceable>applicationName</replaceable> refers to the AS3 application
+				hosted in the RTMP server, and <replaceable>playpath</replaceable> refers to the name
+				of the file to play.</para>
+			</enum>
+			</enumlist>
+		</parameter>
+		</syntax>
+		<description>
+			<para>FFPlayback connects to the URL specified as a parameter,
+			and plays the audio/video stream to the calling channel.</para>
+		</description>
+	</application>
+ ***/
+
+struct stream_info {
+	struct ast_channel *chan;
+	AVCodecContext *in_cctx;
+	AVFormatContext *fctx;
+	AVFormatContext *rtp_fctx;
+	AVOutputFormat *rtp_fmt;
+	int stream_index;
+};
+
+static char *app = "FFPlayback";
+
+static int ffplayback_exec(struct ast_channel *chan, const char *data)
+{
+	int res = 0;
+	char *parse;
+	AST_DECLARE_APP_ARGS(args,
+		AST_APP_ARG(url);
+	);
+
+	if (ast_strlen_zero(data)) {
+		ast_log(LOG_WARNING, "%s requires an argument (url)\n", app);
+		return -1;
+	}
+
+	parse = ast_strdupa(data);
+
+	AST_STANDARD_APP_ARGS(args, parse);
+
+	if (args.argc > 1) {
+		ast_log(LOG_WARNING, "%s requires a single argument (url)\n", app);
+		return -1;
+	}
+
+	if (!ast_strlen_zero(args.url)) {
+		ast_log(LOG_NOTICE, "url value is : %s\n", args.url);
+	}
+
+	res = ff_playstream(chan, args.url);
+
+	return res;
+}
+
+static int ff_playstream(struct ast_channel *chan, char *url)
+{
+	int res = -1;
+	AVFormatContext *vfctx;
+	AVFormatContext *afctx;
+	AVCodecContext *in_vcctx;	/* Input video codec context */
+	AVCodecContext *in_acctx;	/* Input audio codec context */
+	int i;
+       	int vstream = -1;
+       	int astream = -1;
+	pthread_t vthread = AST_PTHREADT_NULL;
+	pthread_t athread = AST_PTHREADT_NULL;
+
+	/* rtp related stuff */
+	AVOutputFormat *rtp_fmt;
+	AVFormatContext *rtp_fctx;
+	AVStream *video_st;
+	URLContext *h;
+	struct sockaddr_in a_rtpaddr;
+	struct sockaddr_in v_rtpaddr;
+	struct sockaddr_in local_v_rtpaddr;
+	struct ast_rtp_glue *glue;
+	static struct ast_rtp_instance *artp;
+	static struct ast_rtp_instance *vrtp;
+	char rtp_vurl[200];
+
+	struct stream_info ainfo;
+	struct stream_info vinfo;
+
+	in_vcctx = NULL;
+	in_acctx = NULL;
+	rtp_fctx = NULL;
+	rtp_fmt = NULL;
+	video_st = NULL;
+	h = NULL;
+	memset(&ainfo, 0, sizeof(ainfo));
+	memset(&vinfo, 0, sizeof(vinfo));
+
+	glue = ast_rtp_instance_get_glue(chan->tech->type);
+	if (!glue) {
+		ast_log(LOG_WARNING, "Channel is not RTP capable, can't playback.\n");
+		return res;
+	}
+
+	if (glue->get_rtp_info(chan, &artp) == AST_RTP_GLUE_RESULT_FORBID || !artp) {
+		ast_log(LOG_WARNING, "RTP audio forbidden on this channel.\n");
+		return res;
+	}
+
+	if (glue->get_vrtp_info(chan, &vrtp) == AST_RTP_GLUE_RESULT_FORBID || !vrtp) {
+		ast_log(LOG_WARNING, "RTP video forbidden on this channel.\n");
+		return res;
+	}
+
+
+	ast_rtp_instance_get_remote_address(artp, &a_rtpaddr);
+	ast_rtp_instance_get_remote_address(vrtp, &v_rtpaddr);
+
+	ast_log(LOG_NOTICE, "Audio RTP address : %s - port %d\n", ast_inet_ntoa(a_rtpaddr.sin_addr), ntohs(a_rtpaddr.sin_port));
+	ast_log(LOG_NOTICE, "Video RTP address : %s - port %d\n", ast_inet_ntoa(v_rtpaddr.sin_addr), ntohs(v_rtpaddr.sin_port));
+
+	rtp_fmt = av_guess_format("rtp", NULL, NULL);
+	rtp_fmt->video_codec = CODEC_ID_H263;
+
+	rtp_fctx = avformat_alloc_context();
+	if (!rtp_fctx) {
+		ast_log(LOG_WARNING, "Memory error\n");
+		return res;
+	}
+
+	rtp_fctx->oformat = rtp_fmt;
+
+	/* Now, we will replace Asterisk's video RTP socket with the one that 
+	 * has been created by FFMPEG's. Why? Well FFMPEG implements video RTP
+	 * packetization (RFC 4629), and Asterisk does not, yet */
+	ast_rtp_instance_get_local_address(vrtp, &local_v_rtpaddr);
+	ast_log(LOG_NOTICE, "Local video RTP address : %s - port %d\n", ast_inet_ntoa(local_v_rtpaddr.sin_addr), ntohs(local_v_rtpaddr.sin_port));
+	snprintf(rtp_vurl, sizeof(rtp_vurl), "rtp://%s:%d?localrtpport=%d", ast_inet_ntoa(a_rtpaddr.sin_addr), ntohs(v_rtpaddr.sin_port), ntohs(local_v_rtpaddr.sin_port));
+
+	/* Stop Asterisk's video RTP structure */
+	ao2_ref(vrtp, -1);
+	ast_rtp_instance_destroy(vrtp);
+	glue->set_vrtp_null(chan);
+	usleep(1000000);
+
+	/* Connect to the video RTP peer */
+	snprintf(rtp_fctx->filename, sizeof(rtp_fctx->filename), "%s", rtp_vurl);
+	if (url_open(&h, rtp_fctx->filename, URL_WRONLY) < 0) {
+		ast_log(LOG_WARNING, "Could not open URL : %s\n", rtp_fctx->filename);
+		goto safeout;
+	}
+	if (url_fdopen(&rtp_fctx->pb, h) < 0) {
+		ast_log(LOG_WARNING, "Could not open URL : %s\n", rtp_fctx->filename);
+		goto safeout;	
+	}
+
+	/* Set our format context based on the stream file */
+	if (av_open_input_file(&afctx, url, NULL, 0, NULL) != 0) {
+		ast_log(LOG_WARNING, "Could not open stream at location %s\n", url);
+		return res;
+	}
+
+	if (av_open_input_file(&vfctx, url, NULL, 0, NULL) != 0) {
+		ast_log(LOG_WARNING, "Could not open stream at location %s\n", url);
+		return res;
+	}
+
+	if (!av_find_stream_info(afctx) || !av_find_stream_info(vfctx)) {
+		ast_log(LOG_WARNING, "Could not find stream information\n");
+		return res;
+	}
+
+	ast_log(LOG_NOTICE, "Playing stream at location %s\n", url);
+	if (vfctx->duration != AV_NOPTS_VALUE) {
+		int hours, mins, secs, us;
+		secs = vfctx->duration / AV_TIME_BASE;
+		us = vfctx->duration % AV_TIME_BASE;
+		mins = secs / 60;
+		secs %= 60;
+		hours = mins / 60;
+		mins %= 60;
+		ast_log(LOG_NOTICE, "Duration : %02d:%02d:%02d.%02d\n", hours, mins, secs, (100 * us) / AV_TIME_BASE);
+	} else {
+		ast_log(LOG_NOTICE, "Duration : N/A\n");
+	}
+
+	/* Find the first video stream */
+	for (i = 0; i < vfctx->nb_streams ; i++) {
+		if (vfctx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO){
+			vstream=i;
+			vinfo.stream_index = vstream;
+			break;
+		}
+	} 
+
+	/* Find the first audio stream */
+	for (i = 0; i < afctx->nb_streams ; i++) {
+		if (afctx->streams[i]->codec->codec_type == CODEC_TYPE_AUDIO){
+			astream=i;
+			ainfo.stream_index = astream;
+			break;
+		}
+	} 
+	if (vstream < 0 || astream < 0) {
+		ast_log(LOG_WARNING, "Could not open audio and video streams\n");
+		return res;
+	}
+
+	/* Set our codec contexts based on the stream info we want to play */
+	in_vcctx = vfctx->streams[vstream]->codec;
+	in_acctx = afctx->streams[astream]->codec;
+
+	if (!open_decoding_context(in_vcctx, in_vcctx->codec_id)) {
+		res = -1;
+		goto safeout;
+	}
+
+	if (!open_decoding_context(in_acctx, in_acctx->codec_id)) {
+		res = -1;
+		goto safeout;
+	}
+
+	ast_log(LOG_NOTICE, "Audio codec time base info : %d/%d\n", in_acctx->time_base.num, in_acctx->time_base.den);
+	ast_log(LOG_NOTICE, "Audio stream time base info : %d/%d\n", afctx->streams[astream]->time_base.num, afctx->streams[astream]->time_base.den);
+	ast_log(LOG_NOTICE, "Audio stream frame rate info : %d/%d\n", afctx->streams[astream]->r_frame_rate.num, afctx->streams[astream]->r_frame_rate.den); 
+	ast_log(LOG_NOTICE, "Video codec time base info : %d/%d\n", in_vcctx->time_base.num, in_vcctx->time_base.den);
+	ast_log(LOG_NOTICE, "Video stream time base info : %d/%d\n", vfctx->streams[vstream]->time_base.num, vfctx->streams[vstream]->time_base.den);
+	ast_log(LOG_NOTICE, "Video stream frame rate info : %d/%d\n", vfctx->streams[vstream]->r_frame_rate.num, vfctx->streams[vstream]->r_frame_rate.den); 
+
+	ainfo.chan = chan;
+	ainfo.in_cctx = in_acctx;
+	ainfo.fctx = afctx;
+	ainfo.rtp_fctx = NULL;
+	ainfo.rtp_fmt = NULL;
+
+	vinfo.chan = chan;
+	vinfo.in_cctx = in_vcctx;
+	vinfo.fctx = vfctx;
+	vinfo.rtp_fctx = rtp_fctx;
+	vinfo.rtp_fmt = rtp_fmt;
+
+	if (ast_pthread_create(&vthread, NULL, video_thread, &vinfo) < 0) {
+		ast_log(LOG_WARNING, "Could not create video thread\n");
+		return -1;
+	}
+
+	if (ast_pthread_create(&athread, NULL, audio_thread, &ainfo) < 0) {
+		ast_log(LOG_WARNING, "Could not create audio thread\n");
+		return -1;
+	}
+
+	pthread_join(vthread, NULL);
+	pthread_join(athread, NULL);
+
+safeout :
+	
+	if (in_vcctx) {
+		avcodec_close(in_vcctx);
+	}
+	if (in_acctx) {
+		avcodec_close(in_acctx);
+	}
+	if (video_st && video_st->codec) {
+		avcodec_close(video_st->codec);
+	}
+	if (video_st) {
+		av_freep(video_st);
+	}
+	if (h) {
+		url_close(h);
+	}
+
+	return res;
+}
+
+static void *video_thread(void *data)
+{
+	struct ast_channel *chan = NULL;
+	AVCodecContext *in_vcctx = NULL;
+	AVFormatContext *fctx = NULL;
+	AVFormatContext *rtp_fctx = NULL;
+	AVOutputFormat *rtp_fmt = NULL;
+	struct SwsContext *img_convert_ctx = NULL;
+	AVStream *video_st = NULL;
+	AVFrame *frame; 
+	AVFrame *frame_rgb;
+	AVPacket packet;
+	int frameFinished;
+	int num_bytes;
+	uint8_t *in_buffer;
+	int out_bufsize;	/* Size reserved for the decoded video frame */
+	int out_size;		/* Actual size of the encoded video frame */
+	uint8_t *out_buffer;	/* Encoded frame */
+	struct ast_frame outf;
+	uint64_t start_time;	/* The time when the app was called */
+	uint64_t pts;		/* Presentation timestamp from the input stream */	
+	uint64_t now;		/* The elapsed time from start_time */
+	int pps;		/* The number of frames per second in the input stream */
+	int vstream = -1;
+	struct stream_info *aux = data;
+
+	chan = aux->chan;
+	fctx = aux->fctx;
+	rtp_fctx = aux->rtp_fctx;
+	in_vcctx = aux->in_cctx;
+	rtp_fmt = aux->rtp_fmt;
+	vstream = aux->stream_index;
+
+	/* Get the video RTP stream structure */
+	video_st = add_video_stream(rtp_fctx, rtp_fmt->video_codec, 176, 144);
+	if (!video_st) {
+		ast_log(LOG_WARNING, "Could not create video RTP stream.\n");
+		return NULL;
+	}
+
+	/* Set the output parameters (must be done even if no parameters). */
+	if (av_set_parameters(rtp_fctx, NULL) < 0) {
+		ast_log(LOG_WARNING, "Invalid output format parameters.\n");
+		return NULL;
+	}
+
+	if (!open_encoding_context(video_st->codec, CODEC_ID_H263)) {
+		ast_log(LOG_WARNING, "Could not open encoding context.\n");
+		return NULL;
+	}
+
+	/* Zero out our outgoing frame */
+	memset(&outf, 0, sizeof(outf));
+
+	/* Allocate space for out_buffer, which must be larger than 
+	 * FF_MIN_BUFFER_SIZE (16384) */ 
+	out_bufsize = 20000;
+	out_buffer = ast_malloc(out_bufsize);
+
+	/* Allocate video frame structures */
+	frame = avcodec_alloc_frame();
+	if (!frame) {
+		ast_log(LOG_WARNING, "Memory allocation problem\n");
+		goto safeout;
+	}
+
+	frame_rgb = avcodec_alloc_frame();
+
+	if (!frame_rgb) {
+		ast_log(LOG_WARNING, "Memory allocation problem\n");
+		goto safeout;
+	}
+
+	/* Determine required buffer size and allocate space for in_buffer */
+	num_bytes = avpicture_get_size(PIX_FMT_YUV420P, in_vcctx->width, in_vcctx->height);
+	in_buffer = ast_malloc(num_bytes);
+
+	/* Assign appropriate parts of buffer to image planes in frame_rgb */
+	avpicture_fill((AVPicture *)frame_rgb, in_buffer, PIX_FMT_YUV420P, in_vcctx->width, in_vcctx->height);
+
+	/* Allocate image converter context */
+	img_convert_ctx = init_img_converter(in_vcctx, video_st);
+	if(img_convert_ctx == NULL) {
+		ast_log(LOG_WARNING, "Could not initialize the conversion context\n");
+		goto safeout;
+	}
+
+	/* Write header */
+	av_write_header(rtp_fctx);
+
+	/* Mark the start time */
+	start_time = av_gettime();
+
+	ast_log(LOG_NOTICE, "Now reading video input...\n");
+	while(!ast_check_hangup(chan) && av_read_frame(fctx, &packet)>=0) {
+		if(packet.stream_index != vstream) {
+			continue;
+		}
+		int ret;
+		ret = avcodec_decode_video(in_vcctx, frame, &frameFinished, packet.data, packet.size);
+
+		/* Did we get a complete video frame ? */
+		if(!frameFinished) {
+			continue;
+		}
+
+		/* The peer that launched this application isn't a real video
+		 * player, and therefore ignores the timestamps marked in frames.
+		 * So we have to feed him with frames coming at a pace that equals
+		 * the one that's marked in the input stream.
+		 * This is similar to the -re (rate emulation) option in ffmpeg. */
+		//pps = fctx->streams[vstream]->r_frame_rate.num ? fctx->streams[vstream]->r_frame_rate.num : 25;
+		pps = fctx->streams[vstream]->time_base.den;
+		now = av_gettime() - start_time;
+		pts = av_rescale(packet.pts, 1000000, pps);
+		if (pts > now) {
+			usleep(pts - now);
+		}
+
+		/* Rescale image */
+		ret = sws_scale(img_convert_ctx, frame->data, frame->linesize, 0, in_vcctx->height, frame_rgb->data, frame_rgb->linesize);
+
+		/* Encode the frame */
+		out_size = avcodec_encode_video(video_st->codec, out_buffer, out_bufsize, frame_rgb);
+		if (out_size) {
+			AVPacket pkt;
+			av_init_packet(&pkt);
+
+			pkt.stream_index= video_st->index;
+			pkt.data= out_buffer;
+			pkt.size= out_size;
+			ret = av_interleaved_write_frame(rtp_fctx, &pkt);
+		}
+		/* Free the packet that was allocated by av_read_frame */
+		av_free_packet(&packet);
+	}
+
+	/* write the trailer, if any.  the trailer must be written
+	 * before you close the CodecContexts open when you wrote the
+	 * header; otherwise write_trailer may try to use memory that
+	 * was freed on av_codec_close() */
+	av_write_trailer(rtp_fctx);
+
+	ast_log(LOG_NOTICE, "Ended reading video.\n");
+
+safeout:
+	if (img_convert_ctx) {
+		sws_freeContext(img_convert_ctx);
+	}
+	av_free(frame);
+	av_free(frame_rgb);
+
+	pthread_exit(NULL);
+
+	return NULL;
+}
+
+static void *audio_thread(void *data)
+{
+	struct ast_channel *chan = NULL;
+	AVCodecContext *in_acctx = NULL;
+	AVPacket packet;
+	AVFormatContext *fctx = NULL;
+	ReSampleContext *resample_context = NULL;
+	struct ast_frame aframe;
+	struct ast_trans_pvt *trans_pvt;
+	uint64_t start_time;	/* The time when the app was called */
+	uint64_t pts;		/* Presentation timestamp from the input stream */	
+	uint64_t now;		/* The elapsed time from start_time */
+	int pps;		/* The number of frames per second in the input stream */
+	int astream = -1;
+	struct stream_info *aux = data;
+
+	chan = aux->chan;
+	fctx = aux->fctx;
+	in_acctx = aux->in_cctx;
+	astream = aux->stream_index;
+
+	/* Init audio resampling context */
+	resample_context = av_audio_resample_init(
+			1, in_acctx->channels,
+			16000, in_acctx->sample_rate,
+			SAMPLE_FMT_S16, in_acctx->sample_fmt,
+			16, 10, 1, 0.8); 
+
+	/* Init audio frame */
+	trans_pvt = ast_translator_build_path(chan->writeformat, AST_FORMAT_SLINEAR16);
+	aframe.frametype = AST_FRAME_VOICE;
+	aframe.subclass.codec = chan->writeformat;
+	aframe.src = "FFPlayback";
+
+	/* Mark the start time */
+	start_time = av_gettime();
+
+	ast_log(LOG_WARNING, "Now reading audio input...\n");
+	while(!ast_check_hangup(chan) && av_read_frame(fctx, &packet)>=0) {
+		int ret;
+		uint16_t *rawsamples;
+		uint16_t samples[1024];
+		int rawsampleslen = AVCODEC_MAX_AUDIO_FRAME_SIZE;
+		struct ast_frame *list;
+		struct ast_frame *cur;
+
+		/* Is this a packet from the video stream? */
+		if(packet.stream_index != astream) {
+			continue;
+		}
+
+		/* The peer that launched this application isn't a real video
+		 * player, and therefore ignores the timestamps marked in frames.
+		 * So we have to feed him with frames coming at a pace that equals
+		 * the one that's marked in the input stream.
+		 * This is similar to the -re (rate emulation) option in ffmpeg. */
+		//pps = fctx->streams[vstream]->r_frame_rate.num ? fctx->streams[vstream]->r_frame_rate.num : 25;
+		pps = fctx->streams[astream]->time_base.den;
+		now = av_gettime() - start_time;
+		pts = av_rescale(packet.pts, 1000000, pps);
+		if (pts > now) {
+			usleep(pts - now);
+		}
+
+		rawsamples = av_malloc(rawsampleslen);
+		ret = avcodec_decode_audio3(in_acctx, rawsamples, &rawsampleslen, &packet);
+		ret = audio_resample(resample_context, samples, rawsamples, rawsampleslen);
+		aframe.datalen = ret;
+		aframe.samples = ret/2;
+		aframe.data.ptr = samples;
+		aframe.delivery.tv_sec = 0;
+		aframe.delivery.tv_usec = 0;
+
+		list = ast_translate(trans_pvt, &aframe, 0);
+		for (cur = list; cur; cur = AST_LIST_NEXT(cur, frame_list)) {
+			if (ast_write(chan, cur)) {
+				ast_log(LOG_WARNING, "Unable to write frame to channel %s\n", chan->name);
+				break;
+			}
+		}
+
+		/* Free the packet that was allocated by av_read_frame */
+		av_free_packet(&packet);
+		av_free(rawsamples);
+	}
+
+	ast_log(LOG_NOTICE, "Ended reading audio.\n");
+
+	if (resample_context) {
+		audio_resample_close(resample_context);
+	}
+	ast_translator_free_path(trans_pvt);
+
+	pthread_exit(NULL);
+
+	return NULL;
+}
+
+static struct SwsContext *init_img_converter(AVCodecContext *in_vcctx, AVStream *video_st)
+{
+	int in_width = in_vcctx->width;
+	int in_height = in_vcctx->height;
+	int out_width = video_st->codec->width;
+	int out_height = video_st->codec->height;
+
+	return  sws_getContext(in_width, in_height, 
+			in_vcctx->pix_fmt, 
+			out_width, out_height, PIX_FMT_YUV420P, SWS_BICUBIC,
+			NULL, NULL, NULL);
+
+}
+
+static int open_context(AVCodecContext *cctx, int codecid, int decoding)
+{
+	AVCodec *codec = NULL;
+	int res = -1;
+
+	codec = decoding ? avcodec_find_decoder(codecid) : avcodec_find_encoder(codecid);
+	if (!codec) {
+		ast_log(LOG_WARNING, "Could not find any codec to decode stream. codecid : %d\n", codecid);
+		return res;
+	}
+
+	if (avcodec_open(cctx, codec) < 0) {
+		ast_log(LOG_WARNING, "Could not open codec with id %d\n", codecid);
+		return res;
+	}
+
+	return 1;
+}
+
+static int open_encoding_context(AVCodecContext *cctx, int codecid)
+{
+	return open_context(cctx, codecid, 0);
+}
+
+static int open_decoding_context(AVCodecContext *cctx, int codecid)
+{
+	return open_context(cctx, codecid, 1);
+}
+
+/* add a video output stream */
+static AVStream *add_video_stream(AVFormatContext *oc, enum CodecID codec_id, int width, int height)
+{
+	AVCodecContext *c;
+	AVStream *st;
+
+	st = av_new_stream(oc, 0);
+	if (!st) {
+		ast_log(LOG_WARNING, "Memory error\n");
+		return NULL;
+	}
+
+	c = st->codec;
+	c->codec_id = codec_id;
+	c->codec_type = AVMEDIA_TYPE_VIDEO;
+	c->bit_rate = 90000;
+	c->width = width;
+	c->height = height;
+	c->time_base = (AVRational){1,25};
+	c->pix_fmt = PIX_FMT_YUV420P;
+
+	st->time_base = c->time_base;
+	return st;
+}
+
+static int unload_module(void)
+{
+	return ast_unregister_application(app);
+}
+
+static int load_module(void)
+{
+
+	avcodec_init();
+	av_register_all();
+
+	return ast_register_application_xml(app, ffplayback_exec) ? 
+		AST_MODULE_LOAD_DECLINE : AST_MODULE_LOAD_SUCCESS;
+}
+
+AST_MODULE_INFO_STANDARD(ASTERISK_GPL_KEY, "FFPlayback Application");

Propchange: team/phsultan/rtmp-support/apps/app_ffplayback.c
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: team/phsultan/rtmp-support/apps/app_ffplayback.c
------------------------------------------------------------------------------
    svn:keywords = Author Date Id Revision

Propchange: team/phsultan/rtmp-support/apps/app_ffplayback.c
------------------------------------------------------------------------------
    svn:mime-type = text/plain

Modified: team/phsultan/rtmp-support/channels/chan_sip.c
URL: http://svnview.digium.com/svn/asterisk/team/phsultan/rtmp-support/channels/chan_sip.c?view=diff&rev=278022&r1=278021&r2=278022
==============================================================================
--- team/phsultan/rtmp-support/channels/chan_sip.c (original)
+++ team/phsultan/rtmp-support/channels/chan_sip.c Tue Jul 20 07:18:41 2010
@@ -27283,6 +27283,20 @@
 	return res;
 }
 
+static int sip_set_vrtp_null(struct ast_channel *chan)
+{
+	struct sip_pvt *p = NULL;
+	if ((p = chan->tech_pvt)) {
+		return -1;
+	}
+
+	sip_pvt_lock(p);
+	p->vrtp = NULL;
+	sip_pvt_unlock(p);
+
+	return 1;
+}
+
 static enum ast_rtp_glue_result sip_get_vrtp_peer(struct ast_channel *chan, struct ast_rtp_instance **instance)
 {
 	struct sip_pvt *p = NULL;
@@ -27424,6 +27438,7 @@
 	.type = "SIP",
 	.get_rtp_info = sip_get_rtp_peer,
 	.get_vrtp_info = sip_get_vrtp_peer,
+	.set_vrtp_null = sip_set_vrtp_null,
 	.get_trtp_info = sip_get_trtp_peer,
 	.update_peer = sip_set_rtp_peer,
 	.get_codec = sip_get_codec,

Modified: team/phsultan/rtmp-support/include/asterisk/rtp_engine.h
URL: http://svnview.digium.com/svn/asterisk/team/phsultan/rtmp-support/include/asterisk/rtp_engine.h?view=diff&rev=278022&r1=278021&r2=278022
==============================================================================
--- team/phsultan/rtmp-support/include/asterisk/rtp_engine.h (original)
+++ team/phsultan/rtmp-support/include/asterisk/rtp_engine.h Tue Jul 20 07:18:41 2010
@@ -396,6 +396,10 @@
 	 */
 	enum ast_rtp_glue_result (*get_rtp_info)(struct ast_channel *chan, struct ast_rtp_instance **instance);
 	/*!
+	 * \brief Callback for resetting the video RTP instance to NULL
+	 */
+	int (*set_vrtp_null)(struct ast_channel *chan);
+	/*!
 	 * \brief Callback for retrieving the RTP instance carrying video
 	 * \note This function increases the reference count on the returned RTP instance.
 	 */

Modified: team/phsultan/rtmp-support/main/rtp_engine.c
URL: http://svnview.digium.com/svn/asterisk/team/phsultan/rtmp-support/main/rtp_engine.c?view=diff&rev=278022&r1=278021&r2=278022
==============================================================================
--- team/phsultan/rtmp-support/main/rtp_engine.c (original)
+++ team/phsultan/rtmp-support/main/rtp_engine.c Tue Jul 20 07:18:41 2010
@@ -290,6 +290,11 @@
 
 int ast_rtp_instance_destroy(struct ast_rtp_instance *instance)
 {
+	if (!ast_rtp_instance_get_data(instance)) {
+		ast_debug(1, "RTP instance has already been destroyed, not doing anything.\n");	
+		return 0;
+	}
+
 	ao2_ref(instance, -1);
 
 	return 0;
@@ -758,6 +763,10 @@
 
 void ast_rtp_instance_stop(struct ast_rtp_instance *instance)
 {
+	if (!ast_rtp_instance_get_data(instance)) {
+		return;
+	}
+
 	if (instance->engine->stop) {
 		instance->engine->stop(instance);
 	}
@@ -1526,6 +1535,10 @@
 
 int ast_rtp_instance_get_stats(struct ast_rtp_instance *instance, struct ast_rtp_instance_stats *stats, enum ast_rtp_instance_stat stat)
 {
+	if (!instance || !instance->engine || !ast_rtp_instance_get_data(instance)) {
+		return -1;
+	}
+
 	return instance->engine->get_stat ? instance->engine->get_stat(instance, stats, stat) : -1;
 }
 

Modified: team/phsultan/rtmp-support/res/res_rtp_asterisk.c
URL: http://svnview.digium.com/svn/asterisk/team/phsultan/rtmp-support/res/res_rtp_asterisk.c?view=diff&rev=278022&r1=278021&r2=278022
==============================================================================
--- team/phsultan/rtmp-support/res/res_rtp_asterisk.c (original)
+++ team/phsultan/rtmp-support/res/res_rtp_asterisk.c Tue Jul 20 07:18:41 2010
@@ -495,6 +495,10 @@
 {
 	struct ast_rtp *rtp = ast_rtp_instance_get_data(instance);
 
+	if (!rtp) {
+		return 0;
+	}
+
 	/* Destroy the smoother that was smoothing out audio if present */
 	if (rtp->smoother) {
 		ast_smoother_free(rtp->smoother);
@@ -518,8 +522,12 @@
 		ast_free(rtp->red);
 	}
 
+	ast_rtp_instance_set_data(instance, NULL);
+
 	/* Finally destroy ourselves */
 	ast_free(rtp);
+
+	ast_log(LOG_NOTICE, "Destroyed RTP struct in RTP instance.\n");
 
 	return 0;
 }
@@ -2532,6 +2540,10 @@
 	struct ast_rtp *rtp = ast_rtp_instance_get_data(instance);
 	struct ast_sockaddr addr = { {0,} };
 
+	if (!rtp) {
+		return;
+	}
+
 	if (rtp->rtcp) {
 		AST_SCHED_DEL(rtp->sched, rtp->rtcp->schedid);
 	}




More information about the asterisk-commits mailing list