soc.2008.vv: 3969ff3f: Refactored PurpleMedia to make creating ...
maiku at soc.pidgin.im
maiku at soc.pidgin.im
Fri Jun 6 03:50:45 EDT 2008
-----------------------------------------------------------------
Revision: 3969ff3f40069748728f9ee2376a4bab3f089d04
Ancestor: 2613c0f3d419d43b6a6bcdf98b7298d0e6a54dbf
Author: maiku at soc.pidgin.im
Date: 2008-06-06T07:43:03
Branch: im.pidgin.soc.2008.vv
URL: http://d.pidgin.im/viewmtn/revision/info/3969ff3f40069748728f9ee2376a4bab3f089d04
Modified files:
finch/gntmedia.c finch/gntmedia.h libpurple/marshallers.list
libpurple/media.c libpurple/media.h
libpurple/protocols/jabber/google.c
libpurple/protocols/jabber/jabber.c
libpurple/protocols/jabber/jingle.c pidgin/gtkconv.c
pidgin/gtkmedia.c pidgin/gtkmedia.h pidgin/gtkprefs.c
ChangeLog:
Refactored PurpleMedia to make creating audio or video sessions virtually identical. Audio, video, and audio/video sessions now work. Also added videotestsrc to the video plugin preference.
-------------- next part --------------
============================================================
--- finch/gntmedia.c 662232a705ce7ae14be24de511688c23a832a853
+++ finch/gntmedia.c 4497102a55b3c20a62b493df4d0938911d2ddb5f
@@ -126,13 +126,13 @@ finch_media_class_init (FinchMediaClass
"Send level",
"The GstElement of this media's send 'level'",
GST_TYPE_ELEMENT,
- G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+ G_PARAM_READWRITE));
g_object_class_install_property(gobject_class, PROP_RECV_LEVEL,
g_param_spec_object("recv-level",
"Receive level",
"The GstElement of this media's recv 'level'",
GST_TYPE_ELEMENT,
- G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+ G_PARAM_READWRITE));
finch_media_signals[MESSAGE] = g_signal_new("message", G_TYPE_FROM_CLASS(klass),
G_SIGNAL_RUN_LAST, 0, NULL, NULL,
@@ -217,7 +217,26 @@ finch_media_ready_cb(PurpleMedia *media,
static void
finch_media_ready_cb(PurpleMedia *media, FinchMedia *gntmedia)
{
- GstElement *element = purple_media_get_audio_pipeline(media);
+ GstElement *element = purple_media_get_pipeline(media);
+
+ GstElement *sendbin, *sendlevel;
+ GstElement *recvbin, *recvlevel;
+
+ GList *sessions = purple_media_get_session_names(media);
+
+ purple_media_audio_init_src(&sendbin, &sendlevel);
+ purple_media_audio_init_recv(&recvbin, &recvlevel);
+
+ for (; sessions; sessions = sessions->next) {
+ purple_media_set_src(media, sessions->data, sendbin);
+ purple_media_set_sink(media, sessions->data, recvbin);
+ }
+ g_list_free(sessions);
+
+ g_object_set(gntmedia, "send-level", &sendlevel,
+ "recv-level", &recvlevel,
+ NULL);
+
gst_bus_add_signal_watch(GST_BUS(gst_pipeline_get_bus(GST_PIPELINE(element))));
g_signal_connect(G_OBJECT(gst_pipeline_get_bus(GST_PIPELINE(element))), "message", G_CALLBACK(level_message_cb), gntmedia);
}
@@ -377,12 +396,10 @@ GntWidget *
}
GntWidget *
-finch_media_new(PurpleMedia *media, GstElement *sendlevel, GstElement *recvlevel)
+finch_media_new(PurpleMedia *media)
{
return GNT_WIDGET(g_object_new(finch_media_get_type(),
"media", media,
- "send-level", sendlevel,
- "recv-level", recvlevel,
"vertical", FALSE,
"homogeneous", FALSE,
NULL));
@@ -399,22 +416,14 @@ finch_new_media(PurpleMediaManager *mana
static void
finch_new_media(PurpleMediaManager *manager, PurpleMedia *media, gpointer null)
{
- GstElement *sendbin, *sendlevel;
- GstElement *recvbin, *recvlevel;
GntWidget *gntmedia;
PurpleConversation *conv;
- purple_media_audio_init_src(&sendbin, &sendlevel);
- purple_media_audio_init_recv(&recvbin, &recvlevel);
-
- purple_media_set_audio_src(media, sendbin);
- purple_media_set_audio_sink(media, recvbin);
-
conv = purple_conversation_new(PURPLE_CONV_TYPE_IM,
purple_connection_get_account(purple_media_get_connection(media)),
purple_media_get_screenname(media));
- gntmedia = finch_media_new(media, sendlevel, recvlevel);
+ gntmedia = finch_media_new(media);
g_signal_connect(G_OBJECT(gntmedia), "message", G_CALLBACK(gntmedia_message_cb), conv);
FINCH_MEDIA(gntmedia)->priv->conv = conv;
finch_conversation_set_info_widget(conv, gntmedia);
============================================================
--- finch/gntmedia.h eac76d1bfdb827e50d872b602baedbac35123bd4
+++ finch/gntmedia.h 223e03dd3e32762269888d304d145f268f398de5
@@ -63,7 +63,7 @@ GType finch_media_get_type(void);
GType finch_media_get_type(void);
-GntWidget *finch_media_new(PurpleMedia *media, GstElement *send_level, GstElement *recv_level);
+GntWidget *finch_media_new(PurpleMedia *media);
void finch_media_manager_init(void);
============================================================
--- libpurple/marshallers.list 1a7907ac8dc61b478097e360c2c2f57240d4f483
+++ libpurple/marshallers.list d1f9f33832013c4030d1df1d2b977be2cb81d358
@@ -1 +1,2 @@ VOID:BOXED,BOXED
VOID:BOXED,BOXED
+VOID:POINTER,POINTER,OBJECT
============================================================
--- libpurple/media.c b642d75b03c700a0d585d5914c35806e39bc7f32
+++ libpurple/media.c 71590385ed72de68140055bf22cb003071dc009d
@@ -38,34 +38,31 @@
#include <gst/interfaces/propertyprobe.h>
#include <gst/farsight/fs-conference-iface.h>
+struct _PurpleMediaSession
+{
+ gchar *id;
+ PurpleMedia *media;
+ GstElement *src;
+ GstElement *sink;
+ FsSession *session;
+ GHashTable *streams; /* FsStream list map to participant's name */
+ FsMediaType type;
+ GHashTable *local_candidates; /* map to participant's name? */
+ FsCandidate *local_candidate;
+ FsCandidate *remote_candidate;
+};
+
struct _PurpleMediaPrivate
{
FsConference *conference;
char *name;
PurpleConnection *connection;
- GstElement *audio_src;
- GstElement *audio_sink;
- GstElement *video_src;
- GstElement *video_sink;
- FsSession *audio_session;
- FsSession *video_session;
+ GHashTable *sessions; /* PurpleMediaSession table */
+ GHashTable *participants; /* FsParticipant table */
- GList *participants; /* FsParticipant list */
- GList *audio_streams; /* FsStream list */
- GList *video_streams; /* FsStream list */
-
- /* might be able to just combine these two */
- GstElement *audio_pipeline;
- GstElement *video_pipeline;
-
- /* this will need to be stored/handled per stream
- * once having multiple streams is supported */
- GList *local_candidates;
-
- FsCandidate *local_candidate;
- FsCandidate *remote_candidate;
+ GstElement *pipeline;
};
#define PURPLE_MEDIA_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), PURPLE_TYPE_MEDIA, PurpleMediaPrivate))
@@ -88,6 +85,7 @@ enum {
REJECT,
GOT_HANGUP,
GOT_ACCEPT,
+ NEW_CANDIDATE,
CANDIDATES_PREPARED,
CANDIDATE_PAIR,
LAST_SIGNAL
@@ -99,12 +97,6 @@ enum {
PROP_FS_CONFERENCE,
PROP_NAME,
PROP_CONNECTION,
- PROP_AUDIO_SRC,
- PROP_AUDIO_SINK,
- PROP_VIDEO_SRC,
- PROP_VIDEO_SINK,
- PROP_VIDEO_SESSION,
- PROP_AUDIO_SESSION
};
GType
@@ -160,48 +152,6 @@ purple_media_class_init (PurpleMediaClas
"The PurpleConnection associated with this session",
G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
- g_object_class_install_property(gobject_class, PROP_AUDIO_SRC,
- g_param_spec_object("audio-src",
- "Audio source",
- "The GstElement used to source audio",
- GST_TYPE_ELEMENT,
- G_PARAM_READWRITE));
-
- g_object_class_install_property(gobject_class, PROP_AUDIO_SINK,
- g_param_spec_object("audio-sink",
- "Audio sink",
- "The GstElement used to sink audio",
- GST_TYPE_ELEMENT,
- G_PARAM_READWRITE));
-
- g_object_class_install_property(gobject_class, PROP_VIDEO_SRC,
- g_param_spec_object("video-src",
- "Video source",
- "The GstElement used to source video",
- GST_TYPE_ELEMENT,
- G_PARAM_READWRITE));
-
- g_object_class_install_property(gobject_class, PROP_VIDEO_SINK,
- g_param_spec_object("video-sink",
- "Audio source",
- "The GstElement used to sink video",
- GST_TYPE_ELEMENT,
- G_PARAM_READWRITE));
-
- g_object_class_install_property(gobject_class, PROP_VIDEO_SESSION,
- g_param_spec_object("video-session",
- "Video stream",
- "The FarsightStream used for video",
- FS_TYPE_SESSION,
- G_PARAM_READWRITE));
-
- g_object_class_install_property(gobject_class, PROP_AUDIO_SESSION,
- g_param_spec_object("audio-session",
- "Audio stream",
- "The FarsightStream used for audio",
- FS_TYPE_SESSION,
- G_PARAM_READWRITE));
-
purple_media_signals[READY] = g_signal_new("ready", G_TYPE_FROM_CLASS(klass),
G_SIGNAL_RUN_LAST, 0, NULL, NULL,
g_cclosure_marshal_VOID__VOID,
@@ -230,6 +180,11 @@ purple_media_class_init (PurpleMediaClas
G_SIGNAL_RUN_LAST, 0, NULL, NULL,
g_cclosure_marshal_VOID__VOID,
G_TYPE_NONE, 0);
+ purple_media_signals[NEW_CANDIDATE] = g_signal_new("new-candidate", G_TYPE_FROM_CLASS(klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL,
+ purple_smarshal_VOID__POINTER_POINTER_OBJECT,
+ G_TYPE_NONE, 3, G_TYPE_POINTER,
+ G_TYPE_POINTER, FS_TYPE_CANDIDATE);
purple_media_signals[CANDIDATES_PREPARED] = g_signal_new("candidates-prepared", G_TYPE_FROM_CLASS(klass),
G_SIGNAL_RUN_LAST, 0, NULL, NULL,
g_cclosure_marshal_VOID__VOID,
@@ -254,59 +209,15 @@ purple_media_finalize (GObject *media)
purple_media_finalize (GObject *media)
{
PurpleMediaPrivate *priv = PURPLE_MEDIA_GET_PRIVATE(media);
- GList *iter;
purple_debug_info("media","purple_media_finalize\n");
g_free(priv->name);
- if (priv->audio_pipeline) {
- gst_element_set_state(priv->audio_pipeline, GST_STATE_NULL);
- gst_object_unref(priv->audio_pipeline);
+ if (priv->pipeline) {
+ gst_element_set_state(priv->pipeline, GST_STATE_NULL);
+ gst_object_unref(priv->pipeline);
}
- if (priv->video_pipeline) {
- gst_element_set_state(priv->video_pipeline, GST_STATE_NULL);
- gst_object_unref(priv->video_pipeline);
- }
- if (priv->audio_src)
- gst_object_unref(priv->audio_src);
- if (priv->audio_sink)
- gst_object_unref(priv->audio_sink);
- if (priv->video_src)
- gst_object_unref(priv->video_src);
- if (priv->video_sink)
- gst_object_unref(priv->video_sink);
-
- for (iter = priv->audio_streams; iter; iter = g_list_next(iter)) {
- g_object_unref(iter->data);
- }
- g_list_free(priv->audio_streams);
-
- for (iter = priv->video_streams; iter; iter = g_list_next(iter)) {
- g_object_unref(iter->data);
- }
- g_list_free(priv->video_streams);
-
- if (priv->audio_session)
- g_object_unref(priv->audio_session);
- if (priv->video_session)
- g_object_unref(priv->video_session);
-
- for (iter = priv->participants; iter; iter = g_list_next(iter)) {
- g_object_unref(iter->data);
- }
- g_list_free(priv->participants);
-
- for (iter = priv->local_candidates; iter; iter = g_list_next(iter)) {
- g_free(iter->data);
- }
- g_list_free(priv->local_candidates);
-
- if (priv->local_candidate)
- g_free(priv->local_candidate);
- if (priv->remote_candidate)
- g_free(priv->remote_candidate);
-
gst_object_unref(priv->conference);
parent_class->finalize(media);
@@ -334,47 +245,6 @@ purple_media_set_property (GObject *obje
case PROP_CONNECTION:
media->priv->connection = g_value_get_pointer(value);
break;
- case PROP_AUDIO_SRC:
- if (media->priv->audio_src)
- gst_object_unref(media->priv->audio_src);
- media->priv->audio_src = g_value_get_object(value);
- gst_object_ref(media->priv->audio_src);
- gst_bin_add(GST_BIN(purple_media_get_audio_pipeline(media)),
- media->priv->audio_src);
- break;
- case PROP_AUDIO_SINK:
- if (media->priv->audio_sink)
- gst_object_unref(media->priv->audio_sink);
- media->priv->audio_sink = g_value_get_object(value);
- gst_object_ref(media->priv->audio_sink);
- gst_bin_add(GST_BIN(purple_media_get_audio_pipeline(media)),
- media->priv->audio_sink);
- break;
- case PROP_VIDEO_SRC:
- if (media->priv->video_src)
- gst_object_unref(media->priv->video_src);
- media->priv->video_src = g_value_get_object(value);
- gst_object_ref(media->priv->video_src);
- break;
- case PROP_VIDEO_SINK:
- if (media->priv->video_sink)
- gst_object_unref(media->priv->video_sink);
- media->priv->video_sink = g_value_get_object(value);
- gst_object_ref(media->priv->video_sink);
- break;
- case PROP_VIDEO_SESSION:
- if (media->priv->video_session)
- g_object_unref(media->priv->video_session);
- media->priv->video_session = g_value_get_object(value);
- gst_object_ref(media->priv->video_session);
- break;
- case PROP_AUDIO_SESSION:
- if (media->priv->audio_session)
- g_object_unref(media->priv->audio_session);
- media->priv->audio_session = g_value_get_object(value);
- gst_object_ref(media->priv->audio_session);
- break;
-
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@@ -399,25 +269,6 @@ purple_media_get_property (GObject *obje
case PROP_CONNECTION:
g_value_set_pointer(value, media->priv->connection);
break;
- case PROP_AUDIO_SRC:
- g_value_set_object(value, media->priv->audio_src);
- break;
- case PROP_AUDIO_SINK:
- g_value_set_object(value, media->priv->audio_sink);
- break;
- case PROP_VIDEO_SRC:
- g_value_set_object(value, media->priv->video_src);
- break;
- case PROP_VIDEO_SINK:
- g_value_set_object(value, media->priv->video_sink);
- break;
- case PROP_VIDEO_SESSION:
- g_value_set_object(value, media->priv->video_session);
- break;
- case PROP_AUDIO_SESSION:
- g_value_set_object(value, media->priv->audio_session);
- break;
-
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@@ -425,6 +276,97 @@ purple_media_get_property (GObject *obje
}
+static PurpleMediaSession*
+purple_media_get_session(PurpleMedia *media, const gchar *sess_id)
+{
+ return (PurpleMediaSession*) (media->priv->sessions) ?
+ g_hash_table_lookup(media->priv->sessions, sess_id) : NULL;
+}
+
+static FsParticipant*
+purple_media_get_participant(PurpleMedia *media, const gchar *name)
+{
+ return (FsParticipant*) (media->priv->participants) ?
+ g_hash_table_lookup(media->priv->participants, name) : NULL;
+}
+
+static FsStream*
+purple_media_session_get_stream(PurpleMediaSession *session, const gchar *name)
+{
+ return (FsStream*) (session->streams) ?
+ g_hash_table_lookup(session->streams, name) : NULL;
+}
+
+static GList*
+purple_media_session_get_local_candidates(PurpleMediaSession *session, const gchar *name)
+{
+ return (GList*) (session->local_candidates) ?
+ g_hash_table_lookup(session->local_candidates, name) : NULL;
+}
+
+static void
+purple_media_add_session(PurpleMedia *media, PurpleMediaSession *session)
+{
+ if (!media->priv->sessions) {
+ purple_debug_info("media", "Creating hash table for sessions\n");
+ media->priv->sessions = g_hash_table_new(g_str_hash, g_str_equal);
+ }
+ g_hash_table_insert(media->priv->sessions, g_strdup(session->id), session);
+}
+
+static FsParticipant *
+purple_media_add_participant(PurpleMedia *media, const gchar *name)
+{
+ FsParticipant *participant = purple_media_get_participant(media, name);
+
+ if (participant)
+ return participant;
+
+ participant = fs_conference_new_participant(media->priv->conference, g_strdup(name), NULL);
+
+ if (!media->priv->participants) {
+ purple_debug_info("media", "Creating hash table for participants\n");
+ media->priv->participants = g_hash_table_new(g_str_hash, g_str_equal);
+ }
+
+ g_hash_table_insert(media->priv->participants, g_strdup(name), participant);
+
+ return participant;
+}
+
+static void
+purple_media_insert_stream(PurpleMediaSession *session, const gchar *name, FsStream *stream)
+{
+ if (!session->streams) {
+ purple_debug_info("media", "Creating hash table for streams\n");
+ session->streams = g_hash_table_new(g_str_hash, g_str_equal);
+ }
+
+ g_hash_table_insert(session->streams, g_strdup(name), stream);
+}
+
+static void
+purple_media_insert_local_candidate(PurpleMediaSession *session, const gchar *name,
+ FsCandidate *candidate)
+{
+ GList *candidates = purple_media_session_get_local_candidates(session, name);
+
+ candidates = g_list_append(candidates, candidate);
+
+ if (!session->local_candidates) {
+ purple_debug_info("media", "Creating hash table for local candidates\n");
+ session->local_candidates = g_hash_table_new(g_str_hash, g_str_equal);
+ }
+
+ g_hash_table_insert(session->local_candidates, g_strdup(name), candidates);
+}
+
+GList *
+purple_media_get_session_names(PurpleMedia *media)
+{
+ return g_hash_table_get_keys(media->priv->sessions);
+}
+
void
purple_media_get_elements(PurpleMedia *media, GstElement **audio_src, GstElement **audio_sink,
GstElement **video_src, GstElement **video_sink)
@@ -441,70 +383,57 @@ void
}
void
-purple_media_set_audio_src(PurpleMedia *media, GstElement *audio_src)
+purple_media_set_src(PurpleMedia *media, const gchar *sess_id, GstElement *src)
{
- g_object_set(G_OBJECT(media), "audio-src", audio_src, NULL);
-}
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ if (session->src)
+ gst_object_unref(session->src);
+ session->src = src;
+ gst_bin_add(GST_BIN(purple_media_get_pipeline(media)),
+ session->src);
-void
-purple_media_set_audio_sink(PurpleMedia *media, GstElement *audio_sink)
-{
- g_object_set(G_OBJECT(media), "audio-sink", audio_sink, NULL);
+ g_object_get(session->session, "sink-pad", &sinkpad, NULL);
+ srcpad = gst_element_get_static_pad(src, "ghostsrc");
+ purple_debug_info("media", "connecting pad: %s\n",
+ gst_pad_link(srcpad, sinkpad) == GST_PAD_LINK_OK
+ ? "success" : "failure");
}
void
-purple_media_set_video_src(PurpleMedia *media, GstElement *video_src)
+purple_media_set_sink(PurpleMedia *media, const gchar *sess_id, GstElement *sink)
{
- g_object_set(G_OBJECT(media), "video-src", video_src, NULL);
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+ if (session->sink)
+ gst_object_unref(session->sink);
+ session->sink = sink;
+ gst_bin_add(GST_BIN(purple_media_get_pipeline(media)),
+ session->sink);
}
-void
-purple_media_set_video_sink(PurpleMedia *media, GstElement *video_sink)
-{
- g_object_set(G_OBJECT(media), "video-sink", video_sink, NULL);
-}
-
GstElement *
-purple_media_get_audio_src(PurpleMedia *media)
+purple_media_get_src(PurpleMedia *media, const gchar *sess_id)
{
- GstElement *ret;
- g_object_get(G_OBJECT(media), "audio-src", &ret, NULL);
- return ret;
+ return purple_media_get_session(media, sess_id)->src;
}
GstElement *
-purple_media_get_audio_sink(PurpleMedia *media)
+purple_media_get_sink(PurpleMedia *media, const gchar *sess_id)
{
- GstElement *ret;
- g_object_get(G_OBJECT(media), "audio-sink", &ret, NULL);
- return ret;
+ return purple_media_get_session(media, sess_id)->src;
}
GstElement *
-purple_media_get_video_src(PurpleMedia *media)
+purple_media_get_pipeline(PurpleMedia *media)
{
- GstElement *ret;
- g_object_get(G_OBJECT(media), "video-src", &ret, NULL);
- return ret;
-}
-
-GstElement *
-purple_media_get_video_sink(PurpleMedia *media)
-{
- GstElement *ret;
- g_object_get(G_OBJECT(media), "video-sink", &ret, NULL);
- return ret;
-}
-
-GstElement *
-purple_media_get_audio_pipeline(PurpleMedia *media)
-{
- if (!media->priv->audio_pipeline) {
- media->priv->audio_pipeline = gst_pipeline_new(media->priv->name);
- gst_bin_add(GST_BIN(media->priv->audio_pipeline), GST_ELEMENT(media->priv->conference));
+ if (!media->priv->pipeline) {
+ media->priv->pipeline = gst_pipeline_new(media->priv->name);
+ gst_bin_add(GST_BIN(media->priv->pipeline), GST_ELEMENT(media->priv->conference));
}
- return media->priv->audio_pipeline;
+ return media->priv->pipeline;
}
PurpleConnection *
@@ -672,7 +601,7 @@ purple_media_audio_init_src(GstElement *
purple_debug_info("media", "purple_media_audio_init_src\n");
- *sendbin = gst_bin_new("sendbin");
+ *sendbin = gst_bin_new("purplesendaudiobin");
src = gst_element_factory_make("alsasrc", "asrc");
*sendlevel = gst_element_factory_make("level", "sendlevel");
gst_bin_add_many(GST_BIN(*sendbin), src, *sendlevel, NULL);
@@ -700,6 +629,46 @@ void
}
void
+purple_media_video_init_src(GstElement **sendbin)
+{
+ GstElement *src;
+ GstPad *pad;
+ GstPad *ghost;
+ const gchar *video_plugin = purple_prefs_get_string("/purple/media/video/plugin");
+ const gchar *video_device = purple_prefs_get_string("/purple/media/video/device");
+
+ purple_debug_info("media", "purple_media_video_init_src\n");
+
+ *sendbin = gst_bin_new("purplesendvideobin");
+ src = gst_element_factory_make(video_plugin, "videosrc");
+ gst_bin_add(GST_BIN(*sendbin), src);
+
+ if (!strcmp(video_plugin, "videotestsrc")) {
+ /* unless is-live is set to true it doesn't throttle videotestsrc */
+ g_object_set (G_OBJECT(src), "is-live", TRUE, NULL);
+ }
+ pad = gst_element_get_pad(src, "src");
+ ghost = gst_ghost_pad_new("ghostsrc", pad);
+ gst_element_add_pad(*sendbin, ghost);
+
+ /* set current video device on "src"... */
+ if (video_device) {
+ GList *devices = purple_media_get_devices(src);
+ GList *dev = devices;
+ purple_debug_info("media", "Setting device of GstElement src to %s\n",
+ video_device);
+ for (; dev ; dev = dev->next) {
+ GValue *device = (GValue *) dev->data;
+ char *name = purple_media_get_device_name(src, device);
+ if (strcmp(name, video_device) == 0) {
+ purple_media_element_set_device(src, device);
+ }
+ g_free(name);
+ }
+ }
+}
+
+void
purple_media_audio_init_recv(GstElement **recvbin, GstElement **recvlevel)
{
GstElement *sink;
@@ -707,7 +676,7 @@ purple_media_audio_init_recv(GstElement
purple_debug_info("media", "purple_media_audio_init_recv\n");
- *recvbin = gst_bin_new("pidginrecvbin");
+ *recvbin = gst_bin_new("pidginrecvaudiobin");
sink = gst_element_factory_make("alsasink", "asink");
g_object_set(G_OBJECT(sink), "sync", FALSE, NULL);
*recvlevel = gst_element_factory_make("level", "recvlevel");
@@ -721,20 +690,54 @@ purple_media_audio_init_recv(GstElement
purple_debug_info("media", "purple_media_audio_init_recv end\n");
}
+void
+purple_media_video_init_recv(GstElement **recvbin)
+{
+ GstElement *sink;
+ GstPad *pad, *ghost;
+
+ purple_debug_info("media", "purple_media_video_init_recv\n");
+
+ *recvbin = gst_bin_new("pidginrecvvideobin");
+ sink = gst_element_factory_make("autovideosink", "purplevideosink");
+ gst_bin_add(GST_BIN(*recvbin), sink);
+ pad = gst_element_get_pad(sink, "sink");
+ ghost = gst_ghost_pad_new("ghostsink", pad);
+ gst_element_add_pad(*recvbin, ghost);
+
+ purple_debug_info("media", "purple_media_video_init_recv end\n");
+}
+
static void
purple_media_new_local_candidate(FsStream *stream,
FsCandidate *local_candidate,
- PurpleMedia *media)
+ PurpleMediaSession *session)
{
+ gchar *name;
+ FsParticipant *participant;
purple_debug_info("media", "got new local candidate: %s\n", local_candidate->candidate_id);
- media->priv->local_candidates = g_list_append(media->priv->local_candidates,
- fs_candidate_copy(local_candidate));
+ g_object_get(stream, "participant", &participant, NULL);
+ g_object_get(participant, "cname", &name, NULL);
+ g_object_unref(participant);
+
+ purple_media_insert_local_candidate(session, name, fs_candidate_copy(local_candidate));
+
+ g_signal_emit(session->media, purple_media_signals[NEW_CANDIDATE],
+ 0, session->id, name, fs_candidate_copy(local_candidate));
+
+ g_free(name);
}
static void
-purple_media_candidates_prepared(FsStream *stream, PurpleMedia *media)
+purple_media_candidates_prepared(FsStream *stream, PurpleMediaSession *session)
{
- g_signal_emit(media, purple_media_signals[CANDIDATES_PREPARED], 0);
+ gchar *name;
+ FsParticipant *participant;
+ g_object_get(stream, "participant", &participant, NULL);
+ g_object_get(participant, "cname", &name, NULL);
+ g_object_unref(participant);
+ g_signal_emit(session->media, purple_media_signals[CANDIDATES_PREPARED], 0);
+ g_free(name);
}
/* callback called when a pair of transport candidates (local and remote)
@@ -743,107 +746,111 @@ purple_media_candidate_pair_established(
purple_media_candidate_pair_established(FsStream *stream,
FsCandidate *native_candidate,
FsCandidate *remote_candidate,
- PurpleMedia *media)
+ PurpleMediaSession *session)
{
- media->priv->local_candidate = fs_candidate_copy(native_candidate);
- media->priv->remote_candidate = fs_candidate_copy(remote_candidate);
+ session->local_candidate = fs_candidate_copy(native_candidate);
+ session->remote_candidate = fs_candidate_copy(remote_candidate);
purple_debug_info("media", "candidate pair established\n");
- g_signal_emit(media, purple_media_signals[CANDIDATE_PAIR], 0,
- media->priv->local_candidate,
- media->priv->remote_candidate);
+ g_signal_emit(session->media, purple_media_signals[CANDIDATE_PAIR], 0,
+ session->local_candidate,
+ session->remote_candidate);
}
static void
purple_media_src_pad_added(FsStream *stream, GstPad *srcpad,
- FsCodec *codec, PurpleMedia *media)
+ FsCodec *codec, PurpleMediaSession *session)
{
- GstElement *pipeline = purple_media_get_audio_pipeline(media);
- GstPad *sinkpad = gst_element_get_static_pad(purple_media_get_audio_sink(media), "ghostsink");
+ GstElement *pipeline = purple_media_get_pipeline(session->media);
+ GstPad *sinkpad = gst_element_get_static_pad(session->sink, "ghostsink");
purple_debug_info("media", "connecting new src pad: %s\n",
gst_pad_link(srcpad, sinkpad) == GST_PAD_LINK_OK ? "success" : "failure");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
}
static gboolean
-purple_media_add_stream_internal(PurpleMedia *media, FsSession **session, GList **streams,
- GstElement *src, const gchar *who, FsMediaType type,
- FsStreamDirection type_direction, const gchar *transmitter)
+purple_media_add_stream_internal(PurpleMedia *media, const gchar *sess_id,
+ const gchar *who, FsMediaType type,
+ FsStreamDirection type_direction,
+ const gchar *transmitter)
{
- char *cname = NULL;
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
FsParticipant *participant = NULL;
- GList *l = NULL;
FsStream *stream = NULL;
- FsParticipant *p = NULL;
FsStreamDirection *direction = NULL;
- FsSession *s = NULL;
- if (!*session) {
+ if (!session) {
GError *err = NULL;
- *session = fs_conference_new_session(media->priv->conference, type, &err);
+ GList *codec_conf;
+ session = g_new0(PurpleMediaSession, 1);
+
+ session->session = fs_conference_new_session(media->priv->conference, type, &err);
+
if (err != NULL) {
purple_debug_error("media", "Error creating session: %s\n", err->message);
g_error_free(err);
purple_conv_present_error(who,
purple_connection_get_account(purple_media_get_connection(media)),
_("Error creating session."));
+ g_free(session);
return FALSE;
}
- if (src) {
- GstPad *sinkpad;
- GstPad *srcpad;
- g_object_get(*session, "sink-pad", &sinkpad, NULL);
- srcpad = gst_element_get_static_pad(src, "ghostsrc");
- purple_debug_info("media", "connecting pad: %s\n",
- gst_pad_link(srcpad, sinkpad) == GST_PAD_LINK_OK
- ? "success" : "failure");
- }
- }
-
- for (l = media->priv->participants; l != NULL; l = g_list_next(l)) {
- g_object_get(l->data, "cname", cname, NULL);
- if (!strcmp(cname, who)) {
- g_free(cname);
- participant = l->data;
- break;
- }
- g_free(cname);
- }
+ /*
+ * None of these three worked for me. THEORA is known to
+ * not work as of at least Farsight2 0.0.2
+ */
+ codec_conf = g_list_prepend(NULL, fs_codec_new(FS_CODEC_ID_DISABLE,
+ "THEORA", FS_MEDIA_TYPE_VIDEO, 90000));
+ codec_conf = g_list_prepend(codec_conf, fs_codec_new(FS_CODEC_ID_DISABLE,
+ "MPV", FS_MEDIA_TYPE_VIDEO, 90000));
+ codec_conf = g_list_prepend(codec_conf, fs_codec_new(FS_CODEC_ID_DISABLE,
+ "H264", FS_MEDIA_TYPE_VIDEO, 90000));
- if (!participant) {
- participant = fs_conference_new_participant(media->priv->conference, (gchar*)who, NULL);
- media->priv->participants = g_list_prepend(media->priv->participants, participant);
- }
-
- for (l = *streams; l != NULL; l = g_list_next(l)) {
- g_object_get(l->data, "participant", &p, "direction", &direction, "session", &s, NULL);
+ /* XXX: SPEEX has a latency of 5 or 6 seconds for me */
+#if 0
+ /* SPEEX is added through the configuration */
+ codec_conf = g_list_prepend(codec_conf, fs_codec_new(FS_CODEC_ID_ANY,
+ "SPEEX", FS_MEDIA_TYPE_AUDIO, 8000));
+ codec_conf = g_list_prepend(codec_conf, fs_codec_new(FS_CODEC_ID_ANY,
+ "SPEEX", FS_MEDIA_TYPE_AUDIO, 16000));
+#endif
- if (participant == p && *session == s) {
- stream = l->data;
- break;
- }
+ g_object_set(G_OBJECT(session->session), "local-codecs-config",
+ codec_conf, NULL);
+
+ fs_codec_list_destroy(codec_conf);
+
+ session->id = g_strdup(sess_id);
+ session->media = media;
+ session->type = type;
+
+ purple_media_add_session(media, session);
}
+ participant = purple_media_add_participant(media, who);
+
+ stream = purple_media_session_get_stream(session, who);
+
if (!stream) {
- stream = fs_session_new_stream(*session, participant,
+ stream = fs_session_new_stream(session->session, participant,
type_direction, transmitter, 0, NULL, NULL);
- *streams = g_list_prepend(*streams, stream);
+ purple_media_insert_stream(session, who, stream);
/* callback for new local candidate (new local candidate retreived) */
g_signal_connect(G_OBJECT(stream),
- "new-local-candidate", G_CALLBACK(purple_media_new_local_candidate), media);
+ "new-local-candidate", G_CALLBACK(purple_media_new_local_candidate), session);
/* callback for source pad added (new stream source ready) */
g_signal_connect(G_OBJECT(stream),
- "src-pad-added", G_CALLBACK(purple_media_src_pad_added), media);
+ "src-pad-added", G_CALLBACK(purple_media_src_pad_added), session);
/* callback for local candidates prepared (local candidates ready to send) */
g_signal_connect(G_OBJECT(stream),
"local-candidates-prepared",
- G_CALLBACK(purple_media_candidates_prepared), media);
+ G_CALLBACK(purple_media_candidates_prepared), session);
/* callback for new active candidate pair (established connection) */
g_signal_connect(G_OBJECT(stream),
"new-active-candidate-pair",
- G_CALLBACK(purple_media_candidate_pair_established), media);
+ G_CALLBACK(purple_media_candidate_pair_established), session);
} else if (*direction != type_direction) {
/* change direction */
g_object_set(stream, "direction", type_direction, NULL);
@@ -853,7 +860,7 @@ gboolean
}
gboolean
-purple_media_add_stream(PurpleMedia *media, const gchar *who,
+purple_media_add_stream(PurpleMedia *media, const gchar *sess_id, const gchar *who,
PurpleMediaStreamType type,
const gchar *transmitter)
{
@@ -869,9 +876,7 @@ purple_media_add_stream(PurpleMedia *med
else
type_direction = FS_DIRECTION_NONE;
- if (!purple_media_add_stream_internal(media, &media->priv->audio_session,
- &media->priv->audio_streams,
- media->priv->audio_src, who,
+ if (!purple_media_add_stream_internal(media, sess_id, who,
FS_MEDIA_TYPE_AUDIO, type_direction,
transmitter)) {
return FALSE;
@@ -887,9 +892,7 @@ purple_media_add_stream(PurpleMedia *med
else
type_direction = FS_DIRECTION_NONE;
- if (!purple_media_add_stream_internal(media, &media->priv->video_session,
- &media->priv->video_streams,
- media->priv->video_src, who,
+ if (!purple_media_add_stream_internal(media, sess_id, who,
FS_MEDIA_TYPE_VIDEO, type_direction,
transmitter)) {
return FALSE;
@@ -899,76 +902,74 @@ void
}
void
-purple_media_remove_stream(PurpleMedia *media, const gchar *who, PurpleMediaStreamType type)
+purple_media_remove_stream(PurpleMedia *media, const gchar *sess_id, const gchar *who)
{
}
-static FsStream *
-purple_media_get_audio_stream(PurpleMedia *media, const gchar *name)
+PurpleMediaStreamType
+purple_media_get_session_type(PurpleMedia *media, const gchar *sess_id)
{
- GList *streams = media->priv->audio_streams;
- for (; streams; streams = streams->next) {
- FsParticipant *participant;
- gchar *cname;
- g_object_get(streams->data, "participant", &participant, NULL);
- g_object_get(participant, "cname", &cname, NULL);
-
- if (!strcmp(cname, name)) {
- return streams->data;
- }
- }
-
- return NULL;
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+ return session->type;
}
GList *
-purple_media_get_local_audio_codecs(PurpleMedia *media)
+purple_media_get_local_codecs(PurpleMedia *media, const gchar *sess_id)
{
GList *codecs;
- g_object_get(G_OBJECT(media->priv->audio_session), "local-codecs", &codecs, NULL);
+ g_object_get(G_OBJECT(purple_media_get_session(media, sess_id)->session),
+ "local-codecs", &codecs, NULL);
return codecs;
}
GList *
-purple_media_get_local_audio_candidates(PurpleMedia *media)
+purple_media_get_local_candidates(PurpleMedia *media, const gchar *sess_id, const gchar *name)
{
- return media->priv->local_candidates;
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+ return purple_media_session_get_local_candidates(session, name);
}
GList *
-purple_media_get_negotiated_audio_codecs(PurpleMedia *media)
+purple_media_get_negotiated_codecs(PurpleMedia *media, const gchar *sess_id)
{
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
GList *codec_intersection;
- g_object_get(media->priv->audio_session, "negotiated-codecs", &codec_intersection, NULL);
+ g_object_get(session->session, "negotiated-codecs", &codec_intersection, NULL);
return codec_intersection;
}
void
-purple_media_add_remote_audio_candidates(PurpleMedia *media, const gchar *name, GList *remote_candidates)
+purple_media_add_remote_candidates(PurpleMedia *media, const gchar *sess_id,
+ const gchar *name, GList *remote_candidates)
{
- FsStream *stream = purple_media_get_audio_stream(media, name);
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+ FsStream *stream = purple_media_session_get_stream(session, name);
GList *candidates = remote_candidates;
for (; candidates; candidates = candidates->next)
fs_stream_add_remote_candidate(stream, candidates->data, NULL);
}
FsCandidate *
-purple_media_get_local_candidate(PurpleMedia *media)
+purple_media_get_local_candidate(PurpleMedia *media, const gchar *sess_id, const gchar *name)
{
- return media->priv->local_candidate;
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+ return session->local_candidate;
}
FsCandidate *
-purple_media_get_remote_candidate(PurpleMedia *media)
+purple_media_get_remote_candidate(PurpleMedia *media, const gchar *sess_id, const gchar *name)
{
- return media->priv->remote_candidate;
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+ return session->remote_candidate;
}
void
-purple_media_set_remote_audio_codecs(PurpleMedia *media, const gchar *name, GList *codecs)
+purple_media_set_remote_codecs(PurpleMedia *media, const gchar *sess_id, const gchar *name, GList *codecs)
{
- fs_stream_set_remote_codecs(purple_media_get_audio_stream(media, name), codecs, NULL);
+ PurpleMediaSession *session = purple_media_get_session(media, sess_id);
+ FsStream *stream = purple_media_session_get_stream(session, name);
+ fs_stream_set_remote_codecs(stream, codecs, NULL);
}
#endif /* USE_VV */
============================================================
--- libpurple/media.h 5a198dd5b14ebd199b2811cc955d9f1e06a0bcf6
+++ libpurple/media.h 147731438eade59e0c49313feb546be10ffc9b0e
@@ -48,6 +48,7 @@ typedef struct _PurpleMediaPrivate Purpl
typedef struct _PurpleMedia PurpleMedia;
typedef struct _PurpleMediaClass PurpleMediaClass;
typedef struct _PurpleMediaPrivate PurpleMediaPrivate;
+typedef struct _PurpleMediaSession PurpleMediaSession;
typedef enum {
PURPLE_MEDIA_RECV_AUDIO = 1 << 0,
@@ -71,20 +72,18 @@ GType purple_media_get_type(void);
GType purple_media_get_type(void);
+GList *purple_media_get_session_names(PurpleMedia *media);
+
void purple_media_get_elements(PurpleMedia *media, GstElement **audio_src, GstElement **audio_sink,
GstElement **video_src, GstElement **video_sink);
-void purple_media_set_audio_src(PurpleMedia *media, GstElement *video_src);
-void purple_media_set_audio_sink(PurpleMedia *media, GstElement *video_src);
-void purple_media_set_video_src(PurpleMedia *media, GstElement *video_src);
-void purple_media_set_video_sink(PurpleMedia *media, GstElement *video_src);
+void purple_media_set_src(PurpleMedia *media, const gchar *sess_id, GstElement *src);
+void purple_media_set_sink(PurpleMedia *media, const gchar *sess_id, GstElement *src);
-GstElement *purple_media_get_audio_src(PurpleMedia *media);
-GstElement *purple_media_get_audio_sink(PurpleMedia *media);
-GstElement *purple_media_get_video_src(PurpleMedia *media);
-GstElement *purple_media_get_video_sink(PurpleMedia *media);
+GstElement *purple_media_get_src(PurpleMedia *media, const gchar *sess_id);
+GstElement *purple_media_get_sink(PurpleMedia *media, const gchar *sess_id);
-GstElement *purple_media_get_audio_pipeline(PurpleMedia *media);
+GstElement *purple_media_get_pipeline(PurpleMedia *media);
PurpleConnection *purple_media_get_connection(PurpleMedia *media);
const char *purple_media_get_screenname(PurpleMedia *media);
@@ -111,21 +110,24 @@ void purple_media_audio_init_recv(GstEle
void purple_media_video_init_src(GstElement **sendbin);
void purple_media_audio_init_recv(GstElement **recvbin, GstElement **recvlevel);
+void purple_media_video_init_recv(GstElement **sendbin);
-gboolean purple_media_add_stream(PurpleMedia *media, const gchar *who,
+gboolean purple_media_add_stream(PurpleMedia *media, const gchar *sess_id, const gchar *who,
PurpleMediaStreamType type, const gchar *transmitter);
-void purple_media_remove_stream(PurpleMedia *media, const gchar *who, PurpleMediaStreamType type);
+void purple_media_remove_stream(PurpleMedia *media, const gchar *sess_id, const gchar *who);
-GList *purple_media_get_local_audio_candidates(PurpleMedia *media);
-GList *purple_media_get_negotiated_audio_codecs(PurpleMedia *media);
+PurpleMediaStreamType purple_media_get_session_type(PurpleMedia *media, const gchar *sess_id);
-GList *purple_media_get_local_audio_codecs(PurpleMedia *media);
-void purple_media_add_remote_audio_candidates(PurpleMedia *media, const gchar *name,
- GList *remote_candidates);
-FsCandidate *purple_media_get_local_candidate(PurpleMedia *media);
-FsCandidate *purple_media_get_remote_candidate(PurpleMedia *media);
-void purple_media_set_remote_audio_codecs(PurpleMedia *media, const gchar *name, GList *codecs);
+GList *purple_media_get_negotiated_codecs(PurpleMedia *media, const gchar *sess_id);
+GList *purple_media_get_local_codecs(PurpleMedia *media, const gchar *sess_id);
+void purple_media_add_remote_candidates(PurpleMedia *media, const gchar *sess_id,
+ const gchar *name, GList *remote_candidates);
+GList *purple_media_get_local_candidates(PurpleMedia *media, const gchar *sess_id, const gchar *name);
+FsCandidate *purple_media_get_local_candidate(PurpleMedia *media, const gchar *sess_id, const gchar *name);
+FsCandidate *purple_media_get_remote_candidate(PurpleMedia *media, const gchar *sess_id, const gchar *name);
+void purple_media_set_remote_codecs(PurpleMedia *media, const gchar *sess_id, const gchar *name, GList *codecs);
+
G_END_DECLS
#endif /* USE_VV */
============================================================
--- libpurple/protocols/jabber/google.c 4ca67713c6066ec6708666551a602523cacc5a13
+++ libpurple/protocols/jabber/google.c 0eb4fd5a7580eb238ee60dc451a1321cf32bd8be
@@ -102,7 +102,7 @@ google_session_send_accept(GoogleSession
google_session_send_accept(GoogleSession *session)
{
xmlnode *sess, *desc, *payload;
- GList *codecs = purple_media_get_negotiated_audio_codecs(session->media);
+ GList *codecs = purple_media_get_negotiated_codecs(session->media, "google-voice");
JabberIq *iq = jabber_iq_new(session->js, JABBER_IQ_SET);
xmlnode_set_attrib(iq->node, "to", session->remote_jid);
@@ -124,7 +124,7 @@ google_session_send_accept(GoogleSession
fs_codec_list_destroy(codecs);
jabber_iq_send(iq);
- gst_element_set_state(purple_media_get_audio_pipeline(session->media), GST_STATE_PLAYING);
+ gst_element_set_state(purple_media_get_pipeline(session->media), GST_STATE_PLAYING);
}
static void
@@ -160,7 +160,8 @@ google_session_candidates_prepared (Purp
google_session_candidates_prepared (PurpleMedia *media, GoogleSession *session)
{
JabberIq *iq = jabber_iq_new(session->js, JABBER_IQ_SET);
- GList *candidates = purple_media_get_local_audio_candidates(session->media);
+ GList *candidates = purple_media_get_local_candidates(session->media, "google-voice",
+ session->remote_jid);
FsCandidate *transport;
xmlnode *sess;
xmlnode *candidate;
@@ -217,7 +218,7 @@ google_session_handle_initiate(JabberStr
"fsrtpconference", session->remote_jid);
/* "rawudp" will need to be changed to "nice" when libnice is finished */
- purple_media_add_stream(session->media, session->remote_jid,
+ purple_media_add_stream(session->media, "google-voice", session->remote_jid,
PURPLE_MEDIA_AUDIO, "rawudp");
desc_element = xmlnode_get_child(sess, "description");
@@ -234,7 +235,7 @@ google_session_handle_initiate(JabberStr
codecs = g_list_append(codecs, codec);
}
- purple_media_set_remote_audio_codecs(session->media, session->remote_jid, codecs);
+ purple_media_set_remote_codecs(session->media, "google-voice", session->remote_jid, codecs);
g_signal_connect_swapped(G_OBJECT(session->media), "accepted",
G_CALLBACK(google_session_send_accept), session);
@@ -282,7 +283,7 @@ google_session_handle_candidates(JabberS
list = g_list_append(list, info);
}
- purple_media_add_remote_audio_candidates(session->media, session->remote_jid, list);
+ purple_media_add_remote_candidates(session->media, "google-voice", session->remote_jid, list);
fs_candidate_list_destroy(list);
result = jabber_iq_new(js, JABBER_IQ_RESULT);
============================================================
--- libpurple/protocols/jabber/jabber.c eef1ae9332a52493a28465e310ef6c69f815b687
+++ libpurple/protocols/jabber/jabber.c d88b85130b9dca29d5a133e1e724b15469be6393
@@ -2391,7 +2391,6 @@ gboolean jabber_can_do_media(PurpleConne
purple_debug_error("jabber", "Could not find buddy\n");
return FALSE;
}
-#if 0 /* These can be added once we support video */
/* XMPP will only support two-way media, AFAIK... */
if (type == (PURPLE_MEDIA_AUDIO | PURPLE_MEDIA_VIDEO)) {
purple_debug_info("jabber",
@@ -2399,21 +2398,17 @@ gboolean jabber_can_do_media(PurpleConne
return (jabber_buddy_has_capability(jb, XEP_0167_CAP) ||
jabber_buddy_has_capability(jb, GTALK_CAP)) &&
jabber_buddy_has_capability(jb, XEP_0180_CAP);
- } else
-#endif
- if (type == (PURPLE_MEDIA_AUDIO)) {
+ } else if (type == (PURPLE_MEDIA_AUDIO)) {
purple_debug_info("jabber",
"Checking audio XEP support for %s\n", who);
return jabber_buddy_has_capability(jb, XEP_0167_CAP) ||
jabber_buddy_has_capability(jb, GTALK_CAP);
- }
-#if 0
- else if (type == (PURPLE_MEDIA_VIDEO)) {
+ } else if (type == (PURPLE_MEDIA_VIDEO)) {
purple_debug_info("jabber",
"Checking video XEP support for %s\n", who);
return jabber_buddy_has_capability(jb, XEP_0180_CAP);
}
-#endif
+
return FALSE;
}
============================================================
--- libpurple/protocols/jabber/jingle.c 57c03c2a3cf5260165c3ba6cad9002e09c017314
+++ libpurple/protocols/jabber/jingle.c 3ff941f6f79cbae767c175e9ed749a8c68e7db34
@@ -278,24 +278,31 @@ static GList *
}
static GList *
-jabber_jingle_get_codecs(const xmlnode *description)
+jabber_jingle_get_codecs(xmlnode *description)
{
GList *codecs = NULL;
xmlnode *codec_element = NULL;
const char *encoding_name,*id, *clock_rate;
FsCodec *codec;
-
+ FsMediaType type = !strcmp(xmlnode_get_namespace(description), JINGLE_VIDEO) ?
+ FS_MEDIA_TYPE_VIDEO : FS_MEDIA_TYPE_AUDIO;
+
for (codec_element = xmlnode_get_child(description, "payload-type") ;
codec_element ;
codec_element = xmlnode_get_next_twin(codec_element)) {
encoding_name = xmlnode_get_attrib(codec_element, "name");
+
id = xmlnode_get_attrib(codec_element, "id");
clock_rate = xmlnode_get_attrib(codec_element, "clockrate");
codec = fs_codec_new(atoi(id), encoding_name,
- FS_MEDIA_TYPE_AUDIO,
+ type,
clock_rate ? atoi(clock_rate) : 0);
- codecs = g_list_append(codecs, codec);
+ purple_debug_info("jingle", "codec: %i, %s, %s, %i\n", codec->id,
+ codec->encoding_name, codec->media_type == FS_MEDIA_TYPE_AUDIO ?
+ "FS_MEDIA_TYPE_AUDIO" : codec->media_type == FS_MEDIA_TYPE_VIDEO ?
+ "FS_MEDIA_TYPE_VIDEO" : "FS_MEDIA_TYPE_NONE", codec->clock_rate);
+ codecs = g_list_append(codecs, codec);
}
return codecs;
}
@@ -397,8 +404,9 @@ jabber_jingle_session_add_payload_types(
{
JingleSession *session = jabber_jingle_session_content_get_session(jsc);
PurpleMedia *media = jabber_jingle_session_get_media(session);
- /* change this to the generic function when PurpleMedia supports video */
- GList *codecs = purple_media_get_local_audio_codecs(media);
+ /* should this be local_codecs or negotiated-codecs? */
+ GList *codecs = purple_media_get_local_codecs(media,
+ jabber_jingle_session_content_get_name(jsc));
for (; codecs ; codecs = codecs->next) {
FsCodec *codec = (FsCodec*)codecs->data;
@@ -407,8 +415,7 @@ jabber_jingle_session_add_payload_types(
g_snprintf(id, sizeof(id), "%d", codec->id);
g_snprintf(clockrate, sizeof(clockrate), "%d", codec->clock_rate);
- g_snprintf(channels, sizeof(channels), "%d",
- codec->channels == 0 ? 1 : codec->channels);
+ g_snprintf(channels, sizeof(channels), "%d", codec->channels);
xmlnode_set_attrib(payload, "name", codec->encoding_name);
xmlnode_set_attrib(payload, "id", id);
@@ -569,36 +576,8 @@ jabber_jingle_session_create_iq(const Ji
jabber_jingle_session_get_remote_jid(session));
return result;
}
-
+#if 0
static xmlnode *
-jabber_jingle_session_create_description(const JingleSession *sess)
-{
- GList *codecs = purple_media_get_local_audio_codecs(sess->media);
- xmlnode *description = xmlnode_new("description");
-
- xmlnode_set_namespace(description, JINGLE_AUDIO);
-
- /* get codecs */
- for (; codecs ; codecs = codecs->next) {
- FsCodec *codec = (FsCodec*)codecs->data;
- char id[8], clockrate[10], channels[10];
- xmlnode *payload = xmlnode_new_child(description, "payload-type");
-
- g_snprintf(id, sizeof(id), "%d", codec->id);
- g_snprintf(clockrate, sizeof(clockrate), "%d", codec->clock_rate);
- g_snprintf(channels, sizeof(channels), "%d", codec->channels);
-
- xmlnode_set_attrib(payload, "name", codec->encoding_name);
- xmlnode_set_attrib(payload, "id", id);
- xmlnode_set_attrib(payload, "clockrate", clockrate);
- xmlnode_set_attrib(payload, "channels", channels);
- }
-
- fs_codec_list_destroy(codecs);
- return description;
-}
-
-static xmlnode *
jabber_jingle_session_create_content_accept(const JingleSession *sess)
{
xmlnode *jingle =
@@ -649,6 +628,7 @@ jabber_jingle_session_create_content_rep
return jingle;
}
+#endif
static JabberIq *
jabber_jingle_session_create_session_accept(const JingleSession *session,
@@ -744,7 +724,7 @@ jabber_jingle_session_create_transport_i
jabber_jingle_session_add_candidate_iceudp(transport, candidate, NULL);
return request;
}
-
+#if 0
static void
jabber_jingle_session_send_content_accept(JingleSession *session)
{
@@ -757,7 +737,7 @@ jabber_jingle_session_send_content_accep
xmlnode_insert_child(result->node, jingle);
jabber_iq_send(result);
}
-
+#endif
static void
jabber_jingle_session_send_session_accept(JingleSession *session)
{
@@ -766,8 +746,10 @@ jabber_jingle_session_send_session_accep
GList *contents = jabber_jingle_session_get_contents(session);
for (; contents; contents = contents->next) {
JingleSessionContent *jsc = contents->data;
- GList *candidates = purple_media_get_local_audio_candidates(
- jabber_jingle_session_get_media(session));
+ GList *candidates = purple_media_get_local_candidates(
+ media,
+ jabber_jingle_session_content_get_name(jsc),
+ jabber_jingle_session_get_remote_jid(session));
purple_debug_info("jabber",
"jabber_session_candidates_prepared: %d candidates\n",
g_list_length(candidates));
@@ -778,14 +760,21 @@ jabber_jingle_session_send_session_accep
jabber_iq_send(result);
}
fs_candidate_list_destroy(candidates);
+ purple_debug_info("jingle", "codec intersection: %i\n",
+ g_list_length(purple_media_get_negotiated_codecs(media,
+ jabber_jingle_session_content_get_name(jsc))));
+ jabber_iq_send(jabber_jingle_session_create_session_accept(session,
+ purple_media_get_local_candidate(media,
+ jabber_jingle_session_content_get_name(jsc),
+ jabber_jingle_session_get_remote_jid(session)),
+ purple_media_get_remote_candidate(media,
+ jabber_jingle_session_content_get_name(jsc),
+ jabber_jingle_session_get_remote_jid(session))));
}
- jabber_iq_send(jabber_jingle_session_create_session_accept(session,
- purple_media_get_local_candidate(media),
- purple_media_get_remote_candidate(media)));
purple_debug_info("jabber", "Sent session accept, starting stream\n");
- gst_element_set_state(purple_media_get_audio_pipeline(session->media), GST_STATE_PLAYING);
+ gst_element_set_state(purple_media_get_pipeline(session->media), GST_STATE_PLAYING);
session->session_started = TRUE;
}
@@ -822,7 +811,8 @@ jabber_jingle_session_content_create_med
jabber_jingle_session_content_create_internal(session,
"audio-content", "initiator", sender,
TRANSPORT_ICEUDP, JINGLE_AUDIO);
- } else if (type & PURPLE_MEDIA_VIDEO) {
+ }
+ if (type & PURPLE_MEDIA_VIDEO) {
if (type == PURPLE_MEDIA_SEND_VIDEO)
strcpy(sender, "initiator");
else if (type == PURPLE_MEDIA_RECV_VIDEO)
@@ -837,9 +827,8 @@ jabber_jingle_session_content_create_par
static void
jabber_jingle_session_content_create_parse(JingleSession *session,
- xmlnode *jingle)
+ xmlnode *content)
{
- xmlnode *content = xmlnode_get_child(jingle, "content");
xmlnode *description = xmlnode_get_child(content, "description");
xmlnode *transport = xmlnode_get_child(content, "transport");
@@ -907,6 +896,7 @@ jabber_jingle_session_initiate_media_int
const char *remote_jid)
{
PurpleMedia *media = NULL;
+ GList *contents = jabber_jingle_session_get_contents(session);
media = purple_media_manager_create_media(purple_media_manager_get(),
session->js->gc, "fsrtpconference", remote_jid);
@@ -916,12 +906,29 @@ jabber_jingle_session_initiate_media_int
return FALSE;
}
- /* this will need to be changed to "nice" once the libnice transmitter is finished */
- if (!purple_media_add_stream(media, remote_jid, PURPLE_MEDIA_AUDIO, "rawudp")) {
- purple_debug_error("jabber", "Couldn't create audio stream\n");
- purple_media_reject(media);
- return FALSE;
+ for (; contents; contents = contents->next) {
+ JingleSessionContent *jsc = contents->data;
+ gboolean result = FALSE;
+
+ /* these will need to be changed to "nice" once the libnice transmitter is finished */
+ if (jabber_jingle_session_content_is_type(jsc, JINGLE_AUDIO)) {
+ result = purple_media_add_stream(media, "audio-content", remote_jid,
+ PURPLE_MEDIA_AUDIO, "rawudp");
+ purple_debug_info("jingle", "Created Jingle audio session\n");
+ }
+ else if (jabber_jingle_session_content_is_type(jsc, JINGLE_VIDEO)) {
+ result = purple_media_add_stream(media, "video-content", remote_jid,
+ PURPLE_MEDIA_VIDEO, "rawudp");
+ purple_debug_info("jingle", "Created Jingle video session\n");
+ }
+
+ if (!result) {
+ purple_debug_error("jabber", "Couldn't create stream\n");
+ purple_media_reject(media);
+ return FALSE;
+ }
}
+ g_list_free(contents);
jabber_jingle_session_set_remote_jid(session, remote_jid);
jabber_jingle_session_set_initiator(session, initiator);
@@ -967,8 +974,10 @@ jabber_jingle_session_initiate_result_cb
contents = jabber_jingle_session_get_contents(session);
for (; contents; contents = contents->next) {
JingleSessionContent *jsc = contents->data;
- GList *candidates = purple_media_get_local_audio_candidates(
- jabber_jingle_session_get_media(session));
+ GList *candidates = purple_media_get_local_candidates(
+ jabber_jingle_session_get_media(session),
+ jabber_jingle_session_content_get_name(jsc),
+ jabber_jingle_session_get_remote_jid(session));
purple_debug_info("jabber",
"jabber_session_candidates_prepared: %d candidates\n",
g_list_length(candidates));
@@ -1012,6 +1021,8 @@ jabber_jingle_session_initiate_media(Jab
}
session = jabber_jingle_session_create(js);
+ jabber_jingle_session_content_create_media(session, type);
+
/* set ourselves as initiator */
me = g_strdup_printf("%s@%s/%s", js->user->node, js->user->domain, js->user->resource);
@@ -1025,8 +1036,6 @@ jabber_jingle_session_initiate_media(Jab
g_free(jid);
g_free(me);
- jabber_jingle_session_content_create_media(session, type);
-
/* create request */
request = jabber_jingle_session_create_session_initiate(session);
jabber_iq_set_callback(request, jabber_jingle_session_initiate_result_cb, NULL);
@@ -1064,6 +1073,7 @@ jabber_jingle_session_handle_content_rep
void
jabber_jingle_session_handle_content_replace(JabberStream *js, xmlnode *packet)
{
+#if 0
xmlnode *jingle = xmlnode_get_child(packet, "jingle");
const char *sid = xmlnode_get_attrib(jingle, "sid");
JingleSession *session = jabber_jingle_session_find_by_id(js, sid);
@@ -1086,6 +1096,7 @@ jabber_jingle_session_handle_content_rep
jabber_iq_send(accept);
}
+#endif
}
void
@@ -1110,60 +1121,67 @@ jabber_jingle_session_handle_session_acc
jabber_jingle_session_get_remote_jid(session));
jabber_iq_set_id(result, xmlnode_get_attrib(packet, "id"));
- description = xmlnode_get_child(content, "description");
- transport = xmlnode_get_child(content, "transport");
+ for (content = xmlnode_get_child(jingle, "content"); content;
+ content = xmlnode_get_next_twin(content)) {
+ description = xmlnode_get_child(content, "description");
+ transport = xmlnode_get_child(content, "transport");
- /* fetch codecs from remote party */
- purple_debug_info("jabber", "get codecs from session-accept\n");
- remote_codecs = jabber_jingle_get_codecs(description);
- purple_debug_info("jabber", "get transport candidates from session accept\n");
- remote_transports = jabber_jingle_get_candidates(transport);
+ /* fetch codecs from remote party */
+ purple_debug_info("jabber", "get codecs from session-accept\n");
+ remote_codecs = jabber_jingle_get_codecs(description);
+ purple_debug_info("jabber", "get transport candidates from session accept\n");
+ remote_transports = jabber_jingle_get_candidates(transport);
- purple_debug_info("jabber", "Got %d codecs from responder\n",
- g_list_length(remote_codecs));
- purple_debug_info("jabber", "Got %d transport candidates from responder\n",
- g_list_length(remote_transports));
+ purple_debug_info("jabber", "Got %d codecs from responder\n",
+ g_list_length(remote_codecs));
+ purple_debug_info("jabber", "Got %d transport candidates from responder\n",
+ g_list_length(remote_transports));
- purple_debug_info("jabber", "Setting remote codecs on stream\n");
+ purple_debug_info("jabber", "Setting remote codecs on stream\n");
- purple_media_set_remote_audio_codecs(session->media,
- jabber_jingle_session_get_remote_jid(session),
- remote_codecs);
+ purple_media_set_remote_codecs(session->media,
+ xmlnode_get_attrib(content, "name"),
+ jabber_jingle_session_get_remote_jid(session),
+ remote_codecs);
- codec_intersection = purple_media_get_negotiated_audio_codecs(session->media);
- purple_debug_info("jabber", "codec_intersection contains %d elems\n",
- g_list_length(codec_intersection));
- /* get the top codec */
- if (g_list_length(codec_intersection) > 0) {
- top = (FsCodec *) codec_intersection->data;
- purple_debug_info("jabber", "Found a suitable codec on stream = %d\n",
- top->id);
+ codec_intersection = purple_media_get_negotiated_codecs(session->media,
+ xmlnode_get_attrib(content, "name"));
+ purple_debug_info("jabber", "codec_intersection contains %d elems\n",
+ g_list_length(codec_intersection));
+ /* get the top codec */
+ if (g_list_length(codec_intersection) > 0) {
+ top = (FsCodec *) codec_intersection->data;
+ purple_debug_info("jabber", "Found a suitable codec on stream = %d\n",
+ top->id);
- /* we have found a suitable codec, but we will not start the stream
- just yet, wait for transport negotiation to complete... */
- }
- /* if we also got transport candidates, add them to our streams
- list of known remote candidates */
- if (g_list_length(remote_transports) > 0) {
- purple_media_add_remote_audio_candidates(session->media,
- jabber_jingle_session_get_remote_jid(session),
- remote_transports);
- fs_candidate_list_destroy(remote_transports);
- }
- if (g_list_length(codec_intersection) == 0 &&
- g_list_length(remote_transports)) {
- /* we didn't get any candidates and the codec intersection is empty,
- this means this was not a content-accept message and we couldn't
- find any suitable codecs, should return error and hang up */
+ /* we have found a suitable codec, but we will not start the stream
+ just yet, wait for transport negotiation to complete... */
+ }
+ /* if we also got transport candidates, add them to our streams
+ list of known remote candidates */
+ if (g_list_length(remote_transports) > 0) {
+ purple_media_add_remote_candidates(session->media,
+ xmlnode_get_attrib(content, "name"),
+ jabber_jingle_session_get_remote_jid(session),
+ remote_transports);
+ fs_candidate_list_destroy(remote_transports);
+ }
+ if (g_list_length(codec_intersection) == 0 &&
+ g_list_length(remote_transports)) {
+ /* we didn't get any candidates and the codec intersection is empty,
+ this means this was not a content-accept message and we couldn't
+ find any suitable codecs, should return error and hang up */
- }
+ }
- g_list_free(codec_intersection);
+ fs_codec_list_destroy(codec_intersection);
+ }
+
if (!strcmp(action, "session-accept")) {
purple_media_got_accept(jabber_jingle_session_get_media(session));
purple_debug_info("jabber", "Got session-accept, starting stream\n");
- gst_element_set_state(purple_media_get_audio_pipeline(session->media),
+ gst_element_set_state(purple_media_get_pipeline(session->media),
GST_STATE_PLAYING);
}
@@ -1204,30 +1222,35 @@ jabber_jingle_session_handle_session_ini
purple_debug_error("jabber", "Jingle session with id={%s} already exists\n", sid);
return;
}
+
session = jabber_jingle_session_create_by_id(js, sid);
- /* init media */
- content = xmlnode_get_child(jingle, "content");
- if (!content) {
- purple_debug_error("jabber", "jingle tag must contain content tag\n");
- /* should send error here */
- return;
- }
+ for (content = xmlnode_get_child(jingle, "content"); content;
+ content = xmlnode_get_next_twin(content)) {
+ /* init media */
+ if (!content) {
+ purple_debug_error("jabber", "jingle tag must contain content tag\n");
+ /* should send error here */
+ return;
+ }
- description = xmlnode_get_child(content, "description");
+ description = xmlnode_get_child(content, "description");
- if (!description) {
- purple_debug_error("jabber", "content tag must contain description tag\n");
- /* we should create an error iq here */
- return;
- }
+ if (!description) {
+ purple_debug_error("jabber", "content tag must contain description tag\n");
+ /* we should create an error iq here */
+ return;
+ }
- transport = xmlnode_get_child(content, "transport");
+ transport = xmlnode_get_child(content, "transport");
- if (!transport) {
- purple_debug_error("jingle", "content tag must contain transport tag\n");
- /* we should create an error iq here */
- return;
+ if (!transport) {
+ purple_debug_error("jingle", "content tag must contain transport tag\n");
+ /* we should create an error iq here */
+ return;
+ }
+
+ jabber_jingle_session_content_create_parse(session, content);
}
if (!jabber_jingle_session_initiate_media_internal(session, initiator, initiator)) {
@@ -1237,12 +1260,31 @@ jabber_jingle_session_handle_session_ini
return;
}
- jabber_jingle_session_content_create_parse(session, jingle);
+ for (content = xmlnode_get_child(jingle, "content"); content;
+ content = xmlnode_get_next_twin(content)) {
+ /* init media */
+ if (!content) {
+ purple_debug_error("jabber", "jingle tag must contain content tag\n");
+ /* should send error here */
+ return;
+ }
- codecs = jabber_jingle_get_codecs(description);
+ description = xmlnode_get_child(content, "description");
- purple_media_set_remote_audio_codecs(session->media, initiator, codecs);
+ if (!description) {
+ purple_debug_error("jabber", "content tag must contain description tag\n");
+ /* we should create an error iq here */
+ return;
+ }
+ codecs = jabber_jingle_get_codecs(description);
+ purple_media_set_remote_codecs(session->media,
+ xmlnode_get_attrib(content, "name"),
+ initiator, codecs);
+ purple_debug_info("jingle", "codec intersection: %i\n",
+ g_list_length(purple_media_get_negotiated_codecs(session->media,
+ xmlnode_get_attrib(content, "name"))));
+ }
jabber_iq_send(jabber_jingle_session_create_ack(js, packet));
jabber_iq_send(jabber_jingle_session_create_session_info(session, "ringing"));
}
@@ -1261,7 +1303,7 @@ jabber_jingle_session_handle_session_ter
/* maybe we should look at the reasoncode to determine if it was
a hangup or a reject, and call different callbacks to purple_media */
- gst_element_set_state(purple_media_get_audio_pipeline(session->media), GST_STATE_NULL);
+ gst_element_set_state(purple_media_get_pipeline(session->media), GST_STATE_NULL);
purple_media_got_hangup(jabber_jingle_session_get_media(session));
jabber_iq_send(jabber_jingle_session_create_ack(js, packet));
@@ -1289,9 +1331,10 @@ jabber_jingle_session_handle_transport_i
/* add candidates to our list of remote candidates */
if (g_list_length(remote_candidates) > 0) {
- purple_media_add_remote_audio_candidates(session->media,
- xmlnode_get_attrib(packet, "from"),
- remote_candidates);
+ purple_media_add_remote_candidates(session->media,
+ xmlnode_get_attrib(content, "name"),
+ xmlnode_get_attrib(packet, "from"),
+ remote_candidates);
fs_candidate_list_destroy(remote_candidates);
}
}
============================================================
--- pidgin/gtkconv.c 5bf396c9468651aa50280e9c3c606c2fdec03db2
+++ pidgin/gtkconv.c 6238b27895efd8c63c56ad05cf5d0427dfb4f499
@@ -7701,7 +7701,7 @@ menu_initiate_audio_video_call_cb(gpoint
PurpleMedia *media =
serv_initiate_media(gc,
purple_conversation_get_name(conv),
- PURPLE_MEDIA_AUDIO & PURPLE_MEDIA_VIDEO);
+ PURPLE_MEDIA_AUDIO | PURPLE_MEDIA_VIDEO);
purple_media_wait(media);
}
@@ -7709,19 +7709,10 @@ pidgin_conv_new_media_cb(PurpleMediaMana
static void
pidgin_conv_new_media_cb(PurpleMediaManager *manager, PurpleMedia *media, gpointer nul)
{
- GstElement *sendbin, *sendlevel;
- GstElement *recvbin, *recvlevel;
-
GtkWidget *gtkmedia;
PurpleConversation *conv;
PidginConversation *gtkconv;
- purple_media_audio_init_src(&sendbin, &sendlevel);
- purple_media_audio_init_recv(&recvbin, &recvlevel);
-
- purple_media_set_audio_src(media, sendbin);
- purple_media_set_audio_sink(media, recvbin);
-
conv = purple_conversation_new(PURPLE_CONV_TYPE_IM,
purple_connection_get_account(purple_media_get_connection(media)),
purple_media_get_screenname(media));
@@ -7729,7 +7720,7 @@ pidgin_conv_new_media_cb(PurpleMediaMana
if (gtkconv->gtkmedia)
gtk_widget_destroy(gtkconv->gtkmedia);
- gtkmedia = pidgin_media_new(media, sendlevel, recvlevel);
+ gtkmedia = pidgin_media_new(media);
gtk_box_pack_start(GTK_BOX(gtkconv->topvbox), gtkmedia, FALSE, FALSE, 0);
gtk_widget_show(gtkmedia);
g_signal_connect(G_OBJECT(gtkmedia), "message", G_CALLBACK(pidgin_gtkmedia_message_cb), conv);
============================================================
--- pidgin/gtkmedia.c 3fa04f11d5468e8a4c5bcf5b8f000a55ece89145
+++ pidgin/gtkmedia.c 169a4b38a8e634eb113d3b4a0ba2b210a23e59c8
@@ -133,13 +133,13 @@ pidgin_media_class_init (PidginMediaClas
"Send level",
"The GstElement of this media's send 'level'",
GST_TYPE_ELEMENT,
- G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+ G_PARAM_READWRITE));
g_object_class_install_property(gobject_class, PROP_RECV_LEVEL,
g_param_spec_object("recv-level",
"Receive level",
"The GstElement of this media's recv 'level'",
GST_TYPE_ELEMENT,
- G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE));
+ G_PARAM_READWRITE));
pidgin_media_signals[MESSAGE] = g_signal_new("message", G_TYPE_FROM_CLASS(klass),
G_SIGNAL_RUN_LAST, 0, NULL, NULL,
@@ -224,7 +224,7 @@ pidgin_media_disconnect_levels(PurpleMed
static void
pidgin_media_disconnect_levels(PurpleMedia *media, PidginMedia *gtkmedia)
{
- GstElement *element = purple_media_get_audio_pipeline(media);
+ GstElement *element = purple_media_get_pipeline(media);
gulong handler_id = g_signal_handler_find(G_OBJECT(gst_pipeline_get_bus(GST_PIPELINE(element))),
G_SIGNAL_MATCH_FUNC | G_SIGNAL_MATCH_DATA, 0, 0,
NULL, G_CALLBACK(level_message_cb), gtkmedia);
@@ -256,10 +256,40 @@ pidgin_media_ready_cb(PurpleMedia *media
static void
pidgin_media_ready_cb(PurpleMedia *media, PidginMedia *gtkmedia)
{
- GstElement *element = purple_media_get_audio_pipeline(media);
+ GstElement *element = purple_media_get_pipeline(media);
+
+ GstElement *audiosendbin, *audiosendlevel;
+ GstElement *audiorecvbin, *audiorecvlevel;
+ GstElement *videosendbin;
+ GstElement *videorecvbin;
+
+ GList *sessions = purple_media_get_session_names(media);
+
+ purple_media_audio_init_src(&audiosendbin, &audiosendlevel);
+ purple_media_audio_init_recv(&audiorecvbin, &audiorecvlevel);
+
+ purple_media_video_init_src(&videosendbin);
+ purple_media_video_init_recv(&videorecvbin);
+
+ for (; sessions; sessions = sessions->next) {
+ if (purple_media_get_session_type(media, sessions->data) == FS_MEDIA_TYPE_AUDIO) {
+ purple_media_set_src(media, sessions->data, audiosendbin);
+ purple_media_set_sink(media, sessions->data, audiorecvbin);
+ } else if (purple_media_get_session_type(media, sessions->data) == FS_MEDIA_TYPE_VIDEO) {
+ purple_media_set_src(media, sessions->data, videosendbin);
+ purple_media_set_sink(media, sessions->data, videorecvbin);
+ }
+ }
+ g_list_free(sessions);
+
+ if (audiosendlevel && audiorecvlevel) {
+ g_object_set(gtkmedia, "send-level", audiosendlevel,
+ "recv-level", audiorecvlevel,
+ NULL);
+ }
+
gst_bus_add_signal_watch(GST_BUS(gst_pipeline_get_bus(GST_PIPELINE(element))));
g_signal_connect(G_OBJECT(gst_pipeline_get_bus(GST_PIPELINE(element))), "message", G_CALLBACK(level_message_cb), gtkmedia);
- printf("\n\nbus: %p\n", gst_pipeline_get_bus(GST_PIPELINE(element)));
}
static void
@@ -376,11 +406,10 @@ GtkWidget *
}
GtkWidget *
-pidgin_media_new(PurpleMedia *media, GstElement *sendlevel, GstElement *recvlevel)
+pidgin_media_new(PurpleMedia *media)
{
- PidginMedia *gtkmedia = g_object_new(pidgin_media_get_type(), "media", media,
- "send-level", sendlevel,
- "recv-level", recvlevel, NULL);
+ PidginMedia *gtkmedia = g_object_new(pidgin_media_get_type(),
+ "media", media, NULL);
return GTK_WIDGET(gtkmedia);
}
============================================================
--- pidgin/gtkmedia.h 45bc503a761d8b9ef0825d81361cba538637fbd8
+++ pidgin/gtkmedia.h be5ac6f0e1d7ce0e844b111c3c28431761e24b83
@@ -59,7 +59,7 @@ GType pidgin_media_get_type(void);
GType pidgin_media_get_type(void);
-GtkWidget *pidgin_media_new(PurpleMedia *media, GstElement *send_level, GstElement *recv_level);
+GtkWidget *pidgin_media_new(PurpleMedia *media);
G_END_DECLS
============================================================
--- pidgin/gtkprefs.c 2192f7728149877e5496577dc63729c015dcfaed
+++ pidgin/gtkprefs.c 3fca23c1ef9d1fab47d89352b911586966be97d9
@@ -2198,6 +2198,7 @@ media_page()
_("Default"), "gconfvideosrc",
_("Video4Linux"), "v4lsrc",
_("Video4Linux2"), "v4l2src",
+ _("Video Test Source"), "videotestsrc",
NULL);
gtk_size_group_add_widget(sg, dd);
More information about the Commits
mailing list