-#ifdef HAVE_GSTREAMER
-
/* note: this requires gstreamer 0.10.x and a big list of plugins. */
/* it's currently hardcoded to use a big-endian alsasink as sink. */
#include <lib/base/ebase.h>
#include <gst/pbutils/missing-plugins.h>
#include <sys/stat.h>
+#define HTTP_TIMEOUT 10
+static GstStaticPadTemplate subsinktemplate = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS("text/plain; text/x-pango-markup; video/x-dvd-subpicture; subpicture/x-pgs"));
+// int ret = gst_pad_set_caps (ghostpad, caps2);
+// gst_caps_unref(caps2);));
+
// eServiceFactoryMP3
eServiceFactoryMP3::eServiceFactoryMP3()
extensions.push_back("mp4");
extensions.push_back("mov");
extensions.push_back("m4a");
- extensions.push_back("m2ts");
sc->addServiceFactory(eServiceFactoryMP3::id, this, extensions);
}
{
m_seekTimeout = eTimer::create(eApp);
m_subtitle_sync_timer = eTimer::create(eApp);
+ m_streamingsrc_timeout = 0;
m_subtitle_hide_timer = eTimer::create(eApp);
m_stream_tags = 0;
m_currentAudioStream = -1;
m_currentTrickRatio = 0;
m_subs_to_pull = 0;
m_buffer_size = 1*1024*1024;
+ m_prev_decoder_time = -1;
+ m_decoder_time_valid_state = 0;
+
CONNECT(m_seekTimeout->timeout, eServiceMP3::seekTimeoutCB);
CONNECT(m_subtitle_sync_timer->timeout, eServiceMP3::pushSubtitles);
CONNECT(m_subtitle_hide_timer->timeout, eServiceMP3::hideSubtitles);
if (!ext)
ext = filename;
- sourceStream sourceinfo;
- sourceinfo.is_video = FALSE;
- sourceinfo.audiotype = atUnknown;
+ m_sourceinfo.is_video = FALSE;
+ m_sourceinfo.audiotype = atUnknown;
if ( (strcasecmp(ext, ".mpeg") && strcasecmp(ext, ".mpg") && strcasecmp(ext, ".vob") && strcasecmp(ext, ".bin") && strcasecmp(ext, ".dat") ) == 0 )
{
- sourceinfo.containertype = ctMPEGPS;
- sourceinfo.is_video = TRUE;
+ m_sourceinfo.containertype = ctMPEGPS;
+ m_sourceinfo.is_video = TRUE;
}
else if ( strcasecmp(ext, ".ts") == 0 )
{
- sourceinfo.containertype = ctMPEGTS;
- sourceinfo.is_video = TRUE;
+ m_sourceinfo.containertype = ctMPEGTS;
+ m_sourceinfo.is_video = TRUE;
}
else if ( strcasecmp(ext, ".mkv") == 0 )
{
- sourceinfo.containertype = ctMKV;
- sourceinfo.is_video = TRUE;
+ m_sourceinfo.containertype = ctMKV;
+ m_sourceinfo.is_video = TRUE;
}
else if ( strcasecmp(ext, ".avi") == 0 || strcasecmp(ext, ".divx") == 0)
{
- sourceinfo.containertype = ctAVI;
- sourceinfo.is_video = TRUE;
+ m_sourceinfo.containertype = ctAVI;
+ m_sourceinfo.is_video = TRUE;
}
else if ( strcasecmp(ext, ".mp4") == 0 || strcasecmp(ext, ".mov") == 0 || strcasecmp(ext, ".m4v") == 0)
{
- sourceinfo.containertype = ctMP4;
- sourceinfo.is_video = TRUE;
+ m_sourceinfo.containertype = ctMP4;
+ m_sourceinfo.is_video = TRUE;
}
else if ( strcasecmp(ext, ".m4a") == 0 )
{
- sourceinfo.containertype = ctMP4;
- sourceinfo.audiotype = atAAC;
+ m_sourceinfo.containertype = ctMP4;
+ m_sourceinfo.audiotype = atAAC;
}
else if ( strcasecmp(ext, ".mp3") == 0 )
- sourceinfo.audiotype = atMP3;
+ m_sourceinfo.audiotype = atMP3;
else if ( (strncmp(filename, "/autofs/", 8) || strncmp(filename+strlen(filename)-13, "/track-", 7) || strcasecmp(ext, ".wav")) == 0 )
- sourceinfo.containertype = ctCDA;
+ m_sourceinfo.containertype = ctCDA;
if ( strcasecmp(ext, ".dat") == 0 )
{
- sourceinfo.containertype = ctVCD;
- sourceinfo.is_video = TRUE;
+ m_sourceinfo.containertype = ctVCD;
+ m_sourceinfo.is_video = TRUE;
}
- if ( (strncmp(filename, "http://", 7)) == 0 || (strncmp(filename, "udp://", 6)) == 0 || (strncmp(filename, "rtp://", 6)) == 0 || (strncmp(filename, "https://", 8)) == 0 || (strncmp(filename, "mms://", 6)) == 0 || (strncmp(filename, "rtsp://", 7)) == 0 )
- sourceinfo.is_streaming = TRUE;
+ if ( (strncmp(filename, "http://", 7)) == 0 || (strncmp(filename, "udp://", 6)) == 0 || (strncmp(filename, "rtp://", 6)) == 0 || (strncmp(filename, "https://", 8)) == 0 || (strncmp(filename, "mms://", 6)) == 0 || (strncmp(filename, "rtsp://", 7)) == 0 || (strncmp(filename, "rtspt://", 7)) == 0 )
+ m_sourceinfo.is_streaming = TRUE;
gchar *uri;
- if ( sourceinfo.is_streaming )
+ if ( m_sourceinfo.is_streaming )
{
uri = g_strdup_printf ("%s", filename);
+ m_streamingsrc_timeout = eTimer::create(eApp);;
+ CONNECT(m_streamingsrc_timeout->timeout, eServiceMP3::sourceTimeout);
+
+ std::string config_str;
+ if( ePythonConfigQuery::getConfigValue("config.mediaplayer.useAlternateUserAgent", config_str) == 0 )
+ {
+ if ( config_str == "True" )
+ ePythonConfigQuery::getConfigValue("config.mediaplayer.alternateUserAgent", m_useragent);
+ }
+ if ( m_useragent.length() == 0 )
+ m_useragent = "Dream Multimedia Dreambox Enigma2 Mediaplayer";
}
- else if ( sourceinfo.containertype == ctCDA )
+ else if ( m_sourceinfo.containertype == ctCDA )
{
int i_track = atoi(filename+18);
uri = g_strdup_printf ("cdda://%i", i_track);
}
- else if ( sourceinfo.containertype == ctVCD )
+ else if ( m_sourceinfo.containertype == ctVCD )
{
int fd = open(filename,O_RDONLY);
char tmp[128*1024];
g_object_set (G_OBJECT (m_gst_playbin), "uri", uri, NULL);
- int flags = 0x47; // ( == GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_TEXT )
+ int flags = 0x47; // ( GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_TEXT );
g_object_set (G_OBJECT (m_gst_playbin), "flags", flags, NULL);
g_free(uri);
m_gst_subtitlebin = gst_bin_new("subtitle_bin");
+
+ if ( m_gst_playbin )
+ {
+ GstElement *appsink = gst_element_factory_make("appsink", "subtitle_sink");
- GstElement *appsink = gst_element_factory_make("appsink", "subtitle_sink");
- GstElement *fakesink = gst_element_factory_make("fakesink", "subtitle_fakesink");
-
- if (!appsink)
- eDebug("eServiceMP3::sorry, can't play: missing gst-plugin-appsink");
-// <<<<<<< HEAD
-// else
-// {
-// m_subs_to_pull_handler_id = g_signal_connect (appsink, "new-buffer", G_CALLBACK (gstCBsubtitleAvail), this);
-// g_object_set (G_OBJECT (appsink), "caps", gst_caps_from_string("text/plain; text/x-pango-markup"), NULL);
-// g_object_set (G_OBJECT (m_gst_playbin), "text-sink", appsink, NULL);
-// }
-// =======
-
- GstElement *dvdsubdec = gst_element_factory_make("dvdsubdec", "vobsubtitle_decoder");
- if ( !dvdsubdec )
- eDebug("eServiceMP3::sorry, can't play: missing gst-plugin-dvdsub");
-
- gst_bin_add_many(GST_BIN(m_gst_subtitlebin), dvdsubdec, appsink, fakesink, NULL);
- GstPad *ghostpad = gst_ghost_pad_new("sink", gst_element_get_static_pad (fakesink, "sink"));
-// // GstPad *ghostpad = gst_ghost_pad_new("sink", gst_element_get_static_pad (dvdsubdec, "sink"));
- gst_element_add_pad (m_gst_subtitlebin, ghostpad);
- eDebug("eServiceMP3::construct dvdsubdec=%p, appsink=%p, fakesink=%p, ghostpad=%p,", dvdsubdec, appsink, fakesink, ghostpad);
-
- g_signal_connect (ghostpad, "notify::caps", G_CALLBACK (gstCBsubtitleCAPS), this);
+ if (!appsink)
+ eDebug("eServiceMP3::sorry, can't play: missing gst-plugin-appsink");
- GstCaps* caps = gst_caps_from_string("text/plain; text/x-pango-markup; video/x-raw-rgb");
- g_object_set (G_OBJECT (appsink), "caps", caps, NULL);
- g_object_set (G_OBJECT (dvdsubdec), "singlebuffer", TRUE, NULL);
- gst_caps_unref(caps);
+ GstElement *dvdsubdec = gst_element_factory_make("dvdsubdec", "vobsubtitle_decoder");
+ if ( dvdsubdec )
+ {
+ gst_bin_add_many(GST_BIN(m_gst_subtitlebin), dvdsubdec, appsink, NULL);
+ g_object_set (G_OBJECT (dvdsubdec), "singlebuffer", TRUE, NULL);
+ }
+ else
+ {
+ eDebug("eServiceMP3::missing gst-plugin-dvdsub, no vob subtitle support!");
+ gst_bin_add(GST_BIN(m_gst_subtitlebin), appsink);
+ }
- g_object_set (G_OBJECT (m_gst_playbin), "text-sink", m_gst_subtitlebin, NULL);
- m_subs_to_pull_handler_id = g_signal_connect (appsink, "new-buffer", G_CALLBACK (gstCBsubtitleAvail), this);
+ GstPadTemplate *templ;
+ templ = gst_static_pad_template_get (&subsinktemplate);
+
+ GstPad *ghostpad = gst_ghost_pad_new_no_target_from_template("sink", templ);
+ gst_element_add_pad (m_gst_subtitlebin, ghostpad);
- if ( m_gst_playbin )
- {
+ GstCaps* caps = gst_caps_from_string("text/plain; text/x-pango-markup; video/x-raw-rgb; subpicture/x-pgs");
+ g_object_set (G_OBJECT (appsink), "caps", caps, NULL);
+ gst_caps_unref(caps);
+
+ g_object_set (G_OBJECT (appsink), "async", FALSE, NULL);
+ g_object_set (G_OBJECT (appsink), "sync", TRUE, NULL);
+ g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, NULL);
+ g_object_set (G_OBJECT (appsink), "ts-offset", 0 * GST_SECOND, NULL);
+
+ g_object_set_data (G_OBJECT (ghostpad), "application-instance", this);
+ g_signal_connect (G_OBJECT (ghostpad), "notify::caps", G_CALLBACK (gstGhostpadHasCAPS), this);
+ gst_pad_set_getcaps_function (ghostpad, gstGhostpadGetCAPS);
+ gst_pad_set_acceptcaps_function (ghostpad, gstGhostpadAcceptCAPS);
+ m_ghost_pad_buffer_alloc = GST_PAD_BUFFERALLOCFUNC(ghostpad);
+ m_ghost_pad_chain_function = GST_PAD_CHAINFUNC(ghostpad);
+ m_ghost_pad_subtitle_sink_event = GST_PAD_EVENTFUNC(ghostpad);
+ gst_pad_set_bufferalloc_function (ghostpad, GST_DEBUG_FUNCPTR(gstGhostpadBufferAlloc));
+ gst_pad_set_event_function (ghostpad, GST_DEBUG_FUNCPTR(gstGhostpadSinkEvent));
+ gst_pad_set_chain_function (ghostpad, GST_DEBUG_FUNCPTR(gstGhostpadChainFunction));
+ m_gst_prev_subtitle_caps = gst_caps_new_empty();
+
+ g_object_set (G_OBJECT (m_gst_playbin), "text-sink", m_gst_subtitlebin, NULL);
+ m_subs_to_pull_handler_id = g_signal_connect (appsink, "new-buffer", G_CALLBACK (gstCBsubtitleAvail), this);
+
gst_bus_set_sync_handler(gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin)), gstBusSyncHandler, this);
char srt_filename[strlen(filename)+1];
strncpy(srt_filename,filename,strlen(filename)-3);
subs.language_code = std::string("und");
m_subtitleStreams.push_back(subs);
}
+ if ( m_sourceinfo.is_streaming )
+ {
+ g_signal_connect (G_OBJECT (m_gst_playbin), "notify::source", G_CALLBACK (gstHTTPSourceSetAgent), this);
+ }
} else
{
m_event((iPlayableService*)this, evUser+12);
}
delete m_subtitle_widget;
+ gst_caps_unref(this->m_gst_prev_subtitle_caps);
// disconnect sync handler callback
gst_bus_set_sync_handler(gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin)), NULL, NULL);
return 0;
}
+void eServiceMP3::sourceTimeout()
+{
+ eDebug("eServiceMP3::http source timeout! issuing eof...");
+ m_event((iPlayableService*)this, evEOF);
+}
+
RESULT eServiceMP3::stop()
{
ASSERT(m_state != stIdle);
if (m_state == stStopped)
return -1;
+
+ //GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(m_gst_playbin),GST_DEBUG_GRAPH_SHOW_ALL,"e2-playbin");
eDebug("eServiceMP3::stop %s", m_ref.path.c_str());
gst_element_set_state(m_gst_playbin, GST_STATE_NULL);
if (!(ret = seekToImpl(to)))
{
m_subtitle_pages.clear();
+ m_prev_decoder_time = -1;
+ m_decoder_time_valid_state = 0;
m_subs_to_pull = 0;
}
}
/* pos is in nanoseconds. we have 90 000 pts per second. */
pts = pos / 11111;
- eDebug("gst_element_query_position %lld pts (%lld ms)", pts, pos/1000000);
+// eDebug("gst_element_query_position %lld pts (%lld ms)", pts, pos/1000000);
return 0;
}
type = stSSA;
else if ( !strcmp(g_type, "text/plain") )
type = stPlainText;
+ else if ( !strcmp(g_type, "subpicture/x-pgs") )
+ type = stPGS;
else
eDebug("getSubtitleType::unsupported subtitle caps %s (%s)", g_type, g_codec);
}
source = GST_MESSAGE_SRC(msg);
sourceName = gst_object_get_name(source);
-#if 1
+#if 0
if (gst_message_get_structure(msg))
{
gchar *string = gst_structure_to_string(gst_message_get_structure(msg));
} break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
- GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_subtitlebin), "subtitle_sink");
+ GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_subtitlebin), "subtitle_sink");
// GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_playbin), "subtitle_sink");
- if (appsink)
- {
- g_object_set (G_OBJECT (appsink), "max-buffers", 2, NULL);
- g_object_set (G_OBJECT (appsink), "sync", FALSE, NULL);
- g_object_set (G_OBJECT (appsink), "async", FALSE, NULL);
- g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, NULL);
- eDebug("eServiceMP3::appsink properties set!");
- gst_object_unref(appsink);
- }
+ if (appsink)
+ {
+ g_object_set (G_OBJECT (appsink), "max-buffers", 2, NULL);
+ g_object_set (G_OBJECT (appsink), "sync", FALSE, NULL);
+ g_object_set (G_OBJECT (appsink), "emit-signals", TRUE, NULL);
+ eDebug("eServiceMP3::appsink properties set!");
+ gst_object_unref(appsink);
+ }
setAC3Delay(ac3_delay);
setPCMDelay(pcm_delay);
} break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
{
+ if ( m_sourceinfo.is_streaming && m_streamingsrc_timeout )
+ m_streamingsrc_timeout->stop();
} break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
{
gchar *g_codec = NULL, *g_lang = NULL;
g_signal_emit_by_name (m_gst_playbin, "get-text-tags", i, &tags);
subtitleStream subs;
- int ret;
+// int ret;
g_lang = g_strdup_printf ("und");
if ( tags && gst_is_tag_list(tags) )
g_free (g_lang);
}
m_event((iPlayableService*)this, evUpdatedEventInfo);
+ break;
}
case GST_MESSAGE_ELEMENT:
{
gst_message_parse_buffering(msg, &(m_bufferInfo.bufferPercent));
gst_message_parse_buffering_stats(msg, &mode, &(m_bufferInfo.avgInRate), &(m_bufferInfo.avgOutRate), &(m_bufferInfo.bufferingLeft));
m_event((iPlayableService*)this, evBuffering);
+ break;
+ }
+ case GST_MESSAGE_STREAM_STATUS:
+ {
+ GstStreamStatusType type;
+ GstElement *owner;
+ gst_message_parse_stream_status (msg, &type, &owner);
+ if ( type == GST_STREAM_STATUS_TYPE_CREATE && m_sourceinfo.is_streaming )
+ {
+ if ( GST_IS_PAD(source) )
+ owner = gst_pad_get_parent_element(GST_PAD(source));
+ else if ( GST_IS_ELEMENT(source) )
+ owner = GST_ELEMENT(source);
+ else
+ owner = 0;
+ if ( owner )
+ {
+ GstElementFactory *factory = gst_element_get_factory(GST_ELEMENT(owner));
+ const gchar *name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory));
+ if (!strcmp(name, "souphttpsrc"))
+ {
+ m_streamingsrc_timeout->start(HTTP_TIMEOUT*1000, true);
+ g_object_set (G_OBJECT (owner), "timeout", HTTP_TIMEOUT, NULL);
+ eDebug("eServiceMP3::GST_STREAM_STATUS_TYPE_CREATE -> setting timeout on %s to %is", name, HTTP_TIMEOUT);
+ }
+
+ }
+ if ( GST_IS_PAD(source) )
+ gst_object_unref(owner);
+ }
+ break;
}
default:
break;
GstBusSyncReply eServiceMP3::gstBusSyncHandler(GstBus *bus, GstMessage *message, gpointer user_data)
{
eServiceMP3 *_this = (eServiceMP3*)user_data;
- _this->m_pump.send(1);
+ _this->m_pump.send(Message(1));
/* wake */
return GST_BUS_PASS;
}
+void eServiceMP3::gstHTTPSourceSetAgent(GObject *object, GParamSpec *unused, gpointer user_data)
+{
+ eServiceMP3 *_this = (eServiceMP3*)user_data;
+ GstElement *source;
+ g_object_get(_this->m_gst_playbin, "source", &source, NULL);
+ g_object_set (G_OBJECT (source), "user-agent", _this->m_useragent.c_str(), NULL);
+ gst_object_unref(source);
+}
+
audiotype_t eServiceMP3::gstCheckAudioPad(GstStructure* structure)
{
if (!structure)
return atUnknown;
}
-void eServiceMP3::gstPoll(const int &msg)
+void eServiceMP3::gstPoll(const Message &msg)
{
- /* ok, we have a serious problem here. gstBusSyncHandler sends
- us the wakup signal, but likely before it was posted.
- the usleep, an EVIL HACK (DON'T DO THAT!!!) works around this.
-
- I need to understand the API a bit more to make this work
- proplerly. */
- if (msg == 1)
+ if (msg.type == 1)
{
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin));
GstMessage *message;
- usleep(1);
- while ((message = gst_bus_pop (bus)))
+ while (message = gst_bus_pop(bus))
{
gstBusCall(bus, message);
gst_message_unref (message);
}
}
- else
+ else if (msg.type == 2)
pullSubtitle();
+ else if (msg.type == 3)
+ gstGhostpadHasCAPS_synced(msg.d.pad);
+ else
+ eDebug("gstPoll unhandled Message %d\n", msg.type);
}
eAutoInitPtr<eServiceFactoryMP3> init_eServiceFactoryMP3(eAutoInitNumbers::service+1, "eServiceFactoryMP3");
eServiceMP3 *_this = (eServiceMP3*)user_data;
eSingleLocker l(_this->m_subs_to_pull_lock);
++_this->m_subs_to_pull;
- _this->m_pump.send(2);
+ _this->m_pump.send(Message(2));
}
-void eServiceMP3::gstCBsubtitleCAPS(GObject *obj, GParamSpec *pspec, gpointer user_data)
+gboolean eServiceMP3::gstGhostpadSinkEvent(GstPad * pad, GstEvent * event)
{
- eDebug("gstCBsubtitleCAPS:: signal::caps callback obj=%p", obj);
+// eDebug("eServiceMP3::gstGhostpadSinkEvent %s", gst_structure_get_name (event->structure));
- eServiceMP3 *_this = (eServiceMP3*)user_data;
- eDebug("gstCBsubtitleCAPS:: m_currentSubtitleStream=%i, m_subtitleStreams.size()=%i", _this->m_currentSubtitleStream, _this->m_subtitleStreams.size());
+// eServiceMP3 *_this = (eServiceMP3*) (gst_pad_get_parent (pad));
+ eServiceMP3 *_this = (eServiceMP3*) g_object_get_data (G_OBJECT (pad), "application-instance");
+ gboolean ret;
+ GstFormat format;
- if ( _this->m_currentSubtitleStream >= _this->m_subtitleStreams.size() )
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_DOWNSTREAM_OOB && event->structure && strcmp (gst_structure_get_name (event->structure), "subtitleoverlay-flush-subtitle") == 0)
{
- eDebug("return");
- return;
- }
-
- subtitleStream subs = _this->m_subtitleStreams[_this->m_currentSubtitleStream];
-
- if ( subs.type == stUnknown )
+ eDebug ("Custom subtitle flush event");
+// GST_SUBTITLE_OVERLAY_LOCK (self);
+// self->subtitle_flush = TRUE;
+// self->subtitle_error = FALSE;
+// if (self->subtitle_block_pad)
+// gst_pad_set_blocked_async_full (self->subtitle_block_pad, TRUE,
+// _pad_blocked_cb, gst_object_ref (self),
+// (GDestroyNotify) gst_object_unref);
+// if (self->video_block_pad)
+// gst_pad_set_blocked_async_full (self->video_block_pad, TRUE,
+// _pad_blocked_cb, gst_object_ref (self),
+// (GDestroyNotify) gst_object_unref);
+// GST_SUBTITLE_OVERLAY_UNLOCK (self);
+//
+ gst_event_unref (event);
+ event = NULL;
+ ret = TRUE;
+ goto out;
+ } else if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT)
{
- GstTagList *tags;
- eDebug("gstCBsubtitleCAPS::m_subtitleStreams[%i].type == stUnknown...", _this->m_currentSubtitleStream);
-
- gchar *g_lang;
- g_signal_emit_by_name (_this->m_gst_playbin, "get-text-tags", _this->m_currentSubtitleStream, &tags);
-
- g_lang = g_strdup_printf ("und");
- if ( tags && gst_is_tag_list(tags) )
- gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
- subs.language_code = std::string(g_lang);
+ gst_event_parse_new_segment_full (event, NULL, NULL, NULL, &format, NULL, NULL, NULL);
+ if (_this->m_gst_subtitle_segment.format != GST_FORMAT_UNDEFINED && _this->m_gst_subtitle_segment.format != format)
+ {
+ eDebug("Subtitle segment format changed: %s -> %s", gst_format_get_name(_this->m_gst_subtitle_segment.format), gst_format_get_name(format));
+ gst_segment_init (&_this->m_gst_subtitle_segment, GST_FORMAT_UNDEFINED);
+ }
+ }
- subs.type = getSubtitleType(GST_PAD(obj));
-
- _this->m_subtitleStreams[_this->m_currentSubtitleStream] = subs;
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ eDebug("Resetting subtitle segment because of flush-stop");
+ gst_segment_init (&_this->m_gst_subtitle_segment, GST_FORMAT_UNDEFINED);
+ /* fall through */
+ case GST_EVENT_FLUSH_START:
+ case GST_EVENT_NEWSEGMENT:
+ case GST_EVENT_EOS:
+// eDebug("GST_EVENT_FLUSH_START GST_EVENT_NEWSEGMENT GST_EVENT_EOS");
+ /* Add our event marker to make sure no events from here go ever outside
+ * the element, they're only interesting for our internal elements */
+// event =
+// GST_EVENT_CAST (gst_mini_object_make_writable (GST_MINI_OBJECT_CAST
+// (event)));
+// if (!event->structure) {
+// event->structure =
+// gst_structure_id_empty_new (_subtitle_overlay_event_marker_id);
+// gst_structure_set_parent_refcount (event->structure,
+// &event->mini_object.refcount);
+// }
+// gst_structure_id_set (event->structure, _subtitle_overlay_event_marker_id,
+// G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ default:
+ eDebug("GST_EVENT_TYPE other: %i", GST_EVENT_TYPE (event));
+ break;
+ }
+
+ ret = _this->m_ghost_pad_subtitle_sink_event (pad, gst_event_ref (event));
+// eDebug("original EVENTFUNC returned %i", ret);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT) {
+ gboolean update;
+ gdouble rate, applied_rate;
+ gint64 start, stop, position;
+
+ GST_DEBUG_OBJECT (pad, "Newsegment event: %" GST_PTR_FORMAT,
+ event->structure);
+ gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
+ &format, &start, &stop, &position);
+
+ GST_DEBUG_OBJECT (pad, "Old subtitle segment: %" GST_SEGMENT_FORMAT,
+ &_this->m_gst_subtitle_segment);
+ if (_this->m_gst_subtitle_segment.format != format) {
+ GST_DEBUG_OBJECT (pad, "Subtitle segment format changed: %s -> %s",
+ gst_format_get_name (_this->m_gst_subtitle_segment.format),
+ gst_format_get_name (format));
+ gst_segment_init (&_this->m_gst_subtitle_segment, format);
+ }
+
+ gst_segment_set_newsegment_full (&_this->m_gst_subtitle_segment, update, rate,
+ applied_rate, format, start, stop, position);
+ GST_DEBUG_OBJECT (pad, "New subtitle segment: %" GST_SEGMENT_FORMAT,
+ &_this->m_gst_subtitle_segment);
+ }
+ gst_event_unref (event);
+//
+out:
+// gst_object_unref (_this);
+ return ret;
+}
+
+GstCaps* eServiceMP3::gstGhostpadGetCAPS(GstPad * pad)
+{
+// eDebug("eServiceMP3::gstGhostpadGetCAPS");
+ return gst_static_pad_template_get_caps(&subsinktemplate);
+}
+
+gboolean eServiceMP3::gstGhostpadAcceptCAPS(GstPad * pad, GstCaps * caps)
+{
+ GstCaps *templ_caps = gst_static_pad_template_get_caps (&subsinktemplate);
+ gboolean ret = gst_caps_can_intersect (templ_caps, caps);
+
+// eDebug("gstGhostpadAcceptCAPS templ=%s, given=%s ret=%i", gst_caps_to_string(templ_caps), gst_caps_to_string(caps), ret);
+ gst_caps_unref (templ_caps);
- g_free (g_lang);
- }
+ return ret;
}
-void eServiceMP3::gstCBsubtitleLink(subtype_t type, gpointer user_data)
+void eServiceMP3::gstGhostpadLink(gpointer user_data, GstCaps * caps)
{
+ GstStructure *s;
+ GstPad *sinkpad;
eServiceMP3 *_this = (eServiceMP3*)user_data;
+
+ // FIXME: Need to cache events from the ghostpad and pass them forward
+ // now... and keep track of the segment and pass newsegment events
+ // downstream.
+ s = gst_caps_get_structure (caps, 0);
+
+ GstPad *ghostpad = gst_element_get_static_pad(_this->m_gst_subtitlebin, "sink");
+ GstElement *appsink = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "subtitle_sink");
+ GstElement *dvdsubdec = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "vobsubtitle_decoder");
- if ( type == stVOB )
+ gst_ghost_pad_set_target(GST_GHOST_PAD(ghostpad), NULL);
+ gst_element_unlink(dvdsubdec, appsink);
+ int ret = -1;
+
+ if ( gst_structure_has_name (s, "video/x-dvd-subpicture") && dvdsubdec )
{
- GstPad *ghostpad = gst_element_get_static_pad(_this->m_gst_subtitlebin, "sink");
- GstElement *dvdsubdec = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "vobsubtitle_decoder");
- GstPad *subdecsinkpad = gst_element_get_static_pad (dvdsubdec, "sink");
- int ret = gst_ghost_pad_set_target((GstGhostPad*)ghostpad, subdecsinkpad);
- GstElement *appsink = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "subtitle_sink");
- ret += gst_element_link(dvdsubdec, appsink);
- eDebug("gstCBsubtitleLink:: dvdsubdec=%p, subdecsinkpad=%p, ghostpad=%p, link=%i", dvdsubdec, subdecsinkpad, ghostpad, ret);
+ sinkpad = gst_element_get_static_pad (dvdsubdec, "sink");
+ ret = gst_element_link_pads (dvdsubdec, "src", appsink, "sink");
+// eDebug("gstGhostpadLink:: dvdsubdec+appsink = %i", ret);
}
- else if ( type < stVOB && type > stUnknown )
+ else
{
- GstPad *ghostpad = gst_element_get_static_pad(_this->m_gst_subtitlebin, "sink");
- GstElement *appsink = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "subtitle_sink");
- GstPad *appsinkpad = gst_element_get_static_pad (appsink, "sink");
- GstElement *dvdsubdec = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "vobsubtitle_decoder");
- gst_element_unlink(dvdsubdec, appsink);
- int ret = gst_ghost_pad_set_target((GstGhostPad*)ghostpad, appsinkpad);
- eDebug("gstCBsubtitleLink:: appsink=%p, appsinkpad=%p, ghostpad=%p, link=%i", appsink, appsinkpad, ghostpad, ret);
+ sinkpad = gst_element_get_static_pad (appsink, "sink");
+// eDebug("gstGhostpadLink:: appsink");
}
- else
+
+ gst_ghost_pad_set_target (GST_GHOST_PAD(ghostpad), sinkpad);
+}
+
+GstFlowReturn eServiceMP3::gstGhostpadBufferAlloc(GstPad *pad, guint64 offset, guint size, GstCaps *caps, GstBuffer **buf)
+{
+ eServiceMP3 *_this = (eServiceMP3*) g_object_get_data (G_OBJECT (pad), "application-instance");
+
+// eDebug("eServiceMP3::gstGhostpadBufferAlloc prevcaps=%s newcaps=%s", gst_caps_to_string(_this->m_gst_prev_subtitle_caps), gst_caps_to_string(caps));
+ if (!GST_PAD_CAPS (pad) || !gst_caps_is_equal (_this->m_gst_prev_subtitle_caps, caps))
+ gstGhostpadLink (_this, caps);
+
+ return _this->m_ghost_pad_buffer_alloc (pad, offset, size, caps, buf);
+}
+
+void eServiceMP3::gstGhostpadHasCAPS(GstPad *pad, GParamSpec * unused, gpointer user_data)
+{
+ eServiceMP3 *_this = (eServiceMP3*)user_data;
+
+ gst_object_ref (pad);
+
+ _this->m_pump.send(Message(3, pad));
+}
+
+// after messagepump
+void eServiceMP3::gstGhostpadHasCAPS_synced(GstPad *pad)
+{
+ GstCaps *caps;
+
+ g_object_get (G_OBJECT (pad), "caps", &caps, NULL);
+
+// eDebug("gstGhostpadHasCAPS:: signal::caps = %s", gst_caps_to_string(caps));
+
+ if (caps)
{
- GstPad *ghostpad = gst_element_get_static_pad(_this->m_gst_subtitlebin, "sink");
- GstElement *fakesink = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "subtitle_fakesink");
- GstPad *fakesinkpad = gst_element_get_static_pad (fakesink, "sink");
- int ret = gst_ghost_pad_set_target((GstGhostPad*)ghostpad, fakesinkpad);
- eDebug("gstCBsubtitleLink:: unsupported subtitles ... throwing them into fakesink");
+ subtitleStream subs;
+
+// eDebug("gstGhostpadHasCAPS_synced %p %d", pad, m_subtitleStreams.size());
+
+ if (!m_subtitleStreams.empty())
+ subs = m_subtitleStreams[m_currentSubtitleStream];
+ else {
+ subs.type = stUnknown;
+ subs.pad = pad;
+ }
+
+ if ( subs.type == stUnknown )
+ {
+ GstTagList *tags;
+// eDebug("gstGhostpadHasCAPS::m_subtitleStreams[%i].type == stUnknown...", m_currentSubtitleStream);
+
+ gchar *g_lang;
+ g_signal_emit_by_name (m_gst_playbin, "get-text-tags", m_currentSubtitleStream, &tags);
+
+ g_lang = g_strdup_printf ("und");
+ if ( tags && gst_is_tag_list(tags) )
+ gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
+
+ subs.language_code = std::string(g_lang);
+ GstPad *ghostpad = gst_element_get_static_pad(m_gst_subtitlebin, "sink");
+ subs.type = getSubtitleType(ghostpad);
+
+ if (!m_subtitleStreams.empty())
+ m_subtitleStreams[m_currentSubtitleStream] = subs;
+ else
+ m_subtitleStreams.push_back(subs);
+
+ g_free (g_lang);
+ }
+
+// eDebug("gstGhostpadHasCAPS:: m_gst_prev_subtitle_caps=%s equal=%i",gst_caps_to_string(m_gst_prev_subtitle_caps),gst_caps_is_equal(m_gst_prev_subtitle_caps, caps));
+
+ if (!GST_PAD_CAPS (pad) || !gst_caps_is_equal (m_gst_prev_subtitle_caps, caps))
+ gstGhostpadLink(this, caps);
+
+ m_gst_prev_subtitle_caps = gst_caps_copy(caps);
+
+ gst_caps_unref (caps);
}
+
+ gst_object_unref (pad);
}
+GstFlowReturn eServiceMP3::gstGhostpadChainFunction(GstPad * pad, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ eServiceMP3 *_this = (eServiceMP3*)g_object_get_data (G_OBJECT (pad), "application-instance");
+
+// gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
+// gint64 duration_ns = GST_BUFFER_DURATION(buffer);
+ size_t len = GST_BUFFER_SIZE(buffer);
+
+ unsigned char line[len+1];
+ memcpy(line, GST_BUFFER_DATA(buffer), len);
+ line[len] = 0;
+// eDebug("gstGhostpadChainFunction buffer: '%s' caps: %s ", line, gst_caps_to_string(GST_BUFFER_CAPS(buffer)));
+
+ ret = _this->m_ghost_pad_chain_function(pad, buffer);
+// eDebug("original chain func returns %i", ret);
+ return ret;
+}
+
+
+// void eServiceMP3::gstCBsubtitleLink(GObject *obj, GParamSpec *pspec, gpointer user_data)
+// {
+//
+// eServiceMP3 *_this = (eServiceMP3*)user_data;
+// eDebug("gstCBsubtitleCAPS:: m_currentSubtitleStream=%i, m_subtitleStreams.size()=%i", _this->m_currentSubtitleStream, _this->m_subtitleStreams.size());
+//
+// if ( _this->m_currentSubtitleStream >= (int)_this->m_subtitleStreams.size() )
+// {
+// eDebug("return invalid stream count");
+// return;
+// }
+//
+// subtitleStream subs = _this->m_subtitleStreams[_this->m_currentSubtitleStream];
+//
+// if ( subs.type == stUnknown )
+// {
+// GstTagList *tags;
+// eDebug("gstCBsubtitleCAPS::m_subtitleStreams[%i].type == stUnknown...", _this->m_currentSubtitleStream);
+//
+// gchar *g_lang;
+// g_signal_emit_by_name (_this->m_gst_playbin, "get-text-tags", _this->m_currentSubtitleStream, &tags);
+//
+// g_lang = g_strdup_printf ("und");
+// if ( tags && gst_is_tag_list(tags) )
+// gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
+// subs.language_code = std::string(g_lang);
+//
+// subs.type = getSubtitleType(GST_PAD(obj));
+//
+// _this->m_subtitleStreams[_this->m_currentSubtitleStream] = subs;
+//
+// g_free (g_lang);
+// }
+//
+// gstCBsubtitleLink(subs.type, _this);
+// }
+
+// void eServiceMP3::gstCBsubtitleLink(subtype_t type, gpointer user_data)
+// {
+// eServiceMP3 *_this = (eServiceMP3*)user_data;
+//
+// if ( type == stVOB )
+// {
+// GstPad *ghostpad = gst_element_get_static_pad(_this->m_gst_subtitlebin, "sink");
+// GstElement *dvdsubdec = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "vobsubtitle_decoder");
+// GstPad *subdecsinkpad = gst_element_get_static_pad (dvdsubdec, "sink");
+// int ret = gst_ghost_pad_set_target((GstGhostPad*)ghostpad, subdecsinkpad);
+// GstElement *appsink = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "subtitle_sink");
+// ret += gst_element_link(dvdsubdec, appsink);
+// eDebug("gstCBsubtitleLink:: dvdsubdec=%p, subdecsinkpad=%p, ghostpad=%p, set target & link=%i", dvdsubdec, subdecsinkpad, ghostpad, ret);
+// }
+// else if ( type < stVOB && type > stUnknown )
+// {
+// GstPad *ghostpad = gst_element_get_static_pad(_this->m_gst_subtitlebin, "sink");
+// GstElement *appsink = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "subtitle_sink");
+// GstPad *appsinkpad = gst_element_get_static_pad (appsink, "sink");
+// GstElement *dvdsubdec = gst_bin_get_by_name(GST_BIN(_this->m_gst_subtitlebin), "vobsubtitle_decoder");
+// gst_element_unlink(dvdsubdec, appsink);
+// int ret = gst_ghost_pad_set_target((GstGhostPad*)ghostpad, appsinkpad);
+// eDebug("gstCBsubtitleLink:: appsink=%p, appsinkpad=%p, ghostpad=%p, set target=%i", appsink, appsinkpad, ghostpad, ret);
+// }
+// else
+// {
+// eDebug("gstCBsubtitleLink:: unsupported subtitles");
+// }
+// }
+/*
+gboolean eServiceMP3::gstCBsubtitleDrop(GstPad *pad, GstBuffer *buffer, gpointer user_data)
+{
+ eDebug("gstCBsubtitleDrop");
+
+ gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
+ gint64 duration_ns = GST_BUFFER_DURATION(buffer);
+ size_t len = GST_BUFFER_SIZE(buffer);
+
+ unsigned char line[len+1];
+ memcpy(line, GST_BUFFER_DATA(buffer), len);
+ line[len] = 0;
+ eDebug("dropping buffer '%s' ", line);
+ return false;
+}*/
+
+
void eServiceMP3::pullSubtitle()
{
GstElement *appsink = gst_bin_get_by_name(GST_BIN(m_gst_subtitlebin), "subtitle_sink");
if ( m_subtitleStreams[m_currentSubtitleStream].type < stVOB )
{
unsigned char line[len+1];
+ SubtitlePage page;
memcpy(line, GST_BUFFER_DATA(buffer), len);
line[len] = 0;
eDebug("got new text subtitle @ buf_pos = %lld ns (in pts=%lld): '%s' ", buf_pos, buf_pos/11111, line);
- ePangoSubtitlePage* page = new ePangoSubtitlePage;
gRGB rgbcol(0xD0,0xD0,0xD0);
- page->m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
- page->show_pts = buf_pos / 11111L;
- page->m_timeout = duration_ns / 1000000;
- SubtitlePage subtitlepage;
- subtitlepage.pango_page = page;
- subtitlepage.vob_page = NULL;
- m_subtitle_pages.push_back(subtitlepage);
- pushSubtitles();
+ page.type = SubtitlePage::Pango;
+ page.pango_page.m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
+ page.pango_page.m_show_pts = buf_pos / 11111L;
+ page.pango_page.m_timeout = duration_ns / 1000000;
+ m_subtitle_pages.push_back(page);
+ if (m_subtitle_pages.size()==1)
+ pushSubtitles();
}
- else
+ else if ( m_subtitleStreams[m_currentSubtitleStream].type == stVOB )
{
+ SubtitlePage page;
eDebug("got new subpicture @ buf_pos = %lld ns (in pts=%lld), duration=%lld ns, len=%i bytes. ", buf_pos, buf_pos/11111, duration_ns, len);
- eVobSubtitlePage* page = new eVobSubtitlePage;
- eSize size = eSize(720, 576);
- page->m_pixmap = new gPixmap(size, 32, 0);
- // ePtr<gPixmap> pixmap;
- // pixmap = new gPixmap(size, 32, 1); /* allocate accel surface (if possible) */
- memcpy(page->m_pixmap->surface->data, GST_BUFFER_DATA(buffer), len);
- page->show_pts = buf_pos / 11111L;
- page->m_timeout = duration_ns / 1000;
- SubtitlePage subtitlepage;
- subtitlepage.vob_page = page;
- subtitlepage.pango_page = NULL;
- m_subtitle_pages.push_back(subtitlepage);
- pushSubtitles();
+ page.type = SubtitlePage::Vob;
+ page.vob_page.m_pixmap = new gPixmap(eSize(720, 576), 32, 1);
+ memcpy(page.vob_page.m_pixmap->surface->data, GST_BUFFER_DATA(buffer), len);
+ page.vob_page.m_show_pts = buf_pos / 11111L;
+ page.vob_page.m_timeout = duration_ns / 1000;
+ m_subtitle_pages.push_back(page);
+ if (m_subtitle_pages.size()==1)
+ pushSubtitles();
+ }
+ else
+ {
+ eDebug("unsupported subpicture... ignoring");
}
}
gst_buffer_unref(buffer);
void eServiceMP3::pushSubtitles()
{
- pts_t running_pts;
while ( !m_subtitle_pages.empty() )
{
- SubtitlePage frontpage = m_subtitle_pages.front();
- gint64 diff_ms;
-
+ SubtitlePage &frontpage = m_subtitle_pages.front();
+ pts_t running_pts;
+ gint64 diff_ms = 0;
+ gint64 show_pts;
+
getPlayPosition(running_pts);
-
- if ( frontpage.pango_page != 0 )
- {
- diff_ms = ( frontpage.pango_page->show_pts - running_pts ) / 90;
- eDebug("eServiceMP3::pushSubtitles TEXT show_pts = %lld running_pts = %lld diff = %lld", frontpage.pango_page->show_pts, running_pts, diff_ms);
- }
-
- if ( frontpage.vob_page != 0 )
- {
- diff_ms = ( frontpage.vob_page->show_pts - running_pts ) / 90;
- eDebug("eServiceMP3::pushSubtitles VOB show_pts = %lld running_pts = %lld diff = %lld", frontpage.vob_page->show_pts, running_pts, diff_ms);
+
+ if (m_decoder_time_valid_state < 4) {
+ ++m_decoder_time_valid_state;
+ if (m_prev_decoder_time == running_pts)
+ m_decoder_time_valid_state = 0;
+ if (m_decoder_time_valid_state < 4) {
+// if (m_decoder_time_valid_state)
+// eDebug("%d: decoder time not valid! prev %lld, now %lld\n", m_decoder_time_valid_state, m_prev_decoder_time/90, running_pts/90);
+// else
+// eDebug("%d: decoder time not valid! now %lld\n", m_decoder_time_valid_state, running_pts/90);
+ m_subtitle_sync_timer->start(25, true);
+ m_prev_decoder_time = running_pts;
+ break;
+ }
}
-
+
+ if (frontpage.type == SubtitlePage::Pango)
+ show_pts = frontpage.pango_page.m_show_pts;
+ else
+ show_pts = frontpage.vob_page.m_show_pts;
+
+ diff_ms = ( show_pts - running_pts ) / 90;
+ eDebug("check subtitle: decoder: %lld, show_pts: %lld, diff: %lld ms", running_pts/90, show_pts/90, diff_ms);
+
if ( diff_ms < -100 )
{
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 now;
- if ( gst_element_query_position(m_gst_playbin, &fmt, &now) != -1 )
- {
- now /= 11111;
- diff_ms = abs((now - running_pts) / 90);
- eDebug("diff < -100ms check decoder/pipeline diff: decoder: %lld, pipeline: %lld, diff: %lld", running_pts, now, diff_ms);
- if (diff_ms > 100000)
- {
- eDebug("high decoder/pipeline difference.. assume decoder has now started yet.. check again in 1sec");
- m_subtitle_sync_timer->start(1000, true);
- break;
- }
- }
- else
- eDebug("query position for decoder/pipeline check failed!");
- eDebug("subtitle to late... drop");
+ eDebug("subtitle too late... drop");
m_subtitle_pages.pop_front();
}
else if ( diff_ms > 20 )
{
- eDebug("start recheck timer");
- m_subtitle_sync_timer->start(diff_ms > 1000 ? 1000 : diff_ms, true);
+ eDebug("start timer");
+ m_subtitle_sync_timer->start(diff_ms, true);
break;
}
else // immediate show
{
if ( m_subtitle_widget )
{
- if ( frontpage.pango_page != 0)
- {
- eDebug("immediate show pango subtitle line");
- m_subtitle_widget->setPage(*(frontpage.pango_page));
- }
- else if ( frontpage.vob_page != 0)
+ eDebug("show!\n");
+ if ( frontpage.type == SubtitlePage::Pango)
+ m_subtitle_widget->setPage(frontpage.pango_page);
+ else
{
- m_subtitle_widget->setPixmap(frontpage.vob_page->m_pixmap, eRect(0, 0, 720, 576));
- eDebug("blit vobsub pixmap... hide in %i ms", frontpage.vob_page->m_timeout);
- m_subtitle_hide_timer->start(frontpage.vob_page->m_timeout, true);
+ m_subtitle_widget->setPixmap(frontpage.vob_page.m_pixmap, eRect(0, 0, 720, 576));
+ eDebug("blit vobsub pixmap... hide in %i ms", frontpage.vob_page.m_timeout);
+ m_subtitle_hide_timer->start(frontpage.vob_page.m_timeout, true);
}
m_subtitle_widget->show();
}
void eServiceMP3::hideSubtitles()
{
- eDebug("eServiceMP3::hideSubtitles()");
+// eDebug("eServiceMP3::hideSubtitles()");
if ( m_subtitle_widget )
m_subtitle_widget->hide();
}
RESULT eServiceMP3::enableSubtitles(eWidget *parent, ePyObject tuple)
{
- eDebug ("eServiceMP3::enableSubtitles m_currentSubtitleStream=%i",m_currentSubtitleStream);
+// eDebug ("eServiceMP3::enableSubtitles m_currentSubtitleStream=%i this=%p",m_currentSubtitleStream, this);
ePyObject entry;
int tuplesize = PyTuple_Size(tuple);
int pid, type;
gint text_pid = 0;
eSingleLocker l(m_subs_to_pull_lock);
+// GstPad *pad = 0;
+// g_signal_emit_by_name (m_gst_playbin, "get-text-pad", m_currentSubtitleStream, &pad);
+// gst_element_get_static_pad(m_gst_subtitlebin, "sink");
+// gulong subprobe_handler_id = gst_pad_add_buffer_probe (pad, G_CALLBACK (gstCBsubtitleDrop), NULL);
+
if (!PyTuple_Check(tuple))
goto error_out;
if (tuplesize < 1)
goto error_out;
type = PyInt_AsLong(entry);
- eDebug ("eServiceMP3::enableSubtitles new pid=%i",pid);
-// if (m_currentSubtitleStream != pid)
-// {
-
+// eDebug ("eServiceMP3::enableSubtitles new pid=%i",pid);
+ if (m_currentSubtitleStream != pid)
+ {
g_object_set (G_OBJECT (m_gst_playbin), "current-text", pid, NULL);
-eDebug ("eServiceMP3::enableSubtitles g_object_set");
+// eDebug ("eServiceMP3::enableSubtitles g_object_set current-text = %i", pid);
m_currentSubtitleStream = pid;
m_subs_to_pull = 0;
+ m_prev_decoder_time = -1;
m_subtitle_pages.clear();
-eDebug ("eServiceMP3::enableSubtitles cleared");
-// }
-
- gstCBsubtitleLink(m_subtitleStreams[m_currentSubtitleStream].type, this);
+ }
m_subtitle_widget = 0;
m_subtitle_widget = new eSubtitleWidget(parent);
g_object_get (G_OBJECT (m_gst_playbin), "current-text", &text_pid, NULL);
eDebug ("eServiceMP3::switched to subtitle stream %i", text_pid);
+// gst_pad_remove_buffer_probe (pad, subprobe_handler_id);
+
m_event((iPlayableService*)this, evUpdatedInfo);
return 0;
{
eDebug("eServiceMP3::disableSubtitles");
m_subtitle_pages.clear();
- eDebug("eServiceMP3::disableSubtitles cleared");
delete m_subtitle_widget;
- eDebug("eServiceMP3::disableSubtitles deleted");
m_subtitle_widget = 0;
- eDebug("eServiceMP3::disableSubtitles nulled");
return 0;
}
PyObject *eServiceMP3::getSubtitleList()
{
- eDebug("eServiceMP3::getSubtitleList");
+// eDebug("eServiceMP3::getSubtitleList");
ePyObject l = PyList_New(0);
int stream_idx = 0;
{
subtype_t type = IterSubtitleStream->type;
ePyObject tuple = PyTuple_New(5);
- eDebug("eServiceMP3::getSubtitleList idx=%i type=%i, code=%s", stream_idx, int(type), (IterSubtitleStream->language_code).c_str());
+// eDebug("eServiceMP3::getSubtitleList idx=%i type=%i, code=%s", stream_idx, int(type), (IterSubtitleStream->language_code).c_str());
PyTuple_SET_ITEM(tuple, 0, PyInt_FromLong(2));
PyTuple_SET_ITEM(tuple, 1, PyInt_FromLong(stream_idx));
PyTuple_SET_ITEM(tuple, 2, PyInt_FromLong(int(type)));
}
}
-#else
-#warning gstreamer not available, not building media player
-#endif