extensions.push_back("mp4");
extensions.push_back("mov");
extensions.push_back("m4a");
+ extensions.push_back("m2ts");
sc->addServiceFactory(eServiceFactoryMP3::id, this, extensions);
}
RESULT eServiceFactoryMP3::play(const eServiceReference &ref, ePtr<iPlayableService> &ptr)
{
// check resources...
- ptr = new eServiceMP3(ref.path.c_str(),ref.getName().c_str());
+ ptr = new eServiceMP3(ref);
return 0;
}
// eServiceMP3
-eServiceMP3::eServiceMP3(const char *filename, const char *title): m_filename(filename), m_title(title), m_pump(eApp, 1)
+eServiceMP3::eServiceMP3(eServiceReference ref)
+ :m_ref(ref), m_pump(eApp, 1)
{
m_seekTimeout = eTimer::create(eApp);
m_subtitle_sync_timer = eTimer::create(eApp);
m_currentSubtitleStream = 0;
m_subtitle_widget = 0;
m_currentTrickRatio = 0;
+ m_subs_to_pull = 0;
+ m_buffer_size = 1*1024*1024;
CONNECT(m_seekTimeout->timeout, eServiceMP3::seekTimeoutCB);
CONNECT(m_subtitle_sync_timer->timeout, eServiceMP3::pushSubtitles);
CONNECT(m_pump.recv_msg, eServiceMP3::gstPoll);
m_state = stIdle;
eDebug("eServiceMP3::construct!");
-
+
+ const char *filename = m_ref.path.c_str();
const char *ext = strrchr(filename, '.');
if (!ext)
ext = filename;
sourceinfo.containertype = ctVCD;
sourceinfo.is_video = TRUE;
}
- if ( (strncmp(filename, "http://", 7)) == 0 || (strncmp(filename, "udp://", 6)) == 0 || (strncmp(filename, "rtsp://", 7)) == 0 )
+ if ( (strncmp(filename, "http://", 7)) == 0 || (strncmp(filename, "udp://", 6)) == 0 || (strncmp(filename, "rtp://", 6)) == 0 || (strncmp(filename, "https://", 8)) == 0 || (strncmp(filename, "mms://", 6)) == 0 || (strncmp(filename, "rtsp://", 7)) == 0 )
sourceinfo.is_streaming = TRUE;
gchar *uri;
eDebug("eServiceMP3::sorry, can't play: missing gst-plugin-appsink");
else
{
- g_object_set (G_OBJECT (subsink), "emit-signals", TRUE, NULL);
g_signal_connect (subsink, "new-buffer", G_CALLBACK (gstCBsubtitleAvail), this);
g_object_set (G_OBJECT (m_gst_playbin), "text-sink", subsink, NULL);
}
}
gst_element_set_state (m_gst_playbin, GST_STATE_PLAYING);
+ setBufferSize(m_buffer_size);
}
eServiceMP3::~eServiceMP3()
ASSERT(m_state != stIdle);
if (m_state == stStopped)
return -1;
- eDebug("eServiceMP3::stop %s", m_filename.c_str());
+ eDebug("eServiceMP3::stop %s", m_ref.path.c_str());
gst_element_set_state(m_gst_playbin, GST_STATE_NULL);
m_state = stStopped;
return 0;
RESULT eServiceMP3::seekTo(pts_t to)
{
- m_subtitle_pages.clear();
-
if (!m_gst_playbin)
return -1;
eDebug("eServiceMP3::seekTo failed");
return -1;
}
+
+ m_subtitle_pages.clear();
+ eSingleLocker l(m_subs_to_pull_lock);
+ m_subs_to_pull = 0;
+
return 0;
}
RESULT eServiceMP3::getPlayPosition(pts_t &pts)
{
+ GstFormat fmt = GST_FORMAT_TIME;
+ gint64 pos;
+ GstElement *sink;
+ pts = 0;
+
if (!m_gst_playbin)
return -1;
if (m_state != stRunning)
return -1;
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 len;
-
- if (!gst_element_query_position(m_gst_playbin, &fmt, &len))
+ g_object_get (G_OBJECT (m_gst_playbin), "audio-sink", &sink, NULL);
+
+ if (!sink)
+ g_object_get (G_OBJECT (m_gst_playbin), "video-sink", &sink, NULL);
+
+ if (!sink)
return -1;
- /* len is in nanoseconds. we have 90 000 pts per second. */
- pts = len / 11111;
+ gchar *name = gst_element_get_name(sink);
+ gboolean use_get_decoder_time = strstr(name, "dvbaudiosink") || strstr(name, "dvbvideosink");
+ g_free(name);
+
+ if (use_get_decoder_time)
+ g_signal_emit_by_name(sink, "get-decoder-time", &pos);
+
+ gst_object_unref(sink);
+
+ if (!use_get_decoder_time && !gst_element_query_position(m_gst_playbin, &fmt, &pos)) {
+ eDebug("gst_element_query_position failed in getPlayPosition");
+ return -1;
+ }
+
+ /* pos is in nanoseconds. we have 90 000 pts per second. */
+ pts = pos / 11111;
return 0;
}
RESULT eServiceMP3::getName(std::string &name)
{
- if (m_title.empty())
+ std::string title = m_ref.getName();
+ if (title.empty())
{
- name = m_filename;
+ name = m_ref.path;
size_t n = name.rfind('/');
if (n != std::string::npos)
name = name.substr(n + 1);
}
else
- name = m_title;
+ name = title;
return 0;
}
switch (w)
{
+ case sServiceref: return m_ref;
case sVideoHeight: return m_height;
case sVideoWidth: return m_width;
case sFrameRate: return m_framerate;
else
eDebug("eServiceMP3::gst_message from %s: %s (without structure)", sourceName, GST_MESSAGE_TYPE_NAME(msg));
#endif
- if ( GST_MESSAGE_TYPE (msg) == GST_MESSAGE_STATE_CHANGED )
+ switch (GST_MESSAGE_TYPE (msg))
{
- // only the pipeline message
- if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
- return;
-
- GstState old_state, new_state;
- gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
-
- if(old_state == new_state)
- return;
-
- eDebug("eServiceMP3::state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
-
- GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
-
- switch(transition)
+ case GST_MESSAGE_EOS:
+ m_event((iPlayableService*)this, evEOF);
+ break;
+ case GST_MESSAGE_STATE_CHANGED:
{
- case GST_STATE_CHANGE_NULL_TO_READY:
- {
- }
+ if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
break;
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- {
-
- } break;
- case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
- {
- } break;
- case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
- {
+ GstState old_state, new_state;
+ gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
+
+ if(old_state == new_state)
+ break;
- } break;
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- {
-
- } break;
- case GST_STATE_CHANGE_READY_TO_NULL:
+ eDebug("eServiceMP3::state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
+
+ GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
+
+ switch(transition)
{
-
- } break;
- }
- }
-
- switch (GST_MESSAGE_TYPE (msg))
- {
- case GST_MESSAGE_EOS:
- m_event((iPlayableService*)this, evEOF);
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ {
+ } break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ {
+ GstElement *sink;
+ g_object_get (G_OBJECT (m_gst_playbin), "text-sink", &sink, NULL);
+ if (sink)
+ {
+ g_object_set (G_OBJECT (sink), "max-buffers", 2, NULL);
+ g_object_set (G_OBJECT (sink), "sync", FALSE, NULL);
+ g_object_set (G_OBJECT (sink), "async", FALSE, NULL);
+ g_object_set (G_OBJECT (sink), "emit-signals", TRUE, NULL);
+ gst_object_unref(sink);
+ }
+ } break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ {
+ } break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ {
+ } break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ {
+ } break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ {
+ } break;
+ }
break;
+ }
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *err;
-
gst_message_parse_error (msg, &err, &debug);
g_free (debug);
eWarning("Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName );
if ( err->domain == GST_STREAM_ERROR )
{
+ eDebug("err->code %d", err->code);
if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
{
if ( g_strrstr(sourceName, "videosink") )
eDebug("eServiceMP3::/tmp/.id3coverart %d bytes written ", ret);
m_event((iPlayableService*)this, evUser+13);
}
-
gst_tag_list_free(tags);
m_event((iPlayableService*)this, evUpdatedInfo);
break;
}
case GST_MESSAGE_ASYNC_DONE:
{
+ if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
+ break;
+
GstTagList *tags;
gint i, active_idx, n_video = 0, n_audio = 0, n_text = 0;
GstPad* pad = 0;
g_signal_emit_by_name (m_gst_playbin, "get-audio-pad", i, &pad);
GstCaps* caps = gst_pad_get_negotiated_caps(pad);
+ if (!caps)
+ continue;
GstStructure* str = gst_caps_get_structure(caps, 0);
-gchar *g_type;
-g_type = gst_structure_get_name(str);
-eDebug("AUDIO STRUCT=%s", g_type);
+ gchar *g_type;
+ g_type = gst_structure_get_name(str);
+ eDebug("AUDIO STRUCT=%s", g_type);
audio.type = gstCheckAudioPad(str);
g_codec = g_strdup(g_type);
g_lang = g_strdup_printf ("und");
{
gst_tag_list_get_string(tags, GST_TAG_AUDIO_CODEC, &g_codec);
gst_tag_list_get_string(tags, GST_TAG_LANGUAGE_CODE, &g_lang);
+ gst_tag_list_free(tags);
}
audio.language_code = std::string(g_lang);
audio.codec = std::string(g_codec);
m_audioStreams.push_back(audio);
g_free (g_lang);
g_free (g_codec);
+ gst_caps_unref(caps);
}
for (i = 0; i < n_text; i++)
}
}
}
+ break;
+ }
+ case GST_MESSAGE_BUFFERING:
+ {
+ GstBufferingMode mode;
+ gst_message_parse_buffering(msg, &(m_bufferInfo.bufferPercent));
+ gst_message_parse_buffering_stats(msg, &mode, &(m_bufferInfo.avgInRate), &(m_bufferInfo.avgOutRate), &(m_bufferInfo.bufferingLeft));
+ m_event((iPlayableService*)this, evBuffering);
}
default:
break;
if ( gst_structure_has_name (structure, "audio/mpeg"))
{
- gint mpegversion, layer = -1;
+ gint mpegversion, layer = -1;
if (!gst_structure_get_int (structure, "mpegversion", &mpegversion))
return atUnknown;
switch (mpegversion) {
- case 1:
+ case 1:
{
gst_structure_get_int (structure, "layer", &layer);
if ( layer == 3 )
return atUnknown;
}
-void eServiceMP3::gstPoll(const int&)
+void eServiceMP3::gstPoll(const int &msg)
{
/* ok, we have a serious problem here. gstBusSyncHandler sends
us the wakup signal, but likely before it was posted.
I need to understand the API a bit more to make this work
proplerly. */
- usleep(1);
-
- GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin));
- GstMessage *message;
- while ((message = gst_bus_pop (bus)))
+ if (msg == 1)
{
- gstBusCall(bus, message);
- gst_message_unref (message);
+ GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin));
+ GstMessage *message;
+ usleep(1);
+ while ((message = gst_bus_pop (bus)))
+ {
+ gstBusCall(bus, message);
+ gst_message_unref (message);
+ }
}
+ else
+ pullSubtitle();
}
eAutoInitPtr<eServiceFactoryMP3> init_eServiceFactoryMP3(eAutoInitNumbers::service+1, "eServiceFactoryMP3");
void eServiceMP3::gstCBsubtitleAvail(GstElement *appsink, gpointer user_data)
{
eServiceMP3 *_this = (eServiceMP3*)user_data;
- GstBuffer *buffer;
- g_signal_emit_by_name (appsink, "pull-buffer", &buffer);
- if (buffer)
+ eSingleLocker l(_this->m_subs_to_pull_lock);
+ ++_this->m_subs_to_pull;
+ _this->m_pump.send(2);
+}
+
+void eServiceMP3::pullSubtitle()
+{
+ GstElement *sink;
+ g_object_get (G_OBJECT (m_gst_playbin), "text-sink", &sink, NULL);
+ if (sink)
{
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
- gint64 duration_ns = GST_BUFFER_DURATION(buffer);
- size_t len = GST_BUFFER_SIZE(buffer);
- unsigned char line[len+1];
- memcpy(line, GST_BUFFER_DATA(buffer), len);
- line[len] = 0;
-// eDebug("got new subtitle @ buf_pos = %lld ns (in pts=%lld): '%s' ", buf_pos, buf_pos/11111, line);
- if ( _this->m_subtitle_widget )
+ while (m_subs_to_pull && m_subtitle_pages.size() < 2)
{
- ePangoSubtitlePage page;
- gRGB rgbcol(0xD0,0xD0,0xD0);
- page.m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
- page.show_pts = buf_pos / 11111L;
- page.m_timeout = duration_ns / 1000000;
- _this->m_subtitle_pages.push_back(page);
- _this->pushSubtitles();
+ GstBuffer *buffer;
+ {
+ eSingleLocker l(m_subs_to_pull_lock);
+ --m_subs_to_pull;
+ }
+ g_signal_emit_by_name (sink, "pull-buffer", &buffer);
+ if (buffer)
+ {
+ gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
+ gint64 duration_ns = GST_BUFFER_DURATION(buffer);
+ size_t len = GST_BUFFER_SIZE(buffer);
+ unsigned char line[len+1];
+ memcpy(line, GST_BUFFER_DATA(buffer), len);
+ line[len] = 0;
+ eDebug("got new subtitle @ buf_pos = %lld ns (in pts=%lld): '%s' ", buf_pos, buf_pos/11111, line);
+ ePangoSubtitlePage page;
+ gRGB rgbcol(0xD0,0xD0,0xD0);
+ page.m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
+ page.show_pts = buf_pos / 11111L;
+ page.m_timeout = duration_ns / 1000000;
+ m_subtitle_pages.push_back(page);
+ pushSubtitles();
+ gst_buffer_unref(buffer);
+ }
}
+ gst_object_unref(sink);
}
+ else
+ eDebug("no subtitle sink!");
}
void eServiceMP3::pushSubtitles()
{
ePangoSubtitlePage page;
- GstClockTime base_time;
pts_t running_pts;
- GstElement *syncsink;
- g_object_get (G_OBJECT (m_gst_playbin), "audio-sink", &syncsink, NULL);
- GstClock *clock;
- clock = gst_element_get_clock (syncsink);
while ( !m_subtitle_pages.empty() )
{
+ getPlayPosition(running_pts);
page = m_subtitle_pages.front();
-
- base_time = gst_element_get_base_time (syncsink);
- running_pts = gst_clock_get_time (clock) / 11111L;
gint64 diff_ms = ( page.show_pts - running_pts ) / 90;
-// eDebug("eServiceMP3::pushSubtitles show_pts = %lld running_pts = %lld diff = %lld", page.show_pts, running_pts, diff_ms);
- if ( diff_ms > 20 )
+ eDebug("eServiceMP3::pushSubtitles show_pts = %lld running_pts = %lld diff = %lld", page.show_pts, running_pts, diff_ms);
+ if (diff_ms < -100)
+ {
+ GstFormat fmt = GST_FORMAT_TIME;
+ gint64 now;
+ if (gst_element_query_position(m_gst_playbin, &fmt, &now) != -1)
+ {
+ now /= 11111;
+ diff_ms = abs((now - running_pts) / 90);
+ eDebug("diff < -100ms check decoder/pipeline diff: decoder: %lld, pipeline: %lld, diff: %lld", running_pts, now, diff_ms);
+ if (diff_ms > 100000)
+ {
+ eDebug("high decoder/pipeline difference.. assume decoder has now started yet.. check again in 1sec");
+ m_subtitle_sync_timer->start(1000, true);
+ break;
+ }
+ }
+ else
+ eDebug("query position for decoder/pipeline check failed!");
+ eDebug("subtitle to late... drop");
+ m_subtitle_pages.pop_front();
+ }
+ else if ( diff_ms > 20 )
{
-// eDebug("m_subtitle_sync_timer->start(%lld,1)", diff_ms);
- m_subtitle_sync_timer->start(diff_ms, 1);
+// eDebug("start recheck timer");
+ m_subtitle_sync_timer->start(diff_ms > 1000 ? 1000 : diff_ms, true);
break;
}
- else
+ else // immediate show
{
- m_subtitle_widget->setPage(page);
+ if (m_subtitle_widget)
+ m_subtitle_widget->setPage(page);
m_subtitle_pages.pop_front();
}
- } ;
-
- gst_object_unref (clock);
+ }
+ if (m_subtitle_pages.empty())
+ pullSubtitle();
}
RESULT eServiceMP3::enableSubtitles(eWidget *parent, ePyObject tuple)
goto error_out;
type = PyInt_AsLong(entry);
- g_object_set (G_OBJECT (m_gst_playbin), "current-text", pid, NULL);
- m_currentSubtitleStream = pid;
+ if (m_currentSubtitleStream != pid)
+ {
+ g_object_set (G_OBJECT (m_gst_playbin), "current-text", pid, NULL);
+ m_currentSubtitleStream = pid;
+ eSingleLocker l(m_subs_to_pull_lock);
+ m_subs_to_pull = 0;
+ m_subtitle_pages.clear();
+ }
+ m_subtitle_widget = 0;
m_subtitle_widget = new eSubtitleWidget(parent);
m_subtitle_widget->resize(parent->size()); /* full size */
g_object_get (G_OBJECT (m_gst_playbin), "current-text", &text_pid, NULL);
eDebug ("eServiceMP3::switched to subtitle stream %i", text_pid);
- m_subtitle_pages.clear();
+
return 0;
PyObject *eServiceMP3::getCachedSubtitle()
{
- eDebug("eServiceMP3::getCachedSubtitle");
+// eDebug("eServiceMP3::getCachedSubtitle");
Py_RETURN_NONE;
}
return l;
}
+RESULT eServiceMP3::streamed(ePtr<iStreamedService> &ptr)
+{
+ ptr = this;
+ return 0;
+}
+
+PyObject *eServiceMP3::getBufferCharge()
+{
+ ePyObject tuple = PyTuple_New(5);
+ PyTuple_SET_ITEM(tuple, 0, PyInt_FromLong(m_bufferInfo.bufferPercent));
+ PyTuple_SET_ITEM(tuple, 1, PyInt_FromLong(m_bufferInfo.avgInRate));
+ PyTuple_SET_ITEM(tuple, 2, PyInt_FromLong(m_bufferInfo.avgOutRate));
+ PyTuple_SET_ITEM(tuple, 3, PyInt_FromLong(m_bufferInfo.bufferingLeft));
+ PyTuple_SET_ITEM(tuple, 4, PyInt_FromLong(m_buffer_size));
+ return tuple;
+}
+
+int eServiceMP3::setBufferSize(int size)
+{
+ m_buffer_size = size;
+ g_object_set (G_OBJECT (m_gst_playbin), "buffer-size", m_buffer_size, NULL);
+ return 0;
+}
+
+
#else
#warning gstreamer not available, not building media player
#endif