/* for subtitles */
#include <lib/gui/esubtitle.h>
-#ifndef GST_SEEK_FLAG_SKIP
-#warning Compiling for legacy gstreamer, things will break
-#define GST_SEEK_FLAG_SKIP 0
-#define GST_TAG_HOMEPAGE ""
-#endif
-
// eServiceFactoryMP3
eServiceFactoryMP3::eServiceFactoryMP3()
extensions.push_back("vob");
extensions.push_back("wav");
extensions.push_back("wave");
+ extensions.push_back("m4v");
extensions.push_back("mkv");
extensions.push_back("avi");
extensions.push_back("divx");
RESULT deleteFromDisk(int simulate);
RESULT getListOfFilenames(std::list<std::string> &);
+ RESULT reindex();
};
DEFINE_REF(eMP3ServiceOfflineOperations);
return 0;
}
+RESULT eMP3ServiceOfflineOperations::reindex()
+{
+ return -1;
+}
+
RESULT eServiceFactoryMP3::offlineOperations(const eServiceReference &ref, ePtr<iServiceOfflineOperations> &ptr)
{
m_seekTimeout = eTimer::create(eApp);
m_subtitle_sync_timer = eTimer::create(eApp);
m_stream_tags = 0;
- m_currentAudioStream = 0;
+ m_currentAudioStream = -1;
m_currentSubtitleStream = 0;
m_subtitle_widget = 0;
m_currentTrickRatio = 0;
+ m_subs_to_pull = 0;
m_buffer_size = 1*1024*1024;
CONNECT(m_seekTimeout->timeout, eServiceMP3::seekTimeoutCB);
CONNECT(m_subtitle_sync_timer->timeout, eServiceMP3::pushSubtitles);
sourceinfo.containertype = ctAVI;
sourceinfo.is_video = TRUE;
}
- else if ( strcasecmp(ext, ".mp4") == 0 || strcasecmp(ext, ".mov") == 0)
+ else if ( strcasecmp(ext, ".mp4") == 0 || strcasecmp(ext, ".mov") == 0 || strcasecmp(ext, ".m4v") == 0)
{
sourceinfo.containertype = ctMP4;
sourceinfo.is_video = TRUE;
eDebug("eServiceMP3::sorry, can't play: missing gst-plugin-appsink");
else
{
- g_object_set (G_OBJECT (subsink), "emit-signals", TRUE, NULL);
- g_signal_connect (subsink, "new-buffer", G_CALLBACK (gstCBsubtitleAvail), this);
+ m_subs_to_pull_handler_id = g_signal_connect (subsink, "new-buffer", G_CALLBACK (gstCBsubtitleAvail), this);
g_object_set (G_OBJECT (m_gst_playbin), "text-sink", subsink, NULL);
}
m_gst_playbin = 0;
}
- gst_element_set_state (m_gst_playbin, GST_STATE_PLAYING);
setBufferSize(m_buffer_size);
}
eServiceMP3::~eServiceMP3()
{
+ // disconnect subtitle callback
+ GstElement *sink;
+ g_object_get (G_OBJECT (m_gst_playbin), "text-sink", &sink, NULL);
+ if (sink)
+ {
+ g_signal_handler_disconnect (sink, m_subs_to_pull_handler_id);
+ gst_object_unref(sink);
+ }
+
delete m_subtitle_widget;
+
+ // disconnect sync handler callback
+ gst_bus_set_sync_handler(gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin)), NULL, NULL);
+
if (m_state == stRunning)
stop();
-
+
if (m_stream_tags)
gst_tag_list_free(m_stream_tags);
}
}
-DEFINE_REF(eServiceMP3);
+DEFINE_REF(eServiceMP3);
RESULT eServiceMP3::connectEvent(const Slot2<void,iPlayableService*,int> &event, ePtr<eConnection> &connection)
{
RESULT eServiceMP3::start()
{
ASSERT(m_state == stIdle);
-
+
m_state = stRunning;
if (m_gst_playbin)
{
eDebug("eServiceMP3::starting pipeline");
gst_element_set_state (m_gst_playbin, GST_STATE_PLAYING);
}
+
m_event(this, evStart);
+
return 0;
}
RESULT eServiceMP3::stop()
{
ASSERT(m_state != stIdle);
+
if (m_state == stStopped)
return -1;
+
eDebug("eServiceMP3::stop %s", m_ref.path.c_str());
gst_element_set_state(m_gst_playbin, GST_STATE_NULL);
m_state = stStopped;
+
return 0;
}
{
if (!m_gst_playbin || m_state != stRunning)
return -1;
- GstStateChangeReturn res = gst_element_set_state(m_gst_playbin, GST_STATE_PAUSED);
- if (res == GST_STATE_CHANGE_ASYNC)
- {
- pts_t ppos;
- getPlayPosition(ppos);
- seekTo(ppos);
- }
+
+ gst_element_set_state(m_gst_playbin, GST_STATE_PAUSED);
+
return 0;
}
RESULT eServiceMP3::unpause()
{
- m_subtitle_pages.clear();
if (!m_gst_playbin || m_state != stRunning)
return -1;
- GstStateChangeReturn res;
- res = gst_element_set_state(m_gst_playbin, GST_STATE_PLAYING);
+ gst_element_set_state(m_gst_playbin, GST_STATE_PLAYING);
+
return 0;
}
{
if (!m_gst_playbin)
return -1;
+
if (m_state != stRunning)
return -1;
-
+
GstFormat fmt = GST_FORMAT_TIME;
gint64 len;
return 0;
}
-RESULT eServiceMP3::seekTo(pts_t to)
+RESULT eServiceMP3::seekToImpl(pts_t to)
{
- m_subtitle_pages.clear();
-
- if (!m_gst_playbin)
- return -1;
-
/* convert pts to nanoseconds */
gint64 time_nanoseconds = to * 11111LL;
if (!gst_element_seek (m_gst_playbin, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
eDebug("eServiceMP3::seekTo failed");
return -1;
}
+
return 0;
}
+RESULT eServiceMP3::seekTo(pts_t to)
+{
+ RESULT ret = -1;
+
+ if (m_gst_playbin) {
+ eSingleLocker l(m_subs_to_pull_lock); // this is needed to dont handle incomming subtitles during seek!
+ if (!(ret = seekToImpl(to)))
+ {
+ m_subtitle_pages.clear();
+ m_subs_to_pull = 0;
+ }
+ }
+
+ return ret;
+}
+
+
RESULT eServiceMP3::trickSeek(gdouble ratio)
{
if (!m_gst_playbin)
return seekRelative(0, 0);
GstEvent *s_event;
- GstSeekFlags flags;
+ int flags;
flags = GST_SEEK_FLAG_NONE;
- flags |= GstSeekFlags (GST_SEEK_FLAG_FLUSH);
+ flags |= GST_SEEK_FLAG_FLUSH;
// flags |= GstSeekFlags (GST_SEEK_FLAG_ACCURATE);
- flags |= GstSeekFlags (GST_SEEK_FLAG_KEY_UNIT);
+ flags |= GST_SEEK_FLAG_KEY_UNIT;
// flags |= GstSeekFlags (GST_SEEK_FLAG_SEGMENT);
// flags |= GstSeekFlags (GST_SEEK_FLAG_SKIP);
if ( ratio >= 0 )
{
- s_event = gst_event_new_seek (ratio, GST_FORMAT_TIME, flags, GST_SEEK_TYPE_SET, pos, GST_SEEK_TYPE_SET, len);
+ s_event = gst_event_new_seek (ratio, GST_FORMAT_TIME, (GstSeekFlags)flags, GST_SEEK_TYPE_SET, pos, GST_SEEK_TYPE_SET, len);
eDebug("eServiceMP3::trickSeek with rate %lf to %" GST_TIME_FORMAT " ", ratio, GST_TIME_ARGS (pos));
}
else
{
- s_event = gst_event_new_seek (ratio, GST_FORMAT_TIME, GST_SEEK_FLAG_SKIP|GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_NONE, -1, GST_SEEK_TYPE_NONE, -1);
+ s_event = gst_event_new_seek (ratio, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_SKIP|GST_SEEK_FLAG_FLUSH), GST_SEEK_TYPE_NONE, -1, GST_SEEK_TYPE_NONE, -1);
}
if (!gst_element_send_event ( GST_ELEMENT (m_gst_playbin), s_event))
RESULT eServiceMP3::getPlayPosition(pts_t &pts)
{
+ GstFormat fmt = GST_FORMAT_TIME;
+ gint64 pos;
+ GstElement *sink;
+ pts = 0;
+
if (!m_gst_playbin)
return -1;
if (m_state != stRunning)
return -1;
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 len;
-
- if (!gst_element_query_position(m_gst_playbin, &fmt, &len))
+ g_object_get (G_OBJECT (m_gst_playbin), "audio-sink", &sink, NULL);
+
+ if (!sink)
+ g_object_get (G_OBJECT (m_gst_playbin), "video-sink", &sink, NULL);
+
+ if (!sink)
return -1;
- /* len is in nanoseconds. we have 90 000 pts per second. */
- pts = len / 11111;
+ gchar *name = gst_element_get_name(sink);
+ gboolean use_get_decoder_time = strstr(name, "dvbaudiosink") || strstr(name, "dvbvideosink");
+ g_free(name);
+
+ if (use_get_decoder_time)
+ g_signal_emit_by_name(sink, "get-decoder-time", &pos);
+
+ gst_object_unref(sink);
+
+ if (!use_get_decoder_time && !gst_element_query_position(m_gst_playbin, &fmt, &pos)) {
+ eDebug("gst_element_query_position failed in getPlayPosition");
+ return -1;
+ }
+
+ /* pos is in nanoseconds. we have 90 000 pts per second. */
+ pts = pos / 11111;
return 0;
}
default:
break;
}
- gdouble value;
- if ( !tag || !m_stream_tags )
- value = 0.0;
- PyObject *pyValue;
+
if ( isBuffer )
{
const GValue *gv_buffer = gst_tag_list_get_value_index(m_stream_tags, tag, 0);
{
GstBuffer *buffer;
buffer = gst_value_get_buffer (gv_buffer);
- pyValue = PyBuffer_FromMemory(GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
+ return PyBuffer_FromMemory(GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
}
}
else
{
+ gdouble value = 0.0;
gst_tag_list_get_double(m_stream_tags, tag, &value);
- pyValue = PyFloat_FromDouble(value);
+ return PyFloat_FromDouble(value);
}
- return pyValue;
+ return 0;
}
RESULT eServiceMP3::audioChannel(ePtr<iAudioChannelSelection> &ptr)
int eServiceMP3::getCurrentTrack()
{
+ if (m_currentAudioStream == -1)
+ g_object_get (G_OBJECT (m_gst_playbin), "current-audio", &m_currentAudioStream, NULL);
return m_currentAudioStream;
}
RESULT eServiceMP3::selectTrack(unsigned int i)
{
- int ret = selectAudioStream(i);
- /* flush */
pts_t ppos;
getPlayPosition(ppos);
- seekTo(ppos);
+ ppos -= 90000;
+ if (ppos < 0)
+ ppos = 0;
+
+ int ret = selectAudioStream(i);
+ if (!ret) {
+ /* flush */
+ seekTo(ppos);
+ }
return ret;
}
case GST_MESSAGE_STATE_CHANGED:
{
if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
- return;
+ break;
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
if(old_state == new_state)
- return;
+ break;
eDebug("eServiceMP3::state transition %s -> %s", gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
} break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
{
+ GstElement *sink;
+ g_object_get (G_OBJECT (m_gst_playbin), "text-sink", &sink, NULL);
+ if (sink)
+ {
+ g_object_set (G_OBJECT (sink), "max-buffers", 2, NULL);
+ g_object_set (G_OBJECT (sink), "sync", FALSE, NULL);
+ g_object_set (G_OBJECT (sink), "async", FALSE, NULL);
+ g_object_set (G_OBJECT (sink), "emit-signals", TRUE, NULL);
+ gst_object_unref(sink);
+ }
} break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
{
{
gchar *debug;
GError *err;
-
gst_message_parse_error (msg, &err, &debug);
g_free (debug);
eWarning("Gstreamer error: %s (%i) from %s", err->message, err->code, sourceName );
}
case GST_MESSAGE_ASYNC_DONE:
{
+ if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
+ break;
+
GstTagList *tags;
gint i, active_idx, n_video = 0, n_audio = 0, n_text = 0;
eDebug("eServiceMP3::async-done - %d video, %d audio, %d subtitle", n_video, n_audio, n_text);
+ if ( n_video + n_audio <= 0 )
+ stop();
+
active_idx = 0;
m_audioStreams.clear();
if (!caps)
continue;
GstStructure* str = gst_caps_get_structure(caps, 0);
-gchar *g_type;
-g_type = gst_structure_get_name(str);
-eDebug("AUDIO STRUCT=%s", g_type);
+ const gchar *g_type = gst_structure_get_name(str);
+ eDebug("AUDIO STRUCT=%s", g_type);
audio.type = gstCheckAudioPad(str);
g_codec = g_strdup(g_type);
g_lang = g_strdup_printf ("und");
if (strstr(eventname, "Changed"))
m_event((iPlayableService*)this, evVideoProgressiveChanged);
}
- g_free(eventname);
}
}
break;
if ( gst_structure_has_name (structure, "audio/mpeg"))
{
- gint mpegversion, layer = -1;
+ gint mpegversion, layer = -1;
if (!gst_structure_get_int (structure, "mpegversion", &mpegversion))
return atUnknown;
switch (mpegversion) {
- case 1:
+ case 1:
{
gst_structure_get_int (structure, "layer", &layer);
if ( layer == 3 )
return atUnknown;
}
-void eServiceMP3::gstPoll(const int&)
+void eServiceMP3::gstPoll(const int &msg)
{
/* ok, we have a serious problem here. gstBusSyncHandler sends
us the wakup signal, but likely before it was posted.
I need to understand the API a bit more to make this work
proplerly. */
- usleep(1);
-
- GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin));
- GstMessage *message;
- while ((message = gst_bus_pop (bus)))
+ if (msg == 1)
{
- gstBusCall(bus, message);
- gst_message_unref (message);
+ GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_gst_playbin));
+ GstMessage *message;
+ usleep(1);
+ while ((message = gst_bus_pop (bus)))
+ {
+ gstBusCall(bus, message);
+ gst_message_unref (message);
+ }
}
+ else
+ pullSubtitle();
}
eAutoInitPtr<eServiceFactoryMP3> init_eServiceFactoryMP3(eAutoInitNumbers::service+1, "eServiceFactoryMP3");
void eServiceMP3::gstCBsubtitleAvail(GstElement *appsink, gpointer user_data)
{
eServiceMP3 *_this = (eServiceMP3*)user_data;
- GstBuffer *buffer;
- g_signal_emit_by_name (appsink, "pull-buffer", &buffer);
- if (buffer)
+ eSingleLocker l(_this->m_subs_to_pull_lock);
+ ++_this->m_subs_to_pull;
+ _this->m_pump.send(2);
+}
+
+void eServiceMP3::pullSubtitle()
+{
+ GstElement *sink;
+ g_object_get (G_OBJECT (m_gst_playbin), "text-sink", &sink, NULL);
+ if (sink)
{
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
- gint64 duration_ns = GST_BUFFER_DURATION(buffer);
- size_t len = GST_BUFFER_SIZE(buffer);
- unsigned char line[len+1];
- memcpy(line, GST_BUFFER_DATA(buffer), len);
- line[len] = 0;
-// eDebug("got new subtitle @ buf_pos = %lld ns (in pts=%lld): '%s' ", buf_pos, buf_pos/11111, line);
- if ( _this->m_subtitle_widget )
+ while (m_subs_to_pull && m_subtitle_pages.size() < 2)
{
- ePangoSubtitlePage page;
- gRGB rgbcol(0xD0,0xD0,0xD0);
- page.m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
- page.show_pts = buf_pos / 11111L;
- page.m_timeout = duration_ns / 1000000;
- _this->m_subtitle_pages.push_back(page);
- _this->pushSubtitles();
+ GstBuffer *buffer;
+ {
+ eSingleLocker l(m_subs_to_pull_lock);
+ --m_subs_to_pull;
+ g_signal_emit_by_name (sink, "pull-buffer", &buffer);
+ }
+ if (buffer)
+ {
+ gint64 buf_pos = GST_BUFFER_TIMESTAMP(buffer);
+ gint64 duration_ns = GST_BUFFER_DURATION(buffer);
+ size_t len = GST_BUFFER_SIZE(buffer);
+ unsigned char line[len+1];
+ memcpy(line, GST_BUFFER_DATA(buffer), len);
+ line[len] = 0;
+ eDebug("got new subtitle @ buf_pos = %lld ns (in pts=%lld): '%s' ", buf_pos, buf_pos/11111, line);
+ ePangoSubtitlePage page;
+ gRGB rgbcol(0xD0,0xD0,0xD0);
+ page.m_elements.push_back(ePangoSubtitlePageElement(rgbcol, (const char*)line));
+ page.show_pts = buf_pos / 11111L;
+ page.m_timeout = duration_ns / 1000000;
+ m_subtitle_pages.push_back(page);
+ pushSubtitles();
+ gst_buffer_unref(buffer);
+ }
}
+ gst_object_unref(sink);
}
+ else
+ eDebug("no subtitle sink!");
}
void eServiceMP3::pushSubtitles()
{
ePangoSubtitlePage page;
- GstClockTime base_time;
pts_t running_pts;
- GstElement *syncsink;
- g_object_get (G_OBJECT (m_gst_playbin), "audio-sink", &syncsink, NULL);
- GstClock *clock;
- clock = gst_element_get_clock (syncsink);
while ( !m_subtitle_pages.empty() )
{
+ getPlayPosition(running_pts);
page = m_subtitle_pages.front();
-
- base_time = gst_element_get_base_time (syncsink);
- running_pts = gst_clock_get_time (clock) / 11111L;
gint64 diff_ms = ( page.show_pts - running_pts ) / 90;
-// eDebug("eServiceMP3::pushSubtitles show_pts = %lld running_pts = %lld diff = %lld", page.show_pts, running_pts, diff_ms);
- if ( diff_ms > 20 )
+ eDebug("eServiceMP3::pushSubtitles show_pts = %lld running_pts = %lld diff = %lld", page.show_pts, running_pts, diff_ms);
+ if (diff_ms < -100)
+ {
+ GstFormat fmt = GST_FORMAT_TIME;
+ gint64 now;
+ if (gst_element_query_position(m_gst_playbin, &fmt, &now) != -1)
+ {
+ now /= 11111;
+ diff_ms = abs((now - running_pts) / 90);
+ eDebug("diff < -100ms check decoder/pipeline diff: decoder: %lld, pipeline: %lld, diff: %lld", running_pts, now, diff_ms);
+ if (diff_ms > 100000)
+ {
+ eDebug("high decoder/pipeline difference.. assume decoder has now started yet.. check again in 1sec");
+ m_subtitle_sync_timer->start(1000, true);
+ break;
+ }
+ }
+ else
+ eDebug("query position for decoder/pipeline check failed!");
+ eDebug("subtitle to late... drop");
+ m_subtitle_pages.pop_front();
+ }
+ else if ( diff_ms > 20 )
{
-// eDebug("m_subtitle_sync_timer->start(%lld,1)", diff_ms);
- m_subtitle_sync_timer->start(diff_ms, 1);
+// eDebug("start recheck timer");
+ m_subtitle_sync_timer->start(diff_ms > 1000 ? 1000 : diff_ms, true);
break;
}
- else
+ else // immediate show
{
- m_subtitle_widget->setPage(page);
+ if (m_subtitle_widget)
+ m_subtitle_widget->setPage(page);
m_subtitle_pages.pop_front();
}
}
- gst_object_unref (clock);
- gst_object_unref (syncsink);
+ if (m_subtitle_pages.empty())
+ pullSubtitle();
}
RESULT eServiceMP3::enableSubtitles(eWidget *parent, ePyObject tuple)
goto error_out;
type = PyInt_AsLong(entry);
- g_object_set (G_OBJECT (m_gst_playbin), "current-text", pid, NULL);
- m_currentSubtitleStream = pid;
+ if (m_currentSubtitleStream != pid)
+ {
+ eSingleLocker l(m_subs_to_pull_lock);
+ g_object_set (G_OBJECT (m_gst_playbin), "current-text", pid, NULL);
+ m_currentSubtitleStream = pid;
+ m_subs_to_pull = 0;
+ m_subtitle_pages.clear();
+ }
+ m_subtitle_widget = 0;
m_subtitle_widget = new eSubtitleWidget(parent);
m_subtitle_widget->resize(parent->size()); /* full size */
g_object_get (G_OBJECT (m_gst_playbin), "current-text", &text_pid, NULL);
eDebug ("eServiceMP3::switched to subtitle stream %i", text_pid);
- m_subtitle_pages.clear();
return 0;