Liquidsoap + gstream with restream - liquidsoap

I try to restream my channel to another one with liquidsoap and gstream. I want do that becouse of fallback and use another source if first one it's not used. I got:
set("frame.video.width", 1920)
set("frame.video.height", 1080)
#set("frame.video.samplerate", 30)
set("gstreamer.add_borders", false)
set("clock.allow_streaming_errors",false)
s = single("rtmp://link_to_rtmp_stream/test")
s = fallback([s, blank()])
output.gstreamer.audio_video(
video_pipeline=
"videoconvert ! x264enc bitrate=4000 ! video/x-h264,profile=baseline ! queue ! mux.",
audio_pipeline=
"audioconvert ! voaacenc bitrate=128000 ! queue ! mux.",
pipeline=
"flvmux name=mux ! rtmpsink location=\"rtmp://wherewewhantstream.com live=1\"",
s)
I got one problem.. Recognize first stream.. Script don't want to recognize it because it think that it's the file. How to recognize rtmp stream and decode it?

try this:
set("frame.video.width", 1920)
set("frame.video.height", 1080)
#set("frame.video.samplerate", 30)
set("gstreamer.add_borders", false)
set("clock.allow_streaming_errors",false)
def gstreamer.rtmp(~id="",uri) =
pipeline = "rtmpsrc location=#{uri} ! tee name=t"
audio_pipeline = "t. ! queue"
video_pipeline = "t. ! queue"
input.gstreamer.audio_video(id=id, pipeline=pipeline, audio_pipeline=audio_pipeline, video_pipeline=video_pipeline)
end
s = gstreamer.rtmp("rtmp://link_to_rtmp_stream/test")
s = fallback([s, blank()])
output.gstreamer.audio_video(
video_pipeline=
"videoconvert ! x264enc bitrate=4000 ! video/x-h264,profile=baseline ! queue ! mux.",
audio_pipeline=
"audioconvert ! voaacenc bitrate=128000 ! queue ! mux.",
pipeline=
"flvmux name=mux ! rtmpsink location=\"rtmp://wherewewhantstream.com live=1\"",
s)

Related

gstreamer 1.x clockoverlay h264 won't show clock

I have working pipe with interpipe:
gstd-client pipeline_create recording_pipe interpipesrc name=rec_intpsrc listen-to=cam_src is-live=true allow-renegotiation=true enable-sync=false format=time ! queue max-size-buffers=3 leaky=downstream flush-on-eos=true ! imxvpuenc_h264 bitrate=4096 ! h264parse ! splitmuxsink location=/tmp/rec_%03d.mpg max-size-time=60000000000 max-files=4 muxer=mpegtsmux
Now I want to overlay clock but can't achieve it. Tried many variants like:
gstd-client pipeline_create recording_pipe interpipesrc name=rec_intpsrc listen-to=cam_src is-live=true allow-renegotiation=true enable-sync=false format=time ! queue max-size-buffers=3 leaky=downstream flush-on-eos=true ! clockoverly ! imxvpuenc_h264 bitrate=4096 ! h264parse ! splitmuxsink location=/tmp/rec_%03d.mpg max-size-time=60000000000 max-files=4 muxer=mpegtsmux
Files are saved and managed but without overlay. As per reference I've got fully working stream:
gstd-client pipeline_create stream_pipe interpipesrc name=stream_intpsrc listen-to=cam_src is-live=true allow-renegotiation=true enable-sync=false ! queue max-size-buffers=3 leaky=downstream ! clockoverlay shaded-background=true font-desc="Nimbus mono" valignment=1 ! textoverlay shaded-background=true font-desc="Nimbus mono" valignment=1 halignment=2 text="Live" ! imxvpuenc_h264 ! mpegtsmux alignment=0 ! tcpserversink host=0.0.0.0 port=1234 sync=true
Any ideas ???

H264 //RTSP using Android phone

I would like to create a VR so I created a RTSP server to link to my Zedmini. It is working if I use a h265 encoder, but the bad thing is the RTSP only works if I use Iphone7 VLC app or computer window 8 VLC software, my Android phone huawei p7 Onvifer app cannot use this RTSP address at all. I need to use huawei p7 for my project as I am going to create the app and link to this RTSP server.
Based on my checking, Some Android device do not support h265 encoder, so I decided to use h264 and I have been googling a lot for few weeks but became frustrated for not finding a solution to use h264.
This is the code which I amend from test-readme.c------>
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
int main (int argc, char *argv[]) { GMainLoop *loop; GstRTSPServer *server; GstRTSPMountPoints *mounts; GstRTSPMediaFactory *factory;
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* create a server instance */ server = gst_rtsp_server_new ();
/* get the mount points for this server, every server has a default object
that be used to map uri mount points to media factories */ mounts = gst_rtsp_server_get_mount_points (server);
/* make a media factory for a test stream. The default media factory can use
gst-launch syntax to create pipelines.
any launch line works as long as it contains elements named pay%d. Each
element with pay%d names will be a stream */ factory = gst_rtsp_media_factory_new ();
//working case for streaming video //gst_rtsp_media_factory_set_launch (factory,"( videotestsrc is-live=1 ! x264enc ! rtph264pay name=pay0 pt=96 )");
//working case for external camera //gst_rtsp_media_factory_set_launch (factory,"( v4l2src is-live=1 device=/dev/video1 ! video/x-raw, width=(int)720, height=(int)480 framerate=30/1 format=I420 ! timeoverlay ! omxh265enc ! rtph265pay name=pay0 pt=96 )");
//working case for JX2 camera //gst_rtsp_media_factory_set_launch (factory,"( nvcamerasrc sensor-id=0 ! video/x-raw(memory:NVMM), width=1920, height=1080, framerate=30/1, format=I420 ! nvvidconv flip-method=4 !video/x-raw, width=(int)720, height=(int)480 framerate=30/1 format=I420 ! timeoverlay ! omxh265enc ! rtph265pay name=pay0 pt=96 )");
//Fail or not working case for Zed mini camera testing FOR H264 gst_rtsp_media_factory_set_launch (factory,"(v4l2src is-live=1 device=/dev/video1 ! video/x-raw, width=2560, height=720, framerate=30/1, format=I420 ! nvvidconv !video/x-raw, width=(int)720, height=(int)480, framerate=30/1, format=NV12 ! omxh264enc bitrate=10000000 ! rtph264pay name=pay0 pt=96 )");
//working case for Zed mini camera FOR H265 //gst_rtsp_media_factory_set_launch (factory,"(v4l2src is-live=1 device=/dev/video1 ! video/x-raw, width=2560, height=720, framerate=30/1, format=I420 ! nvvidconv !video/x-raw, width=(int)720, height=(int)480 framerate=30/1 format=I420 ! timeoverlay ! omxh265enc ! rtph265pay name=pay0 pt=96 )");
gst_rtsp_media_factory_set_shared (factory, TRUE); /* attach the test factory to the /test url */ gst_rtsp_mount_points_add_factory (mounts, "/test", factory);
/* don't need the ref to the mapper anymore */ g_object_unref (mounts);
/* attach the server to the default maincontext */ gst_rtsp_server_attach (server, NULL);
/* start serving */ g_print ("stream ready at rtsp://172.16.124.75:8554/test\n"); g_main_loop_run (loop);
return 0; }
This code is working on streaming video, JX2 camera, simple USB camera (low end), also zedmini camera but using h265. I need the code to run using h264, there must be some element missed out here or wrong.
gst_rtsp_media_factory_set_launch (factory,"(v4l2src is-live=1 device=/dev/video1 ! video/x-raw, width=2560, height=720, framerate=30/1, format=I420 ! nvvidconv !video/x-raw, width=(int)720, height=(int)480, framerate=30/1, format=NV12 ! omxh264enc bitrate=10000000 ! rtph264pay name=pay0 pt=96 )");

gstreamer-1.0 on Raspberry Pi: cannot decode H.264 stream

I'm trying to run a gstreamer-1.0 python script (see below, works fine on an ubuntu laptop) on a Raspberry Pi. However, it seems to be unable to decode the stream:
0:00:11.237415476 9605 0xafb0cc60 ERROR vaapidecode ../../../gst/vaapi/gstvaapidecode.c:1025:gst_vaapidecode_ensure_allowed_caps: failed to retrieve VA display
0:00:11.239490439 9605 0xafb0cc60 WARN decodebin gstdecodebin2.c:2087:connect_pad:<decodebin0> Link failed on pad vaapidecode0:sink
0:00:11.244097356 9605 0xafb0cc60 WARN uridecodebin gsturidecodebin.c:939:unknown_type_cb:<decoder> warning: No decoder available for type 'video/x-h264, stream-format=(string)byte-stream, alignment=(string)nal, width=(int)426, height=(int)240, framerate=(fraction)30/1, parsed=(boolean)true, pixel-aspect-ratio=(fraction)1/1, level=(string)2.1, profile=(string)main'.
I searched for information about the error (the results didn't enlighten me) and the warnings but couldn't really find much advice other than to install gstreamer1.0-libav which had already been installed. Consequently the decoder should be available.
What might be wrong here and how do I fix it?
This is the script:
#!/usr/bin/env python
# GST_DEBUG=3,python:5,gnl*:5 python 01_parsepipeline.py http://www.ustream.tv/channel/17074538 worst novideo.png
from __future__ import print_function
import sys
import gi
from gi.repository import GObject as gobject, Gst as gst
from livestreamer import Livestreamer, StreamError, PluginError, NoPluginError
import cv2
import numpy
def exit(msg):
print(msg, file=sys.stderr)
sys.exit()
class Player(object):
def __init__(self):
self.fd = None
self.mainloop = gobject.MainLoop()
# This creates a playbin pipeline and using the appsrc source
# we can feed it our stream data
self.pipeline = gst.parse_launch('uridecodebin uri=appsrc:// name=decoder \
decoder. ! videorate ! video/x-raw,framerate=1/1 ! tee name=t \
t. ! queue ! videoconvert ! video/x-raw,format=RGB ! appsink name=appsink \
decoder. ! queue ! audioconvert ! fakesink')
if self.pipeline is None:
exit("couldn't build pipeline")
decoder = self.pipeline.get_by_name('decoder')
if decoder is None:
exit("couldn't get decoder")
decoder.connect("source-setup", self.on_source_setup)
vsink = self.pipeline.get_by_name('appsink')
if vsink is None:
exit("couldn't get sink")
vsink.set_property("emit-signals", True)
vsink.set_property("max-buffers", 1)
vsink.connect("new-sample", self.on_new_sample)
# Creates a bus and set callbacks to receive errors
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.connect("message::eos", self.on_eos)
self.bus.connect("message::error", self.on_error)
def on_new_sample(self, sink):
sample = sink.emit("pull-sample")
buf = sample.get_buffer()
caps = sample.get_caps()
height = caps.get_structure(0).get_value('height')
width = caps.get_structure(0).get_value('width')
(result, mapinfo) = buf.map(gst.MapFlags.READ)
if result == True:
arr = numpy.ndarray(
(height,
width,
3),
buffer=buf.extract_dup(0, buf.get_size()),
dtype=numpy.uint8)
resized_refimage = cv2.resize(refArray, (width, height))
diff = cv2.norm(arr, resized_refimage, cv2.NORM_L2)
buf.unmap(mapinfo)
s = "diff = " + str(diff)
print(s)
return gst.FlowReturn.OK
def exit(self, msg):
self.stop()
exit(msg)
def stop(self):
# Stop playback and exit mainloop
self.pipeline.set_state(gst.State.NULL)
self.mainloop.quit()
# Close the stream
if self.fd:
self.fd.close()
def play(self, stream):
# Attempt to open the stream
try:
self.fd = stream.open()
except StreamError as err:
self.exit("Failed to open stream: {0}".format(err))
# Start playback
self.pipeline.set_state(gst.State.PLAYING)
self.mainloop.run()
def on_source_setup(self, element, source):
# When this callback is called the appsrc expects
# us to feed it more data
print("source setup")
source.connect("need-data", self.on_source_need_data)
print("done")
def on_pad_added(self, element, pad):
string = pad.query_caps(None).to_string()
print(string)
if string.startswith('video/'):
#type = pad.get_caps()[0].get_name()
#print(type)
#if type.startswith("video"):
pad.link(self.vconverter.get_static_pad("sink"))
def on_source_need_data(self, source, length):
# Attempt to read data from the stream
try:
data = self.fd.read(length)
except IOError as err:
self.exit("Failed to read data from stream: {0}".format(err))
# If data is empty it's the end of stream
if not data:
source.emit("end-of-stream")
return
# Convert the Python bytes into a GStreamer Buffer
# and then push it to the appsrc
buf = gst.Buffer.new_wrapped(data)
source.emit("push-buffer", buf)
#print("sent " + str(length) + " bytes")
def on_eos(self, bus, msg):
# Stop playback on end of stream
self.stop()
def on_error(self, bus, msg):
# Print error message and exit on error
error = msg.parse_error()[1]
self.exit(error)
def main():
if len(sys.argv) < 4:
exit("Usage: {0} <url> <quality> <reference png image path>".format(sys.argv[0]))
# Initialize and check GStreamer version
gi.require_version("Gst", "1.0")
gobject.threads_init()
gst.init(None)
# Collect arguments
url = sys.argv[1]
quality = sys.argv[2]
refImage = sys.argv[3]
global refArray
image = cv2.imread(refImage)
refArray = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# refArray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
refArray = cv2.blur(refArray, (3,3))
# Create the Livestreamer session
livestreamer = Livestreamer()
# Enable logging
livestreamer.set_loglevel("debug")
livestreamer.set_logoutput(sys.stdout)
# Attempt to fetch streams
try:
streams = livestreamer.streams(url)
except NoPluginError:
exit("Livestreamer is unable to handle the URL '{0}'".format(url))
except PluginError as err:
exit("Plugin error: {0}".format(err))
if not streams:
exit("No streams found on URL '{0}'".format(url))
# Look for specified stream
if quality not in streams:
exit("Unable to find '{0}' stream on URL '{1}'".format(quality, url))
# We found the stream
stream = streams[quality]
# Create the player and start playback
player = Player()
# Blocks until playback is done
player.play(stream)
if __name__ == "__main__":
main()
The pipeline you've set up appears to be trying to invoke a vaapi decoder to hardware-decode h264 - vaapi isn't available on the raspberry pi, since the closed source X server doesn't implement it. You may be able to use omxh264dec from the gstreamer1.0-omx package instead.
If not, you could do software h264 decoding, but that will be slower (maybe unacceptably slow on a raspberry pi).

Gstreamer No RTP

I tried to compile static binary using latest Gstreamer Libs 1.8.0. I want to get incomming RTSP stream and put it into file. The pipeline is:
rtspsrc location=rtsp://X.X.X.X/ protocols=GST_RTSP_LOWER_TRANS_TCP ! queue ! rtph264depay ! h264parse ! flvmux name=\"mux\" streamable=\"true\" ! fakesink
Running compiled binary results in error:
rtpbasedepayload
gstrtpbasedepayload.c:484:gst_rtp_base_depayload_handle_buffer:[00m
error: No RTP format was negotiated.
int main(int argc, char *argv[]) {
GstElement *pipeline;
GstBus *bus;
GstStateChangeReturn ret;
GMainLoop *main_loop;
CustomData data;
/* Initialize GStreamer */
gst_init (&argc, &argv);
registerGstStaticPlugins();
/* Initialize our data structure */
memset (&data, 0, sizeof (data));
/* Build the pipeline */
pipeline = gst_parse_launch ("rtspsrc location=rtsp://X.X.X.X/ protocols=GST_RTSP_LOWER_TRANS_TCP ! queue ! rtph264depay ! h264parse ! flvmux name=\"mux\" streamable=\"true\" ! fakesink", NULL);
bus = gst_element_get_bus (pipeline);
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
} else if (ret == GST_STATE_CHANGE_NO_PREROLL) {
data.is_live = TRUE;
}
main_loop = g_main_loop_new (NULL, FALSE);
data.loop = main_loop;
data.pipeline = pipeline;
gst_bus_add_signal_watch (bus);
g_signal_connect (bus, "message", G_CALLBACK (cb_message), &data);
g_main_loop_run (main_loop);
/* Free resources */
g_main_loop_unref (main_loop);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
Complete output: http://pastebin.com/Ln06d0iP
As the source is RTSP with SDP data - I don't need to set caps manually. Interesting part that running this pipeline using Gstreamer 0.10 works fine.
Fixed by myself. Gstreamer doesn't complain about missing plugins if you dont use them in pipeline directly. Static registration of plugins udp and rtpmanager solved the problem.

PYSNMP stop and restart a trap reciever

I am trying to create a pysnmp daemon. I want to have the ability to start, stop, and restart the thread that the daemon is running on. I am having trouble cleaning the socket, notification receiver, and transport dispatcher.
I am using a pysnmp v1/2c trap receiver
class trapReceiverThread(threading.Thread):
def __init__(self):
try:
trapworking = snmpEngine.transportDispatcher.jobsArePending()
except:
trapworking = -1
if trapworking == 0:
snmpEngine.transportDispatcher.jobStarted(1)
elif trapworking == -1:
print "starting"
# UDP over IPv4, first listening interface/port
config.addV1System(snmpEngine, 'my-area', 'public')
# SecurityName <-> CommunityName mapping
print "d0"
config.addSocketTransport(
snmpEngine,
udp.domainName + (1,),
udp.UdpTransport().openServerMode(( 'localhost', 162 ))
)
ntfrcv.NotificationReceiver(snmpEngine, cbFun)
snmpEngine.transportDispatcher.jobStarted(1)
else:
print "Trap receiver already started."
def run(self):
try:
snmpEngine.transportDispatcher.runDispatcher()
except:
print "fail"
snmpEngine.transportDispatcher.closeDispatcher()
raise
def cbFun(snmpEngine,
stateReference,
contextEngineId, contextName,
varBinds,
cbCtx):
transportDomain, transportAddress = snmpEngine.msgAndPduDsp.getTransportInfo(stateReference)
print('Notification from %s, ContextEngineId "%s", ContextName "%s"' % (
transportAddress, contextEngineId.prettyPrint(),
contextName.prettyPrint()
)
)
for obj in varBinds:
print obj
trapStatus = threading.Thread(target = trapReceiverThread().run)
trapStatus.deamon = True
def start():
global trapStatus
if trapStatus.isAlive() == False:
try:
trapStatus.start();
except:
trapStatus = threading.Thread(target = trapReceiverThread().run)
trapStatus.start();
def stop():
if snmpEngine.transportDispatcher.jobsArePending():
print "stopping"
"""
CODE to stop SocketTransport, transportDispatcher, and NotificationReceiver
"""
snmpEngine.transportDispatcher.jobFinished(1)
trapStatus.join()
def restart():
stop()
start()
Since the trap is defined the the local variable transportDispater, the process can be stopped by finishing job #1 and releasing the port.
transportDispatcher.jobFinished(1)
transportDispatcher.unregisterRecvCbFun(recvId=None)
transportDispatcher.unregisterTransport(udp.domainName)