{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "原文： http://gstreamer-devel.966125.n4.nabble.com/Write-opencv-frames-into-gstreamer-rtsp-server-pipeline-td4685382.html"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 一"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from threading import Thread\n",
    "from time import clock\n",
    "\n",
    "import cv2\n",
    "import gi\n",
    "\n",
    "gi.require_version('Gst', '1.0')\n",
    "gi.require_version('GstRtspServer', '1.0')\n",
    "from gi.repository import Gst, GstRtspServer, GObject\n",
    "\n",
    "\n",
    "class SensorFactory(GstRtspServer.RTSPMediaFactory):\n",
    "    def __init__(self, **properties):\n",
    "        super(SensorFactory, self).__init__(**properties)\n",
    "        self.launch_string = 'appsrc !video/x-raw, width=320,height=240,framerate=30/1' \\\n",
    "                             '! videoconvert ! x264enc speed-preset=ultrafast tune=zerolatency' \\\n",
    "                             '! rtph264pay config-interval=1 name=pay0 pt=96'\n",
    "        self.pipeline = Gst.parse_launch(self.launch_string)\n",
    "        self.appsrc = self.pipeline.get_child_by_index(4)\n",
    "\n",
    "    def do_create_element(self, url):\n",
    "        return self.pipeline\n",
    "\n",
    "\n",
    "class GstServer(GstRtspServer.RTSPServer):\n",
    "    def __init__(self, **properties):\n",
    "        super(GstServer, self).__init__(**properties)\n",
    "        self.factory = SensorFactory()\n",
    "        self.factory.set_shared(True)\n",
    "        self.get_mount_points().add_factory(\"/test\", self.factory)\n",
    "        self.attach(None)\n",
    "\n",
    "\n",
    "GObject.threads_init()\n",
    "Gst.init(None)\n",
    "\n",
    "server = GstServer()\n",
    "\n",
    "loop = GObject.MainLoop()\n",
    "th = Thread(target=loop.run)\n",
    "th.start()\n",
    "\n",
    "print('Thread started')\n",
    "\n",
    "cap = cv2.VideoCapture(0)\n",
    "\n",
    "print(cap.isOpened())\n",
    "\n",
    "frame_number = 0\n",
    "\n",
    "fps = 30\n",
    "duration = 1 / fps\n",
    "\n",
    "timestamp = clock()\n",
    "\n",
    "while cap.isOpened():\n",
    "    ret, frame = cap.read()\n",
    "    if ret:\n",
    "\n",
    "        print('Writing buffer')\n",
    "\n",
    "        data = frame.tostring()\n",
    "\n",
    "        buf = Gst.Buffer.new_allocate(None, len(data), None)\n",
    "        buf.fill(0, data)\n",
    "        buf.duration = fps\n",
    "        timestamp = clock() - timestamp\n",
    "        buf.pts = buf.dts = int(timestamp)\n",
    "        buf.offset = frame_number\n",
    "        frame_number += 1\n",
    "        retval = server.factory.appsrc.emit('push-buffer', buf)\n",
    "        print(retval)\n",
    "\n",
    "        if cv2.waitKey(1) & 0xFF == ord('q'):\n",
    "            break\n",
    "\n",
    "cap.release()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 二\n",
    "\n",
    "I would suggest checking out this example if you haven't done so already:\n",
    "\n",
    "https://github.com/GStreamer/gst-rtsp-server/blob/master/examples/test-appsrc.c\n",
    "\n",
    "It is in C and I apologize I am not familiar with the python interface.\n",
    "Two things I notice is it doesn't look like you are setting the appsrc format to time.   By default it is bytes.  You can change that like this (C++):\n",
    "\n",
    "gst_util_set_object_arg(G_OBJECT(video_appsrc), \"format\", \"time\");\n",
    "\n",
    "Since you are setting the PTS on the frames, you need to do this.  You might be getting invalid/unexpected format errors because of this.\n",
    "I would also recommend setting up pushing frames on the \"media-configured\" signal on the media factory (assuming you are using a live source).  You can stop pushing frames when you get GST_FLOW_NOT_OK when you push your frames.\n",
    "This way you can start pushing frames only when a client connects.  And stop pushing when they disconnect.\n",
    "\n",
    "It also does not look like you are setting the caps on the appsrc.  I am fairly certain you need to do this.\n",
    "You might also want to post what log output and what errors you are getting so people can better help you.\n",
    "\n",
    "Good luck, appsrc rtsp servers are pretty challenging."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "---\n",
    "\n",
    "First and foremost, thank you for answering. You are very helpful.\n",
    "I changed come stuff like you suggeted.\n",
    "\n",
    "1) I started using need-data.\n",
    "\n",
    "2) I set the caps on the appsrc\n",
    "\n",
    "3) I changed the format\n",
    "\n",
    "But I can display only the first frame, then I get the following error\n",
    "\n",
    "x264 [error]: baseline profile doesn't support 4:4:4\n",
    "\n",
    "My code is the following."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from threading import Thread\n",
    "from time import clock\n",
    "\n",
    "import cv2\n",
    "import gi\n",
    "\n",
    "gi.require_version('Gst', '1.0')\n",
    "gi.require_version('GstRtspServer', '1.0')\n",
    "from gi.repository import Gst, GstRtspServer, GObject\n",
    "\n",
    "\n",
    "class SensorFactory(GstRtspServer.RTSPMediaFactory):\n",
    "    def __init__(self, **properties):\n",
    "        super(SensorFactory, self).__init__(**properties)\n",
    "        self.cap = cv2.VideoCapture(0)\n",
    "        self.number_frames = 0\n",
    "        self.fps = 30\n",
    "        self.duration = 1 / self.fps * 1000\n",
    "        self.timestamp = clock()\n",
    "        self.launch_string = 'appsrc name=source caps=video/x-raw,format=BGR,width=640,height=480,framerate=30/1 ' \\\n",
    "                             '! videoconvert ! x264enc speed-preset=ultrafast tune=zerolatency ' \\\n",
    "                             '! rtph264pay config-interval=1 name=pay0 pt=96'\n",
    "        self.pipeline = Gst.parse_launch(self.launch_string)\n",
    "        self.appsrc = self.pipeline.get_child_by_name('source')\n",
    "        self.appsrc.get_property('caps').fixate()\n",
    "        self.appsrc.set_property('format', Gst.Format.TIME)\n",
    "        self.bus = self.appsrc.get_bus()\n",
    "        self.appsrc.connect('need-data', self.on_need_data)\n",
    "        self.bus.connect('message::error', self.on_error)\n",
    "        self.bus.connect('message::state-changed', self.on_status_changed)\n",
    "        self.bus.connect('message::eos', self.on_eos)\n",
    "\n",
    "    def on_need_data(self, src, lenght):\n",
    "        if self.cap.isOpened():\n",
    "            ret, frame = self.cap.read()\n",
    "            if ret:\n",
    "                data = frame.tostring()\n",
    "\n",
    "                buf = Gst.Buffer.new_allocate(None, len(data), None)\n",
    "                buf.fill(0, data)\n",
    "                buf.duration = self.fps\n",
    "                timestamp = self.number_frames * self.duration\n",
    "                buf.pts = buf.dts = int(timestamp)\n",
    "                buf.offset = self.number_frames\n",
    "                self.number_frames += 1\n",
    "                retval = server.factory.appsrc.emit('push-buffer', buf)\n",
    "                if retval != Gst.FlowReturn.OK:\n",
    "                    print(retval)\n",
    "\n",
    "    def on_status_changed(self, bus, message):\n",
    "        msg = message.parse_state_changed()\n",
    "        print('status_changed message -> {}'.format(msg))\n",
    "\n",
    "    def on_eos(self, bus, message):\n",
    "        print('eos message -> {}'.format(message))\n",
    "\n",
    "    def on_error(self, bus, message):\n",
    "        print('error message -> {}'.format(message.parse_error().debug))\n",
    "\n",
    "    def do_create_element(self, url):\n",
    "        return self.pipeline\n",
    "\n",
    "\n",
    "class GstServer(GstRtspServer.RTSPServer):\n",
    "    def __init__(self, **properties):\n",
    "        super(GstServer, self).__init__(**properties)\n",
    "        self.factory = SensorFactory()\n",
    "        self.factory.set_shared(True)\n",
    "        self.get_mount_points().add_factory(\"/test\", self.factory)\n",
    "        self.attach(None)\n",
    "\n",
    "\n",
    "GObject.threads_init()\n",
    "Gst.init(None)\n",
    "\n",
    "server = GstServer()\n",
    "\n",
    "loop = GObject.MainLoop()\n",
    "loop.run()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 三\n",
    "\n",
    "I found the solution, I was not setting the offset of the buffer correctly.\n",
    "Here's the code for anyone who's facing the same problem or has a somewhat\n",
    "similar one."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#!/usr/bin/env python\n",
    "# install gir1.2-gst-rtsp-server-1.0\n",
    "\n",
    "import cv2\n",
    "import gi\n",
    "\n",
    "gi.require_version('Gst', '1.0')\n",
    "gi.require_version('GstRtspServer', '1.0')\n",
    "from gi.repository import Gst, GstRtspServer, GObject\n",
    "\n",
    "\n",
    "class SensorFactory(GstRtspServer.RTSPMediaFactory):\n",
    "    def __init__(self, **properties):\n",
    "        super(SensorFactory, self).__init__(**properties)\n",
    "        self.cap = cv2.VideoCapture(\n",
    "            'rtsp://admin:admin777@10.86.77.23:554/h264/ch1/sub/av_stream')\n",
    "        print(self.cap.isOpened(), self.cap)\n",
    "        self.number_frames = 0\n",
    "        self.fps = 20\n",
    "        self.duration = 1 / self.fps * Gst.SECOND  # duration of a frame in nanoseconds\n",
    "        self.launch_string = 'appsrc name=source is-live=true block=true format=GST_FORMAT_TIME ' \\\n",
    "                             'caps=video/x-raw,format=BGR,width=704,height=576,framerate={}/1 ' \\\n",
    "                             '! videoconvert ! video/x-raw,format=I420 ' \\\n",
    "                             '! x264enc speed-preset=ultrafast tune=zerolatency ' \\\n",
    "                             '! rtph264pay config-interval=1 name=pay0 pt=96'.format(self.fps)\n",
    "\n",
    "    def __del__(self):\n",
    "        self.cap.release()\n",
    "\n",
    "    def on_need_data(self, src, lenght):\n",
    "        if self.cap.isOpened():\n",
    "            ret, frame = self.cap.read()\n",
    "            if ret:\n",
    "                cv2.rectangle(frame, (0, 0), (50, 50), (0, 255, 0), 3)\n",
    "                data = frame.tostring()\n",
    "                buf = Gst.Buffer.new_allocate(None, len(data), None)\n",
    "                buf.fill(0, data)\n",
    "                buf.duration = self.duration\n",
    "                timestamp = self.number_frames * self.duration\n",
    "                buf.pts = buf.dts = int(timestamp)\n",
    "                buf.offset = timestamp\n",
    "                self.number_frames += 1\n",
    "                retval = src.emit('push-buffer', buf)\n",
    "                print(\n",
    "                    'pushed buffer, frame {}, duration {} ns, durations {} s'.\n",
    "                    format(self.number_frames, self.duration,\n",
    "                           self.duration / Gst.SECOND))\n",
    "                if retval != Gst.FlowReturn.OK:\n",
    "                    print(retval)\n",
    "\n",
    "    def do_create_element(self, url):\n",
    "        return Gst.parse_launch(self.launch_string)\n",
    "\n",
    "    def do_configure(self, rtsp_media):\n",
    "        self.number_frames = 0\n",
    "        appsrc = rtsp_media.get_element().get_child_by_name('source')\n",
    "        appsrc.connect('need-data', self.on_need_data)\n",
    "\n",
    "\n",
    "class GstServer(GstRtspServer.RTSPServer):\n",
    "    def __init__(self, **properties):\n",
    "        super(GstServer, self).__init__(**properties)\n",
    "        self.factory = SensorFactory()\n",
    "        self.factory.set_shared(True)\n",
    "        self.get_mount_points().add_factory(\"/test\", self.factory)\n",
    "        self.attach(None)\n",
    "\n",
    "\n",
    "GObject.threads_init()\n",
    "Gst.init(None)\n",
    "\n",
    "server = GstServer()\n",
    "\n",
    "loop = GObject.MainLoop()\n",
    "loop.run()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 从pipeline中获取数据\n",
    "\n",
    "## [地址](http://www.itkeyword.com/doc/9781951603792297x436/reading-a-h264-rtsp-stream-into-python-and-opencv)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import gi\n",
    "gi.require_version('Gst', '1.0')\n",
    "from gi.repository import GObject, Gst\n",
    "import numpy as np\n",
    "import cv2\n",
    "\n",
    "GObject.threads_init()\n",
    "Gst.init(None)\n",
    "\n",
    "\n",
    "def YUV_stream2RGB_frame(data):\n",
    "\n",
    "    w = 640\n",
    "    h = 368\n",
    "    size = w * h\n",
    "\n",
    "    stream = np.fromstring(data,\n",
    "                           np.uint8)  #convert data form string to numpy array\n",
    "\n",
    "    #Y bytes  will start form 0 and end in size-1\n",
    "    y = stream[0:size].reshape(\n",
    "        h, w)  # create the y channel same size as the image\n",
    "\n",
    "    #U bytes will start from size and end at size+size/4 as its size = framesize/4\n",
    "    u = stream[size:(size + (size / 4))].reshape(\n",
    "        (h / 2), (w / 2))  # create the u channel  itssize=framesize/4\n",
    "\n",
    "    #up-sample the u channel to be the same size as the y channel and frame using pyrUp func in opencv2\n",
    "    u_upsize = cv2.pyrUp(u)\n",
    "\n",
    "    #do the same for v channel\n",
    "    v = stream[(size + (size / 4)):].reshape((h / 2), (w / 2))\n",
    "    v_upsize = cv2.pyrUp(v)\n",
    "\n",
    "    #create the 3-channel frame using cv2.merge func watch for the order\n",
    "    yuv = cv2.merge((y, u_upsize, v_upsize))\n",
    "\n",
    "    #Convert TO RGB format\n",
    "    rgb = cv2.cvtColor(yuv, cv2.cv.CV_YCrCb2RGB)\n",
    "\n",
    "    #show frame\n",
    "    cv2.imshow(\"show\", rgb)\n",
    "    cv2.waitKey(5)\n",
    "\n",
    "\n",
    "def on_new_buffer(appsink):\n",
    "\n",
    "    sample = appsink.emit('pull-sample')\n",
    "    #get the buffer\n",
    "    buf = sample.get_buffer()\n",
    "    #extract data stream as string\n",
    "    data = buf.extract_dup(0, buf.get_size())\n",
    "    YUV_stream2RGB_frame(data)\n",
    "    return False\n",
    "\n",
    "\n",
    "def Init():\n",
    "\n",
    "    CLI = \"rtspsrc name=src location=rtsp://192.168.1.20:554/live/ch01_0 latency=10   !decodebin ! appsink name=sink\"\n",
    "\n",
    "    #simplest way to create a pipline\n",
    "    pipline = Gst.parse_launch(CLI)\n",
    "\n",
    "    #getting the sink by its name set in CLI\n",
    "    appsink = pipline.get_by_name(\"sink\")\n",
    "\n",
    "    #setting some important properties of appsnik\n",
    "    appsink.set_property(\"max-buffers\",\n",
    "                         20)  # prevent the app to consume huge part of memory\n",
    "    appsink.set_property('emit-signals', True)  #tell sink to emit signals\n",
    "    appsink.set_property('sync',\n",
    "                         False)  #no sync to make decoding as fast as possible\n",
    "\n",
    "    appsink.connect('new-sample',\n",
    "                    on_new_buffer)  #connect signal to callable func\n",
    "\n",
    "\n",
    "def run():\n",
    "    pipline.set_state(Gst.State.PLAYING)\n",
    "    GObject.MainLoop.run()\n",
    "\n",
    "\n",
    "Init()\n",
    "run()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": true
  },
  "varInspector": {
   "cols": {
    "lenName": 16,
    "lenType": 16,
    "lenVar": 40
   },
   "kernels_config": {
    "python": {
     "delete_cmd_postfix": "",
     "delete_cmd_prefix": "del ",
     "library": "var_list.py",
     "varRefreshCmd": "print(var_dic_list())"
    },
    "r": {
     "delete_cmd_postfix": ") ",
     "delete_cmd_prefix": "rm(",
     "library": "var_list.r",
     "varRefreshCmd": "cat(var_dic_list()) "
    }
   },
   "types_to_exclude": [
    "module",
    "function",
    "builtin_function_or_method",
    "instance",
    "_Feature"
   ],
   "window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
