Create rtsp stream based on opencv images in python

I did once a similar thing, reading frames from RTSP server and processing them within OpenCV. For some reason I could not use VideoCapture of cv2, it did not work. So my solution was to use ffmpeg to convert RTSP input into a stream of bitmaps, for my problem it was ok to read the grayscale image with 1 byte per pixel.

The basic implementation idea was:

  1. Running ffmpeg process, which is my start_reading() method;
  2. Having a thread which reads bytes from ffmpeg’s stdout frame by frame within a pipe;
  3. Having a property of the class which returns the last frame from ffmpeg. Note that this is asynchronous reading, as you could see from the code, but worked fine to me;

Here’s my code (it’s python3 but should be easily convertible to 2.7).

import subprocess
import shlex
import time
from threading import Thread
import os
import numpy as np
import logging


class FFMPEGVideoReader(object):
    def __init__(self, rtsp_url: str, width:int=320, height:int=180) -> None:
        super().__init__()
        self.rtsp_url = rtsp_url
        self.width = width
        self.height=height
        self.process = None
        self._stdout_reader = Thread(target=self._receive_output, name="stdout_reader", daemon=True)
        self._stdout_reader.start()
        self.frame_number = -1
        self._last_frame_read = -1

    def start_reading(self):
        if self.process is not None:
            self.process.kill()
            self.process = None
        # Customize your input/output params here
        command = 'ffmpeg -i {rtsp} -f rawvideo -r 4 -pix_fmt gray -vf scale={width}:{height} -'.format(rtsp=self.rtsp_url, width=self.width, height=self.height)
        logging.debug('Opening ffmpeg process with command "%s"' % command)
        args = shlex.split(command)
        FNULL = open(os.devnull, 'w')
        self.process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=FNULL)

    def _receive_output(self):
        chunksize = self.width*self.height

        while True:
            while self.process is None:
                time.sleep(1)
            self._last_chunk = self.process.stdout.read(chunksize)
            self.frame_number += 1
    
    @property
    def frame(self):
        started = time.time()
        while self._last_frame_read == self.frame_number:
            time.sleep(0.125) # Put your FPS threshold here
            if time.time() - started > self.MAX_FRAME_WAIT:
                logging.warning('Reloading ffmpeg process...')
                self.start_reading()
                started = time.time()
        self._last_frame_read = self.frame_number

        dt = np.dtype('uint8')
        vec = np.frombuffer(self._last_chunk, dtype=dt)
        return np.reshape(vec, (self.height, self.width))


if __name__ == '__main__':
    logging.basicConfig(level=logging.DEBUG)
    vr = FFMPEGVideoReader('rtsp://192.168.1.10:554/onvif2', width=320, height=180)
    vr.start_reading()

    while True:
        print('update')
        fr = vr.frame
        np.save('frame.npy', fr)

If you need color images, thenyou need to change the pix_fmt in the ffmpeg’s command, reading (width * height * channels) bytes, and then reshaping it correctly to one more axis.

Leave a Comment

Hata!: SQLSTATE[HY000] [1045] Access denied for user 'divattrend_liink'@'localhost' (using password: YES)