I can stream videos frame by frame to an OpenGL Texture2D OK in python (pi3d module, example in pi3d_demos/VideoWalk.py) but I've noticed that it gradually leaks memory. Below is a stripped down version of the code that shows the problem.
Can anyone see where I'm leaking? The memory seems to be recovered when python stops. I've tried explicitly setting things to None
or calling the garbage collector manually.
#!/usr/bin/python
import os
import numpy as np
import subprocess
import threading
import time
import json
def get_dimensions(video_path):
probe_cmd = f'ffprobe -v error -show_entries stream=width,height,avg_frame_rate -of json "{video_path}"'
probe_result = subprocess.check_output(probe_cmd, shell=True, text=True)
video_info_list = [vinfo for vinfo in json.loads(probe_result)['streams'] if 'width' in vinfo]
if len(video_info_list) > 0:
video_info = video_info_list[0] # use first if more than one!
return(video_info['width'], video_info['height'])
else:
return None
class VideoStreamer:
def __init__(self, video_path):
self.flag = False # use to signal new texture
self.kill_thread = False
self.command = [ 'ffmpeg', '-i', video_path, '-f', 'image2pipe',
'-pix_fmt', 'rgb24', '-vcodec', 'rawvideo', '-']
dimensions = get_dimensions(video_path)
if dimensions is not None:
(self.W, self.H) = dimensions
self.P = 3
self.image = np.zeros((self.H, self.W, self.P), dtype='uint8')
self.t = threading.Thread(target=self.pipe_thread)
self.t.start()
else: # couldn't get dimensions for some reason - assume not able to read video
self.W = 240
self.H = 180
self.P = 3
self.image = np.zeros((self.H, self.W, self.P), dtype='uint8')
self.t = None
def pipe_thread(self):
pipe = None
while not self.kill_thread:
st_tm = time.time()
if pipe is None:
pipe = subprocess.Popen(self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=-1)
self.image = np.frombuffer(pipe.stdout.read(self.H * self.W * self.P), dtype='uint8') # overwrite array
pipe.stdout.flush() # presumably nothing else has arrived since read()
pipe.stderr.flush() # ffmpeg sends commentary to stderr
if len(self.image) < self.H * self.W * self.P: # end of video, reload
pipe.terminate()
pipe = None
else:
self.image.shape = (self.H, self.W, self.P)
self.flag = True
step = time.time() - st_tm
time.sleep(max(0.04 - step, 0.0)) # adding fps info to ffmpeg doesn't seem to have any effect
if pipe is not None:
pipe.terminate()
pipe = None
def kill(self):
self.kill_thread = True
if self.t is not None:
self.t.join()
vs = None
try:
while True:
for (path, _, videos) in os.walk("/home/patrick/Pictures/videos"):
for video in videos:
print(video)
os.system("free") # shows gradually declining memory available
vs = VideoStreamer(os.path.join(path, video))
for i in range(500):
tries = 0
while not vs.flag and tries < 5:
time.sleep(0.001)
tries += 1
# at this point vs.image is a numpy array HxWxP bytes
vs.flag = False
vs.kill()
except KeyboardInterrupt:
if vs is not None:
vs.kill()
os.system("free")
OK there were two factors that confused me. The first was a bug in pcmanfm
used by Wayland on bookworm on the Raspberry Pi (at least) that was hanging onto image data.
The second was due to the timing of releasing stream handles and this latter can be fixed by using a context manager (i.e. with Popen(...stdout=PIPE) as pipe:
as probably should always be done but isn't often shown explicitly for subprocess.Popen
. So as above but the threaded part:
def pipe_thread(self):
while not self.kill_thread:
with subprocess.Popen(self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=-1) as pipe:
while pipe.poll() is None and not self.kill_thread:
st_tm = time.time()
self.flag = False
self.image = np.frombuffer(pipe.stdout.read(self.H * self.W * self.P), dtype='uint8') # overwrite array
self.image.shape = (self.H, self.W, self.P)
self.flag = True
step = time.time() - st_tm
time.sleep(max(0.04 - step, 0.0)) # adding fps info to ffmpeg doesn't seem to have any effect