Skip to content

Commit

Permalink
Add fps counter to dual camera example
Browse files Browse the repository at this point in the history
  • Loading branch information
jetsonhacks committed Mar 24, 2020
1 parent 32c8681 commit bacee3d
Show file tree
Hide file tree
Showing 2 changed files with 243 additions and 1 deletion.
2 changes: 1 addition & 1 deletion dual_camera.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def start_cameras():

_ , left_image=left_camera.read()
_ , right_image=right_camera.read()
camera_images = np.vstack((left_image, right_image))
camera_images = np.hstack((left_image, right_image))
cv2.imshow("CSI Cameras", camera_images)

# This also acts as
Expand Down
242 changes: 242 additions & 0 deletions instrumented/dual_camera_fps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,242 @@
# MIT License
# Copyright (c) 2019,2020 JetsonHacks
# See license
# A very simple code snippet
# Using two CSI cameras (such as the Raspberry Pi Version 2) connected to a
# NVIDIA Jetson Nano Developer Kit (Rev B01) using OpenCV
# Drivers for the camera and OpenCV are included in the base image in JetPack 4.3+

# This script will open a window and place the camera stream from each camera in a window
# arranged horizontally.
# The camera streams are each read in their own thread, as when done sequentially there
# is a noticeable lag
# For better performance, the next step would be to experiment with having the window display
# in a separate thread

import cv2
import threading
import numpy as np

# gstreamer_pipeline returns a GStreamer pipeline for capturing from the CSI camera
# Flip the image by setting the flip_method (most common values: 0 and 2)
# display_width and display_height determine the size of each camera pane in the window on the screen

left_camera = None
right_camera = None

# Let's use a repeating Timer for counting FPS
class RepeatTimer(threading.Timer):
def run(self):
while not self.finished.wait(self.interval):
self.function(*self.args, **self.kwargs)


class CSI_Camera:

def __init__ (self) :
# Initialize instance variables
# OpenCV video capture element
self.video_capture = None
# The last captured image from the camera
self.frame = None
self.grabbed = False
# The thread where the video capture runs
self.read_thread = None
self.read_lock = threading.Lock()
self.running = False
self.fps_timer=None
self.frames_read=0
self.frames_displayed=0
self.last_frames_read=0
self.last_frames_displayed=0


def open(self, gstreamer_pipeline_string):
try:
self.video_capture = cv2.VideoCapture(
gstreamer_pipeline_string, cv2.CAP_GSTREAMER
)

except RuntimeError:
self.video_capture = None
print("Unable to open camera")
print("Pipeline: " + gstreamer_pipeline_string)
return
# Grab the first frame to start the video capturing
self.grabbed, self.frame = self.video_capture.read()

def start(self):
if self.running:
print('Video capturing is already running')
return None
# create a thread to read the camera image
if self.video_capture != None:
self.running=True
self.read_thread = threading.Thread(target=self.updateCamera)
self.read_thread.start()
return self

def stop(self):
self.running=False
self.read_thread.join()

def updateCamera(self):
# This is the thread to read images from the camera
while self.running:
try:
grabbed, frame = self.video_capture.read()
with self.read_lock:
self.grabbed=grabbed
self.frame=frame
self.frames_read=self.frames_read+1
except RuntimeError:
print("Could not read image from camera")
# FIX ME - stop and cleanup thread
# Something bad happened


def read(self):
with self.read_lock:
frame = self.frame.copy()
grabbed=self.grabbed
return grabbed, frame

def release(self):
if self.video_capture != None:
self.video_capture.release()
self.video_capture = None
# Kill the timer
self.fps_timer.cancel()
self.fps_timer.join()
# Now kill the thread
if self.read_thread != None:
self.read_thread.join()

def update_fps_stats(self):
self.last_frames_read=self.frames_read
self.last_frames_displayed=self.frames_displayed
# Start the next measurement cycle
self.frames_read=0
self.frames_displayed=0

def start_counting_fps(self):
self.fps_timer=RepeatTimer(1.0,self.update_fps_stats)
self.fps_timer.start()


# Currently there are setting frame rate on CSI Camera on Nano through gstreamer
# Here we directly select sensor_mode 3 (1280x720, 59.9999 fps)
def gstreamer_pipeline(
sensor_id=0,
sensor_mode=3,
capture_width=1280,
capture_height=720,
display_width=1280,
display_height=720,
framerate=60,
flip_method=0,
):
return (
"nvarguscamerasrc sensor-id=%d sensor-mode=%d ! "
"video/x-raw(memory:NVMM), "
"width=(int)%d, height=(int)%d, "
"format=(string)NV12, framerate=(fraction)%d/1 ! "
"nvvidconv flip-method=%d ! "
"video/x-raw, width=(int)%d, height=(int)%d, format=(string)BGRx ! "
"videoconvert ! "
"video/x-raw, format=(string)BGR ! appsink"
% (
sensor_id,
sensor_mode,
capture_width,
capture_height,
framerate,
flip_method,
display_width,
display_height,
)
)

# Simple draw label on an image; in our case, the video frame
def draw_label(cv_image, label_text, label_position):
font_face = cv2.FONT_HERSHEY_SIMPLEX
scale = 0.5
color = (255,255,255)
thickness = cv2.FILLED
# You can get the size of the string with cv2.getTextSize here
cv2.putText(cv_image, label_text, label_position, font_face, scale, color, 1, cv2.LINE_AA)

# Read a frame from the camera, and draw the FPS on the image if desired
# Return an image
def read_camera(camera,display_fps):
_ , camera_image=camera.read()
if display_fps:
draw_label(camera_image, "Frames Displayed (PS): "+str(camera.last_frames_displayed),(10,20))
draw_label(camera_image, "Frames Read (PS): "+str(camera.last_frames_read),(10,40))
return camera_image


def start_cameras():
left_camera = CSI_Camera()
left_camera.open(
gstreamer_pipeline(
sensor_id=0,
sensor_mode=3,
flip_method=0,
display_height=540,
display_width=960,
)
)
left_camera.start()

right_camera = CSI_Camera()
right_camera.open(
gstreamer_pipeline(
sensor_id=1,
sensor_mode=3,
flip_method=0,
display_height=540,
display_width=960,
)
)
right_camera.start()

cv2.namedWindow("CSI Cameras", cv2.WINDOW_AUTOSIZE)

if (
not left_camera.video_capture.isOpened()
or not right_camera.video_capture.isOpened()
):
# Cameras did not open, or no camera attached

print("Unable to open any cameras")
# TODO: Proper Cleanup
SystemExit(0)
try:
# Start counting the number of frames read and displayed
left_camera.start_counting_fps()
right_camera.start_counting_fps()
while cv2.getWindowProperty("CSI Cameras", 0) >= 0 :
left_image=read_camera(left_camera,True)
right_image=read_camera(right_camera,True)
# We place both images side by side to show in the window
camera_images = np.hstack((left_image, right_image))
cv2.imshow("CSI Cameras", camera_images)
left_camera.frames_displayed=left_camera.frames_displayed+1
right_camera.frames_displayed=right_camera.frames_displayed+1
# This also acts as a frame limiter
keyCode = cv2.waitKey(25) & 0xFF
# Stop the program on the ESC key
if keyCode == 27:
break

finally:
left_camera.stop()
left_camera.release()
right_camera.stop()
right_camera.release()
cv2.destroyAllWindows()


if __name__ == "__main__":
start_cameras()

0 comments on commit bacee3d

Please sign in to comment.