First camera feed success

This commit is contained in:
Tempest 2025-12-01 09:42:34 +07:00
parent da4f7073dc
commit 7af789a1d6

View File

@ -1,85 +1,51 @@
import sys import sys
import threading import threading
import math
import gi import gi
import logging import logging
from flask import Flask, Response, render_template_string from flask import Flask, Response, render_template_string
from pypylon import pylon
# GStreamer dependencies # GStreamer dependencies
gi.require_version('Gst', '1.0') gi.require_version('Gst', '1.0')
from gi.repository import Gst, GLib from gi.repository import Gst, GLib
# --- Configuration --- # --- Configuration ---
CAMERA_1_SERIAL = "40650847"
CAMERA_2_SERIAL = "40653314"
STREAM_WIDTH = 1920 STREAM_WIDTH = 1920
STREAM_HEIGHT = 1080 STREAM_HEIGHT = 1080
# The final output resolution of the tiled web stream TILED_WIDTH = 1920
WEB_OUTPUT_WIDTH = 1920 TILED_HEIGHT = 1080
WEB_OUTPUT_HEIGHT = 1080
# --- Flask Setup ---
app = Flask(__name__) app = Flask(__name__)
frame_buffer = None frame_buffer = None
buffer_lock = threading.Lock() buffer_lock = threading.Lock()
def discover_cameras():
"""
Uses pypylon to find all connected Basler cameras.
Returns a list of Serial Numbers.
"""
tl_factory = pylon.TlFactory.GetInstance()
devices = tl_factory.EnumerateDevices()
serials = []
for dev in devices:
serials.append(dev.GetSerialNumber())
if not serials:
print("CRITICAL ERROR: No Basler cameras detected via Pylon.")
sys.exit(1)
print(f"Discovered {len(serials)} cameras: {serials}")
return serials
class GStreamerPipeline(threading.Thread): class GStreamerPipeline(threading.Thread):
def __init__(self, camera_serials): def __init__(self):
super().__init__() super().__init__()
self.camera_serials = camera_serials
self.loop = GLib.MainLoop() self.loop = GLib.MainLoop()
self.pipeline = None self.pipeline = None
def calculate_grid(self, num_cams):
"""
Calculates the most square-like grid (rows, cols) for N cameras.
"""
rows = int(math.ceil(math.sqrt(num_cams)))
cols = int(math.ceil(num_cams / rows))
return rows, cols
def run(self): def run(self):
Gst.init(None) Gst.init(None)
self.build_dynamic_pipeline() self.build_pipeline()
self.pipeline.set_state(Gst.State.PLAYING) self.pipeline.set_state(Gst.State.PLAYING)
try: try:
self.loop.run() self.loop.run()
except Exception as e: except Exception as e:
print(f"Error in GStreamer loop: {e}") print(f"Error: {e}")
finally: finally:
self.pipeline.set_state(Gst.State.NULL) self.pipeline.set_state(Gst.State.NULL)
def on_new_sample(self, sink): def on_new_sample(self, sink):
"""
Callback: grabs the already-encoded JPEG from the pipeline.
"""
sample = sink.emit("pull-sample") sample = sink.emit("pull-sample")
if not sample: if not sample: return Gst.FlowReturn.ERROR
return Gst.FlowReturn.ERROR
buffer = sample.get_buffer() buffer = sample.get_buffer()
success, map_info = buffer.map(Gst.MapFlags.READ) success, map_info = buffer.map(Gst.MapFlags.READ)
if not success: if not success: return Gst.FlowReturn.ERROR
return Gst.FlowReturn.ERROR
global frame_buffer global frame_buffer
with buffer_lock: with buffer_lock:
@ -88,65 +54,56 @@ class GStreamerPipeline(threading.Thread):
buffer.unmap(map_info) buffer.unmap(map_info)
return Gst.FlowReturn.OK return Gst.FlowReturn.OK
def build_dynamic_pipeline(self): def build_pipeline(self):
num_cams = len(self.camera_serials) # FIX: Added 'compute-hw=1' to nvvideoconvert.
rows, cols = self.calculate_grid(num_cams) # This forces the conversion to happen on the GPU (CUDA) instead of the VIC,
# which fixes the "RGB/BGR not supported" error.
print(f"Building pipeline for {num_cams} cameras (Grid: {cols}x{rows})") # Source 1
src1 = (
# 1. Construct Sources f"pylonsrc device-serial-number={CAMERA_1_SERIAL} "
# We need to build N pylonsrc elements, each linking to a specific pad on the muxer. "cam::TriggerMode=Off cam::AcquisitionFrameRateEnable=true cam::AcquisitionFrameRate=30.0 ! "
sources_str = "" "videoconvert ! "
for i, serial in enumerate(self.camera_serials): "nvvideoconvert compute-hw=1 ! "
# We explicitly link to muxer sink pad: m.sink_0, m.sink_1, etc. "m.sink_0 "
sources_str += (
f"pylonsrc camera-device-serial-number={serial} ! "
f"videoconvert ! nvvideoconvert ! m.sink_{i} "
) )
# 2. Configure Muxer # Source 2
# batch-size must match number of cameras src2 = (
muxer_str = ( f"pylonsrc device-serial-number={CAMERA_2_SERIAL} "
f"nvstreammux name=m batch-size={num_cams} " "cam::TriggerMode=Off cam::AcquisitionFrameRateEnable=true cam::AcquisitionFrameRate=30.0 ! "
f"width={STREAM_WIDTH} height={STREAM_HEIGHT} live-source=1 " "videoconvert ! "
"nvvideoconvert compute-hw=1 ! "
"m.sink_1 "
) )
# 3. Configure Tiler # Muxer -> Tiler -> Output
# This combines the batch into one 2D image processing = (
tiler_str = ( f"nvstreammux name=m batch-size=2 width={STREAM_WIDTH} height={STREAM_HEIGHT} live-source=1 ! "
f"nvmultistreamtiler width={WEB_OUTPUT_WIDTH} height={WEB_OUTPUT_HEIGHT} " f"nvmultistreamtiler width={TILED_WIDTH} height={TILED_HEIGHT} rows=2 columns=1 ! "
f"rows={rows} columns={cols} " "nvvideoconvert ! "
) "video/x-raw, format=I420 ! "
# 4. Final Processing (Convert -> JPEG -> AppSink)
output_str = (
"nvvideoconvert ! video/x-raw, format=I420 ! "
"jpegenc quality=85 ! " "jpegenc quality=85 ! "
"appsink name=sink emit-signals=True sync=False max-buffers=1 drop=True" "appsink name=sink emit-signals=True sync=False max-buffers=1 drop=True"
) )
# Combine all parts pipeline_str = f"{src1} {src2} {processing}"
full_pipeline_str = f"{sources_str} {muxer_str} ! {tiler_str} ! {output_str}"
print(f"Pipeline String:\n{full_pipeline_str}") print(f"Launching Pipeline (GPU Mode)...")
self.pipeline = Gst.parse_launch(pipeline_str)
self.pipeline = Gst.parse_launch(full_pipeline_str)
# Link callback
appsink = self.pipeline.get_by_name("sink") appsink = self.pipeline.get_by_name("sink")
appsink.connect("new-sample", self.on_new_sample) appsink.connect("new-sample", self.on_new_sample)
# --- Flask Routes --- # --- Flask ---
@app.route('/') @app.route('/')
def index(): def index():
return render_template_string(''' return render_template_string('''
<html> <html><body style="background:#111; color:white; text-align:center;">
<body style="background:#111; color:white; text-align:center;"> <h1>Basler Feed</h1>
<h1>Basler Auto-Discovery Feed</h1> <img src="{{ url_for('video_feed') }}" style="border: 2px solid green; max-width:90%;">
<img src="{{ url_for('video_feed') }}" style="border: 2px solid green; max-width:95%;"> </body></html>
</body>
</html>
''') ''')
@app.route('/video_feed') @app.route('/video_feed')
@ -157,21 +114,12 @@ def video_feed():
if frame_buffer: if frame_buffer:
yield (b'--frame\r\n' yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame_buffer + b'\r\n') b'Content-Type: image/jpeg\r\n\r\n' + frame_buffer + b'\r\n')
GLib.usleep(15000) # ~60fps poll cap GLib.usleep(33000)
return Response(generate(), mimetype='multipart/x-mixed-replace; boundary=frame') return Response(generate(), mimetype='multipart/x-mixed-replace; boundary=frame')
# --- Main ---
if __name__ == "__main__": if __name__ == "__main__":
# 1. Discover Cameras gst_thread = GStreamerPipeline()
found_serials = discover_cameras()
# 2. Start Pipeline Thread
gst_thread = GStreamerPipeline(found_serials)
gst_thread.daemon = True gst_thread.daemon = True
gst_thread.start() gst_thread.start()
# 3. Start Web Server
print(f"Stream available at http://0.0.0.0:5000")
app.run(host='0.0.0.0', port=5000, debug=False, threaded=True) app.run(host='0.0.0.0', port=5000, debug=False, threaded=True)