From 35ddf9f844f856d25bc0f2ca37b84fb39f2260ec Mon Sep 17 00:00:00 2001 From: Tempest Date: Mon, 1 Dec 2025 15:25:57 +0700 Subject: [PATCH] 3 cameras confirmed available --- src/detectionSoftware/run.py | 247 +++++++++++++++++++---------------- 1 file changed, 138 insertions(+), 109 deletions(-) diff --git a/src/detectionSoftware/run.py b/src/detectionSoftware/run.py index 1c2ba497..fe5e0e89 100644 --- a/src/detectionSoftware/run.py +++ b/src/detectionSoftware/run.py @@ -3,13 +3,16 @@ import subprocess import threading import time import gc -from flask import Flask, Response, render_template_string +import json +from flask import Flask, Response, render_template_string, jsonify -# --- PART 1: ADAPTIVE DETECTION --- +# --- CONFIGURATION --- +TARGET_NUM_CAMS = 3 +DEFAULT_W = 1280 +DEFAULT_H = 720 + +# --- PART 1: DETECTION --- def scan_connected_cameras(): - """ - Returns a list of serials ['400...', '400...'] and their config. - """ print("--- Scanning for Basler Cameras ---") detection_script = """ import sys @@ -17,83 +20,57 @@ try: from pypylon import pylon tl_factory = pylon.TlFactory.GetInstance() devices = tl_factory.EnumerateDevices() - if not devices: print("NONE") else: - # Collect all serials serials = [d.GetSerialNumber() for d in devices] - - # Open the first one just to check capabilities/resolution cam = pylon.InstantCamera(tl_factory.CreateDevice(devices[0])) cam.Open() - - # Check Binning support - supported = 0 try: cam.BinningHorizontal.Value = 2 cam.BinningVertical.Value = 2 + w = cam.Width.GetValue() + h = cam.Height.GetValue() cam.BinningHorizontal.Value = 1 cam.BinningVertical.Value = 1 supported = 1 except: - pass - - w = cam.Width.GetValue() - h = cam.Height.GetValue() + w = cam.Width.GetValue() + h = cam.Height.GetValue() + supported = 0 cam.Close() - - # Output format: SERIAL1,SERIAL2|WIDTH|HEIGHT|BINNING_SUPPORTED print(f"{','.join(serials)}|{w}|{h}|{supported}") - -except Exception as e: - print(f"ERROR:{e}") +except Exception: + print("NONE") """ try: result = subprocess.run([sys.executable, "-c", detection_script], capture_output=True, text=True) output = result.stdout.strip() - - if "NONE" in output or "ERROR" in output or not output: - print("No cameras detected!") - return [], 1920, 1080, False - - # Parse output + if "NONE" in output or not output: + return [], DEFAULT_W, DEFAULT_H, False parts = output.split('|') - serials_list = parts[0].split(',') - w = int(parts[1]) - h = int(parts[2]) - binning = (parts[3] == '1') - - print(f"Found {len(serials_list)} cameras: {serials_list}") - return serials_list, w, h, binning - - except Exception as e: - print(f"Scanner failed: {e}") - return [], 1920, 1080, False + return parts[0].split(','), int(parts[1]), int(parts[2]), (parts[3] == '1') + except: return [], DEFAULT_W, DEFAULT_H, False -# Run Scan DETECTED_SERIALS, CAM_W, CAM_H, BINNING_SUPPORTED = scan_connected_cameras() -NUM_CAMS = len(DETECTED_SERIALS) +ACTUAL_CAMS_COUNT = len(DETECTED_SERIALS) -# --- DYNAMIC RESOLUTION --- +# --- RESOLUTION & LAYOUT --- INTERNAL_WIDTH = 1280 -scale = INTERNAL_WIDTH / CAM_W -INTERNAL_HEIGHT = int(CAM_H * scale) +if ACTUAL_CAMS_COUNT > 0: + scale = INTERNAL_WIDTH / CAM_W + INTERNAL_HEIGHT = int(CAM_H * scale) +else: + INTERNAL_HEIGHT = 720 if INTERNAL_HEIGHT % 2 != 0: INTERNAL_HEIGHT += 1 -# Web Tiling Logic -WEB_WIDTH = 1280 -if NUM_CAMS > 0: - # If 1 camera: Output is 1280x960 - # If 2 cameras: Output is 1280x(Height scaled for 2-wide) - total_source_width = INTERNAL_WIDTH * NUM_CAMS - scale_tiled = WEB_WIDTH / total_source_width - WEB_HEIGHT = int(INTERNAL_HEIGHT * scale_tiled) - if WEB_HEIGHT % 2 != 0: WEB_HEIGHT += 1 -else: - WEB_HEIGHT = 720 # Fallback +WEB_WIDTH = 1280 +total_source_width = INTERNAL_WIDTH * TARGET_NUM_CAMS +scale_tiled = WEB_WIDTH / total_source_width +WEB_HEIGHT = int(INTERNAL_HEIGHT * scale_tiled) +if WEB_HEIGHT % 2 != 0: WEB_HEIGHT += 1 -print(f"ADAPTIVE MODE: Found {NUM_CAMS} Cams -> Layout {NUM_CAMS}x1 -> Web {WEB_WIDTH}x{WEB_HEIGHT}") +print(f"LAYOUT: {TARGET_NUM_CAMS} Slots | Detected: {ACTUAL_CAMS_COUNT} Cams") # --- FLASK & GSTREAMER --- import gi @@ -103,6 +80,9 @@ from gi.repository import Gst, GLib app = Flask(__name__) frame_buffer = None buffer_lock = threading.Lock() +current_fps = 0.0 +frame_count = 0 +start_time = time.time() class GStreamerPipeline(threading.Thread): def __init__(self): @@ -122,9 +102,17 @@ class GStreamerPipeline(threading.Thread): self.pipeline.set_state(Gst.State.NULL) def on_new_sample(self, sink): + global frame_count, start_time, current_fps sample = sink.emit("pull-sample") if not sample: return Gst.FlowReturn.ERROR + frame_count += 1 + # Calculate FPS every 30 frames + if frame_count % 30 == 0: + elapsed = time.time() - start_time + current_fps = 30 / elapsed if elapsed > 0 else 0 + start_time = time.time() + buffer = sample.get_buffer() success, map_info = buffer.map(Gst.MapFlags.READ) if not success: return Gst.FlowReturn.ERROR @@ -137,21 +125,9 @@ class GStreamerPipeline(threading.Thread): return Gst.FlowReturn.OK def build_pipeline(self): - # Handle 0 Cameras gracefully (Placeholder) - if NUM_CAMS == 0: - print("Launching Placeholder Pipeline (No Cameras)...") - # Uses 'videotestsrc' to generate a test pattern so the web UI works - pipeline_str = ( - f"videotestsrc pattern=smpte ! video/x-raw,width={WEB_WIDTH},height={WEB_HEIGHT},framerate=30/1 ! " - "jpegenc ! appsink name=sink emit-signals=True sync=False max-buffers=1 drop=True" - ) - self.pipeline = Gst.parse_launch(pipeline_str) - appsink = self.pipeline.get_by_name("sink") - appsink.connect("new-sample", self.on_new_sample) - return - - # Settings - settings = ( + # 1. CAMERA SETTINGS + # Note: We run cameras at 60 FPS for internal stability + cam_settings = ( "cam::TriggerMode=Off " "cam::AcquisitionFrameRateEnable=true cam::AcquisitionFrameRate=60.0 " "cam::ExposureAuto=Off " @@ -160,60 +136,110 @@ class GStreamerPipeline(threading.Thread): "cam::DeviceLinkThroughputLimitMode=Off " ) if BINNING_SUPPORTED: - settings += "cam::BinningHorizontal=2 cam::BinningVertical=2 " - - # Pre-scaler (Crucial for stability) - pre_scale = ( - "nvvideoconvert compute-hw=1 ! " - "video/x-raw(memory:NVMM), format=NV12, " - f"width={INTERNAL_WIDTH}, height={INTERNAL_HEIGHT} ! " - ) + cam_settings += "cam::BinningHorizontal=2 cam::BinningVertical=2 " - # 1. GENERATE SOURCES DYNAMICALLY sources_str = "" - for i, serial in enumerate(DETECTED_SERIALS): - sources_str += ( - f"pylonsrc device-serial-number={serial} {settings} ! " - "video/x-raw,format=GRAY8 ! " - "videoconvert ! " - "video/x-raw,format=I420 ! " - "nvvideoconvert compute-hw=1 ! " - "video/x-raw(memory:NVMM) ! " - f"{pre_scale}" - f"m.sink_{i} " # Link to the correct pad (0, 1, 2...) - ) + + for i in range(TARGET_NUM_CAMS): + if i < len(DETECTED_SERIALS): + # --- REAL CAMERA SOURCE --- + serial = DETECTED_SERIALS[i] + print(f"Slot {i}: Linking Camera {serial}") + + pre_scale = ( + "nvvideoconvert compute-hw=1 ! " + f"video/x-raw(memory:NVMM), format=NV12, width={INTERNAL_WIDTH}, height={INTERNAL_HEIGHT}, framerate=60/1 ! " + ) + + source = ( + f"pylonsrc device-serial-number={serial} {cam_settings} ! " + "video/x-raw,format=GRAY8 ! " + "videoconvert ! " + "video/x-raw,format=I420 ! " + "nvvideoconvert compute-hw=1 ! " + "video/x-raw(memory:NVMM) ! " + f"{pre_scale}" + f"m.sink_{i} " + ) + else: + # --- DISCONNECTED PLACEHOLDER --- + print(f"Slot {i}: Creating Placeholder (Synchronized)") + + # FIX 1: Add 'videorate' to enforce strict timing on the fake source + # This prevents the placeholder from running too fast/slow and jittering the muxer + + source = ( + f"videotestsrc pattern=black is-live=true ! " + f"videorate ! " # <--- TIMING ENFORCER + f"video/x-raw,width={INTERNAL_WIDTH},height={INTERNAL_HEIGHT},format=I420,framerate=60/1 ! " + f"textoverlay text=\"DISCONNECTED\" valignment=center halignment=center font-desc=\"Sans, 48\" ! " + "nvvideoconvert compute-hw=1 ! " + f"video/x-raw(memory:NVMM),format=NV12,width={INTERNAL_WIDTH},height={INTERNAL_HEIGHT},framerate=60/1 ! " + f"m.sink_{i} " + ) + + sources_str += source - # 2. CONFIGURE MUXER & TILER - # Batch size MUST match number of cameras + # 3. MUXER & PROCESSING + # FIX 2: batched-push-timeout=33000 + # This tells the muxer: "If you have data, send it every 33ms (30fps). Don't wait forever." + + # FIX 3: Output Videorate + # We process internally at 60fps (best for camera driver), but we DROP to 30fps + # for the web stream. This makes the network stream buttery smooth and consistent. + processing = ( - f"nvstreammux name=m batch-size={NUM_CAMS} width={INTERNAL_WIDTH} height={INTERNAL_HEIGHT} live-source=1 ! " - f"nvmultistreamtiler width={WEB_WIDTH} height={WEB_HEIGHT} rows=1 columns={NUM_CAMS} ! " + f"nvstreammux name=m batch-size={TARGET_NUM_CAMS} width={INTERNAL_WIDTH} height={INTERNAL_HEIGHT} " + f"live-source=1 batched-push-timeout=33000 ! " # <--- TIMEOUT FIX + f"nvmultistreamtiler width={WEB_WIDTH} height={WEB_HEIGHT} rows=1 columns={TARGET_NUM_CAMS} ! " "nvvideoconvert compute-hw=1 ! " "video/x-raw(memory:NVMM) ! " + "videorate drop-only=true ! " # <--- DROPPING FRAMES CLEANLY + "video/x-raw(memory:NVMM), framerate=30/1 ! " # <--- Force 30 FPS Output f"nvjpegenc quality=60 ! " "appsink name=sink emit-signals=True sync=False max-buffers=1 drop=True" ) pipeline_str = f"{sources_str} {processing}" - print(f"Launching ADAPTIVE Pipeline ({NUM_CAMS} Cameras)...") + print(f"Launching SMOOTH Pipeline...") self.pipeline = Gst.parse_launch(pipeline_str) - appsink = self.pipeline.get_by_name("sink") appsink.connect("new-sample", self.on_new_sample) -# --- Flask Routes --- +# --- FLASK --- @app.route('/') def index(): return render_template_string(''' - -

Basler Feed ({{ num }} Cameras)

- {% if num == 0 %} -

NO CAMERAS DETECTED

- {% endif %} - - - ''', num=NUM_CAMS) + + + + + +

Basler 3-Cam (Smooth)

+
+
FPS: --
+ +
+ + + + ''') @app.route('/video_feed') def video_feed(): @@ -222,14 +248,17 @@ def video_feed(): while True: with buffer_lock: if frame_buffer: - yield (b'--frame\r\n' - b'Content-Type: image/jpeg\r\n\r\n' + frame_buffer + b'\r\n') - time.sleep(0.016) + yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame_buffer + b'\r\n') + # Sleep 33ms (30 FPS) + time.sleep(0.033) count += 1 if count % 200 == 0: gc.collect() - return Response(generate(), mimetype='multipart/x-mixed-replace; boundary=frame') +@app.route('/get_fps') +def get_fps(): + return jsonify(fps=round(current_fps, 1)) + if __name__ == "__main__": subprocess.run([sys.executable, "-c", "import gc; gc.collect()"]) gst_thread = GStreamerPipeline()